@fluidframework/tree 2.63.0 → 2.70.0-360753
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/api-report/tree.alpha.api.md +31 -27
- package/api-report/tree.beta.api.md +36 -1
- package/api-report/tree.legacy.beta.api.md +36 -1
- package/dist/alpha.d.ts +4 -4
- package/dist/beta.d.ts +4 -0
- package/dist/feature-libraries/chunked-forest/codec/chunkDecoding.d.ts +2 -2
- package/dist/feature-libraries/chunked-forest/codec/chunkDecoding.d.ts.map +1 -1
- package/dist/feature-libraries/chunked-forest/codec/chunkDecoding.js +9 -10
- package/dist/feature-libraries/chunked-forest/codec/chunkDecoding.js.map +1 -1
- package/dist/feature-libraries/chunked-forest/codec/codecs.d.ts +5 -4
- package/dist/feature-libraries/chunked-forest/codec/codecs.d.ts.map +1 -1
- package/dist/feature-libraries/chunked-forest/codec/codecs.js.map +1 -1
- package/dist/feature-libraries/forest-summary/forestSummarizer.d.ts +1 -1
- package/dist/feature-libraries/forest-summary/forestSummarizer.d.ts.map +1 -1
- package/dist/feature-libraries/forest-summary/forestSummarizer.js +2 -2
- package/dist/feature-libraries/forest-summary/forestSummarizer.js.map +1 -1
- package/dist/feature-libraries/forest-summary/incrementalSummaryBuilder.d.ts +9 -7
- package/dist/feature-libraries/forest-summary/incrementalSummaryBuilder.d.ts.map +1 -1
- package/dist/feature-libraries/forest-summary/incrementalSummaryBuilder.js +34 -11
- package/dist/feature-libraries/forest-summary/incrementalSummaryBuilder.js.map +1 -1
- package/dist/legacy.d.ts +4 -0
- package/dist/packageVersion.d.ts +1 -1
- package/dist/packageVersion.d.ts.map +1 -1
- package/dist/packageVersion.js +1 -1
- package/dist/packageVersion.js.map +1 -1
- package/dist/serializableDomainSchema.d.ts +12 -10
- package/dist/serializableDomainSchema.d.ts.map +1 -1
- package/dist/serializableDomainSchema.js +7 -7
- package/dist/serializableDomainSchema.js.map +1 -1
- package/dist/shared-tree/sharedTree.d.ts +1 -1
- package/dist/shared-tree/sharedTree.d.ts.map +1 -1
- package/dist/shared-tree/sharedTree.js +2 -2
- package/dist/shared-tree/sharedTree.js.map +1 -1
- package/dist/simple-tree/api/schemaFactory.d.ts +1 -1
- package/dist/simple-tree/api/schemaFactory.d.ts.map +1 -1
- package/dist/simple-tree/api/schemaFactory.js +17 -13
- package/dist/simple-tree/api/schemaFactory.js.map +1 -1
- package/dist/simple-tree/api/schemaFactoryAlpha.d.ts.map +1 -1
- package/dist/simple-tree/api/schemaFactoryAlpha.js +8 -8
- package/dist/simple-tree/api/schemaFactoryAlpha.js.map +1 -1
- package/dist/simple-tree/api/schemaFactoryBeta.d.ts +2 -1
- package/dist/simple-tree/api/schemaFactoryBeta.d.ts.map +1 -1
- package/dist/simple-tree/api/schemaFactoryBeta.js +8 -7
- package/dist/simple-tree/api/schemaFactoryBeta.js.map +1 -1
- package/dist/simple-tree/api/schemaFactoryRecursive.d.ts +1 -1
- package/dist/simple-tree/api/schemaFactoryRecursive.js.map +1 -1
- package/dist/simple-tree/node-kinds/array/arrayNode.d.ts +3 -4
- package/dist/simple-tree/node-kinds/array/arrayNode.d.ts.map +1 -1
- package/dist/simple-tree/node-kinds/array/arrayNode.js +3 -2
- package/dist/simple-tree/node-kinds/array/arrayNode.js.map +1 -1
- package/dist/simple-tree/node-kinds/map/mapNode.d.ts +4 -3
- package/dist/simple-tree/node-kinds/map/mapNode.d.ts.map +1 -1
- package/dist/simple-tree/node-kinds/map/mapNode.js +3 -2
- package/dist/simple-tree/node-kinds/map/mapNode.js.map +1 -1
- package/dist/simple-tree/node-kinds/object/objectNode.d.ts +4 -3
- package/dist/simple-tree/node-kinds/object/objectNode.d.ts.map +1 -1
- package/dist/simple-tree/node-kinds/object/objectNode.js +5 -5
- package/dist/simple-tree/node-kinds/object/objectNode.js.map +1 -1
- package/dist/simple-tree/node-kinds/record/recordNode.d.ts +3 -10
- package/dist/simple-tree/node-kinds/record/recordNode.d.ts.map +1 -1
- package/dist/simple-tree/node-kinds/record/recordNode.js +3 -2
- package/dist/simple-tree/node-kinds/record/recordNode.js.map +1 -1
- package/dist/simple-tree/node-kinds/record/recordNodeTypes.d.ts +2 -2
- package/dist/simple-tree/node-kinds/record/recordNodeTypes.d.ts.map +1 -1
- package/dist/simple-tree/node-kinds/record/recordNodeTypes.js.map +1 -1
- package/dist/tableSchema.d.ts +37 -24
- package/dist/tableSchema.d.ts.map +1 -1
- package/dist/tableSchema.js +81 -54
- package/dist/tableSchema.js.map +1 -1
- package/dist/treeFactory.d.ts.map +1 -1
- package/dist/treeFactory.js +1 -1
- package/dist/treeFactory.js.map +1 -1
- package/dist/util/utils.d.ts +11 -2
- package/dist/util/utils.d.ts.map +1 -1
- package/dist/util/utils.js.map +1 -1
- package/lib/alpha.d.ts +4 -4
- package/lib/beta.d.ts +4 -0
- package/lib/feature-libraries/chunked-forest/codec/chunkDecoding.d.ts +2 -2
- package/lib/feature-libraries/chunked-forest/codec/chunkDecoding.d.ts.map +1 -1
- package/lib/feature-libraries/chunked-forest/codec/chunkDecoding.js +9 -10
- package/lib/feature-libraries/chunked-forest/codec/chunkDecoding.js.map +1 -1
- package/lib/feature-libraries/chunked-forest/codec/codecs.d.ts +5 -4
- package/lib/feature-libraries/chunked-forest/codec/codecs.d.ts.map +1 -1
- package/lib/feature-libraries/chunked-forest/codec/codecs.js.map +1 -1
- package/lib/feature-libraries/forest-summary/forestSummarizer.d.ts +1 -1
- package/lib/feature-libraries/forest-summary/forestSummarizer.d.ts.map +1 -1
- package/lib/feature-libraries/forest-summary/forestSummarizer.js +2 -2
- package/lib/feature-libraries/forest-summary/forestSummarizer.js.map +1 -1
- package/lib/feature-libraries/forest-summary/incrementalSummaryBuilder.d.ts +9 -7
- package/lib/feature-libraries/forest-summary/incrementalSummaryBuilder.d.ts.map +1 -1
- package/lib/feature-libraries/forest-summary/incrementalSummaryBuilder.js +34 -11
- package/lib/feature-libraries/forest-summary/incrementalSummaryBuilder.js.map +1 -1
- package/lib/legacy.d.ts +4 -0
- package/lib/packageVersion.d.ts +1 -1
- package/lib/packageVersion.d.ts.map +1 -1
- package/lib/packageVersion.js +1 -1
- package/lib/packageVersion.js.map +1 -1
- package/lib/serializableDomainSchema.d.ts +12 -10
- package/lib/serializableDomainSchema.d.ts.map +1 -1
- package/lib/serializableDomainSchema.js +8 -8
- package/lib/serializableDomainSchema.js.map +1 -1
- package/lib/shared-tree/sharedTree.d.ts +1 -1
- package/lib/shared-tree/sharedTree.d.ts.map +1 -1
- package/lib/shared-tree/sharedTree.js +2 -2
- package/lib/shared-tree/sharedTree.js.map +1 -1
- package/lib/simple-tree/api/schemaFactory.d.ts +1 -1
- package/lib/simple-tree/api/schemaFactory.d.ts.map +1 -1
- package/lib/simple-tree/api/schemaFactory.js +18 -14
- package/lib/simple-tree/api/schemaFactory.js.map +1 -1
- package/lib/simple-tree/api/schemaFactoryAlpha.d.ts.map +1 -1
- package/lib/simple-tree/api/schemaFactoryAlpha.js +8 -8
- package/lib/simple-tree/api/schemaFactoryAlpha.js.map +1 -1
- package/lib/simple-tree/api/schemaFactoryBeta.d.ts +2 -1
- package/lib/simple-tree/api/schemaFactoryBeta.d.ts.map +1 -1
- package/lib/simple-tree/api/schemaFactoryBeta.js +8 -7
- package/lib/simple-tree/api/schemaFactoryBeta.js.map +1 -1
- package/lib/simple-tree/api/schemaFactoryRecursive.d.ts +1 -1
- package/lib/simple-tree/api/schemaFactoryRecursive.js.map +1 -1
- package/lib/simple-tree/node-kinds/array/arrayNode.d.ts +3 -4
- package/lib/simple-tree/node-kinds/array/arrayNode.d.ts.map +1 -1
- package/lib/simple-tree/node-kinds/array/arrayNode.js +3 -2
- package/lib/simple-tree/node-kinds/array/arrayNode.js.map +1 -1
- package/lib/simple-tree/node-kinds/map/mapNode.d.ts +4 -3
- package/lib/simple-tree/node-kinds/map/mapNode.d.ts.map +1 -1
- package/lib/simple-tree/node-kinds/map/mapNode.js +3 -2
- package/lib/simple-tree/node-kinds/map/mapNode.js.map +1 -1
- package/lib/simple-tree/node-kinds/object/objectNode.d.ts +4 -3
- package/lib/simple-tree/node-kinds/object/objectNode.d.ts.map +1 -1
- package/lib/simple-tree/node-kinds/object/objectNode.js +5 -5
- package/lib/simple-tree/node-kinds/object/objectNode.js.map +1 -1
- package/lib/simple-tree/node-kinds/record/recordNode.d.ts +3 -10
- package/lib/simple-tree/node-kinds/record/recordNode.d.ts.map +1 -1
- package/lib/simple-tree/node-kinds/record/recordNode.js +3 -2
- package/lib/simple-tree/node-kinds/record/recordNode.js.map +1 -1
- package/lib/simple-tree/node-kinds/record/recordNodeTypes.d.ts +2 -2
- package/lib/simple-tree/node-kinds/record/recordNodeTypes.d.ts.map +1 -1
- package/lib/simple-tree/node-kinds/record/recordNodeTypes.js.map +1 -1
- package/lib/tableSchema.d.ts +37 -24
- package/lib/tableSchema.d.ts.map +1 -1
- package/lib/tableSchema.js +81 -54
- package/lib/tableSchema.js.map +1 -1
- package/lib/treeFactory.d.ts.map +1 -1
- package/lib/treeFactory.js +1 -1
- package/lib/treeFactory.js.map +1 -1
- package/lib/util/utils.d.ts +11 -2
- package/lib/util/utils.d.ts.map +1 -1
- package/lib/util/utils.js.map +1 -1
- package/package.json +21 -21
- package/src/feature-libraries/chunked-forest/codec/chunkDecoding.ts +16 -14
- package/src/feature-libraries/chunked-forest/codec/codecs.ts +8 -4
- package/src/feature-libraries/forest-summary/forestSummarizer.ts +2 -0
- package/src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts +65 -13
- package/src/packageVersion.ts +1 -1
- package/src/serializableDomainSchema.ts +11 -11
- package/src/shared-tree/sharedTree.ts +2 -0
- package/src/simple-tree/api/schemaFactory.ts +38 -22
- package/src/simple-tree/api/schemaFactoryAlpha.ts +8 -17
- package/src/simple-tree/api/schemaFactoryBeta.ts +17 -23
- package/src/simple-tree/api/schemaFactoryRecursive.ts +1 -1
- package/src/simple-tree/node-kinds/array/arrayNode.ts +5 -4
- package/src/simple-tree/node-kinds/map/mapNode.ts +5 -3
- package/src/simple-tree/node-kinds/object/objectNode.ts +8 -7
- package/src/simple-tree/node-kinds/record/recordNode.ts +6 -18
- package/src/simple-tree/node-kinds/record/recordNodeTypes.ts +2 -2
- package/src/tableSchema.ts +143 -81
- package/src/treeFactory.ts +1 -0
- package/src/util/utils.ts +11 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"codecs.js","sourceRoot":"","sources":["../../../../src/feature-libraries/chunked-forest/codec/codecs.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,kEAA8E;AAG9E,sDAKiC;AAOjC,qDAIgC;AAChC,2EAIuC;AAEvC,yDAA4C;AAE5C,2CAA6F;AAC7F,iEAAgE;AAChE,mEAA6D;AAQ7D,MAAM,gBAAgB,GAAG,IAAA,4BAAiB,EAAmB,EAAE,UAAU,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC;
|
|
1
|
+
{"version":3,"file":"codecs.js","sourceRoot":"","sources":["../../../../src/feature-libraries/chunked-forest/codec/codecs.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,kEAA8E;AAG9E,sDAKiC;AAOjC,qDAIgC;AAChC,2EAIuC;AAEvC,yDAA4C;AAE5C,2CAA6F;AAC7F,iEAAgE;AAChE,mEAA6D;AAQ7D,MAAM,gBAAgB,GAAG,IAAA,4BAAiB,EAAmB,EAAE,UAAU,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC;AA6E5F;;;;GAIG;AACH,SAAgB,yCAAyC,CACxD,mBAA4C;IAE5C,wCAAwC;IACxC,OAAO,CAAC,CAAC;AACV,CAAC;AALD,8FAKC;AAED,SAAgB,mBAAmB,CAClC,OAAsB,EACtB,YAAoB;IAEpB,6GAA6G;IAC7G,8GAA8G;IAC9G,iHAAiH;IACjH,+BAA+B;IAC/B,IAAA,iBAAM,EACL,yBAAa,CAAC,GAAG,CAAC,YAAY,CAAC,EAC/B,KAAK,CAAC,gDAAgD,CACtD,CAAC;IAEF,uFAAuF;IACvF,OAAO,IAAA,sCAA2B,EAAC,OAAO,EAAE,yBAAa,EAAE,6BAAiB,EAAE;QAC7E,MAAM,EAAE,CAAC,IAAgB,EAAE,OAAkC,EAAqB,EAAE;YACnF,KAAK,MAAM,MAAM,IAAI,IAAI,EAAE,CAAC;gBAC3B,IAAA,iBAAM,EACL,MAAM,CAAC,IAAI,sCAA8B,EACzC,KAAK,CAAC,uCAAuC,CAC7C,CAAC;YACH,CAAC;YACD,IAAI,OAA0B,CAAC;YAC/B,QAAQ,OAAO,CAAC,UAAU,EAAE,CAAC;gBAC5B,KAAK,iDAAuB,CAAC,YAAY;oBACxC,OAAO,GAAG,IAAA,0CAAkB,EAAC,IAAI,CAAC,CAAC;oBACnC,MAAM;gBACP,KAAK,yDAA+B,CAAC,qBAAqB,CAAC;gBAC3D,KAAK,iDAAuB,CAAC,UAAU;oBACtC,kDAAkD;oBAClD,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,EAAE,CAAC;wBAClC,OAAO,GAAG,IAAA,6CAAsB,EAC/B,OAAO,CAAC,MAAM,CAAC,MAAM,EACrB,OAAO,CAAC,MAAM,CAAC,MAAM,EACrB,IAAI,EACJ,OAAO,CAAC,YAAY;wBACpB,oEAAoE;wBACpE,OAAO,CAAC,UAAU,KAAK,yDAA+B,CAAC,qBAAqB;4BAC3E,CAAC,CAAC,OAAO,CAAC,yBAAyB;4BACnC,CAAC,CAAC,SAAS,CACZ,CAAC;oBACH,CAAC;yBAAM,CAAC;wBACP,mFAAmF;wBACnF,OAAO,GAAG,IAAA,0CAAkB,EAAC,IAAI,CAAC,CAAC;oBACpC,CAAC;oBAED,MAAM;gBACP;oBACC,IAAA,0BAAe,EAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YACtC,CAAC;YAED,oDAAoD;YACpD,OAAO,OAAO,CAAC;QAChB,CAAC;QACD,MAAM,EAAE,CAAC,IAAuB,EAAE,OAAkC,EAAc,EAAE;YACnF,6CAA6C;YAC7C,OAAO,IAAA,yBAAM,EACZ,IAAI,EACJ;gBACC,YAAY,EAAE,OAAO,CAAC,YAAY;gBAClC,YAAY,EAAE,OAAO,CAAC,YAAY;aAClC,EACD,OAAO,CAAC,yBAAyB,CACjC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;QAClC,CAAC;KACD,CAAC,CAAC;AACJ,CAAC;AAlED,kDAkEC;AAED,SAAgB,+BAA+B,CAAC,OAAgC;IAC/E,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,CAAC;AACxC,CAAC;AAFD,0EAEC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert, unreachableCase } from \"@fluidframework/core-utils/internal\";\nimport type { IIdCompressor, SessionId } from \"@fluidframework/id-compressor\";\n\nimport {\n\ttype CodecTree,\n\ttype ICodecOptions,\n\ttype IJsonCodec,\n\tmakeVersionedValidatedCodec,\n} from \"../../../codec/index.js\";\nimport {\n\tCursorLocationType,\n\ttype ITreeCursorSynchronous,\n\ttype SchemaAndPolicy,\n\ttype TreeChunk,\n} from \"../../../core/index.js\";\nimport {\n\tbrandedNumberType,\n\ttype Brand,\n\ttype JsonCompatibleReadOnly,\n} from \"../../../util/index.js\";\nimport {\n\tTreeCompressionStrategy,\n\tTreeCompressionStrategyExtended,\n\ttype TreeCompressionStrategyPrivate,\n} from \"../../treeCompressionUtils.js\";\n\nimport { decode } from \"./chunkDecoding.js\";\nimport type { FieldBatch } from \"./fieldBatch.js\";\nimport { EncodedFieldBatch, validVersions, type FieldBatchFormatVersion } from \"./format.js\";\nimport { schemaCompressedEncode } from \"./schemaBasedEncode.js\";\nimport { uncompressedEncode } from \"./uncompressedEncode.js\";\nimport type { MinimumVersionForCollab } from \"@fluidframework/runtime-definitions/internal\";\nimport type { IncrementalEncodingPolicy } from \"./incrementalEncodingPolicy.js\";\n\n/**\n * Reference ID for a chunk that is incrementally encoded.\n */\nexport type ChunkReferenceId = Brand<number, \"forest.ChunkReferenceId\">;\nconst ChunkReferenceId = brandedNumberType<ChunkReferenceId>({ multipleOf: 1, minimum: 0 });\n\n/**\n * Properties for incremental encoding.\n * Fields that support incremental encoding will encode their chunks separately by calling `encodeIncrementalField`.\n * @remarks\n * This supports features like incremental summarization where the summary from these fields can be re-used if\n * unchanged between summaries.\n * Note that each of these chunks that are incrementally encoded is fully self-describing (contain its own shapes\n * list and identifier table) and does not rely on context from its parent.\n */\nexport interface IncrementalEncoder {\n\t/**\n\t * Returns whether a node / field should be incrementally encoded.\n\t * @remarks See {@link IncrementalEncodingPolicy}.\n\t */\n\tshouldEncodeIncrementally: IncrementalEncodingPolicy;\n\t/**\n\t * Called to encode an incremental field at the cursor.\n\t * The chunks for this field are encoded separately from the main buffer.\n\t * @param cursor - The cursor pointing to the field to encode.\n\t * @param chunkEncoder - A function that encodes the contents of the passed chunk in the field.\n\t * @returns The reference IDs of the encoded chunks in the field.\n\t * This is used to retrieve the encoded chunks later.\n\t */\n\tencodeIncrementalField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tchunkEncoder: (chunk: TreeChunk) => EncodedFieldBatch,\n\t): ChunkReferenceId[];\n}\n\n/**\n * Properties for incremental decoding.\n *\n * Fields that had their chunks incrementally encoded will retrieve them by calling `getEncodedIncrementalChunk`.\n * @remarks\n * See {@link IncrementalEncoder} for more details.\n */\nexport interface IncrementalDecoder {\n\t/**\n\t * Called to decode an incremental chunk with the given reference ID.\n\t * @param referenceId - The reference ID of the chunk to decode.\n\t * @param chunkDecoder - A function that decodes the chunk.\n\t * @returns The decoded chunk.\n\t */\n\tdecodeIncrementalChunk(\n\t\treferenceId: ChunkReferenceId,\n\t\tchunkDecoder: (encoded: EncodedFieldBatch) => TreeChunk,\n\t): TreeChunk;\n}\n/**\n * Combines the properties of {@link IncrementalEncoder} and {@link IncrementalDecoder}.\n */\nexport interface IncrementalEncoderDecoder extends IncrementalEncoder, IncrementalDecoder {}\n\nexport interface FieldBatchEncodingContext {\n\treadonly encodeType: TreeCompressionStrategyPrivate;\n\treadonly idCompressor: IIdCompressor;\n\treadonly originatorId: SessionId;\n\treadonly schema?: SchemaAndPolicy;\n\t/**\n\t * An encoder / decoder for encoding and decoding of incremental fields.\n\t * This will be defined if incremental encoding is supported and enabled.\n\t */\n\treadonly incrementalEncoderDecoder?: IncrementalEncoderDecoder;\n}\n/**\n * @remarks\n * Fields in this batch currently don't have field schema for the root, which limits optimizations.\n */\nexport type FieldBatchCodec = IJsonCodec<\n\tFieldBatch,\n\tEncodedFieldBatch,\n\tJsonCompatibleReadOnly,\n\tFieldBatchEncodingContext\n>;\n\n/**\n * Get the write version for {@link makeFieldBatchCodec} based on the `minVersionForCollab` version.\n * @privateRemarks\n * TODO: makeFieldBatchCodec (and makeVersionDispatchingCodec transitively) should bake in this versionToFormat logic and the resulting codec can then support use with FluidClientVersion directly.\n */\nexport function fluidVersionToFieldBatchCodecWriteVersion(\n\tminVersionForCollab: MinimumVersionForCollab,\n): number {\n\t// There is currently on only 1 version.\n\treturn 1;\n}\n\nexport function makeFieldBatchCodec(\n\toptions: ICodecOptions,\n\twriteVersion: number,\n): FieldBatchCodec {\n\t// Note: it's important that the decode function is schema-agnostic for this strategy/layering to work, since\n\t// the schema that an op was encoded in doesn't necessarily match the current schema for the document (e.g. if\n\t// decode is being run on a client that just submitted a schema change, but the op is from another client who has\n\t// yet to receive that change).\n\tassert(\n\t\tvalidVersions.has(writeVersion),\n\t\t0x935 /* Invalid write version for FieldBatch codec */,\n\t);\n\n\t// TODO: use makeVersionDispatchingCodec to support adding more versions in the future.\n\treturn makeVersionedValidatedCodec(options, validVersions, EncodedFieldBatch, {\n\t\tencode: (data: FieldBatch, context: FieldBatchEncodingContext): EncodedFieldBatch => {\n\t\t\tfor (const cursor of data) {\n\t\t\t\tassert(\n\t\t\t\t\tcursor.mode === CursorLocationType.Fields,\n\t\t\t\t\t0x8a3 /* FieldBatch expects fields cursors */,\n\t\t\t\t);\n\t\t\t}\n\t\t\tlet encoded: EncodedFieldBatch;\n\t\t\tswitch (context.encodeType) {\n\t\t\t\tcase TreeCompressionStrategy.Uncompressed:\n\t\t\t\t\tencoded = uncompressedEncode(data);\n\t\t\t\t\tbreak;\n\t\t\t\tcase TreeCompressionStrategyExtended.CompressedIncremental:\n\t\t\t\tcase TreeCompressionStrategy.Compressed:\n\t\t\t\t\t// eslint-disable-next-line unicorn/prefer-ternary\n\t\t\t\t\tif (context.schema !== undefined) {\n\t\t\t\t\t\tencoded = schemaCompressedEncode(\n\t\t\t\t\t\t\tcontext.schema.schema,\n\t\t\t\t\t\t\tcontext.schema.policy,\n\t\t\t\t\t\t\tdata,\n\t\t\t\t\t\t\tcontext.idCompressor,\n\t\t\t\t\t\t\t// Incremental encoding is only supported for CompressedIncremental.\n\t\t\t\t\t\t\tcontext.encodeType === TreeCompressionStrategyExtended.CompressedIncremental\n\t\t\t\t\t\t\t\t? context.incrementalEncoderDecoder\n\t\t\t\t\t\t\t\t: undefined,\n\t\t\t\t\t\t);\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// TODO: consider enabling a somewhat compressed but not schema accelerated encode.\n\t\t\t\t\t\tencoded = uncompressedEncode(data);\n\t\t\t\t\t}\n\n\t\t\t\t\tbreak;\n\t\t\t\tdefault:\n\t\t\t\t\tunreachableCase(context.encodeType);\n\t\t\t}\n\n\t\t\t// TODO: consider checking input data was in schema.\n\t\t\treturn encoded;\n\t\t},\n\t\tdecode: (data: EncodedFieldBatch, context: FieldBatchEncodingContext): FieldBatch => {\n\t\t\t// TODO: consider checking data is in schema.\n\t\t\treturn decode(\n\t\t\t\tdata,\n\t\t\t\t{\n\t\t\t\t\tidCompressor: context.idCompressor,\n\t\t\t\t\toriginatorId: context.originatorId,\n\t\t\t\t},\n\t\t\t\tcontext.incrementalEncoderDecoder,\n\t\t\t).map((chunk) => chunk.cursor());\n\t\t},\n\t});\n}\n\nexport function getCodecTreeForFieldBatchFormat(version: FieldBatchFormatVersion): CodecTree {\n\treturn { name: \"FieldBatch\", version };\n}\n"]}
|
|
@@ -29,7 +29,7 @@ export declare class ForestSummarizer implements Summarizable {
|
|
|
29
29
|
/**
|
|
30
30
|
* @param encoderContext - The schema if provided here must be mutated by the caller to keep it up to date.
|
|
31
31
|
*/
|
|
32
|
-
constructor(forest: IEditableForest, revisionTagCodec: RevisionTagCodec, fieldBatchCodec: FieldBatchCodec, encoderContext: FieldBatchEncodingContext, options: CodecWriteOptions, idCompressor: IIdCompressor, shouldEncodeIncrementally?: IncrementalEncodingPolicy);
|
|
32
|
+
constructor(forest: IEditableForest, revisionTagCodec: RevisionTagCodec, fieldBatchCodec: FieldBatchCodec, encoderContext: FieldBatchEncodingContext, options: CodecWriteOptions, idCompressor: IIdCompressor, initialSequenceNumber: number, shouldEncodeIncrementally?: IncrementalEncodingPolicy);
|
|
33
33
|
/**
|
|
34
34
|
* Summarization of the forest's tree content.
|
|
35
35
|
* @returns a summary tree containing the forest's tree content.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"forestSummarizer.d.ts","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/forestSummarizer.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,gDAAgD,CAAC;AAC7F,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,+BAA+B,CAAC;AACnE,OAAO,KAAK,EACX,sCAAsC,EACtC,qBAAqB,EACrB,iBAAiB,EACjB,MAAM,8CAA8C,CAAC;AAEtD,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AAC9D,OAAO,EAIN,KAAK,eAAe,EAGpB,KAAK,gBAAgB,EAKrB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,KAAK,EACX,YAAY,EACZ,oBAAoB,EACpB,yBAAyB,EACzB,MAAM,iCAAiC,CAAC;AAIzC,OAAO,EAEN,KAAK,eAAe,EACpB,KAAK,yBAAyB,EAC9B,KAAK,yBAAyB,EAC9B,MAAM,4BAA4B,CAAC;AAWpC;;;;GAIG;AACH,eAAO,MAAM,gBAAgB,WAAW,CAAC;AAEzC;;GAEG;AACH,qBAAa,gBAAiB,YAAW,YAAY;IAWnD,OAAO,CAAC,QAAQ,CAAC,MAAM;IACvB,OAAO,CAAC,QAAQ,CAAC,gBAAgB;IAEjC,OAAO,CAAC,QAAQ,CAAC,cAAc;IAE/B,OAAO,CAAC,QAAQ,CAAC,YAAY;IAf9B,SAAgB,GAAG,YAAoB;IAEvC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAc;IAEpC,OAAO,CAAC,QAAQ,CAAC,yBAAyB,CAAkC;IAE5E;;OAEG;gBAEe,MAAM,EAAE,eAAe,EACvB,gBAAgB,EAAE,gBAAgB,EACnD,eAAe,EAAE,eAAe,EACf,cAAc,EAAE,yBAAyB,EAC1D,OAAO,EAAE,iBAAiB,EACT,YAAY,EAAE,aAAa,EAC5C,yBAAyB,GAAE,yBAA4D;
|
|
1
|
+
{"version":3,"file":"forestSummarizer.d.ts","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/forestSummarizer.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,gDAAgD,CAAC;AAC7F,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,+BAA+B,CAAC;AACnE,OAAO,KAAK,EACX,sCAAsC,EACtC,qBAAqB,EACrB,iBAAiB,EACjB,MAAM,8CAA8C,CAAC;AAEtD,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AAC9D,OAAO,EAIN,KAAK,eAAe,EAGpB,KAAK,gBAAgB,EAKrB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,KAAK,EACX,YAAY,EACZ,oBAAoB,EACpB,yBAAyB,EACzB,MAAM,iCAAiC,CAAC;AAIzC,OAAO,EAEN,KAAK,eAAe,EACpB,KAAK,yBAAyB,EAC9B,KAAK,yBAAyB,EAC9B,MAAM,4BAA4B,CAAC;AAWpC;;;;GAIG;AACH,eAAO,MAAM,gBAAgB,WAAW,CAAC;AAEzC;;GAEG;AACH,qBAAa,gBAAiB,YAAW,YAAY;IAWnD,OAAO,CAAC,QAAQ,CAAC,MAAM;IACvB,OAAO,CAAC,QAAQ,CAAC,gBAAgB;IAEjC,OAAO,CAAC,QAAQ,CAAC,cAAc;IAE/B,OAAO,CAAC,QAAQ,CAAC,YAAY;IAf9B,SAAgB,GAAG,YAAoB;IAEvC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAc;IAEpC,OAAO,CAAC,QAAQ,CAAC,yBAAyB,CAAkC;IAE5E;;OAEG;gBAEe,MAAM,EAAE,eAAe,EACvB,gBAAgB,EAAE,gBAAgB,EACnD,eAAe,EAAE,eAAe,EACf,cAAc,EAAE,yBAAyB,EAC1D,OAAO,EAAE,iBAAiB,EACT,YAAY,EAAE,aAAa,EAC5C,qBAAqB,EAAE,MAAM,EAC7B,yBAAyB,GAAE,yBAA4D;IAaxF;;;;;;;;;OASG;IACI,SAAS,CAAC,KAAK,EAAE;QACvB,SAAS,EAAE,yBAAyB,CAAC;QACrC,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,UAAU,CAAC,EAAE,OAAO,CAAC;QACrB,gBAAgB,CAAC,EAAE,iBAAiB,CAAC;QACrC,yBAAyB,CAAC,EAAE,sCAAsC,CAAC;KACnE,GAAG,qBAAqB;IAyCZ,IAAI,CAChB,QAAQ,EAAE,sBAAsB,EAChC,KAAK,EAAE,oBAAoB,GACzB,OAAO,CAAC,IAAI,CAAC;CAsDhB"}
|
|
@@ -28,7 +28,7 @@ class ForestSummarizer {
|
|
|
28
28
|
/**
|
|
29
29
|
* @param encoderContext - The schema if provided here must be mutated by the caller to keep it up to date.
|
|
30
30
|
*/
|
|
31
|
-
constructor(forest, revisionTagCodec, fieldBatchCodec, encoderContext, options, idCompressor, shouldEncodeIncrementally = index_js_3.defaultIncrementalEncodingPolicy) {
|
|
31
|
+
constructor(forest, revisionTagCodec, fieldBatchCodec, encoderContext, options, idCompressor, initialSequenceNumber, shouldEncodeIncrementally = index_js_3.defaultIncrementalEncodingPolicy) {
|
|
32
32
|
this.forest = forest;
|
|
33
33
|
this.revisionTagCodec = revisionTagCodec;
|
|
34
34
|
this.encoderContext = encoderContext;
|
|
@@ -37,7 +37,7 @@ class ForestSummarizer {
|
|
|
37
37
|
// TODO: this should take in CodecWriteOptions, and use it to pick the write version.
|
|
38
38
|
this.codec = (0, codec_js_1.makeForestSummarizerCodec)(options, fieldBatchCodec);
|
|
39
39
|
this.incrementalSummaryBuilder = new incrementalSummaryBuilder_js_1.ForestIncrementalSummaryBuilder(encoderContext.encodeType ===
|
|
40
|
-
treeCompressionUtils_js_1.TreeCompressionStrategyExtended.CompressedIncremental /* enableIncrementalSummary */, (cursor) => this.forest.chunkField(cursor), shouldEncodeIncrementally);
|
|
40
|
+
treeCompressionUtils_js_1.TreeCompressionStrategyExtended.CompressedIncremental /* enableIncrementalSummary */, (cursor) => this.forest.chunkField(cursor), shouldEncodeIncrementally, initialSequenceNumber);
|
|
41
41
|
}
|
|
42
42
|
/**
|
|
43
43
|
* Summarization of the forest's tree content.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"forestSummarizer.js","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/forestSummarizer.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,+DAA8D;AAC9D,kEAA6D;AAU7D,kDAY6B;AAM7B,kDAAgF;AAChF,sDAAsD;AACtD,iEAAsF;AACtF,yDAKoC;AAEpC,yCAAyE;AACzE,iFAIwC;AACxC,wEAA6E;AAG7E;;;;GAIG;AACU,QAAA,gBAAgB,GAAG,QAAQ,CAAC;AAEzC;;GAEG;AACH,MAAa,gBAAgB;IAO5B;;OAEG;IACH,YACkB,MAAuB,EACvB,gBAAkC,EACnD,eAAgC,EACf,cAAyC,EAC1D,OAA0B,EACT,YAA2B,EAC5C,4BAAuD,2CAAgC;QANtE,WAAM,GAAN,MAAM,CAAiB;QACvB,qBAAgB,GAAhB,gBAAgB,CAAkB;QAElC,mBAAc,GAAd,cAAc,CAA2B;QAEzC,iBAAY,GAAZ,YAAY,CAAe;QAf7B,QAAG,GAAG,wBAAgB,CAAC;QAkBtC,qFAAqF;QACrF,IAAI,CAAC,KAAK,GAAG,IAAA,oCAAyB,EAAC,OAAO,EAAE,eAAe,CAAC,CAAC;QACjE,IAAI,CAAC,yBAAyB,GAAG,IAAI,8DAA+B,CACnE,cAAc,CAAC,UAAU;YACxB,yDAA+B,CAAC,qBAAqB,CAAC,8BAA8B,EACrF,CAAC,MAA8B,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,EAClE,yBAAyB,CACzB,CAAC;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,SAAS,CAAC,KAMhB;QACA,MAAM,EAAE,SAAS,EAAE,QAAQ,GAAG,KAAK,EAAE,yBAAyB,EAAE,GAAG,KAAK,CAAC;QAEzE,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,4BAA4B,EAAE,CAAC;QAC9D,MAAM,QAAQ,GACb,IAAI,GAAG,EAAE,CAAC;QACX,2FAA2F;QAC3F,IAAA,uBAAY,EAAC,UAAU,EAAE,CAAC,MAAM,EAAE,EAAE;YACnC,MAAM,GAAG,GAAG,MAAM,CAAC,WAAW,EAAE,CAAC;YACjC,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,CAAC;YAChE,IAAA,iBAAM,EACL,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,EAAE,QAAQ,EAAE,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,EAAE,WAAW,CAAC;+CAC3D,EACxB,KAAK,CAAC,iCAAiC,CACvC,CAAC;YACF,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,WAA+D,CAAC,CAAC;QACpF,CAAC,CAAC,CAAC;QAEH,+EAA+E;QAC/E,sDAAsD;QACtD,MAAM,0BAA0B,GAAG,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC;YAC9E,QAAQ;YACR,yBAAyB;YACzB,SAAS;SACT,CAAC,CAAC;QACH,MAAM,cAAc,GAA8B;YACjD,GAAG,IAAI,CAAC,cAAc;YACtB,yBAAyB,EACxB,0BAA0B,KAAK,+DAAgC,CAAC,WAAW;gBAC1E,CAAC,CAAC,IAAI,CAAC,yBAAyB;gBAChC,CAAC,CAAC,SAAS;SACb,CAAC;QACF,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC;QAC5D,QAAQ,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;QAE1C,OAAO,IAAI,CAAC,yBAAyB,CAAC,eAAe,CAAC;YACrD,yBAAyB;YACzB,oBAAoB,EAAE,SAAS,CAAC,OAAO,CAAC;SACxC,CAAC,CAAC;IACJ,CAAC;IAEM,KAAK,CAAC,IAAI,CAChB,QAAgC,EAChC,KAA2B;QAE3B,6GAA6G;QAC7G,4GAA4G;QAC5G,wBAAwB;QACxB,8GAA8G;QAC9G,wCAAwC;QACxC,gHAAgH;QAChH,IAAA,iBAAM,EACL,MAAM,QAAQ,CAAC,QAAQ,CAAC,sDAAuB,CAAC,EAChD,KAAK,CAAC,gDAAgD,CACtD,CAAC;QAEF,MAAM,gBAAgB,GAAG,KAAK,EAC7B,EAAU,EACG,EAAE;YACf,MAAM,UAAU,GAAG,MAAM,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;YAC/C,MAAM,gBAAgB,GAAG,IAAA,6BAAc,EAAC,UAAU,EAAE,MAAM,CAAC,CAAC;YAC5D,OAAO,KAAK,CAAC,gBAAgB,CAAM,CAAC;QACrC,CAAC,CAAC;QAEF,6FAA6F;QAC7F,YAAY;QACZ,MAAM,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAEtE,2HAA2H;QAC3H,yBAAyB;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,gBAAgB,CAAC,sDAAuB,CAAC,EAAE;YACjF,GAAG,IAAI,CAAC,cAAc;YACtB,yBAAyB,EAAE,IAAI,CAAC,yBAAyB;SACzD,CAAC,CAAC;QACH,MAAM,SAAS,GAAG,IAAA,+BAAoB,GAAE,CAAC;QACzC,MAAM,YAAY,GAAoC,EAAE,CAAC;QACzD,MAAM,KAAK,GAA6B,EAAE,CAAC;QAC3C,KAAK,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,IAAI,MAAM,EAAE,CAAC;YACxC,MAAM,OAAO,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE;gBACvC,MAAM,EAAE,iCAAkB;gBAC1B,YAAY,EAAE,IAAI,CAAC,YAAY;aAC/B,CAAC,CAAC;YACH,MAAM,OAAO,GAAG,EAAE,KAAK,EAAE,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE,CAAC;YACtE,KAAK,CAAC,IAAI,CAAC;gBACV,EAAE,EAAE,OAAO;gBACX,KAAK,EAAE,OAAO;aACd,CAAC,CAAC;YACH,YAAY,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,cAAc,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;QACrF,CAAC;QAED,IAAA,iBAAM,EAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;QAC9D,IAAA,qBAAU,EACT,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,GAAG,CAAC,YAAY,CAAC,EAAE,EACxC,SAAS,EACT,IAAI,CAAC,MAAM,EACX,IAAA,iCAAsB,EAAC,MAAM,EAAE,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,YAAY,CAAC,CACxE,CAAC;IACH,CAAC;CACD;AA/ID,4CA+IC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { bufferToString } from \"@fluid-internal/client-utils\";\nimport { assert } from \"@fluidframework/core-utils/internal\";\nimport type { IChannelStorageService } from \"@fluidframework/datastore-definitions/internal\";\nimport type { IIdCompressor } from \"@fluidframework/id-compressor\";\nimport type {\n\tIExperimentalIncrementalSummaryContext,\n\tISummaryTreeWithStats,\n\tITelemetryContext,\n} from \"@fluidframework/runtime-definitions/internal\";\n\nimport type { CodecWriteOptions } from \"../../codec/index.js\";\nimport {\n\ttype DeltaDetachedNodeBuild,\n\ttype DeltaFieldChanges,\n\ttype FieldKey,\n\ttype IEditableForest,\n\ttype ITreeCursorSynchronous,\n\ttype ITreeSubscriptionCursor,\n\ttype RevisionTagCodec,\n\tTreeNavigationResult,\n\tapplyDelta,\n\tforEachField,\n\tmakeDetachedFieldIndex,\n} from \"../../core/index.js\";\nimport type {\n\tSummarizable,\n\tSummaryElementParser,\n\tSummaryElementStringifier,\n} from \"../../shared-tree-core/index.js\";\nimport { idAllocatorFromMaxId, type JsonCompatible } from \"../../util/index.js\";\n// eslint-disable-next-line import/no-internal-modules\nimport { chunkFieldSingle, defaultChunkPolicy } from \"../chunked-forest/chunkTree.js\";\nimport {\n\tdefaultIncrementalEncodingPolicy,\n\ttype FieldBatchCodec,\n\ttype FieldBatchEncodingContext,\n\ttype IncrementalEncodingPolicy,\n} from \"../chunked-forest/index.js\";\n\nimport { type ForestCodec, makeForestSummarizerCodec } from \"./codec.js\";\nimport {\n\tForestIncrementalSummaryBehavior,\n\tForestIncrementalSummaryBuilder,\n\tforestSummaryContentKey,\n} from \"./incrementalSummaryBuilder.js\";\nimport { TreeCompressionStrategyExtended } from \"../treeCompressionUtils.js\";\nimport type { IFluidHandle } from \"@fluidframework/core-interfaces\";\n\n/**\n * The key for the tree that contains the overall forest's summary tree.\n * This tree is added by the parent of the forest summarizer.\n * See {@link ForestIncrementalSummaryBuilder} for details on the summary structure.\n */\nexport const forestSummaryKey = \"Forest\";\n\n/**\n * Provides methods for summarizing and loading a forest.\n */\nexport class ForestSummarizer implements Summarizable {\n\tpublic readonly key = forestSummaryKey;\n\n\tprivate readonly codec: ForestCodec;\n\n\tprivate readonly incrementalSummaryBuilder: ForestIncrementalSummaryBuilder;\n\n\t/**\n\t * @param encoderContext - The schema if provided here must be mutated by the caller to keep it up to date.\n\t */\n\tpublic constructor(\n\t\tprivate readonly forest: IEditableForest,\n\t\tprivate readonly revisionTagCodec: RevisionTagCodec,\n\t\tfieldBatchCodec: FieldBatchCodec,\n\t\tprivate readonly encoderContext: FieldBatchEncodingContext,\n\t\toptions: CodecWriteOptions,\n\t\tprivate readonly idCompressor: IIdCompressor,\n\t\tshouldEncodeIncrementally: IncrementalEncodingPolicy = defaultIncrementalEncodingPolicy,\n\t) {\n\t\t// TODO: this should take in CodecWriteOptions, and use it to pick the write version.\n\t\tthis.codec = makeForestSummarizerCodec(options, fieldBatchCodec);\n\t\tthis.incrementalSummaryBuilder = new ForestIncrementalSummaryBuilder(\n\t\t\tencoderContext.encodeType ===\n\t\t\t\tTreeCompressionStrategyExtended.CompressedIncremental /* enableIncrementalSummary */,\n\t\t\t(cursor: ITreeCursorSynchronous) => this.forest.chunkField(cursor),\n\t\t\tshouldEncodeIncrementally,\n\t\t);\n\t}\n\n\t/**\n\t * Summarization of the forest's tree content.\n\t * @returns a summary tree containing the forest's tree content.\n\t * @remarks\n\t * If incremental summary is disabled, all the content will be added to a single summary blob.\n\t * If incremental summary is enabled, the summary will be a tree.\n\t * See {@link ForestIncrementalSummaryBuilder} for details of what this tree looks like.\n\t *\n\t * TODO: when perf matters, this should be replaced with a chunked async version using a binary format.\n\t */\n\tpublic summarize(props: {\n\t\tstringify: SummaryElementStringifier;\n\t\tfullTree?: boolean;\n\t\ttrackState?: boolean;\n\t\ttelemetryContext?: ITelemetryContext;\n\t\tincrementalSummaryContext?: IExperimentalIncrementalSummaryContext;\n\t}): ISummaryTreeWithStats {\n\t\tconst { stringify, fullTree = false, incrementalSummaryContext } = props;\n\n\t\tconst rootCursor = this.forest.getCursorAboveDetachedFields();\n\t\tconst fieldMap: Map<FieldKey, ITreeCursorSynchronous & ITreeSubscriptionCursor> =\n\t\t\tnew Map();\n\t\t// TODO: Encode all detached fields in one operation for better performance and compression\n\t\tforEachField(rootCursor, (cursor) => {\n\t\t\tconst key = cursor.getFieldKey();\n\t\t\tconst innerCursor = this.forest.allocateCursor(\"getTreeString\");\n\t\t\tassert(\n\t\t\t\tthis.forest.tryMoveCursorToField({ fieldKey: key, parent: undefined }, innerCursor) ===\n\t\t\t\t\tTreeNavigationResult.Ok,\n\t\t\t\t0x892 /* failed to navigate to field */,\n\t\t\t);\n\t\t\tfieldMap.set(key, innerCursor as ITreeCursorSynchronous & ITreeSubscriptionCursor);\n\t\t});\n\n\t\t// Let the incremental summary builder know that we are starting a new summary.\n\t\t// It returns whether incremental encoding is enabled.\n\t\tconst incrementalSummaryBehavior = this.incrementalSummaryBuilder.startSummary({\n\t\t\tfullTree,\n\t\t\tincrementalSummaryContext,\n\t\t\tstringify,\n\t\t});\n\t\tconst encoderContext: FieldBatchEncodingContext = {\n\t\t\t...this.encoderContext,\n\t\t\tincrementalEncoderDecoder:\n\t\t\t\tincrementalSummaryBehavior === ForestIncrementalSummaryBehavior.Incremental\n\t\t\t\t\t? this.incrementalSummaryBuilder\n\t\t\t\t\t: undefined,\n\t\t};\n\t\tconst encoded = this.codec.encode(fieldMap, encoderContext);\n\t\tfieldMap.forEach((value) => value.free());\n\n\t\treturn this.incrementalSummaryBuilder.completeSummary({\n\t\t\tincrementalSummaryContext,\n\t\t\tforestSummaryContent: stringify(encoded),\n\t\t});\n\t}\n\n\tpublic async load(\n\t\tservices: IChannelStorageService,\n\t\tparse: SummaryElementParser,\n\t): Promise<void> {\n\t\t// The contents of the top-level forest must be present under a summary blob named `forestSummaryContentKey`.\n\t\t// If the summary was generated as `ForestIncrementalSummaryBehavior.SingleBlob`, this blob will contain all\n\t\t// of forest's contents.\n\t\t// If the summary was generated as `ForestIncrementalSummaryBehavior.Incremental`, this blob will contain only\n\t\t// the top-level forest node's contents.\n\t\t// The contents of the incremental chunks will be in separate tree nodes and will be read later during decoding.\n\t\tassert(\n\t\t\tawait services.contains(forestSummaryContentKey),\n\t\t\t0xc21 /* Forest summary content missing in snapshot */,\n\t\t);\n\n\t\tconst readAndParseBlob = async <T extends JsonCompatible<IFluidHandle>>(\n\t\t\tid: string,\n\t\t): Promise<T> => {\n\t\t\tconst treeBuffer = await services.readBlob(id);\n\t\t\tconst treeBufferString = bufferToString(treeBuffer, \"utf8\");\n\t\t\treturn parse(treeBufferString) as T;\n\t\t};\n\n\t\t// Load the incremental summary builder so that it can download any incremental chunks in the\n\t\t// snapshot.\n\t\tawait this.incrementalSummaryBuilder.load(services, readAndParseBlob);\n\n\t\t// TODO: this code is parsing data without an optional validator, this should be defined in a typebox schema as part of the\n\t\t// forest summary format.\n\t\tconst fields = this.codec.decode(await readAndParseBlob(forestSummaryContentKey), {\n\t\t\t...this.encoderContext,\n\t\t\tincrementalEncoderDecoder: this.incrementalSummaryBuilder,\n\t\t});\n\t\tconst allocator = idAllocatorFromMaxId();\n\t\tconst fieldChanges: [FieldKey, DeltaFieldChanges][] = [];\n\t\tconst build: DeltaDetachedNodeBuild[] = [];\n\t\tfor (const [fieldKey, field] of fields) {\n\t\t\tconst chunked = chunkFieldSingle(field, {\n\t\t\t\tpolicy: defaultChunkPolicy,\n\t\t\t\tidCompressor: this.idCompressor,\n\t\t\t});\n\t\t\tconst buildId = { minor: allocator.allocate(chunked.topLevelLength) };\n\t\t\tbuild.push({\n\t\t\t\tid: buildId,\n\t\t\t\ttrees: chunked,\n\t\t\t});\n\t\t\tfieldChanges.push([fieldKey, [{ count: chunked.topLevelLength, attach: buildId }]]);\n\t\t}\n\n\t\tassert(this.forest.isEmpty, 0x797 /* forest must be empty */);\n\t\tapplyDelta(\n\t\t\t{ build, fields: new Map(fieldChanges) },\n\t\t\tundefined,\n\t\t\tthis.forest,\n\t\t\tmakeDetachedFieldIndex(\"init\", this.revisionTagCodec, this.idCompressor),\n\t\t);\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"forestSummarizer.js","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/forestSummarizer.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,+DAA8D;AAC9D,kEAA6D;AAU7D,kDAY6B;AAM7B,kDAAgF;AAChF,sDAAsD;AACtD,iEAAsF;AACtF,yDAKoC;AAEpC,yCAAyE;AACzE,iFAIwC;AACxC,wEAA6E;AAG7E;;;;GAIG;AACU,QAAA,gBAAgB,GAAG,QAAQ,CAAC;AAEzC;;GAEG;AACH,MAAa,gBAAgB;IAO5B;;OAEG;IACH,YACkB,MAAuB,EACvB,gBAAkC,EACnD,eAAgC,EACf,cAAyC,EAC1D,OAA0B,EACT,YAA2B,EAC5C,qBAA6B,EAC7B,4BAAuD,2CAAgC;QAPtE,WAAM,GAAN,MAAM,CAAiB;QACvB,qBAAgB,GAAhB,gBAAgB,CAAkB;QAElC,mBAAc,GAAd,cAAc,CAA2B;QAEzC,iBAAY,GAAZ,YAAY,CAAe;QAf7B,QAAG,GAAG,wBAAgB,CAAC;QAmBtC,qFAAqF;QACrF,IAAI,CAAC,KAAK,GAAG,IAAA,oCAAyB,EAAC,OAAO,EAAE,eAAe,CAAC,CAAC;QACjE,IAAI,CAAC,yBAAyB,GAAG,IAAI,8DAA+B,CACnE,cAAc,CAAC,UAAU;YACxB,yDAA+B,CAAC,qBAAqB,CAAC,8BAA8B,EACrF,CAAC,MAA8B,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,EAClE,yBAAyB,EACzB,qBAAqB,CACrB,CAAC;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,SAAS,CAAC,KAMhB;QACA,MAAM,EAAE,SAAS,EAAE,QAAQ,GAAG,KAAK,EAAE,yBAAyB,EAAE,GAAG,KAAK,CAAC;QAEzE,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,4BAA4B,EAAE,CAAC;QAC9D,MAAM,QAAQ,GACb,IAAI,GAAG,EAAE,CAAC;QACX,2FAA2F;QAC3F,IAAA,uBAAY,EAAC,UAAU,EAAE,CAAC,MAAM,EAAE,EAAE;YACnC,MAAM,GAAG,GAAG,MAAM,CAAC,WAAW,EAAE,CAAC;YACjC,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,CAAC;YAChE,IAAA,iBAAM,EACL,IAAI,CAAC,MAAM,CAAC,oBAAoB,CAAC,EAAE,QAAQ,EAAE,GAAG,EAAE,MAAM,EAAE,SAAS,EAAE,EAAE,WAAW,CAAC;+CAC3D,EACxB,KAAK,CAAC,iCAAiC,CACvC,CAAC;YACF,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,WAA+D,CAAC,CAAC;QACpF,CAAC,CAAC,CAAC;QAEH,+EAA+E;QAC/E,sDAAsD;QACtD,MAAM,0BAA0B,GAAG,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC;YAC9E,QAAQ;YACR,yBAAyB;YACzB,SAAS;SACT,CAAC,CAAC;QACH,MAAM,cAAc,GAA8B;YACjD,GAAG,IAAI,CAAC,cAAc;YACtB,yBAAyB,EACxB,0BAA0B,KAAK,+DAAgC,CAAC,WAAW;gBAC1E,CAAC,CAAC,IAAI,CAAC,yBAAyB;gBAChC,CAAC,CAAC,SAAS;SACb,CAAC;QACF,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC;QAC5D,QAAQ,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;QAE1C,OAAO,IAAI,CAAC,yBAAyB,CAAC,eAAe,CAAC;YACrD,yBAAyB;YACzB,oBAAoB,EAAE,SAAS,CAAC,OAAO,CAAC;SACxC,CAAC,CAAC;IACJ,CAAC;IAEM,KAAK,CAAC,IAAI,CAChB,QAAgC,EAChC,KAA2B;QAE3B,6GAA6G;QAC7G,4GAA4G;QAC5G,wBAAwB;QACxB,8GAA8G;QAC9G,wCAAwC;QACxC,gHAAgH;QAChH,IAAA,iBAAM,EACL,MAAM,QAAQ,CAAC,QAAQ,CAAC,sDAAuB,CAAC,EAChD,KAAK,CAAC,gDAAgD,CACtD,CAAC;QAEF,MAAM,gBAAgB,GAAG,KAAK,EAC7B,EAAU,EACG,EAAE;YACf,MAAM,UAAU,GAAG,MAAM,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;YAC/C,MAAM,gBAAgB,GAAG,IAAA,6BAAc,EAAC,UAAU,EAAE,MAAM,CAAC,CAAC;YAC5D,OAAO,KAAK,CAAC,gBAAgB,CAAM,CAAC;QACrC,CAAC,CAAC;QAEF,6FAA6F;QAC7F,YAAY;QACZ,MAAM,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC;QAEtE,2HAA2H;QAC3H,yBAAyB;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,gBAAgB,CAAC,sDAAuB,CAAC,EAAE;YACjF,GAAG,IAAI,CAAC,cAAc;YACtB,yBAAyB,EAAE,IAAI,CAAC,yBAAyB;SACzD,CAAC,CAAC;QACH,MAAM,SAAS,GAAG,IAAA,+BAAoB,GAAE,CAAC;QACzC,MAAM,YAAY,GAAoC,EAAE,CAAC;QACzD,MAAM,KAAK,GAA6B,EAAE,CAAC;QAC3C,KAAK,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,IAAI,MAAM,EAAE,CAAC;YACxC,MAAM,OAAO,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE;gBACvC,MAAM,EAAE,iCAAkB;gBAC1B,YAAY,EAAE,IAAI,CAAC,YAAY;aAC/B,CAAC,CAAC;YACH,MAAM,OAAO,GAAG,EAAE,KAAK,EAAE,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE,CAAC;YACtE,KAAK,CAAC,IAAI,CAAC;gBACV,EAAE,EAAE,OAAO;gBACX,KAAK,EAAE,OAAO;aACd,CAAC,CAAC;YACH,YAAY,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,cAAc,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;QACrF,CAAC;QAED,IAAA,iBAAM,EAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,0BAA0B,CAAC,CAAC;QAC9D,IAAA,qBAAU,EACT,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,GAAG,CAAC,YAAY,CAAC,EAAE,EACxC,SAAS,EACT,IAAI,CAAC,MAAM,EACX,IAAA,iCAAsB,EAAC,MAAM,EAAE,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,YAAY,CAAC,CACxE,CAAC;IACH,CAAC;CACD;AAjJD,4CAiJC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { bufferToString } from \"@fluid-internal/client-utils\";\nimport { assert } from \"@fluidframework/core-utils/internal\";\nimport type { IChannelStorageService } from \"@fluidframework/datastore-definitions/internal\";\nimport type { IIdCompressor } from \"@fluidframework/id-compressor\";\nimport type {\n\tIExperimentalIncrementalSummaryContext,\n\tISummaryTreeWithStats,\n\tITelemetryContext,\n} from \"@fluidframework/runtime-definitions/internal\";\n\nimport type { CodecWriteOptions } from \"../../codec/index.js\";\nimport {\n\ttype DeltaDetachedNodeBuild,\n\ttype DeltaFieldChanges,\n\ttype FieldKey,\n\ttype IEditableForest,\n\ttype ITreeCursorSynchronous,\n\ttype ITreeSubscriptionCursor,\n\ttype RevisionTagCodec,\n\tTreeNavigationResult,\n\tapplyDelta,\n\tforEachField,\n\tmakeDetachedFieldIndex,\n} from \"../../core/index.js\";\nimport type {\n\tSummarizable,\n\tSummaryElementParser,\n\tSummaryElementStringifier,\n} from \"../../shared-tree-core/index.js\";\nimport { idAllocatorFromMaxId, type JsonCompatible } from \"../../util/index.js\";\n// eslint-disable-next-line import/no-internal-modules\nimport { chunkFieldSingle, defaultChunkPolicy } from \"../chunked-forest/chunkTree.js\";\nimport {\n\tdefaultIncrementalEncodingPolicy,\n\ttype FieldBatchCodec,\n\ttype FieldBatchEncodingContext,\n\ttype IncrementalEncodingPolicy,\n} from \"../chunked-forest/index.js\";\n\nimport { type ForestCodec, makeForestSummarizerCodec } from \"./codec.js\";\nimport {\n\tForestIncrementalSummaryBehavior,\n\tForestIncrementalSummaryBuilder,\n\tforestSummaryContentKey,\n} from \"./incrementalSummaryBuilder.js\";\nimport { TreeCompressionStrategyExtended } from \"../treeCompressionUtils.js\";\nimport type { IFluidHandle } from \"@fluidframework/core-interfaces\";\n\n/**\n * The key for the tree that contains the overall forest's summary tree.\n * This tree is added by the parent of the forest summarizer.\n * See {@link ForestIncrementalSummaryBuilder} for details on the summary structure.\n */\nexport const forestSummaryKey = \"Forest\";\n\n/**\n * Provides methods for summarizing and loading a forest.\n */\nexport class ForestSummarizer implements Summarizable {\n\tpublic readonly key = forestSummaryKey;\n\n\tprivate readonly codec: ForestCodec;\n\n\tprivate readonly incrementalSummaryBuilder: ForestIncrementalSummaryBuilder;\n\n\t/**\n\t * @param encoderContext - The schema if provided here must be mutated by the caller to keep it up to date.\n\t */\n\tpublic constructor(\n\t\tprivate readonly forest: IEditableForest,\n\t\tprivate readonly revisionTagCodec: RevisionTagCodec,\n\t\tfieldBatchCodec: FieldBatchCodec,\n\t\tprivate readonly encoderContext: FieldBatchEncodingContext,\n\t\toptions: CodecWriteOptions,\n\t\tprivate readonly idCompressor: IIdCompressor,\n\t\tinitialSequenceNumber: number,\n\t\tshouldEncodeIncrementally: IncrementalEncodingPolicy = defaultIncrementalEncodingPolicy,\n\t) {\n\t\t// TODO: this should take in CodecWriteOptions, and use it to pick the write version.\n\t\tthis.codec = makeForestSummarizerCodec(options, fieldBatchCodec);\n\t\tthis.incrementalSummaryBuilder = new ForestIncrementalSummaryBuilder(\n\t\t\tencoderContext.encodeType ===\n\t\t\t\tTreeCompressionStrategyExtended.CompressedIncremental /* enableIncrementalSummary */,\n\t\t\t(cursor: ITreeCursorSynchronous) => this.forest.chunkField(cursor),\n\t\t\tshouldEncodeIncrementally,\n\t\t\tinitialSequenceNumber,\n\t\t);\n\t}\n\n\t/**\n\t * Summarization of the forest's tree content.\n\t * @returns a summary tree containing the forest's tree content.\n\t * @remarks\n\t * If incremental summary is disabled, all the content will be added to a single summary blob.\n\t * If incremental summary is enabled, the summary will be a tree.\n\t * See {@link ForestIncrementalSummaryBuilder} for details of what this tree looks like.\n\t *\n\t * TODO: when perf matters, this should be replaced with a chunked async version using a binary format.\n\t */\n\tpublic summarize(props: {\n\t\tstringify: SummaryElementStringifier;\n\t\tfullTree?: boolean;\n\t\ttrackState?: boolean;\n\t\ttelemetryContext?: ITelemetryContext;\n\t\tincrementalSummaryContext?: IExperimentalIncrementalSummaryContext;\n\t}): ISummaryTreeWithStats {\n\t\tconst { stringify, fullTree = false, incrementalSummaryContext } = props;\n\n\t\tconst rootCursor = this.forest.getCursorAboveDetachedFields();\n\t\tconst fieldMap: Map<FieldKey, ITreeCursorSynchronous & ITreeSubscriptionCursor> =\n\t\t\tnew Map();\n\t\t// TODO: Encode all detached fields in one operation for better performance and compression\n\t\tforEachField(rootCursor, (cursor) => {\n\t\t\tconst key = cursor.getFieldKey();\n\t\t\tconst innerCursor = this.forest.allocateCursor(\"getTreeString\");\n\t\t\tassert(\n\t\t\t\tthis.forest.tryMoveCursorToField({ fieldKey: key, parent: undefined }, innerCursor) ===\n\t\t\t\t\tTreeNavigationResult.Ok,\n\t\t\t\t0x892 /* failed to navigate to field */,\n\t\t\t);\n\t\t\tfieldMap.set(key, innerCursor as ITreeCursorSynchronous & ITreeSubscriptionCursor);\n\t\t});\n\n\t\t// Let the incremental summary builder know that we are starting a new summary.\n\t\t// It returns whether incremental encoding is enabled.\n\t\tconst incrementalSummaryBehavior = this.incrementalSummaryBuilder.startSummary({\n\t\t\tfullTree,\n\t\t\tincrementalSummaryContext,\n\t\t\tstringify,\n\t\t});\n\t\tconst encoderContext: FieldBatchEncodingContext = {\n\t\t\t...this.encoderContext,\n\t\t\tincrementalEncoderDecoder:\n\t\t\t\tincrementalSummaryBehavior === ForestIncrementalSummaryBehavior.Incremental\n\t\t\t\t\t? this.incrementalSummaryBuilder\n\t\t\t\t\t: undefined,\n\t\t};\n\t\tconst encoded = this.codec.encode(fieldMap, encoderContext);\n\t\tfieldMap.forEach((value) => value.free());\n\n\t\treturn this.incrementalSummaryBuilder.completeSummary({\n\t\t\tincrementalSummaryContext,\n\t\t\tforestSummaryContent: stringify(encoded),\n\t\t});\n\t}\n\n\tpublic async load(\n\t\tservices: IChannelStorageService,\n\t\tparse: SummaryElementParser,\n\t): Promise<void> {\n\t\t// The contents of the top-level forest must be present under a summary blob named `forestSummaryContentKey`.\n\t\t// If the summary was generated as `ForestIncrementalSummaryBehavior.SingleBlob`, this blob will contain all\n\t\t// of forest's contents.\n\t\t// If the summary was generated as `ForestIncrementalSummaryBehavior.Incremental`, this blob will contain only\n\t\t// the top-level forest node's contents.\n\t\t// The contents of the incremental chunks will be in separate tree nodes and will be read later during decoding.\n\t\tassert(\n\t\t\tawait services.contains(forestSummaryContentKey),\n\t\t\t0xc21 /* Forest summary content missing in snapshot */,\n\t\t);\n\n\t\tconst readAndParseBlob = async <T extends JsonCompatible<IFluidHandle>>(\n\t\t\tid: string,\n\t\t): Promise<T> => {\n\t\t\tconst treeBuffer = await services.readBlob(id);\n\t\t\tconst treeBufferString = bufferToString(treeBuffer, \"utf8\");\n\t\t\treturn parse(treeBufferString) as T;\n\t\t};\n\n\t\t// Load the incremental summary builder so that it can download any incremental chunks in the\n\t\t// snapshot.\n\t\tawait this.incrementalSummaryBuilder.load(services, readAndParseBlob);\n\n\t\t// TODO: this code is parsing data without an optional validator, this should be defined in a typebox schema as part of the\n\t\t// forest summary format.\n\t\tconst fields = this.codec.decode(await readAndParseBlob(forestSummaryContentKey), {\n\t\t\t...this.encoderContext,\n\t\t\tincrementalEncoderDecoder: this.incrementalSummaryBuilder,\n\t\t});\n\t\tconst allocator = idAllocatorFromMaxId();\n\t\tconst fieldChanges: [FieldKey, DeltaFieldChanges][] = [];\n\t\tconst build: DeltaDetachedNodeBuild[] = [];\n\t\tfor (const [fieldKey, field] of fields) {\n\t\t\tconst chunked = chunkFieldSingle(field, {\n\t\t\t\tpolicy: defaultChunkPolicy,\n\t\t\t\tidCompressor: this.idCompressor,\n\t\t\t});\n\t\t\tconst buildId = { minor: allocator.allocate(chunked.topLevelLength) };\n\t\t\tbuild.push({\n\t\t\t\tid: buildId,\n\t\t\t\ttrees: chunked,\n\t\t\t});\n\t\t\tfieldChanges.push([fieldKey, [{ count: chunked.topLevelLength, attach: buildId }]]);\n\t\t}\n\n\t\tassert(this.forest.isEmpty, 0x797 /* forest must be empty */);\n\t\tapplyDelta(\n\t\t\t{ build, fields: new Map(fieldChanges) },\n\t\t\tundefined,\n\t\t\tthis.forest,\n\t\t\tmakeDetachedFieldIndex(\"init\", this.revisionTagCodec, this.idCompressor),\n\t\t);\n\t}\n}\n"]}
|
|
@@ -93,6 +93,7 @@ export declare class ForestIncrementalSummaryBuilder implements IncrementalEncod
|
|
|
93
93
|
private readonly enableIncrementalSummary;
|
|
94
94
|
private readonly getChunkAtCursor;
|
|
95
95
|
readonly shouldEncodeIncrementally: IncrementalEncodingPolicy;
|
|
96
|
+
private readonly initialSequenceNumber;
|
|
96
97
|
/**
|
|
97
98
|
* The next reference ID to use for a chunk.
|
|
98
99
|
*/
|
|
@@ -119,8 +120,12 @@ export declare class ForestIncrementalSummaryBuilder implements IncrementalEncod
|
|
|
119
120
|
* A map of chunk reference IDs to their encoded contents. This is typically used during the loading of the
|
|
120
121
|
* forest to retrieve the contents of the chunks that were summarized incrementally.
|
|
121
122
|
*/
|
|
122
|
-
|
|
123
|
-
|
|
123
|
+
/**
|
|
124
|
+
* A map of chunk reference IDs to their {@link ChunkLoadProperties}.
|
|
125
|
+
* This is used during the loading of the forest to track each chunk that is retrieved and decoded.
|
|
126
|
+
*/
|
|
127
|
+
private readonly loadedChunksMap;
|
|
128
|
+
constructor(enableIncrementalSummary: boolean, getChunkAtCursor: (cursor: ITreeCursorSynchronous) => TreeChunk, shouldEncodeIncrementally: IncrementalEncodingPolicy, initialSequenceNumber: number);
|
|
124
129
|
/**
|
|
125
130
|
* Must be called when the forest is loaded to download the encoded contents of incremental chunks.
|
|
126
131
|
* @param services - The channel storage service to use to access the snapshot tree and download the
|
|
@@ -161,11 +166,8 @@ export declare class ForestIncrementalSummaryBuilder implements IncrementalEncod
|
|
|
161
166
|
forestSummaryContent: string;
|
|
162
167
|
}): ISummaryTreeWithStats;
|
|
163
168
|
/**
|
|
164
|
-
*
|
|
165
|
-
* This is typically used when loading the forest to retrieve the contents of incremental chunks.
|
|
166
|
-
* @param referenceId - The reference ID of the chunk to retrieve.
|
|
167
|
-
* @returns The encoded contents of the chunk.
|
|
169
|
+
* {@link IncrementalEncoder.decodeIncrementalChunk}
|
|
168
170
|
*/
|
|
169
|
-
|
|
171
|
+
decodeIncrementalChunk(referenceId: ChunkReferenceId, chunkDecoder: (encoded: EncodedFieldBatch) => TreeChunk): TreeChunk;
|
|
170
172
|
}
|
|
171
173
|
//# sourceMappingURL=incrementalSummaryBuilder.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"incrementalSummaryBuilder.d.ts","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,KAAK,EACX,sCAAsC,EACtC,qBAAqB,EACrB,MAAM,8CAA8C,CAAC;AAEtD,OAAO,EAIN,KAAK,cAAc,EAEnB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,KAAK,EACX,gBAAgB,EAChB,iBAAiB,EACjB,yBAAyB,EACzB,yBAAyB,EACzB,SAAS,EACT,MAAM,4BAA4B,CAAC;AACpC,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,qBAAqB,CAAC;AAElE,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,gDAAgD,CAAC;AAG7F,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,KAAK,EAAE,yBAAyB,EAAE,MAAM,iCAAiC,CAAC;AAEjF;;;;GAIG;AACH,eAAO,MAAM,uBAAuB,eAAe,CAAC;AAUpD;;GAEG;AACH,eAAO,MAAM,0BAA0B;IACtC,4CAA4C;;IAE5C,wCAAwC;;CAE/B,CAAC;AACX,MAAM,MAAM,0BAA0B,GACrC,CAAC,OAAO,0BAA0B,CAAC,CAAC,MAAM,OAAO,0BAA0B,CAAC,CAAC;
|
|
1
|
+
{"version":3,"file":"incrementalSummaryBuilder.d.ts","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,KAAK,EACX,sCAAsC,EACtC,qBAAqB,EACrB,MAAM,8CAA8C,CAAC;AAEtD,OAAO,EAIN,KAAK,cAAc,EAEnB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,KAAK,EACX,gBAAgB,EAChB,iBAAiB,EACjB,yBAAyB,EACzB,yBAAyB,EACzB,SAAS,EACT,MAAM,4BAA4B,CAAC;AACpC,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,qBAAqB,CAAC;AAElE,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,gDAAgD,CAAC;AAG7F,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,KAAK,EAAE,yBAAyB,EAAE,MAAM,iCAAiC,CAAC;AAEjF;;;;GAIG;AACH,eAAO,MAAM,uBAAuB,eAAe,CAAC;AAUpD;;GAEG;AACH,eAAO,MAAM,0BAA0B;IACtC,4CAA4C;;IAE5C,wCAAwC;;CAE/B,CAAC;AACX,MAAM,MAAM,0BAA0B,GACrC,CAAC,OAAO,0BAA0B,CAAC,CAAC,MAAM,OAAO,0BAA0B,CAAC,CAAC;AAkF9E;;GAEG;AACH,oBAAY,gCAAgC;IAC3C;;;;OAIG;IACH,WAAW,IAAA;IACX;;;;;;;OAOG;IACH,UAAU,IAAA;CACV;AAyCD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AAEH,qBAAa,+BAAgC,YAAW,yBAAyB;IA4C/E,OAAO,CAAC,QAAQ,CAAC,wBAAwB;IACzC,OAAO,CAAC,QAAQ,CAAC,gBAAgB;aACjB,yBAAyB,EAAE,yBAAyB;IACpE,OAAO,CAAC,QAAQ,CAAC,qBAAqB;IA9CvC;;OAEG;IACH,OAAO,CAAC,eAAe,CAA8B;IAErD;;;OAGG;IACH,OAAO,CAAC,QAAQ,CAAC,0BAA0B,CAI7B;IAEd;;OAEG;IACI,kBAAkB,EAAE,0BAA0B,CACZ;IAEzC;;OAEG;IACH,OAAO,CAAC,2BAA2B,CAAc;IAEjD;;;OAGG;IACH,OAAO,CAAC,wBAAwB,CAAuC;IAEvE;;;OAGG;IACH;;;OAGG;IACH,OAAO,CAAC,QAAQ,CAAC,eAAe,CAA+C;gBAG7D,wBAAwB,EAAE,OAAO,EACjC,gBAAgB,EAAE,CAAC,MAAM,EAAE,sBAAsB,KAAK,SAAS,EAChE,yBAAyB,EAAE,yBAAyB,EACnD,qBAAqB,EAAE,MAAM;IAG/C;;;;;OAKG;IACU,IAAI,CAChB,QAAQ,EAAE,sBAAsB,EAChC,iBAAiB,EAAE,CAAC,CAAC,SAAS,cAAc,CAAC,YAAY,CAAC,EAAE,EAAE,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,GACnF,OAAO,CAAC,IAAI,CAAC;IA0ChB;;;;;;;;OAQG;IACI,YAAY,CAAC,IAAI,EAAE;QACzB,QAAQ,EAAE,OAAO,CAAC;QAClB,yBAAyB,EAAE,sCAAsC,GAAG,SAAS,CAAC;QAC9E,SAAS,EAAE,yBAAyB,CAAC;KACrC,GAAG,gCAAgC;IAwBpC;;;OAGG;IACI,sBAAsB,CAC5B,MAAM,EAAE,sBAAsB,EAC9B,YAAY,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,iBAAiB,GACnD,gBAAgB,EAAE;IAgFrB;;;;;;;;OAQG;IACI,eAAe,CAAC,IAAI,EAAE;QAC5B,yBAAyB,EAAE,sCAAsC,GAAG,SAAS,CAAC;QAC9E,oBAAoB,EAAE,MAAM,CAAC;KAC7B,GAAG,qBAAqB;IA+CzB;;OAEG;IACI,sBAAsB,CAC5B,WAAW,EAAE,gBAAgB,EAC7B,YAAY,EAAE,CAAC,OAAO,EAAE,iBAAiB,KAAK,SAAS,GACrD,SAAS;CAkBZ"}
|
|
@@ -118,10 +118,11 @@ function validateReadyToTrackSummary(forestSummaryState, trackedSummaryPropertie
|
|
|
118
118
|
*/
|
|
119
119
|
/* eslint-enable jsdoc/check-indentation */
|
|
120
120
|
class ForestIncrementalSummaryBuilder {
|
|
121
|
-
constructor(enableIncrementalSummary, getChunkAtCursor, shouldEncodeIncrementally) {
|
|
121
|
+
constructor(enableIncrementalSummary, getChunkAtCursor, shouldEncodeIncrementally, initialSequenceNumber) {
|
|
122
122
|
this.enableIncrementalSummary = enableIncrementalSummary;
|
|
123
123
|
this.getChunkAtCursor = getChunkAtCursor;
|
|
124
124
|
this.shouldEncodeIncrementally = shouldEncodeIncrementally;
|
|
125
|
+
this.initialSequenceNumber = initialSequenceNumber;
|
|
125
126
|
/**
|
|
126
127
|
* The next reference ID to use for a chunk.
|
|
127
128
|
*/
|
|
@@ -143,7 +144,11 @@ class ForestIncrementalSummaryBuilder {
|
|
|
143
144
|
* A map of chunk reference IDs to their encoded contents. This is typically used during the loading of the
|
|
144
145
|
* forest to retrieve the contents of the chunks that were summarized incrementally.
|
|
145
146
|
*/
|
|
146
|
-
|
|
147
|
+
/**
|
|
148
|
+
* A map of chunk reference IDs to their {@link ChunkLoadProperties}.
|
|
149
|
+
* This is used during the loading of the forest to track each chunk that is retrieved and decoded.
|
|
150
|
+
*/
|
|
151
|
+
this.loadedChunksMap = new Map();
|
|
147
152
|
}
|
|
148
153
|
/**
|
|
149
154
|
* Must be called when the forest is loaded to download the encoded contents of incremental chunks.
|
|
@@ -170,7 +175,12 @@ class ForestIncrementalSummaryBuilder {
|
|
|
170
175
|
throw new internal_3.LoggingError(`SharedTree: Cannot find contents for incremental chunk ${chunkContentsPath}`);
|
|
171
176
|
}
|
|
172
177
|
const chunkContents = await readAndParseChunk(chunkContentsPath);
|
|
173
|
-
this.
|
|
178
|
+
this.loadedChunksMap.set(chunkReferenceId, {
|
|
179
|
+
encodedContents: chunkContents,
|
|
180
|
+
summaryPath: chunkSubTreePath,
|
|
181
|
+
});
|
|
182
|
+
const chunkReferenceIdNumber = Number(chunkReferenceId);
|
|
183
|
+
this.nextReferenceId = (0, index_js_1.brand)(Math.max(this.nextReferenceId, chunkReferenceIdNumber + 1));
|
|
174
184
|
// Recursively download the contents of chunks in this chunk's sub tree.
|
|
175
185
|
await downloadChunkContentsInTree(chunkSnapshotTree, `${chunkSubTreePath}/`);
|
|
176
186
|
}
|
|
@@ -219,6 +229,10 @@ class ForestIncrementalSummaryBuilder {
|
|
|
219
229
|
}
|
|
220
230
|
let chunkReferenceId;
|
|
221
231
|
let chunkProperties;
|
|
232
|
+
// An additional ref-count must be added to these chunks representing a reference from the summary tree to the chunk.
|
|
233
|
+
// This will ensure that the blob's content never change and thus the reference stays accurate: instead of modifying it,
|
|
234
|
+
// a copy will be created without the blob reference.
|
|
235
|
+
// The "getChunkAtCursor" adds this additional ref-count.
|
|
222
236
|
const chunk = this.getChunkAtCursor(cursor);
|
|
223
237
|
// Try and get the properties of the chunk from the latest successful summary.
|
|
224
238
|
// If it exists and the summary is not a full tree, use the properties to generate a summary handle.
|
|
@@ -303,15 +317,24 @@ class ForestIncrementalSummaryBuilder {
|
|
|
303
317
|
return summaryTree;
|
|
304
318
|
}
|
|
305
319
|
/**
|
|
306
|
-
*
|
|
307
|
-
* This is typically used when loading the forest to retrieve the contents of incremental chunks.
|
|
308
|
-
* @param referenceId - The reference ID of the chunk to retrieve.
|
|
309
|
-
* @returns The encoded contents of the chunk.
|
|
320
|
+
* {@link IncrementalEncoder.decodeIncrementalChunk}
|
|
310
321
|
*/
|
|
311
|
-
|
|
312
|
-
const
|
|
313
|
-
(0, internal_1.assert)(
|
|
314
|
-
|
|
322
|
+
decodeIncrementalChunk(referenceId, chunkDecoder) {
|
|
323
|
+
const ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);
|
|
324
|
+
(0, internal_1.assert)(ChunkLoadProperties !== undefined, "Encoded incremental chunk not found");
|
|
325
|
+
const chunk = chunkDecoder(ChunkLoadProperties.encodedContents);
|
|
326
|
+
// Account for the reference about to be added in `chunkTrackingPropertiesMap`
|
|
327
|
+
// to ensure that no other users of this chunk think they have unique ownership.
|
|
328
|
+
// This prevents prevent whoever this chunk is returned to from modifying it in-place.
|
|
329
|
+
chunk.referenceAdded();
|
|
330
|
+
// Track the decoded chunk. This will recreate the tracking state when the summary that this client
|
|
331
|
+
// is loaded from was generated. This is needed to ensure that incremental summaries work correctly
|
|
332
|
+
// when a new client starts to summarize.
|
|
333
|
+
(0, index_js_1.setInNestedMap)(this.chunkTrackingPropertiesMap, this.initialSequenceNumber, chunk, {
|
|
334
|
+
referenceId,
|
|
335
|
+
summaryPath: ChunkLoadProperties.summaryPath,
|
|
336
|
+
});
|
|
337
|
+
return chunk;
|
|
315
338
|
}
|
|
316
339
|
}
|
|
317
340
|
exports.ForestIncrementalSummaryBuilder = ForestIncrementalSummaryBuilder;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"incrementalSummaryBuilder.js","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,kEAA6D;AAK7D,qEAA4E;AAC5E,kDAM6B;AAS7B,2EAAiE;AAGjE,uEAAwE;AAIxE;;;;GAIG;AACU,QAAA,uBAAuB,GAAG,YAAY,CAAC;AAEpD;;;;;GAKG;AACH,MAAM,oBAAoB,GAAG,UAAU,CAAC;AAExC;;GAEG;AACU,QAAA,0BAA0B,GAAG;IACzC,4CAA4C;IAC5C,QAAQ,EAAE,UAAU;IACpB,wCAAwC;IACxC,YAAY,EAAE,cAAc;CACnB,CAAC;AA2DX;;GAEG;AACH,IAAY,gCAgBX;AAhBD,WAAY,gCAAgC;IAC3C;;;;OAIG;IACH,qGAAW,CAAA;IACX;;;;;;;OAOG;IACH,mGAAU,CAAA;AACX,CAAC,EAhBW,gCAAgC,gDAAhC,gCAAgC,QAgB3C;AAED;;;;GAIG;AACH,SAAS,uBAAuB,CAC/B,kBAA8C,EAC9C,wBAA8D;IAE9D,IAAA,iBAAM,EACL,kBAAkB,KAAK,kCAA0B,CAAC,QAAQ,EAC1D,KAAK,CAAC,4BAA4B,CAClC,CAAC;IACF,IAAA,iBAAM,EACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,2BAA2B,CACnC,kBAA8C,EAC9C,wBAA8D;IAE9D,IAAA,iBAAM,EACL,kBAAkB,KAAK,kCAA0B,CAAC,YAAY,EAC9D,KAAK,CAAC,gCAAgC,CACtC,CAAC;IACF,IAAA,iBAAM,EACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED,4CAA4C;AAC5C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,2CAA2C;AAC3C,MAAa,+BAA+B;IAuC3C,YACkB,wBAAiC,EACjC,gBAA+D,EAChE,yBAAoD;QAFnD,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,qBAAgB,GAAhB,gBAAgB,CAA+C;QAChE,8BAAyB,GAAzB,yBAAyB,CAA2B;QAzCrE;;WAEG;QACK,oBAAe,GAAqB,IAAA,gBAAK,EAAC,CAAC,CAAC,CAAC;QAErD;;;WAGG;QACc,+BAA0B,GAIvC,IAAI,GAAG,EAAE,CAAC;QAEd;;WAEG;QACI,uBAAkB,GACxB,kCAA0B,CAAC,YAAY,CAAC;QAEzC;;WAEG;QACK,gCAA2B,GAAW,CAAC,CAAC,CAAC;QAQjD;;;WAGG;QACc,4BAAuB,GAAmC,IAAI,GAAG,EAAE,CAAC;IAMlF,CAAC;IAEJ;;;;;OAKG;IACI,KAAK,CAAC,IAAI,CAChB,QAAgC,EAChC,iBAAqF;QAErF,MAAM,UAAU,GAAG,QAAQ,CAAC,eAAe,EAAE,EAAE,CAAC;QAChD,wGAAwG;QACxG,qDAAqD;QACrD,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,OAAO;QACR,CAAC;QAED,uGAAuG;QACvG,uDAAuD;QACvD,MAAM,2BAA2B,GAAG,KAAK,EACxC,YAA2B,EAC3B,aAAqB,EACL,EAAE;YAClB,iGAAiG;YACjG,oDAAoD;YACpD,KAAK,MAAM,CAAC,gBAAgB,EAAE,iBAAiB,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,CAAC;gBACxF,MAAM,gBAAgB,GAAG,GAAG,aAAa,GAAG,gBAAgB,EAAE,CAAC;gBAC/D,MAAM,iBAAiB,GAAG,GAAG,gBAAgB,IAAI,oBAAoB,EAAE,CAAC;gBACxE,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,EAAE,CAAC;oBACnD,MAAM,IAAI,uBAAY,CACrB,0DAA0D,iBAAiB,EAAE,CAC7E,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,GAAG,MAAM,iBAAiB,CAAoB,iBAAiB,CAAC,CAAC;gBACpF,IAAI,CAAC,uBAAuB,CAAC,GAAG,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;gBAElE,wEAAwE;gBACxE,MAAM,2BAA2B,CAAC,iBAAiB,EAAE,GAAG,gBAAgB,GAAG,CAAC,CAAC;YAC9E,CAAC;QACF,CAAC,CAAC;QACF,MAAM,2BAA2B,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;OAQG;IACI,YAAY,CAAC,IAInB;QACA,MAAM,EAAE,QAAQ,EAAE,yBAAyB,EAAE,SAAS,EAAE,GAAG,IAAI,CAAC;QAChE,6GAA6G;QAC7G,0EAA0E;QAC1E,0GAA0G;QAC1G,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,OAAO,gCAAgC,CAAC,UAAU,CAAC;QACpD,CAAC;QAED,2BAA2B,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEpF,IAAI,CAAC,kBAAkB,GAAG,kCAA0B,CAAC,QAAQ,CAAC;QAC9D,IAAI,CAAC,2BAA2B,GAAG,yBAAyB,CAAC,2BAA2B,CAAC;QACzF,IAAI,CAAC,wBAAwB,GAAG;YAC/B,qBAAqB,EAAE,yBAAyB,CAAC,qBAAqB;YACtE,qBAAqB,EAAE,yBAAyB,CAAC,WAAW;YAC5D,gBAAgB,EAAE,EAAE;YACpB,oBAAoB,EAAE,IAAI,6BAAkB,EAAE;YAC9C,QAAQ;YACR,SAAS;SACT,CAAC;QACF,OAAO,gCAAgC,CAAC,WAAW,CAAC;IACrD,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAC5B,MAA8B,EAC9B,YAAqD;QAErD,0GAA0G;QAC1G,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YACnC,OAAO,EAAE,CAAC;QACX,CAAC;QAED,IAAI,gBAAkC,CAAC;QACvC,IAAI,eAAuC,CAAC;QAE5C,MAAM,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;QAE5C,8EAA8E;QAC9E,oGAAoG;QACpG,6EAA6E;QAC7E,MAAM,uBAAuB,GAAG,IAAA,8BAAmB,EAClD,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,2BAA2B,EAChC,KAAK,CACL,CAAC;QACF,IAAI,uBAAuB,KAAK,SAAS,IAAI,CAAC,IAAI,CAAC,wBAAwB,CAAC,QAAQ,EAAE,CAAC;YACtF,eAAe,GAAG,uBAAuB,CAAC;YAC1C,gBAAgB,GAAG,uBAAuB,CAAC,WAAW,CAAC;YACvD,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,SAAS,CAC3D,GAAG,gBAAgB,EAAE,EACrB,gCAAW,CAAC,IAAI,EAChB,GAAG,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,IAAI,uBAAuB,CAAC,WAAW,EAAE,CAC/F,CAAC;QACH,CAAC;aAAM,CAAC;YACP,6CAA6C;YAC7C,gBAAgB,GAAG,IAAA,gBAAK,EAAC,IAAI,CAAC,eAAe,EAAE,CAAC,CAAC;YACjD,oGAAoG;YACpG,2CAA2C;YAC3C,qGAAqG;YACrG,oDAAoD;YACpD,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;YAEtE,eAAe,GAAG;gBACjB,WAAW,EAAE,gBAAgB;gBAC7B,WAAW,EAAE,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC;aACrE,CAAC;YAEF,MAAM,oBAAoB,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC;YAChF,oGAAoG;YACpG,gDAAgD;YAChD,oGAAoG;YACpG,qGAAqG;YACrG,MAAM,mBAAmB,GAAG,IAAI,6BAAkB,EAAE,CAAC;YACrD,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;YACzE,mBAAmB,CAAC,OAAO,CAC1B,oBAAoB,EACpB,IAAI,CAAC,wBAAwB,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAC5D,CAAC;YAEF,oGAAoG;YACpG,qEAAqE;YACrE,oBAAoB,CAAC,YAAY,CAChC,GAAG,gBAAgB,EAAE,EACrB,mBAAmB,CAAC,cAAc,EAAE,CACpC,CAAC;YAEF,6DAA6D;YAC7D,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;YAC1E,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,GAAG,EAAE,CAAC;QACtD,CAAC;QAED,IAAA,yBAAc,EACb,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,EACnD,KAAK,EACL,eAAe,CACf,CAAC;QACF,OAAO,CAAC,gBAAgB,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,IAGtB;QACA,MAAM,EAAE,yBAAyB,EAAE,oBAAoB,EAAE,GAAG,IAAI,CAAC;QACjE,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,MAAM,cAAc,GAAG,IAAI,6BAAkB,EAAE,CAAC;YAChD,cAAc,CAAC,OAAO,CAAC,+BAAuB,EAAE,oBAAoB,CAAC,CAAC;YACtE,OAAO,cAAc,CAAC,cAAc,EAAE,CAAC;QACxC,CAAC;QAED,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,OAAO,CACzD,+BAAuB,EACvB,oBAAoB,CACpB,CAAC;QAEF,wEAAwE;QACxE,2GAA2G;QAC3G,2GAA2G;QAC3G,kEAAkE;QAClE,MAAM,wBAAwB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACnE,IAAI,CAAC,2BAA2B,CAChC,CAAC;QACF,MAAM,yBAAyB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACpE,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,CACnD,CAAC;QACF,IAAI,wBAAwB,KAAK,SAAS,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YACvF,KAAK,MAAM,CAAC,KAAK,EAAE,eAAe,CAAC,IAAI,wBAAwB,CAAC,OAAO,EAAE,EAAE,CAAC;gBAC3E,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;oBAC3C,yBAAyB,CAAC,GAAG,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBACvD,CAAC;YACF,CAAC;QACF,CAAC;QAED,oGAAoG;QACpG,4DAA4D;QAC5D,KAAK,MAAM,cAAc,IAAI,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,EAAE,CAAC;YACrE,IAAI,cAAc,GAAG,IAAI,CAAC,2BAA2B,EAAE,CAAC;gBACvD,IAAI,CAAC,0BAA0B,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YACxD,CAAC;QACF,CAAC;QAED,IAAI,CAAC,kBAAkB,GAAG,kCAA0B,CAAC,YAAY,CAAC;QAClE,MAAM,WAAW,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,cAAc,EAAE,CAAC;QACxF,IAAI,CAAC,wBAAwB,GAAG,SAAS,CAAC;QAC1C,OAAO,WAAW,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACI,0BAA0B,CAAC,WAA6B;QAC9D,MAAM,oBAAoB,GAAG,IAAI,CAAC,uBAAuB,CAAC,GAAG,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;QAChF,IAAA,iBAAM,EACL,oBAAoB,KAAK,SAAS,EAClC,KAAK,CAAC,0CAA0C,CAChD,CAAC;QACF,OAAO,oBAAoB,CAAC;IAC7B,CAAC;CACD;AAzRD,0EAyRC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert } from \"@fluidframework/core-utils/internal\";\nimport type {\n\tIExperimentalIncrementalSummaryContext,\n\tISummaryTreeWithStats,\n} from \"@fluidframework/runtime-definitions/internal\";\nimport { SummaryTreeBuilder } from \"@fluidframework/runtime-utils/internal\";\nimport {\n\tbrand,\n\tsetInNestedMap,\n\ttryGetFromNestedMap,\n\ttype JsonCompatible,\n\ttype NestedMap,\n} from \"../../util/index.js\";\nimport type {\n\tChunkReferenceId,\n\tEncodedFieldBatch,\n\tIncrementalEncoderDecoder,\n\tIncrementalEncodingPolicy,\n\tTreeChunk,\n} from \"../chunked-forest/index.js\";\nimport type { ITreeCursorSynchronous } from \"../../core/index.js\";\nimport { SummaryType } from \"@fluidframework/driver-definitions\";\nimport type { IChannelStorageService } from \"@fluidframework/datastore-definitions/internal\";\nimport type { ISnapshotTree } from \"@fluidframework/driver-definitions/internal\";\nimport { LoggingError } from \"@fluidframework/telemetry-utils/internal\";\nimport type { IFluidHandle } from \"@fluidframework/core-interfaces\";\nimport type { SummaryElementStringifier } from \"../../shared-tree-core/index.js\";\n\n/**\n * The key for the blob under ForestSummarizer's root.\n * This blob contains the ForestCodec's output.\n * See {@link ForestIncrementalSummaryBuilder} for details on the summary structure.\n */\nexport const forestSummaryContentKey = \"ForestTree\";\n\n/**\n * The contents of an incremental chunk is under a summary tree node with its {@link ChunkReferenceId} as the key.\n * The inline portion of the chunk content is encoded with the forest codec is stored in a blob with this key.\n * The rest of the chunk contents is stored in the summary tree under the summary tree node.\n * See the summary format in {@link ForestIncrementalSummaryBuilder} for more details.\n */\nconst chunkContentsBlobKey = \"contents\";\n\n/**\n * State that tells whether a summary is currently being tracked.\n */\nexport const ForestSummaryTrackingState = {\n\t/** A summary is currently being tracked. */\n\tTracking: \"Tracking\",\n\t/** A summary is ready to be tracked. */\n\tReadyToTrack: \"ReadyToTrack\",\n} as const;\nexport type ForestSummaryTrackingState =\n\t(typeof ForestSummaryTrackingState)[keyof typeof ForestSummaryTrackingState];\n\n/**\n * The properties of a chunk that is tracked for every summary.\n * If a chunk doesn't change between summaries,\n * these properties will be used to generate a summary handle for the chunk.\n */\ninterface ChunkSummaryProperties {\n\t/**\n\t * The reference ID of the chunk which uniquely identifies it under its parent's summary tree.\n\t * The summary for this chunk will be stored against this reference ID as key in the summary tree.\n\t */\n\treadonly referenceId: ChunkReferenceId;\n\t/**\n\t * The path for this chunk's summary in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a summary being tracked.\n */\ninterface TrackedSummaryProperties {\n\t/**\n\t * The sequence number of the summary in progress.\n\t */\n\treadonly summarySequenceNumber: number;\n\t/**\n\t * The base path for the latest summary that was successful.\n\t * This is used to generate summary handles.\n\t */\n\treadonly latestSummaryBasePath: string;\n\t/**\n\t * Whether the summary being tracked is a full tree summary.\n\t * If true, the summary will not contain any summary handles. All chunks must be summarized in full.\n\t */\n\treadonly fullTree: boolean;\n\t/**\n\t * Represents the path of a chunk in the summary tree relative to the forest's summary tree.\n\t * Each item in the array is the {@link ChunkReferenceId} of a chunk in the summary tree starting\n\t * from the chunk under forest summary tree.\n\t * When a chunk is summarized, this array will be used to generate the path for the chunk's summary in the\n\t * summary tree.\n\t */\n\treadonly chunkSummaryPath: ChunkReferenceId[];\n\t/**\n\t * The parent summary builder to use to build the incremental summary tree.\n\t * When a chunk is being summarized, it will add its summary to this builder against its reference ID.\n\t */\n\tparentSummaryBuilder: SummaryTreeBuilder;\n\t/**\n\t * Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t */\n\tstringify: SummaryElementStringifier;\n}\n\n/**\n * The behavior of the forest's incremental summary - whether the summary should be a single blob or incremental.\n */\nexport enum ForestIncrementalSummaryBehavior {\n\t/**\n\t * The forest can encode chunks incrementally, i.e., chunks that support incremental encoding will be encoded\n\t * separately - they will be added to a separate tree.\n\t * The incremental summary format is described in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tIncremental,\n\t/**\n\t * The forest should encode all of it's data in a single summary blob.\n\t * @remarks\n\t * The format of the summary will be the same as the old format (pre-incremental summaries) and is fully\n\t * backwards compatible with the old format. The summary will basically look like an incremental summary\n\t * with no incremental fields - it will only contain the \"ForestTree\" blob in the summary format described\n\t * in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tSingleBlob,\n}\n\n/**\n * Validates that a summary is currently being tracked and that the tracked summary properties are defined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be available.\n */\nfunction validateTrackingSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is TrackedSummaryProperties {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.Tracking,\n\t\t0xc22 /* Not tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties !== undefined,\n\t\t0xc23 /* Tracked summary properties must be available when tracking a summary */,\n\t);\n}\n\n/**\n * Validates that a summary is ready to be tracked and that the tracked summary properties are undefined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be undefined.\n */\nfunction validateReadyToTrackSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is undefined {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.ReadyToTrack,\n\t\t0xc24 /* Already tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties === undefined,\n\t\t0xc25 /* Tracked summary properties must not be available when ready to track */,\n\t);\n}\n\n/* eslint-disable jsdoc/check-indentation */\n/**\n * Tracks and builds the incremental summary tree for a forest where chunks that support incremental encoding are\n * stored in a separate tree in the summary under its {@link ChunkReferenceId}.\n * The summary tree for a chunk is self-sufficient and can be independently loaded and used to reconstruct the\n * chunk's contents without any additional context from its parent.\n *\n * An example summary tree with incremental summary:\n * Forest\n * ├── ForestTree\n * ├── 0\n * | ├── contents\n * | ├── 1\n * | | ├── contents\n * | | ├── 2\n * | | | ├── contents\n * | ├── 3 - \".../Forest/ForestTree/0/1/3\"\n * ├── 4\n * | ├── contents\n * | ├── ...\n * ├── 5 - \"/.../Forest/ForestTree/5\"\n * - Forest is a summary tree node added by the shared tree and contains the following:\n * - The inline portion of the top-level forest content is stored in a summary blob called \"ForestTree\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - For each chunk, the structure of the summary tree is the same as the Forest. It contains the following:\n * - The inline portion of the chunk content is stored in a blob called \"contents\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - Chunks that do not change between summaries are summarized as handles in the summary tree.\n * @remarks\n * It may seem inconsistent that although the structure for the top-level forest tree is similar to that of\n * an incremental chunk, its content is stored in a summary blob called \"ForestTree\" while the content for\n * the incremental chunks are stored in a summary blob called \"contents\".\n * This is to keep this summary backwards compatible with old format (before incremental summaries were added)\n * where the entire forest content was in a summary blob called \"ForestTree\". So, if incremental summaries were\n * disabled, the forest content will be fully backwards compatible.\n * Note that this limits reusing the root node in a location other than root and a non-root node in the root.\n * We could phase this out by switching to write the top-level contents under \"contents\" if we want to support\n * the above. However, there is no plan to do that for now.\n *\n * TODO: AB#46752\n * Add strong types for the summary structure to document it better. It will help make it super clear what the actual\n * format is in a way that can easily be linked to, documented and inspected.\n */\n/* eslint-enable jsdoc/check-indentation */\nexport class ForestIncrementalSummaryBuilder implements IncrementalEncoderDecoder {\n\t/**\n\t * The next reference ID to use for a chunk.\n\t */\n\tprivate nextReferenceId: ChunkReferenceId = brand(0);\n\n\t/**\n\t * For a given summary sequence number, keeps track of a chunk's properties that will be used to generate\n\t * a summary handle for the chunk if it does not change between summaries.\n\t */\n\tprivate readonly chunkTrackingPropertiesMap: NestedMap<\n\t\tnumber,\n\t\tTreeChunk,\n\t\tChunkSummaryProperties\n\t> = new Map();\n\n\t/**\n\t * The state indicating whether a summary is currently being tracked or not.\n\t */\n\tpublic forestSummaryState: ForestSummaryTrackingState =\n\t\tForestSummaryTrackingState.ReadyToTrack;\n\n\t/**\n\t * The sequence number of the latest summary that was successful.\n\t */\n\tprivate latestSummarySequenceNumber: number = -1;\n\n\t/**\n\t * The current state of the summary being tracked.\n\t * This is undefined if no summary is currently being tracked.\n\t */\n\tprivate trackedSummaryProperties: TrackedSummaryProperties | undefined;\n\n\t/**\n\t * A map of chunk reference IDs to their encoded contents. This is typically used during the loading of the\n\t * forest to retrieve the contents of the chunks that were summarized incrementally.\n\t */\n\tprivate readonly encodedChunkContentsMap: Map<string, EncodedFieldBatch> = new Map();\n\n\tpublic constructor(\n\t\tprivate readonly enableIncrementalSummary: boolean,\n\t\tprivate readonly getChunkAtCursor: (cursor: ITreeCursorSynchronous) => TreeChunk,\n\t\tpublic readonly shouldEncodeIncrementally: IncrementalEncodingPolicy,\n\t) {}\n\n\t/**\n\t * Must be called when the forest is loaded to download the encoded contents of incremental chunks.\n\t * @param services - The channel storage service to use to access the snapshot tree and download the\n\t * contents of the chunks.\n\t * @param readAndParse - A function that reads and parses a blob from the storage service.\n\t */\n\tpublic async load(\n\t\tservices: IChannelStorageService,\n\t\treadAndParseChunk: <T extends JsonCompatible<IFluidHandle>>(id: string) => Promise<T>,\n\t): Promise<void> {\n\t\tconst forestTree = services.getSnapshotTree?.();\n\t\t// Snapshot tree should be available when loading forest's contents. However, it is an optional function\n\t\t// and may not be implemented by the storage service.\n\t\tif (forestTree === undefined) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Downloads the contents of incremental chunks in the given snapshot tree. Also, recursively downloads\n\t\t// the contents of incremental chunks in any sub-trees.\n\t\tconst downloadChunkContentsInTree = async (\n\t\t\tsnapshotTree: ISnapshotTree,\n\t\t\tparentTreeKey: string,\n\t\t): Promise<void> => {\n\t\t\t// All trees in the snapshot tree are for incremental chunks. The key is the chunk's reference ID\n\t\t\t// and the value is the snapshot tree for the chunk.\n\t\t\tfor (const [chunkReferenceId, chunkSnapshotTree] of Object.entries(snapshotTree.trees)) {\n\t\t\t\tconst chunkSubTreePath = `${parentTreeKey}${chunkReferenceId}`;\n\t\t\t\tconst chunkContentsPath = `${chunkSubTreePath}/${chunkContentsBlobKey}`;\n\t\t\t\tif (!(await services.contains(chunkContentsPath))) {\n\t\t\t\t\tthrow new LoggingError(\n\t\t\t\t\t\t`SharedTree: Cannot find contents for incremental chunk ${chunkContentsPath}`,\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\tconst chunkContents = await readAndParseChunk<EncodedFieldBatch>(chunkContentsPath);\n\t\t\t\tthis.encodedChunkContentsMap.set(chunkReferenceId, chunkContents);\n\n\t\t\t\t// Recursively download the contents of chunks in this chunk's sub tree.\n\t\t\t\tawait downloadChunkContentsInTree(chunkSnapshotTree, `${chunkSubTreePath}/`);\n\t\t\t}\n\t\t};\n\t\tawait downloadChunkContentsInTree(forestTree, \"\");\n\t}\n\n\t/**\n\t * Must be called when starting a new forest summary to track it.\n\t * @param fullTree - Whether the summary is a full tree summary. If true, the summary will not contain\n\t * any summary handles. All chunks must be summarized in full.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers\n\t * for the current and latest summaries.\n\t * @param stringify - Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t * @returns the behavior of the forest's incremental summary.\n\t */\n\tpublic startSummary(args: {\n\t\tfullTree: boolean;\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tstringify: SummaryElementStringifier;\n\t}): ForestIncrementalSummaryBehavior {\n\t\tconst { fullTree, incrementalSummaryContext, stringify } = args;\n\t\t// If there is no incremental summary context, do not summarize incrementally. This happens in two scenarios:\n\t\t// 1. When summarizing a detached container, i.e., the first ever summary.\n\t\t// 2. When running GC, the default behavior is to call summarize on DDS without incrementalSummaryContext.\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\treturn ForestIncrementalSummaryBehavior.SingleBlob;\n\t\t}\n\n\t\tvalidateReadyToTrackSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.Tracking;\n\t\tthis.latestSummarySequenceNumber = incrementalSummaryContext.latestSummarySequenceNumber;\n\t\tthis.trackedSummaryProperties = {\n\t\t\tsummarySequenceNumber: incrementalSummaryContext.summarySequenceNumber,\n\t\t\tlatestSummaryBasePath: incrementalSummaryContext.summaryPath,\n\t\t\tchunkSummaryPath: [],\n\t\t\tparentSummaryBuilder: new SummaryTreeBuilder(),\n\t\t\tfullTree,\n\t\t\tstringify,\n\t\t};\n\t\treturn ForestIncrementalSummaryBehavior.Incremental;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.encodeIncrementalField}\n\t * @remarks Returns an empty array if the field has no content.\n\t */\n\tpublic encodeIncrementalField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tchunkEncoder: (chunk: TreeChunk) => EncodedFieldBatch,\n\t): ChunkReferenceId[] {\n\t\t// Validate that a summary is currently being tracked and that the tracked summary properties are defined.\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tif (cursor.getFieldLength() === 0) {\n\t\t\treturn [];\n\t\t}\n\n\t\tlet chunkReferenceId: ChunkReferenceId;\n\t\tlet chunkProperties: ChunkSummaryProperties;\n\n\t\tconst chunk = this.getChunkAtCursor(cursor);\n\n\t\t// Try and get the properties of the chunk from the latest successful summary.\n\t\t// If it exists and the summary is not a full tree, use the properties to generate a summary handle.\n\t\t// If it does not exist, encode the chunk and generate new properties for it.\n\t\tconst previousChunkProperties = tryGetFromNestedMap(\n\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\tthis.latestSummarySequenceNumber,\n\t\t\tchunk,\n\t\t);\n\t\tif (previousChunkProperties !== undefined && !this.trackedSummaryProperties.fullTree) {\n\t\t\tchunkProperties = previousChunkProperties;\n\t\t\tchunkReferenceId = previousChunkProperties.referenceId;\n\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addHandle(\n\t\t\t\t`${chunkReferenceId}`,\n\t\t\t\tSummaryType.Tree,\n\t\t\t\t`${this.trackedSummaryProperties.latestSummaryBasePath}/${previousChunkProperties.summaryPath}`,\n\t\t\t);\n\t\t} else {\n\t\t\t// Generate a new reference ID for the chunk.\n\t\t\tchunkReferenceId = brand(this.nextReferenceId++);\n\t\t\t// Add the reference ID of this chunk to the chunk summary path and use the path as the summary path\n\t\t\t// for the chunk in its summary properties.\n\t\t\t// This is done before encoding the chunk so that the summary path is updated correctly when encoding\n\t\t\t// any incremental chunks that are under this chunk.\n\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.push(chunkReferenceId);\n\n\t\t\tchunkProperties = {\n\t\t\t\treferenceId: chunkReferenceId,\n\t\t\t\tsummaryPath: this.trackedSummaryProperties.chunkSummaryPath.join(\"/\"),\n\t\t\t};\n\n\t\t\tconst parentSummaryBuilder = this.trackedSummaryProperties.parentSummaryBuilder;\n\t\t\t// Create a new summary builder for this chunk to build its summary tree which will be stored in the\n\t\t\t// parent's summary tree under its reference ID.\n\t\t\t// Before encoding the chunk, set the parent summary builder to this chunk's summary builder so that\n\t\t\t// any incremental chunks in the subtree of this chunk will use that as their parent summary builder.\n\t\t\tconst chunkSummaryBuilder = new SummaryTreeBuilder();\n\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = chunkSummaryBuilder;\n\t\t\tchunkSummaryBuilder.addBlob(\n\t\t\t\tchunkContentsBlobKey,\n\t\t\t\tthis.trackedSummaryProperties.stringify(chunkEncoder(chunk)),\n\t\t\t);\n\n\t\t\t// Add this chunk's summary tree to the parent's summary tree. The summary tree contains its encoded\n\t\t\t// contents and the summary trees of any incremental chunks under it.\n\t\t\tparentSummaryBuilder.addWithStats(\n\t\t\t\t`${chunkReferenceId}`,\n\t\t\t\tchunkSummaryBuilder.getSummaryTree(),\n\t\t\t);\n\n\t\t\t// Restore the parent summary builder and chunk summary path.\n\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = parentSummaryBuilder;\n\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.pop();\n\t\t}\n\n\t\tsetInNestedMap(\n\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t\tchunk,\n\t\t\tchunkProperties,\n\t\t);\n\t\treturn [chunkReferenceId];\n\t}\n\n\t/**\n\t * Must be called after summary generation is complete to finish tracking the summary.\n\t * It clears any tracking state and deletes the tracking properties for summaries that are older than the\n\t * latest successful summary.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers.\n\t * If this is undefined, the summary tree will only contain a summary blob for `forestSummaryContent`.\n\t * @param forestSummaryContent - The stringified ForestCodec output of top-level Forest content.\n\t * @returns the Forest's summary tree.\n\t */\n\tpublic completeSummary(args: {\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tforestSummaryContent: string;\n\t}): ISummaryTreeWithStats {\n\t\tconst { incrementalSummaryContext, forestSummaryContent } = args;\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\tconst summaryBuilder = new SummaryTreeBuilder();\n\t\t\tsummaryBuilder.addBlob(forestSummaryContentKey, forestSummaryContent);\n\t\t\treturn summaryBuilder.getSummaryTree();\n\t\t}\n\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addBlob(\n\t\t\tforestSummaryContentKey,\n\t\t\tforestSummaryContent,\n\t\t);\n\n\t\t// Copy over the entries from the latest summary to the current summary.\n\t\t// In the current summary, there can be fields that haven't changed since the latest summary and the chunks\n\t\t// in these fields and in any of its children weren't encoded. So, we need get the entries for these chunks\n\t\t// to be able to incrementally summarize them in the next summary.\n\t\tconst latestSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.latestSummarySequenceNumber,\n\t\t);\n\t\tconst currentSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t);\n\t\tif (latestSummaryTrackingMap !== undefined && currentSummaryTrackingMap !== undefined) {\n\t\t\tfor (const [chunk, chunkProperties] of latestSummaryTrackingMap.entries()) {\n\t\t\t\tif (!currentSummaryTrackingMap.has(chunk)) {\n\t\t\t\t\tcurrentSummaryTrackingMap.set(chunk, chunkProperties);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Delete tracking for summaries that are older than the latest successful summary because they will\n\t\t// never be referenced again for generating summary handles.\n\t\tfor (const sequenceNumber of this.chunkTrackingPropertiesMap.keys()) {\n\t\t\tif (sequenceNumber < this.latestSummarySequenceNumber) {\n\t\t\t\tthis.chunkTrackingPropertiesMap.delete(sequenceNumber);\n\t\t\t}\n\t\t}\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.ReadyToTrack;\n\t\tconst summaryTree = this.trackedSummaryProperties.parentSummaryBuilder.getSummaryTree();\n\t\tthis.trackedSummaryProperties = undefined;\n\t\treturn summaryTree;\n\t}\n\n\t/**\n\t * Called to get the encoded contents of an incremental chunk with the given reference ID.\n\t * This is typically used when loading the forest to retrieve the contents of incremental chunks.\n\t * @param referenceId - The reference ID of the chunk to retrieve.\n\t * @returns The encoded contents of the chunk.\n\t */\n\tpublic getEncodedIncrementalChunk(referenceId: ChunkReferenceId): EncodedFieldBatch {\n\t\tconst chunkEncodedContents = this.encodedChunkContentsMap.get(`${referenceId}`);\n\t\tassert(\n\t\t\tchunkEncodedContents !== undefined,\n\t\t\t0xc26 /* Incremental chunk contents not found */,\n\t\t);\n\t\treturn chunkEncodedContents;\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"incrementalSummaryBuilder.js","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,kEAA6D;AAK7D,qEAA4E;AAC5E,kDAM6B;AAS7B,2EAAiE;AAGjE,uEAAwE;AAIxE;;;;GAIG;AACU,QAAA,uBAAuB,GAAG,YAAY,CAAC;AAEpD;;;;;GAKG;AACH,MAAM,oBAAoB,GAAG,UAAU,CAAC;AAExC;;GAEG;AACU,QAAA,0BAA0B,GAAG;IACzC,4CAA4C;IAC5C,QAAQ,EAAE,UAAU;IACpB,wCAAwC;IACxC,YAAY,EAAE,cAAc;CACnB,CAAC;AAoFX;;GAEG;AACH,IAAY,gCAgBX;AAhBD,WAAY,gCAAgC;IAC3C;;;;OAIG;IACH,qGAAW,CAAA;IACX;;;;;;;OAOG;IACH,mGAAU,CAAA;AACX,CAAC,EAhBW,gCAAgC,gDAAhC,gCAAgC,QAgB3C;AAED;;;;GAIG;AACH,SAAS,uBAAuB,CAC/B,kBAA8C,EAC9C,wBAA8D;IAE9D,IAAA,iBAAM,EACL,kBAAkB,KAAK,kCAA0B,CAAC,QAAQ,EAC1D,KAAK,CAAC,4BAA4B,CAClC,CAAC;IACF,IAAA,iBAAM,EACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,2BAA2B,CACnC,kBAA8C,EAC9C,wBAA8D;IAE9D,IAAA,iBAAM,EACL,kBAAkB,KAAK,kCAA0B,CAAC,YAAY,EAC9D,KAAK,CAAC,gCAAgC,CACtC,CAAC;IACF,IAAA,iBAAM,EACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED,4CAA4C;AAC5C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,2CAA2C;AAC3C,MAAa,+BAA+B;IA2C3C,YACkB,wBAAiC,EACjC,gBAA+D,EAChE,yBAAoD,EACnD,qBAA6B;QAH7B,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,qBAAgB,GAAhB,gBAAgB,CAA+C;QAChE,8BAAyB,GAAzB,yBAAyB,CAA2B;QACnD,0BAAqB,GAArB,qBAAqB,CAAQ;QA9C/C;;WAEG;QACK,oBAAe,GAAqB,IAAA,gBAAK,EAAC,CAAC,CAAC,CAAC;QAErD;;;WAGG;QACc,+BAA0B,GAIvC,IAAI,GAAG,EAAE,CAAC;QAEd;;WAEG;QACI,uBAAkB,GACxB,kCAA0B,CAAC,YAAY,CAAC;QAEzC;;WAEG;QACK,gCAA2B,GAAW,CAAC,CAAC,CAAC;QAQjD;;;WAGG;QACH;;;WAGG;QACc,oBAAe,GAAqC,IAAI,GAAG,EAAE,CAAC;IAO5E,CAAC;IAEJ;;;;;OAKG;IACI,KAAK,CAAC,IAAI,CAChB,QAAgC,EAChC,iBAAqF;QAErF,MAAM,UAAU,GAAG,QAAQ,CAAC,eAAe,EAAE,EAAE,CAAC;QAChD,wGAAwG;QACxG,qDAAqD;QACrD,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,OAAO;QACR,CAAC;QAED,uGAAuG;QACvG,uDAAuD;QACvD,MAAM,2BAA2B,GAAG,KAAK,EACxC,YAA2B,EAC3B,aAAqB,EACL,EAAE;YAClB,iGAAiG;YACjG,oDAAoD;YACpD,KAAK,MAAM,CAAC,gBAAgB,EAAE,iBAAiB,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,CAAC;gBACxF,MAAM,gBAAgB,GAAG,GAAG,aAAa,GAAG,gBAAgB,EAAE,CAAC;gBAC/D,MAAM,iBAAiB,GAAG,GAAG,gBAAgB,IAAI,oBAAoB,EAAE,CAAC;gBACxE,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,EAAE,CAAC;oBACnD,MAAM,IAAI,uBAAY,CACrB,0DAA0D,iBAAiB,EAAE,CAC7E,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,GAAG,MAAM,iBAAiB,CAAoB,iBAAiB,CAAC,CAAC;gBACpF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,gBAAgB,EAAE;oBAC1C,eAAe,EAAE,aAAa;oBAC9B,WAAW,EAAE,gBAAgB;iBAC7B,CAAC,CAAC;gBAEH,MAAM,sBAAsB,GAAG,MAAM,CAAC,gBAAgB,CAAC,CAAC;gBACxD,IAAI,CAAC,eAAe,GAAG,IAAA,gBAAK,EAC3B,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,EAAE,sBAAsB,GAAG,CAAC,CAAC,CAC1D,CAAC;gBAEF,wEAAwE;gBACxE,MAAM,2BAA2B,CAAC,iBAAiB,EAAE,GAAG,gBAAgB,GAAG,CAAC,CAAC;YAC9E,CAAC;QACF,CAAC,CAAC;QACF,MAAM,2BAA2B,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;OAQG;IACI,YAAY,CAAC,IAInB;QACA,MAAM,EAAE,QAAQ,EAAE,yBAAyB,EAAE,SAAS,EAAE,GAAG,IAAI,CAAC;QAChE,6GAA6G;QAC7G,0EAA0E;QAC1E,0GAA0G;QAC1G,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,OAAO,gCAAgC,CAAC,UAAU,CAAC;QACpD,CAAC;QAED,2BAA2B,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEpF,IAAI,CAAC,kBAAkB,GAAG,kCAA0B,CAAC,QAAQ,CAAC;QAC9D,IAAI,CAAC,2BAA2B,GAAG,yBAAyB,CAAC,2BAA2B,CAAC;QACzF,IAAI,CAAC,wBAAwB,GAAG;YAC/B,qBAAqB,EAAE,yBAAyB,CAAC,qBAAqB;YACtE,qBAAqB,EAAE,yBAAyB,CAAC,WAAW;YAC5D,gBAAgB,EAAE,EAAE;YACpB,oBAAoB,EAAE,IAAI,6BAAkB,EAAE;YAC9C,QAAQ;YACR,SAAS;SACT,CAAC;QACF,OAAO,gCAAgC,CAAC,WAAW,CAAC;IACrD,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAC5B,MAA8B,EAC9B,YAAqD;QAErD,0GAA0G;QAC1G,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YACnC,OAAO,EAAE,CAAC;QACX,CAAC;QAED,IAAI,gBAAkC,CAAC;QACvC,IAAI,eAAuC,CAAC;QAE5C,qHAAqH;QACrH,wHAAwH;QACxH,qDAAqD;QACrD,yDAAyD;QACzD,MAAM,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;QAE5C,8EAA8E;QAC9E,oGAAoG;QACpG,6EAA6E;QAC7E,MAAM,uBAAuB,GAAG,IAAA,8BAAmB,EAClD,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,2BAA2B,EAChC,KAAK,CACL,CAAC;QACF,IAAI,uBAAuB,KAAK,SAAS,IAAI,CAAC,IAAI,CAAC,wBAAwB,CAAC,QAAQ,EAAE,CAAC;YACtF,eAAe,GAAG,uBAAuB,CAAC;YAC1C,gBAAgB,GAAG,uBAAuB,CAAC,WAAW,CAAC;YACvD,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,SAAS,CAC3D,GAAG,gBAAgB,EAAE,EACrB,gCAAW,CAAC,IAAI,EAChB,GAAG,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,IAAI,uBAAuB,CAAC,WAAW,EAAE,CAC/F,CAAC;QACH,CAAC;aAAM,CAAC;YACP,6CAA6C;YAC7C,gBAAgB,GAAG,IAAA,gBAAK,EAAC,IAAI,CAAC,eAAe,EAAE,CAAC,CAAC;YACjD,oGAAoG;YACpG,2CAA2C;YAC3C,qGAAqG;YACrG,oDAAoD;YACpD,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;YAEtE,eAAe,GAAG;gBACjB,WAAW,EAAE,gBAAgB;gBAC7B,WAAW,EAAE,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC;aACrE,CAAC;YAEF,MAAM,oBAAoB,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC;YAChF,oGAAoG;YACpG,gDAAgD;YAChD,oGAAoG;YACpG,qGAAqG;YACrG,MAAM,mBAAmB,GAAG,IAAI,6BAAkB,EAAE,CAAC;YACrD,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;YACzE,mBAAmB,CAAC,OAAO,CAC1B,oBAAoB,EACpB,IAAI,CAAC,wBAAwB,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAC5D,CAAC;YAEF,oGAAoG;YACpG,qEAAqE;YACrE,oBAAoB,CAAC,YAAY,CAChC,GAAG,gBAAgB,EAAE,EACrB,mBAAmB,CAAC,cAAc,EAAE,CACpC,CAAC;YAEF,6DAA6D;YAC7D,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;YAC1E,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,GAAG,EAAE,CAAC;QACtD,CAAC;QAED,IAAA,yBAAc,EACb,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,EACnD,KAAK,EACL,eAAe,CACf,CAAC;QACF,OAAO,CAAC,gBAAgB,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,IAGtB;QACA,MAAM,EAAE,yBAAyB,EAAE,oBAAoB,EAAE,GAAG,IAAI,CAAC;QACjE,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,MAAM,cAAc,GAAG,IAAI,6BAAkB,EAAE,CAAC;YAChD,cAAc,CAAC,OAAO,CAAC,+BAAuB,EAAE,oBAAoB,CAAC,CAAC;YACtE,OAAO,cAAc,CAAC,cAAc,EAAE,CAAC;QACxC,CAAC;QAED,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,OAAO,CACzD,+BAAuB,EACvB,oBAAoB,CACpB,CAAC;QAEF,wEAAwE;QACxE,2GAA2G;QAC3G,2GAA2G;QAC3G,kEAAkE;QAClE,MAAM,wBAAwB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACnE,IAAI,CAAC,2BAA2B,CAChC,CAAC;QACF,MAAM,yBAAyB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACpE,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,CACnD,CAAC;QACF,IAAI,wBAAwB,KAAK,SAAS,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YACvF,KAAK,MAAM,CAAC,KAAK,EAAE,eAAe,CAAC,IAAI,wBAAwB,CAAC,OAAO,EAAE,EAAE,CAAC;gBAC3E,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;oBAC3C,yBAAyB,CAAC,GAAG,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBACvD,CAAC;YACF,CAAC;QACF,CAAC;QAED,oGAAoG;QACpG,4DAA4D;QAC5D,KAAK,MAAM,cAAc,IAAI,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,EAAE,CAAC;YACrE,IAAI,cAAc,GAAG,IAAI,CAAC,2BAA2B,EAAE,CAAC;gBACvD,IAAI,CAAC,0BAA0B,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YACxD,CAAC;QACF,CAAC;QAED,IAAI,CAAC,kBAAkB,GAAG,kCAA0B,CAAC,YAAY,CAAC;QAClE,MAAM,WAAW,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,cAAc,EAAE,CAAC;QACxF,IAAI,CAAC,wBAAwB,GAAG,SAAS,CAAC;QAC1C,OAAO,WAAW,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,sBAAsB,CAC5B,WAA6B,EAC7B,YAAuD;QAEvD,MAAM,mBAAmB,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;QACvE,IAAA,iBAAM,EAAC,mBAAmB,KAAK,SAAS,EAAE,qCAAqC,CAAC,CAAC;QACjF,MAAM,KAAK,GAAG,YAAY,CAAC,mBAAmB,CAAC,eAAe,CAAC,CAAC;QAEhE,8EAA8E;QAC9E,gFAAgF;QAChF,sFAAsF;QACtF,KAAK,CAAC,cAAc,EAAE,CAAC;QACvB,mGAAmG;QACnG,mGAAmG;QACnG,yCAAyC;QACzC,IAAA,yBAAc,EAAC,IAAI,CAAC,0BAA0B,EAAE,IAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE;YAClF,WAAW;YACX,WAAW,EAAE,mBAAmB,CAAC,WAAW;SAC5C,CAAC,CAAC;QACH,OAAO,KAAK,CAAC;IACd,CAAC;CACD;AApTD,0EAoTC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert } from \"@fluidframework/core-utils/internal\";\nimport type {\n\tIExperimentalIncrementalSummaryContext,\n\tISummaryTreeWithStats,\n} from \"@fluidframework/runtime-definitions/internal\";\nimport { SummaryTreeBuilder } from \"@fluidframework/runtime-utils/internal\";\nimport {\n\tbrand,\n\tsetInNestedMap,\n\ttryGetFromNestedMap,\n\ttype JsonCompatible,\n\ttype NestedMap,\n} from \"../../util/index.js\";\nimport type {\n\tChunkReferenceId,\n\tEncodedFieldBatch,\n\tIncrementalEncoderDecoder,\n\tIncrementalEncodingPolicy,\n\tTreeChunk,\n} from \"../chunked-forest/index.js\";\nimport type { ITreeCursorSynchronous } from \"../../core/index.js\";\nimport { SummaryType } from \"@fluidframework/driver-definitions\";\nimport type { IChannelStorageService } from \"@fluidframework/datastore-definitions/internal\";\nimport type { ISnapshotTree } from \"@fluidframework/driver-definitions/internal\";\nimport { LoggingError } from \"@fluidframework/telemetry-utils/internal\";\nimport type { IFluidHandle } from \"@fluidframework/core-interfaces\";\nimport type { SummaryElementStringifier } from \"../../shared-tree-core/index.js\";\n\n/**\n * The key for the blob under ForestSummarizer's root.\n * This blob contains the ForestCodec's output.\n * See {@link ForestIncrementalSummaryBuilder} for details on the summary structure.\n */\nexport const forestSummaryContentKey = \"ForestTree\";\n\n/**\n * The contents of an incremental chunk is under a summary tree node with its {@link ChunkReferenceId} as the key.\n * The inline portion of the chunk content is encoded with the forest codec is stored in a blob with this key.\n * The rest of the chunk contents is stored in the summary tree under the summary tree node.\n * See the summary format in {@link ForestIncrementalSummaryBuilder} for more details.\n */\nconst chunkContentsBlobKey = \"contents\";\n\n/**\n * State that tells whether a summary is currently being tracked.\n */\nexport const ForestSummaryTrackingState = {\n\t/** A summary is currently being tracked. */\n\tTracking: \"Tracking\",\n\t/** A summary is ready to be tracked. */\n\tReadyToTrack: \"ReadyToTrack\",\n} as const;\nexport type ForestSummaryTrackingState =\n\t(typeof ForestSummaryTrackingState)[keyof typeof ForestSummaryTrackingState];\n\n/**\n * The properties of a chunk tracked during the loading process.\n * These are used to identify a chunk when it is decoded and recreate the tracking state\n * as it was when the summary that the client is loading from was generated.\n *\n * An encoded chunk, paired with a location it can be reused / reloaded from.\n * @remarks\n * This identifies a location in a specific summary where `encodedContents` was loaded from.\n *\n * When summarizing, Fluid always ensures the summary that the summary client is allowed to reuse content from\n * is the one it loaded from, so tracking this on load is sufficient for now:\n * there is no need to track the equivalent data when summarizing.\n */\ninterface ChunkLoadProperties {\n\t/**\n\t * The encoded contents of the chunk.\n\t */\n\treadonly encodedContents: EncodedFieldBatch;\n\t/**\n\t * The path for this chunk's contents in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a chunk that is tracked for every summary.\n * If a chunk doesn't change between summaries,\n * these properties will be used to generate a summary handle for the chunk.\n */\ninterface ChunkSummaryProperties {\n\t/**\n\t * The reference ID of the chunk which uniquely identifies it under its parent's summary tree.\n\t * The summary for this chunk will be stored against this reference ID as key in the summary tree.\n\t */\n\treadonly referenceId: ChunkReferenceId;\n\t/**\n\t * The path for this chunk's summary in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a summary being tracked.\n */\ninterface TrackedSummaryProperties {\n\t/**\n\t * The sequence number of the summary in progress.\n\t */\n\treadonly summarySequenceNumber: number;\n\t/**\n\t * The base path for the latest summary that was successful.\n\t * This is used to generate summary handles.\n\t */\n\treadonly latestSummaryBasePath: string;\n\t/**\n\t * Whether the summary being tracked is a full tree summary.\n\t * If true, the summary will not contain any summary handles. All chunks must be summarized in full.\n\t */\n\treadonly fullTree: boolean;\n\t/**\n\t * Represents the path of a chunk in the summary tree relative to the forest's summary tree.\n\t * Each item in the array is the {@link ChunkReferenceId} of a chunk in the summary tree starting\n\t * from the chunk under forest summary tree.\n\t * When a chunk is summarized, this array will be used to generate the path for the chunk's summary in the\n\t * summary tree.\n\t */\n\treadonly chunkSummaryPath: ChunkReferenceId[];\n\t/**\n\t * The parent summary builder to use to build the incremental summary tree.\n\t * When a chunk is being summarized, it will add its summary to this builder against its reference ID.\n\t */\n\tparentSummaryBuilder: SummaryTreeBuilder;\n\t/**\n\t * Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t */\n\tstringify: SummaryElementStringifier;\n}\n\n/**\n * The behavior of the forest's incremental summary - whether the summary should be a single blob or incremental.\n */\nexport enum ForestIncrementalSummaryBehavior {\n\t/**\n\t * The forest can encode chunks incrementally, i.e., chunks that support incremental encoding will be encoded\n\t * separately - they will be added to a separate tree.\n\t * The incremental summary format is described in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tIncremental,\n\t/**\n\t * The forest should encode all of it's data in a single summary blob.\n\t * @remarks\n\t * The format of the summary will be the same as the old format (pre-incremental summaries) and is fully\n\t * backwards compatible with the old format. The summary will basically look like an incremental summary\n\t * with no incremental fields - it will only contain the \"ForestTree\" blob in the summary format described\n\t * in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tSingleBlob,\n}\n\n/**\n * Validates that a summary is currently being tracked and that the tracked summary properties are defined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be available.\n */\nfunction validateTrackingSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is TrackedSummaryProperties {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.Tracking,\n\t\t0xc22 /* Not tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties !== undefined,\n\t\t0xc23 /* Tracked summary properties must be available when tracking a summary */,\n\t);\n}\n\n/**\n * Validates that a summary is ready to be tracked and that the tracked summary properties are undefined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be undefined.\n */\nfunction validateReadyToTrackSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is undefined {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.ReadyToTrack,\n\t\t0xc24 /* Already tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties === undefined,\n\t\t0xc25 /* Tracked summary properties must not be available when ready to track */,\n\t);\n}\n\n/* eslint-disable jsdoc/check-indentation */\n/**\n * Tracks and builds the incremental summary tree for a forest where chunks that support incremental encoding are\n * stored in a separate tree in the summary under its {@link ChunkReferenceId}.\n * The summary tree for a chunk is self-sufficient and can be independently loaded and used to reconstruct the\n * chunk's contents without any additional context from its parent.\n *\n * An example summary tree with incremental summary:\n * Forest\n * ├── ForestTree\n * ├── 0\n * | ├── contents\n * | ├── 1\n * | | ├── contents\n * | | ├── 2\n * | | | ├── contents\n * | ├── 3 - \".../Forest/ForestTree/0/1/3\"\n * ├── 4\n * | ├── contents\n * | ├── ...\n * ├── 5 - \"/.../Forest/ForestTree/5\"\n * - Forest is a summary tree node added by the shared tree and contains the following:\n * - The inline portion of the top-level forest content is stored in a summary blob called \"ForestTree\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - For each chunk, the structure of the summary tree is the same as the Forest. It contains the following:\n * - The inline portion of the chunk content is stored in a blob called \"contents\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - Chunks that do not change between summaries are summarized as handles in the summary tree.\n * @remarks\n * It may seem inconsistent that although the structure for the top-level forest tree is similar to that of\n * an incremental chunk, its content is stored in a summary blob called \"ForestTree\" while the content for\n * the incremental chunks are stored in a summary blob called \"contents\".\n * This is to keep this summary backwards compatible with old format (before incremental summaries were added)\n * where the entire forest content was in a summary blob called \"ForestTree\". So, if incremental summaries were\n * disabled, the forest content will be fully backwards compatible.\n * Note that this limits reusing the root node in a location other than root and a non-root node in the root.\n * We could phase this out by switching to write the top-level contents under \"contents\" if we want to support\n * the above. However, there is no plan to do that for now.\n *\n * TODO: AB#46752\n * Add strong types for the summary structure to document it better. It will help make it super clear what the actual\n * format is in a way that can easily be linked to, documented and inspected.\n */\n/* eslint-enable jsdoc/check-indentation */\nexport class ForestIncrementalSummaryBuilder implements IncrementalEncoderDecoder {\n\t/**\n\t * The next reference ID to use for a chunk.\n\t */\n\tprivate nextReferenceId: ChunkReferenceId = brand(0);\n\n\t/**\n\t * For a given summary sequence number, keeps track of a chunk's properties that will be used to generate\n\t * a summary handle for the chunk if it does not change between summaries.\n\t */\n\tprivate readonly chunkTrackingPropertiesMap: NestedMap<\n\t\tnumber,\n\t\tTreeChunk,\n\t\tChunkSummaryProperties\n\t> = new Map();\n\n\t/**\n\t * The state indicating whether a summary is currently being tracked or not.\n\t */\n\tpublic forestSummaryState: ForestSummaryTrackingState =\n\t\tForestSummaryTrackingState.ReadyToTrack;\n\n\t/**\n\t * The sequence number of the latest summary that was successful.\n\t */\n\tprivate latestSummarySequenceNumber: number = -1;\n\n\t/**\n\t * The current state of the summary being tracked.\n\t * This is undefined if no summary is currently being tracked.\n\t */\n\tprivate trackedSummaryProperties: TrackedSummaryProperties | undefined;\n\n\t/**\n\t * A map of chunk reference IDs to their encoded contents. This is typically used during the loading of the\n\t * forest to retrieve the contents of the chunks that were summarized incrementally.\n\t */\n\t/**\n\t * A map of chunk reference IDs to their {@link ChunkLoadProperties}.\n\t * This is used during the loading of the forest to track each chunk that is retrieved and decoded.\n\t */\n\tprivate readonly loadedChunksMap: Map<string, ChunkLoadProperties> = new Map();\n\n\tpublic constructor(\n\t\tprivate readonly enableIncrementalSummary: boolean,\n\t\tprivate readonly getChunkAtCursor: (cursor: ITreeCursorSynchronous) => TreeChunk,\n\t\tpublic readonly shouldEncodeIncrementally: IncrementalEncodingPolicy,\n\t\tprivate readonly initialSequenceNumber: number,\n\t) {}\n\n\t/**\n\t * Must be called when the forest is loaded to download the encoded contents of incremental chunks.\n\t * @param services - The channel storage service to use to access the snapshot tree and download the\n\t * contents of the chunks.\n\t * @param readAndParse - A function that reads and parses a blob from the storage service.\n\t */\n\tpublic async load(\n\t\tservices: IChannelStorageService,\n\t\treadAndParseChunk: <T extends JsonCompatible<IFluidHandle>>(id: string) => Promise<T>,\n\t): Promise<void> {\n\t\tconst forestTree = services.getSnapshotTree?.();\n\t\t// Snapshot tree should be available when loading forest's contents. However, it is an optional function\n\t\t// and may not be implemented by the storage service.\n\t\tif (forestTree === undefined) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Downloads the contents of incremental chunks in the given snapshot tree. Also, recursively downloads\n\t\t// the contents of incremental chunks in any sub-trees.\n\t\tconst downloadChunkContentsInTree = async (\n\t\t\tsnapshotTree: ISnapshotTree,\n\t\t\tparentTreeKey: string,\n\t\t): Promise<void> => {\n\t\t\t// All trees in the snapshot tree are for incremental chunks. The key is the chunk's reference ID\n\t\t\t// and the value is the snapshot tree for the chunk.\n\t\t\tfor (const [chunkReferenceId, chunkSnapshotTree] of Object.entries(snapshotTree.trees)) {\n\t\t\t\tconst chunkSubTreePath = `${parentTreeKey}${chunkReferenceId}`;\n\t\t\t\tconst chunkContentsPath = `${chunkSubTreePath}/${chunkContentsBlobKey}`;\n\t\t\t\tif (!(await services.contains(chunkContentsPath))) {\n\t\t\t\t\tthrow new LoggingError(\n\t\t\t\t\t\t`SharedTree: Cannot find contents for incremental chunk ${chunkContentsPath}`,\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\tconst chunkContents = await readAndParseChunk<EncodedFieldBatch>(chunkContentsPath);\n\t\t\t\tthis.loadedChunksMap.set(chunkReferenceId, {\n\t\t\t\t\tencodedContents: chunkContents,\n\t\t\t\t\tsummaryPath: chunkSubTreePath,\n\t\t\t\t});\n\n\t\t\t\tconst chunkReferenceIdNumber = Number(chunkReferenceId);\n\t\t\t\tthis.nextReferenceId = brand(\n\t\t\t\t\tMath.max(this.nextReferenceId, chunkReferenceIdNumber + 1),\n\t\t\t\t);\n\n\t\t\t\t// Recursively download the contents of chunks in this chunk's sub tree.\n\t\t\t\tawait downloadChunkContentsInTree(chunkSnapshotTree, `${chunkSubTreePath}/`);\n\t\t\t}\n\t\t};\n\t\tawait downloadChunkContentsInTree(forestTree, \"\");\n\t}\n\n\t/**\n\t * Must be called when starting a new forest summary to track it.\n\t * @param fullTree - Whether the summary is a full tree summary. If true, the summary will not contain\n\t * any summary handles. All chunks must be summarized in full.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers\n\t * for the current and latest summaries.\n\t * @param stringify - Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t * @returns the behavior of the forest's incremental summary.\n\t */\n\tpublic startSummary(args: {\n\t\tfullTree: boolean;\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tstringify: SummaryElementStringifier;\n\t}): ForestIncrementalSummaryBehavior {\n\t\tconst { fullTree, incrementalSummaryContext, stringify } = args;\n\t\t// If there is no incremental summary context, do not summarize incrementally. This happens in two scenarios:\n\t\t// 1. When summarizing a detached container, i.e., the first ever summary.\n\t\t// 2. When running GC, the default behavior is to call summarize on DDS without incrementalSummaryContext.\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\treturn ForestIncrementalSummaryBehavior.SingleBlob;\n\t\t}\n\n\t\tvalidateReadyToTrackSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.Tracking;\n\t\tthis.latestSummarySequenceNumber = incrementalSummaryContext.latestSummarySequenceNumber;\n\t\tthis.trackedSummaryProperties = {\n\t\t\tsummarySequenceNumber: incrementalSummaryContext.summarySequenceNumber,\n\t\t\tlatestSummaryBasePath: incrementalSummaryContext.summaryPath,\n\t\t\tchunkSummaryPath: [],\n\t\t\tparentSummaryBuilder: new SummaryTreeBuilder(),\n\t\t\tfullTree,\n\t\t\tstringify,\n\t\t};\n\t\treturn ForestIncrementalSummaryBehavior.Incremental;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.encodeIncrementalField}\n\t * @remarks Returns an empty array if the field has no content.\n\t */\n\tpublic encodeIncrementalField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tchunkEncoder: (chunk: TreeChunk) => EncodedFieldBatch,\n\t): ChunkReferenceId[] {\n\t\t// Validate that a summary is currently being tracked and that the tracked summary properties are defined.\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tif (cursor.getFieldLength() === 0) {\n\t\t\treturn [];\n\t\t}\n\n\t\tlet chunkReferenceId: ChunkReferenceId;\n\t\tlet chunkProperties: ChunkSummaryProperties;\n\n\t\t// An additional ref-count must be added to these chunks representing a reference from the summary tree to the chunk.\n\t\t// This will ensure that the blob's content never change and thus the reference stays accurate: instead of modifying it,\n\t\t// a copy will be created without the blob reference.\n\t\t// The \"getChunkAtCursor\" adds this additional ref-count.\n\t\tconst chunk = this.getChunkAtCursor(cursor);\n\n\t\t// Try and get the properties of the chunk from the latest successful summary.\n\t\t// If it exists and the summary is not a full tree, use the properties to generate a summary handle.\n\t\t// If it does not exist, encode the chunk and generate new properties for it.\n\t\tconst previousChunkProperties = tryGetFromNestedMap(\n\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\tthis.latestSummarySequenceNumber,\n\t\t\tchunk,\n\t\t);\n\t\tif (previousChunkProperties !== undefined && !this.trackedSummaryProperties.fullTree) {\n\t\t\tchunkProperties = previousChunkProperties;\n\t\t\tchunkReferenceId = previousChunkProperties.referenceId;\n\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addHandle(\n\t\t\t\t`${chunkReferenceId}`,\n\t\t\t\tSummaryType.Tree,\n\t\t\t\t`${this.trackedSummaryProperties.latestSummaryBasePath}/${previousChunkProperties.summaryPath}`,\n\t\t\t);\n\t\t} else {\n\t\t\t// Generate a new reference ID for the chunk.\n\t\t\tchunkReferenceId = brand(this.nextReferenceId++);\n\t\t\t// Add the reference ID of this chunk to the chunk summary path and use the path as the summary path\n\t\t\t// for the chunk in its summary properties.\n\t\t\t// This is done before encoding the chunk so that the summary path is updated correctly when encoding\n\t\t\t// any incremental chunks that are under this chunk.\n\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.push(chunkReferenceId);\n\n\t\t\tchunkProperties = {\n\t\t\t\treferenceId: chunkReferenceId,\n\t\t\t\tsummaryPath: this.trackedSummaryProperties.chunkSummaryPath.join(\"/\"),\n\t\t\t};\n\n\t\t\tconst parentSummaryBuilder = this.trackedSummaryProperties.parentSummaryBuilder;\n\t\t\t// Create a new summary builder for this chunk to build its summary tree which will be stored in the\n\t\t\t// parent's summary tree under its reference ID.\n\t\t\t// Before encoding the chunk, set the parent summary builder to this chunk's summary builder so that\n\t\t\t// any incremental chunks in the subtree of this chunk will use that as their parent summary builder.\n\t\t\tconst chunkSummaryBuilder = new SummaryTreeBuilder();\n\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = chunkSummaryBuilder;\n\t\t\tchunkSummaryBuilder.addBlob(\n\t\t\t\tchunkContentsBlobKey,\n\t\t\t\tthis.trackedSummaryProperties.stringify(chunkEncoder(chunk)),\n\t\t\t);\n\n\t\t\t// Add this chunk's summary tree to the parent's summary tree. The summary tree contains its encoded\n\t\t\t// contents and the summary trees of any incremental chunks under it.\n\t\t\tparentSummaryBuilder.addWithStats(\n\t\t\t\t`${chunkReferenceId}`,\n\t\t\t\tchunkSummaryBuilder.getSummaryTree(),\n\t\t\t);\n\n\t\t\t// Restore the parent summary builder and chunk summary path.\n\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = parentSummaryBuilder;\n\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.pop();\n\t\t}\n\n\t\tsetInNestedMap(\n\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t\tchunk,\n\t\t\tchunkProperties,\n\t\t);\n\t\treturn [chunkReferenceId];\n\t}\n\n\t/**\n\t * Must be called after summary generation is complete to finish tracking the summary.\n\t * It clears any tracking state and deletes the tracking properties for summaries that are older than the\n\t * latest successful summary.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers.\n\t * If this is undefined, the summary tree will only contain a summary blob for `forestSummaryContent`.\n\t * @param forestSummaryContent - The stringified ForestCodec output of top-level Forest content.\n\t * @returns the Forest's summary tree.\n\t */\n\tpublic completeSummary(args: {\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tforestSummaryContent: string;\n\t}): ISummaryTreeWithStats {\n\t\tconst { incrementalSummaryContext, forestSummaryContent } = args;\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\tconst summaryBuilder = new SummaryTreeBuilder();\n\t\t\tsummaryBuilder.addBlob(forestSummaryContentKey, forestSummaryContent);\n\t\t\treturn summaryBuilder.getSummaryTree();\n\t\t}\n\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addBlob(\n\t\t\tforestSummaryContentKey,\n\t\t\tforestSummaryContent,\n\t\t);\n\n\t\t// Copy over the entries from the latest summary to the current summary.\n\t\t// In the current summary, there can be fields that haven't changed since the latest summary and the chunks\n\t\t// in these fields and in any of its children weren't encoded. So, we need get the entries for these chunks\n\t\t// to be able to incrementally summarize them in the next summary.\n\t\tconst latestSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.latestSummarySequenceNumber,\n\t\t);\n\t\tconst currentSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t);\n\t\tif (latestSummaryTrackingMap !== undefined && currentSummaryTrackingMap !== undefined) {\n\t\t\tfor (const [chunk, chunkProperties] of latestSummaryTrackingMap.entries()) {\n\t\t\t\tif (!currentSummaryTrackingMap.has(chunk)) {\n\t\t\t\t\tcurrentSummaryTrackingMap.set(chunk, chunkProperties);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Delete tracking for summaries that are older than the latest successful summary because they will\n\t\t// never be referenced again for generating summary handles.\n\t\tfor (const sequenceNumber of this.chunkTrackingPropertiesMap.keys()) {\n\t\t\tif (sequenceNumber < this.latestSummarySequenceNumber) {\n\t\t\t\tthis.chunkTrackingPropertiesMap.delete(sequenceNumber);\n\t\t\t}\n\t\t}\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.ReadyToTrack;\n\t\tconst summaryTree = this.trackedSummaryProperties.parentSummaryBuilder.getSummaryTree();\n\t\tthis.trackedSummaryProperties = undefined;\n\t\treturn summaryTree;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.decodeIncrementalChunk}\n\t */\n\tpublic decodeIncrementalChunk(\n\t\treferenceId: ChunkReferenceId,\n\t\tchunkDecoder: (encoded: EncodedFieldBatch) => TreeChunk,\n\t): TreeChunk {\n\t\tconst ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);\n\t\tassert(ChunkLoadProperties !== undefined, \"Encoded incremental chunk not found\");\n\t\tconst chunk = chunkDecoder(ChunkLoadProperties.encodedContents);\n\n\t\t// Account for the reference about to be added in `chunkTrackingPropertiesMap`\n\t\t// to ensure that no other users of this chunk think they have unique ownership.\n\t\t// This prevents prevent whoever this chunk is returned to from modifying it in-place.\n\t\tchunk.referenceAdded();\n\t\t// Track the decoded chunk. This will recreate the tracking state when the summary that this client\n\t\t// is loaded from was generated. This is needed to ensure that incremental summaries work correctly\n\t\t// when a new client starts to summarize.\n\t\tsetInNestedMap(this.chunkTrackingPropertiesMap, this.initialSequenceNumber, chunk, {\n\t\t\treferenceId,\n\t\t\tsummaryPath: ChunkLoadProperties.summaryPath,\n\t\t});\n\t\treturn chunk;\n\t}\n}\n"]}
|
package/dist/legacy.d.ts
CHANGED
|
@@ -94,11 +94,15 @@ export {
|
|
|
94
94
|
|
|
95
95
|
// #region @beta APIs
|
|
96
96
|
ConciseTree,
|
|
97
|
+
FixRecursiveArraySchema,
|
|
98
|
+
FluidSerializableAsTree,
|
|
97
99
|
ForestOptions,
|
|
98
100
|
ForestType,
|
|
99
101
|
ForestTypeExpensiveDebug,
|
|
100
102
|
ForestTypeOptimized,
|
|
101
103
|
ForestTypeReference,
|
|
104
|
+
JsonCompatible,
|
|
105
|
+
JsonCompatibleObject,
|
|
102
106
|
KeyEncodingOptions,
|
|
103
107
|
NodeChangedData,
|
|
104
108
|
ObjectSchemaOptions,
|
package/dist/packageVersion.d.ts
CHANGED
|
@@ -5,5 +5,5 @@
|
|
|
5
5
|
* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY
|
|
6
6
|
*/
|
|
7
7
|
export declare const pkgName = "@fluidframework/tree";
|
|
8
|
-
export declare const pkgVersion = "2.
|
|
8
|
+
export declare const pkgVersion = "2.70.0-360753";
|
|
9
9
|
//# sourceMappingURL=packageVersion.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"packageVersion.d.ts","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,eAAO,MAAM,OAAO,yBAAyB,CAAC;AAC9C,eAAO,MAAM,UAAU,
|
|
1
|
+
{"version":3,"file":"packageVersion.d.ts","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,eAAO,MAAM,OAAO,yBAAyB,CAAC;AAC9C,eAAO,MAAM,UAAU,kBAAkB,CAAC"}
|
package/dist/packageVersion.js
CHANGED
|
@@ -8,5 +8,5 @@
|
|
|
8
8
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
9
|
exports.pkgVersion = exports.pkgName = void 0;
|
|
10
10
|
exports.pkgName = "@fluidframework/tree";
|
|
11
|
-
exports.pkgVersion = "2.
|
|
11
|
+
exports.pkgVersion = "2.70.0-360753";
|
|
12
12
|
//# sourceMappingURL=packageVersion.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"packageVersion.js","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEU,QAAA,OAAO,GAAG,sBAAsB,CAAC;AACjC,QAAA,UAAU,GAAG,
|
|
1
|
+
{"version":3,"file":"packageVersion.js","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEU,QAAA,OAAO,GAAG,sBAAsB,CAAC;AACjC,QAAA,UAAU,GAAG,eAAe,CAAC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n *\n * THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY\n */\n\nexport const pkgName = \"@fluidframework/tree\";\nexport const pkgVersion = \"2.70.0-360753\";\n"]}
|
|
@@ -13,14 +13,14 @@ import type { JsonCompatible } from "./util/index.js";
|
|
|
13
13
|
* Schema which replicate the Fluid Serializable data model with {@link TreeNode}s.
|
|
14
14
|
*
|
|
15
15
|
* Fluid Serializable data can be imported from the {@link FluidSerializableAsTree.Data|Fluid Serializable format} into this format using {@link (TreeBeta:interface).importConcise} with the {@link FluidSerializableAsTree.(Tree:variable)} schema.
|
|
16
|
-
* @
|
|
16
|
+
* @beta
|
|
17
17
|
*/
|
|
18
18
|
export declare namespace FluidSerializableAsTree {
|
|
19
19
|
/**
|
|
20
20
|
* Data which can be serialized by Fluid.
|
|
21
21
|
* @remarks
|
|
22
22
|
* Can be encoded as a {@link FluidSerializableAsTree.(Tree:type)} using {@link (TreeBeta:interface).importConcise}.
|
|
23
|
-
* @
|
|
23
|
+
* @beta
|
|
24
24
|
*/
|
|
25
25
|
type Data = JsonCompatible<IFluidHandle>;
|
|
26
26
|
/**
|
|
@@ -29,16 +29,16 @@ export declare namespace FluidSerializableAsTree {
|
|
|
29
29
|
* ```typescript
|
|
30
30
|
* const tree = TreeBeta.importConcise(FluidSerializableAsTree.Tree, { example: { nested: true }, value: 5 });
|
|
31
31
|
* ```
|
|
32
|
-
* @
|
|
32
|
+
* @beta
|
|
33
33
|
*/
|
|
34
34
|
const Tree: readonly [() => typeof FluidSerializableObject, () => typeof Array, import("./simple-tree/leafNodeSchema.js").LeafSchema<"string", string>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"number", number>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"boolean", boolean>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"null", null>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"handle", IFluidHandle<unknown>>];
|
|
35
35
|
/**
|
|
36
|
-
* @
|
|
36
|
+
* @beta
|
|
37
37
|
*/
|
|
38
38
|
type Tree = TreeNodeFromImplicitAllowedTypes<typeof Tree>;
|
|
39
39
|
/**
|
|
40
40
|
* Do not use. Exists only as a workaround for {@link https://github.com/microsoft/TypeScript/issues/59550} and {@link https://github.com/microsoft/rushstack/issues/4429}.
|
|
41
|
-
* @system @
|
|
41
|
+
* @system @beta
|
|
42
42
|
*/
|
|
43
43
|
const _APIExtractorWorkaroundObjectBase: import("./simple-tree/index.js").TreeNodeSchemaClass<"com.fluidframework.serializable.object", import("./simple-tree/index.js").NodeKind.Record, import("./simple-tree/index.js").TreeRecordNodeUnsafe<readonly [() => typeof FluidSerializableObject, () => typeof Array, import("./simple-tree/leafNodeSchema.js").LeafSchema<"string", string>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"number", number>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"boolean", boolean>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"null", null>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"handle", IFluidHandle<unknown>>]> & import("./simple-tree/index.js").WithType<"com.fluidframework.serializable.object", import("./simple-tree/index.js").NodeKind.Record, unknown>, {
|
|
44
44
|
readonly [x: string]: string | number | IFluidHandle<unknown> | import("./simple-tree/index.js").System_Unsafe.InsertableTypedNodeUnsafe<import("./simple-tree/leafNodeSchema.js").LeafSchema<"boolean", boolean>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"boolean", boolean>> | FluidSerializableObject | Array | null;
|
|
@@ -58,7 +58,7 @@ export declare namespace FluidSerializableAsTree {
|
|
|
58
58
|
* ```
|
|
59
59
|
* @privateRemarks
|
|
60
60
|
* Due to https://github.com/microsoft/TypeScript/issues/61270 this can't be named `Object`.
|
|
61
|
-
* @sealed @
|
|
61
|
+
* @sealed @beta
|
|
62
62
|
*/
|
|
63
63
|
class FluidSerializableObject extends _APIExtractorWorkaroundObjectBase {
|
|
64
64
|
}
|
|
@@ -67,14 +67,16 @@ export declare namespace FluidSerializableAsTree {
|
|
|
67
67
|
* @privateRemarks
|
|
68
68
|
* In the past this this had to reference the base type (_APIExtractorWorkaroundArrayBase).
|
|
69
69
|
* Testing for this in examples/utils/import-testing now shows it has to reference FluidSerializableAsTree.Array instead.
|
|
70
|
-
* @system @
|
|
70
|
+
* @system @beta
|
|
71
71
|
*/
|
|
72
72
|
type _RecursiveArrayWorkaroundJsonArray = FixRecursiveArraySchema<typeof Array>;
|
|
73
73
|
/**
|
|
74
74
|
* Do not use. Exists only as a workaround for {@link https://github.com/microsoft/TypeScript/issues/59550} and {@link https://github.com/microsoft/rushstack/issues/4429}.
|
|
75
|
-
* @system @
|
|
75
|
+
* @system @beta
|
|
76
76
|
*/
|
|
77
|
-
const _APIExtractorWorkaroundArrayBase: import("./simple-tree/index.js").
|
|
77
|
+
const _APIExtractorWorkaroundArrayBase: import("./simple-tree/index.js").TreeNodeSchemaClass<"com.fluidframework.serializable.array", import("./simple-tree/index.js").NodeKind.Array, import("./simple-tree/index.js").System_Unsafe.TreeArrayNodeUnsafe<readonly [() => typeof FluidSerializableObject, () => typeof Array, import("./simple-tree/leafNodeSchema.js").LeafSchema<"string", string>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"number", number>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"boolean", boolean>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"null", null>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"handle", IFluidHandle<unknown>>]> & import("./simple-tree/index.js").WithType<"com.fluidframework.serializable.array", import("./simple-tree/index.js").NodeKind.Array, unknown>, {
|
|
78
|
+
[Symbol.iterator](): Iterator<string | number | IFluidHandle<unknown> | import("./simple-tree/index.js").System_Unsafe.InsertableTypedNodeUnsafe<import("./simple-tree/leafNodeSchema.js").LeafSchema<"boolean", boolean>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"boolean", boolean>> | FluidSerializableObject | Array | null, any, undefined>;
|
|
79
|
+
}, false, readonly [() => typeof FluidSerializableObject, () => typeof Array, import("./simple-tree/leafNodeSchema.js").LeafSchema<"string", string>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"number", number>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"boolean", boolean>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"null", null>, import("./simple-tree/leafNodeSchema.js").LeafSchema<"handle", IFluidHandle<unknown>>], undefined>;
|
|
78
80
|
/**
|
|
79
81
|
* Arbitrary Fluid Serializable array as a {@link TreeNode}.
|
|
80
82
|
* @remarks
|
|
@@ -90,7 +92,7 @@ export declare namespace FluidSerializableAsTree {
|
|
|
90
92
|
* assert(Tree.is(inner, FluidSerializableAsTree.Array));
|
|
91
93
|
* const leaf = inner[0];
|
|
92
94
|
* ```
|
|
93
|
-
* @sealed @
|
|
95
|
+
* @sealed @beta
|
|
94
96
|
*/
|
|
95
97
|
class Array extends _APIExtractorWorkaroundArrayBase {
|
|
96
98
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"serializableDomainSchema.d.ts","sourceRoot":"","sources":["../src/serializableDomainSchema.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAC;AAEpE,OAAO,EAGN,KAAK,uBAAuB,EAC5B,KAAK,gCAAgC,EAErC,MAAM,wBAAwB,CAAC;AAChC,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAItD;;;;;;;;;GASG;AACH,yBAAiB,uBAAuB,CAAC;IACxC;;;;;OAKG;IACH,KAAY,IAAI,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAEhD;;;;;;;OAOG;IACI,MAAM,IAAI,0bAIP,CAAC;IAEX;;OAEG;IACH,KAAY,IAAI,GAAG,gCAAgC,CAAC,OAAO,IAAI,CAAC,CAAC;IAEjE;;;OAGG;IACI,MAAM,iCAAiC;;2dAAqC,CAAC;IAEpF;;;;;;;;;;;;;;;;OAgBG;IACH,MAAa,uBAAwB,SAAQ,iCAAiC;KAAG;IAKjF;;;;;;OAMG;IACH,KAAoB,kCAAkC,GAAG,uBAAuB,CAC/E,OAAO,KAAK,CACZ,CAAC;IAEF;;;OAGG;IACI,MAAM,gCAAgC,
|
|
1
|
+
{"version":3,"file":"serializableDomainSchema.d.ts","sourceRoot":"","sources":["../src/serializableDomainSchema.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAC;AAEpE,OAAO,EAGN,KAAK,uBAAuB,EAC5B,KAAK,gCAAgC,EAErC,MAAM,wBAAwB,CAAC;AAChC,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AAItD;;;;;;;;;GASG;AACH,yBAAiB,uBAAuB,CAAC;IACxC;;;;;OAKG;IACH,KAAY,IAAI,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAEhD;;;;;;;OAOG;IACI,MAAM,IAAI,0bAIP,CAAC;IAEX;;OAEG;IACH,KAAY,IAAI,GAAG,gCAAgC,CAAC,OAAO,IAAI,CAAC,CAAC;IAEjE;;;OAGG;IACI,MAAM,iCAAiC;;2dAAqC,CAAC;IAEpF;;;;;;;;;;;;;;;;OAgBG;IACH,MAAa,uBAAwB,SAAQ,iCAAiC;KAAG;IAKjF;;;;;;OAMG;IACH,KAAoB,kCAAkC,GAAG,uBAAuB,CAC/E,OAAO,KAAK,CACZ,CAAC;IAEF;;;OAGG;IACI,MAAM,gCAAgC;;kdAAmC,CAAC;IAEjF;;;;;;;;;;;;;;;;OAgBG;IACH,MAAa,KAAM,SAAQ,gCAAgC;KAAG;CAI9D"}
|