@azure/search-documents 12.3.0-beta.1 → 13.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/browser/base64.d.ts +1 -1
- package/dist/browser/base64.d.ts.map +1 -0
- package/dist/browser/base64.js +4 -3
- package/dist/browser/base64.js.map +1 -0
- package/dist/browser/index.d.ts +14 -12
- package/dist/browser/index.d.ts.map +1 -1
- package/dist/browser/index.js +3 -4
- package/dist/browser/index.js.map +1 -1
- package/dist/browser/indexModels.d.ts +16 -114
- package/dist/browser/indexModels.d.ts.map +1 -1
- package/dist/browser/indexModels.js.map +1 -1
- package/dist/browser/knowledgeBaseModels.d.ts +6 -18
- package/dist/browser/knowledgeBaseModels.d.ts.map +1 -1
- package/dist/browser/knowledgeBaseModels.js.map +1 -1
- package/dist/browser/knowledgeBaseRetrieval/api/index.d.ts +5 -0
- package/dist/browser/knowledgeBaseRetrieval/api/index.d.ts.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/api/index.js +5 -0
- package/dist/browser/knowledgeBaseRetrieval/api/index.js.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.d.ts +17 -0
- package/dist/browser/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.d.ts.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.js +25 -0
- package/dist/browser/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.js.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/api/operations.d.ts +9 -0
- package/dist/browser/knowledgeBaseRetrieval/api/operations.d.ts.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/api/operations.js +45 -0
- package/dist/browser/knowledgeBaseRetrieval/api/operations.js.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/api/options.d.ts +9 -0
- package/dist/browser/knowledgeBaseRetrieval/api/options.d.ts.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/api/options.js +4 -0
- package/dist/browser/knowledgeBaseRetrieval/api/options.js.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/index.d.ts +3 -0
- package/dist/browser/knowledgeBaseRetrieval/index.d.ts.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/index.js +4 -0
- package/dist/browser/knowledgeBaseRetrieval/index.js.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.d.ts +15 -0
- package/dist/browser/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.d.ts.map +1 -0
- package/dist/browser/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.js +25 -0
- package/dist/browser/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.js.map +1 -0
- package/dist/browser/knowledgeRetrievalClient.d.ts +5 -5
- package/dist/browser/knowledgeRetrievalClient.d.ts.map +1 -1
- package/dist/browser/knowledgeRetrievalClient.js +11 -19
- package/dist/browser/knowledgeRetrievalClient.js.map +1 -1
- package/dist/browser/models/azure/search/documents/index.d.ts +3 -0
- package/dist/browser/models/azure/search/documents/index.d.ts.map +1 -0
- package/dist/browser/models/azure/search/documents/index.js +4 -0
- package/dist/browser/models/azure/search/documents/index.js.map +1 -0
- package/dist/browser/models/azure/search/documents/indexes/index.d.ts +3 -0
- package/dist/browser/models/azure/search/documents/indexes/index.d.ts.map +1 -0
- package/dist/browser/models/azure/search/documents/indexes/index.js +4 -0
- package/dist/browser/models/azure/search/documents/indexes/index.js.map +1 -0
- package/dist/browser/models/azure/search/documents/indexes/models.d.ts +5170 -0
- package/dist/browser/models/azure/search/documents/indexes/models.d.ts.map +1 -0
- package/dist/browser/models/azure/search/documents/indexes/models.js +6380 -0
- package/dist/browser/models/azure/search/documents/indexes/models.js.map +1 -0
- package/dist/browser/models/azure/search/documents/knowledgeBases/index.d.ts +3 -0
- package/dist/browser/models/azure/search/documents/knowledgeBases/index.d.ts.map +1 -0
- package/dist/browser/models/azure/search/documents/knowledgeBases/index.js +4 -0
- package/dist/browser/models/azure/search/documents/knowledgeBases/index.js.map +1 -0
- package/dist/browser/models/azure/search/documents/knowledgeBases/models.d.ts +492 -0
- package/dist/browser/models/azure/search/documents/knowledgeBases/models.d.ts.map +1 -0
- package/dist/browser/models/azure/search/documents/knowledgeBases/models.js +592 -0
- package/dist/browser/models/azure/search/documents/knowledgeBases/models.js.map +1 -0
- package/dist/browser/models/azure/search/documents/models.d.ts +537 -0
- package/dist/browser/models/azure/search/documents/models.d.ts.map +1 -0
- package/dist/browser/models/azure/search/documents/models.js +593 -0
- package/dist/browser/models/azure/search/documents/models.js.map +1 -0
- package/dist/browser/models/index.d.ts +3 -0
- package/dist/browser/models/index.d.ts.map +1 -0
- package/dist/browser/models/index.js +4 -0
- package/dist/browser/models/index.js.map +1 -0
- package/dist/browser/models/models.d.ts +17 -0
- package/dist/browser/models/models.d.ts.map +1 -0
- package/dist/browser/models/models.js +15 -0
- package/dist/browser/models/models.js.map +1 -0
- package/dist/browser/search/api/index.d.ts +5 -0
- package/dist/browser/search/api/index.d.ts.map +1 -0
- package/dist/browser/search/api/index.js +5 -0
- package/dist/browser/search/api/index.js.map +1 -0
- package/dist/browser/search/api/operations.d.ts +41 -0
- package/dist/browser/search/api/operations.d.ts.map +1 -0
- package/dist/browser/search/api/operations.js +456 -0
- package/dist/browser/search/api/operations.js.map +1 -0
- package/dist/browser/search/api/options.d.ts +244 -0
- package/dist/browser/search/api/options.d.ts.map +1 -0
- package/dist/browser/search/api/options.js +4 -0
- package/dist/browser/search/api/options.js.map +1 -0
- package/dist/browser/search/api/searchContext.d.ts +17 -0
- package/dist/browser/search/api/searchContext.d.ts.map +1 -0
- package/dist/browser/search/api/searchContext.js +25 -0
- package/dist/browser/search/api/searchContext.js.map +1 -0
- package/dist/browser/search/index.d.ts +3 -0
- package/dist/browser/search/index.d.ts.map +1 -0
- package/dist/browser/search/index.js +4 -0
- package/dist/browser/search/index.js.map +1 -0
- package/dist/browser/search/searchClient.d.ts +31 -0
- package/dist/browser/search/searchClient.d.ts.map +1 -0
- package/dist/browser/search/searchClient.js +57 -0
- package/dist/browser/search/searchClient.js.map +1 -0
- package/dist/browser/searchClient.d.ts +4 -6
- package/dist/browser/searchClient.d.ts.map +1 -1
- package/dist/browser/searchClient.js +93 -238
- package/dist/browser/searchClient.js.map +1 -1
- package/dist/browser/searchIndex/api/index.d.ts +5 -0
- package/dist/browser/searchIndex/api/index.d.ts.map +1 -0
- package/dist/browser/searchIndex/api/index.js +5 -0
- package/dist/browser/searchIndex/api/index.js.map +1 -0
- package/dist/browser/searchIndex/api/operations.d.ts +128 -0
- package/dist/browser/searchIndex/api/operations.d.ts.map +1 -0
- package/dist/browser/searchIndex/api/operations.js +1123 -0
- package/dist/browser/searchIndex/api/operations.js.map +1 -0
- package/dist/browser/searchIndex/api/options.d.ts +258 -0
- package/dist/browser/searchIndex/api/options.d.ts.map +1 -0
- package/dist/browser/searchIndex/api/options.js +4 -0
- package/dist/browser/searchIndex/api/options.js.map +1 -0
- package/dist/browser/searchIndex/api/searchIndexContext.d.ts +15 -0
- package/dist/browser/searchIndex/api/searchIndexContext.d.ts.map +1 -0
- package/dist/browser/searchIndex/api/searchIndexContext.js +25 -0
- package/dist/browser/searchIndex/api/searchIndexContext.js.map +1 -0
- package/dist/browser/searchIndex/index.d.ts +3 -0
- package/dist/browser/searchIndex/index.d.ts.map +1 -0
- package/dist/browser/searchIndex/index.js +4 -0
- package/dist/browser/searchIndex/index.js.map +1 -0
- package/dist/browser/searchIndex/searchIndexClient.d.ts +75 -0
- package/dist/browser/searchIndex/searchIndexClient.d.ts.map +1 -0
- package/dist/browser/searchIndex/searchIndexClient.js +141 -0
- package/dist/browser/searchIndex/searchIndexClient.js.map +1 -0
- package/dist/browser/searchIndexClient.d.ts +6 -22
- package/dist/browser/searchIndexClient.d.ts.map +1 -1
- package/dist/browser/searchIndexClient.js +108 -616
- package/dist/browser/searchIndexClient.js.map +1 -1
- package/dist/browser/searchIndexer/api/index.d.ts +5 -0
- package/dist/browser/searchIndexer/api/index.d.ts.map +1 -0
- package/dist/browser/searchIndexer/api/index.js +5 -0
- package/dist/browser/searchIndexer/api/index.js.map +1 -0
- package/dist/browser/searchIndexer/api/operations.d.ts +77 -0
- package/dist/browser/searchIndexer/api/operations.d.ts.map +1 -0
- package/dist/browser/searchIndexer/api/operations.js +679 -0
- package/dist/browser/searchIndexer/api/operations.js.map +1 -0
- package/dist/browser/searchIndexer/api/options.d.ts +158 -0
- package/dist/browser/searchIndexer/api/options.d.ts.map +1 -0
- package/dist/browser/searchIndexer/api/options.js +4 -0
- package/dist/browser/searchIndexer/api/options.js.map +1 -0
- package/dist/browser/searchIndexer/api/searchIndexerContext.d.ts +15 -0
- package/dist/browser/searchIndexer/api/searchIndexerContext.d.ts.map +1 -0
- package/dist/browser/searchIndexer/api/searchIndexerContext.js +25 -0
- package/dist/browser/searchIndexer/api/searchIndexerContext.js.map +1 -0
- package/dist/browser/searchIndexer/index.d.ts +3 -0
- package/dist/browser/searchIndexer/index.d.ts.map +1 -0
- package/dist/browser/searchIndexer/index.js +4 -0
- package/dist/browser/searchIndexer/index.js.map +1 -0
- package/dist/browser/searchIndexer/searchIndexerClient.d.ts +49 -0
- package/dist/browser/searchIndexer/searchIndexerClient.d.ts.map +1 -0
- package/dist/browser/searchIndexer/searchIndexerClient.js +93 -0
- package/dist/browser/searchIndexer/searchIndexerClient.js.map +1 -0
- package/dist/browser/searchIndexerClient.d.ts +4 -17
- package/dist/browser/searchIndexerClient.d.ts.map +1 -1
- package/dist/browser/searchIndexerClient.js +74 -350
- package/dist/browser/searchIndexerClient.js.map +1 -1
- package/dist/browser/searchIndexingBufferedSender.d.ts +1 -1
- package/dist/browser/searchIndexingBufferedSender.d.ts.map +1 -1
- package/dist/browser/searchIndexingBufferedSender.js +3 -7
- package/dist/browser/searchIndexingBufferedSender.js.map +1 -1
- package/dist/browser/serviceModels.d.ts +80 -199
- package/dist/browser/serviceModels.d.ts.map +1 -1
- package/dist/browser/serviceModels.js.map +1 -1
- package/dist/browser/serviceUtils.d.ts +52 -10
- package/dist/browser/serviceUtils.d.ts.map +1 -1
- package/dist/browser/serviceUtils.js +210 -167
- package/dist/browser/serviceUtils.js.map +1 -1
- package/dist/browser/static-helpers/pagingHelpers.d.ts +74 -0
- package/dist/browser/static-helpers/pagingHelpers.d.ts.map +1 -0
- package/dist/browser/static-helpers/pagingHelpers.js +143 -0
- package/dist/browser/static-helpers/pagingHelpers.js.map +1 -0
- package/dist/browser/static-helpers/serialization/build-csv-collection.d.ts +2 -0
- package/dist/browser/static-helpers/serialization/build-csv-collection.d.ts.map +1 -0
- package/dist/browser/static-helpers/serialization/build-csv-collection.js +6 -0
- package/dist/browser/static-helpers/serialization/build-csv-collection.js.map +1 -0
- package/dist/browser/static-helpers/serialization/build-newline-collection.d.ts +2 -0
- package/dist/browser/static-helpers/serialization/build-newline-collection.d.ts.map +1 -0
- package/dist/browser/static-helpers/serialization/build-newline-collection.js +6 -0
- package/dist/browser/static-helpers/serialization/build-newline-collection.js.map +1 -0
- package/dist/browser/static-helpers/serialization/build-pipe-collection.d.ts +2 -0
- package/dist/browser/static-helpers/serialization/build-pipe-collection.d.ts.map +1 -0
- package/dist/browser/static-helpers/serialization/build-pipe-collection.js +6 -0
- package/dist/browser/static-helpers/serialization/build-pipe-collection.js.map +1 -0
- package/dist/browser/static-helpers/serialization/check-prop-undefined.d.ts +8 -0
- package/dist/browser/static-helpers/serialization/check-prop-undefined.d.ts.map +1 -0
- package/dist/browser/static-helpers/serialization/check-prop-undefined.js +17 -0
- package/dist/browser/static-helpers/serialization/check-prop-undefined.js.map +1 -0
- package/dist/browser/static-helpers/serialization/parse-csv-collection.d.ts +2 -0
- package/dist/browser/static-helpers/serialization/parse-csv-collection.d.ts.map +1 -0
- package/dist/browser/static-helpers/serialization/parse-csv-collection.js +6 -0
- package/dist/browser/static-helpers/serialization/parse-csv-collection.js.map +1 -0
- package/dist/browser/static-helpers/serialization/parse-newline-collection.d.ts +2 -0
- package/dist/browser/static-helpers/serialization/parse-newline-collection.d.ts.map +1 -0
- package/dist/browser/static-helpers/serialization/parse-newline-collection.js +6 -0
- package/dist/browser/static-helpers/serialization/parse-newline-collection.js.map +1 -0
- package/dist/browser/static-helpers/serialization/parse-pipe-collection.d.ts +2 -0
- package/dist/browser/static-helpers/serialization/parse-pipe-collection.d.ts.map +1 -0
- package/dist/browser/static-helpers/serialization/parse-pipe-collection.js +6 -0
- package/dist/browser/static-helpers/serialization/parse-pipe-collection.js.map +1 -0
- package/dist/browser/static-helpers/serialization/serialize-record.d.ts +2 -0
- package/dist/browser/static-helpers/serialization/serialize-record.d.ts.map +1 -0
- package/dist/browser/static-helpers/serialization/serialize-record.js +19 -0
- package/dist/browser/static-helpers/serialization/serialize-record.js.map +1 -0
- package/dist/browser/static-helpers/urlTemplate.d.ts +5 -0
- package/dist/browser/static-helpers/urlTemplate.d.ts.map +1 -0
- package/dist/browser/static-helpers/urlTemplate.js +200 -0
- package/dist/browser/static-helpers/urlTemplate.js.map +1 -0
- package/dist/browser/synonymMapHelper-browser.d.mts +11 -0
- package/dist/browser/synonymMapHelper-browser.mjs +14 -0
- package/dist/browser/synonymMapHelper-browser.mjs.map +1 -1
- package/dist/browser/timers-browser.d.mts +8 -0
- package/dist/browser/timers-browser.d.mts.map +1 -0
- package/dist/browser/timers-browser.mjs +13 -0
- package/dist/browser/timers-browser.mjs.map +1 -0
- package/dist/browser/tracing.d.ts +5 -0
- package/dist/browser/tracing.d.ts.map +1 -1
- package/dist/browser/tracing.js +1 -1
- package/dist/browser/tracing.js.map +1 -1
- package/dist/browser/walk.js +1 -1
- package/dist/browser/walk.js.map +1 -1
- package/dist/commonjs/base64.d.ts.map +1 -1
- package/dist/commonjs/base64.js +32 -17
- package/dist/commonjs/base64.js.map +7 -1
- package/dist/commonjs/geographyPoint.js +51 -37
- package/dist/commonjs/geographyPoint.js.map +7 -1
- package/dist/commonjs/index.d.ts +13 -11
- package/dist/commonjs/index.d.ts.map +1 -1
- package/dist/commonjs/index.js +197 -109
- package/dist/commonjs/index.js.map +7 -1
- package/dist/commonjs/indexDocumentsBatch.js +96 -81
- package/dist/commonjs/indexDocumentsBatch.js.map +7 -1
- package/dist/commonjs/indexModels.d.ts +16 -114
- package/dist/commonjs/indexModels.d.ts.map +1 -1
- package/dist/commonjs/indexModels.js +16 -5
- package/dist/commonjs/indexModels.js.map +7 -1
- package/dist/commonjs/knowledgeBaseModels.d.ts +6 -18
- package/dist/commonjs/knowledgeBaseModels.d.ts.map +1 -1
- package/dist/commonjs/knowledgeBaseModels.js +16 -5
- package/dist/commonjs/knowledgeBaseModels.js.map +7 -1
- package/dist/commonjs/knowledgeBaseRetrieval/api/index.d.ts +5 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/index.d.ts.map +1 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/index.js +31 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/index.js.map +7 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.d.ts +17 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.d.ts.map +1 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.js +47 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.js.map +7 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/operations.d.ts +9 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/operations.d.ts.map +1 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/operations.js +72 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/operations.js.map +7 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/options.d.ts +9 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/options.d.ts.map +1 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/options.js +16 -0
- package/dist/commonjs/knowledgeBaseRetrieval/api/options.js.map +7 -0
- package/dist/commonjs/knowledgeBaseRetrieval/index.d.ts +3 -0
- package/dist/commonjs/knowledgeBaseRetrieval/index.d.ts.map +1 -0
- package/dist/commonjs/knowledgeBaseRetrieval/index.js +28 -0
- package/dist/commonjs/knowledgeBaseRetrieval/index.js.map +7 -0
- package/dist/commonjs/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.d.ts +15 -0
- package/dist/commonjs/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.d.ts.map +1 -0
- package/dist/commonjs/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.js +47 -0
- package/dist/commonjs/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.js.map +7 -0
- package/dist/commonjs/knowledgeRetrievalClient.d.ts +5 -5
- package/dist/commonjs/knowledgeRetrievalClient.d.ts.map +1 -1
- package/dist/commonjs/knowledgeRetrievalClient.js +134 -109
- package/dist/commonjs/knowledgeRetrievalClient.js.map +7 -1
- package/dist/commonjs/logger.js +29 -11
- package/dist/commonjs/logger.js.map +7 -1
- package/dist/commonjs/models/azure/search/documents/index.d.ts +3 -0
- package/dist/commonjs/models/azure/search/documents/index.d.ts.map +1 -0
- package/dist/commonjs/models/azure/search/documents/index.js +44 -0
- package/dist/commonjs/models/azure/search/documents/index.js.map +7 -0
- package/dist/commonjs/models/azure/search/documents/indexes/index.d.ts +3 -0
- package/dist/commonjs/models/azure/search/documents/indexes/index.d.ts.map +1 -0
- package/dist/commonjs/models/azure/search/documents/indexes/index.js +134 -0
- package/dist/commonjs/models/azure/search/documents/indexes/index.js.map +7 -0
- package/dist/commonjs/models/azure/search/documents/indexes/models.d.ts +5170 -0
- package/dist/commonjs/models/azure/search/documents/indexes/models.d.ts.map +1 -0
- package/dist/commonjs/models/azure/search/documents/indexes/models.js +6342 -0
- package/dist/commonjs/models/azure/search/documents/indexes/models.js.map +7 -0
- package/dist/commonjs/models/azure/search/documents/knowledgeBases/index.d.ts +3 -0
- package/dist/commonjs/models/azure/search/documents/knowledgeBases/index.d.ts.map +1 -0
- package/dist/commonjs/models/azure/search/documents/knowledgeBases/index.js +36 -0
- package/dist/commonjs/models/azure/search/documents/knowledgeBases/index.js.map +7 -0
- package/dist/commonjs/models/azure/search/documents/knowledgeBases/models.d.ts +492 -0
- package/dist/commonjs/models/azure/search/documents/knowledgeBases/models.d.ts.map +1 -0
- package/dist/commonjs/models/azure/search/documents/knowledgeBases/models.js +689 -0
- package/dist/commonjs/models/azure/search/documents/knowledgeBases/models.js.map +7 -0
- package/dist/commonjs/models/azure/search/documents/models.d.ts +537 -0
- package/dist/commonjs/models/azure/search/documents/models.d.ts.map +1 -0
- package/dist/commonjs/models/azure/search/documents/models.js +699 -0
- package/dist/commonjs/models/azure/search/documents/models.js.map +7 -0
- package/dist/commonjs/models/index.d.ts +3 -0
- package/dist/commonjs/models/index.d.ts.map +1 -0
- package/dist/commonjs/models/index.js +28 -0
- package/dist/commonjs/models/index.js.map +7 -0
- package/dist/commonjs/models/models.d.ts +17 -0
- package/dist/commonjs/models/models.d.ts.map +1 -0
- package/dist/commonjs/models/models.js +39 -0
- package/dist/commonjs/models/models.js.map +7 -0
- package/dist/commonjs/odata.js +49 -45
- package/dist/commonjs/odata.js.map +7 -1
- package/dist/commonjs/odataMetadataPolicy.js +34 -17
- package/dist/commonjs/odataMetadataPolicy.js.map +7 -1
- package/dist/commonjs/search/api/index.d.ts +5 -0
- package/dist/commonjs/search/api/index.d.ts.map +1 -0
- package/dist/commonjs/search/api/index.js +47 -0
- package/dist/commonjs/search/api/index.js.map +7 -0
- package/dist/commonjs/search/api/operations.d.ts +41 -0
- package/dist/commonjs/search/api/operations.d.ts.map +1 -0
- package/dist/commonjs/search/api/operations.js +509 -0
- package/dist/commonjs/search/api/operations.js.map +7 -0
- package/dist/commonjs/search/api/options.d.ts +244 -0
- package/dist/commonjs/search/api/options.d.ts.map +1 -0
- package/dist/commonjs/search/api/options.js +16 -0
- package/dist/commonjs/search/api/options.js.map +7 -0
- package/dist/commonjs/search/api/searchContext.d.ts +17 -0
- package/dist/commonjs/search/api/searchContext.d.ts.map +1 -0
- package/dist/commonjs/search/api/searchContext.js +47 -0
- package/dist/commonjs/search/api/searchContext.js.map +7 -0
- package/dist/commonjs/search/index.d.ts +3 -0
- package/dist/commonjs/search/index.d.ts.map +1 -0
- package/dist/commonjs/search/index.js +28 -0
- package/dist/commonjs/search/index.js.map +7 -0
- package/dist/commonjs/search/searchClient.d.ts +31 -0
- package/dist/commonjs/search/searchClient.d.ts.map +1 -0
- package/dist/commonjs/search/searchClient.js +79 -0
- package/dist/commonjs/search/searchClient.js.map +7 -0
- package/dist/commonjs/searchApiKeyCredentialPolicy.js +36 -19
- package/dist/commonjs/searchApiKeyCredentialPolicy.js.map +7 -1
- package/dist/commonjs/searchAudience.js +33 -24
- package/dist/commonjs/searchAudience.js.map +7 -1
- package/dist/commonjs/searchClient.d.ts +4 -6
- package/dist/commonjs/searchClient.d.ts.map +1 -1
- package/dist/commonjs/searchClient.js +648 -702
- package/dist/commonjs/searchClient.js.map +7 -1
- package/dist/commonjs/searchIndex/api/index.d.ts +5 -0
- package/dist/commonjs/searchIndex/api/index.d.ts.map +1 -0
- package/dist/commonjs/searchIndex/api/index.js +89 -0
- package/dist/commonjs/searchIndex/api/index.js.map +7 -0
- package/dist/commonjs/searchIndex/api/operations.d.ts +128 -0
- package/dist/commonjs/searchIndex/api/operations.d.ts.map +1 -0
- package/dist/commonjs/searchIndex/api/operations.js +1325 -0
- package/dist/commonjs/searchIndex/api/operations.js.map +7 -0
- package/dist/commonjs/searchIndex/api/options.d.ts +258 -0
- package/dist/commonjs/searchIndex/api/options.d.ts.map +1 -0
- package/dist/commonjs/searchIndex/api/options.js +16 -0
- package/dist/commonjs/searchIndex/api/options.js.map +7 -0
- package/dist/commonjs/searchIndex/api/searchIndexContext.d.ts +15 -0
- package/dist/commonjs/searchIndex/api/searchIndexContext.d.ts.map +1 -0
- package/dist/commonjs/searchIndex/api/searchIndexContext.js +47 -0
- package/dist/commonjs/searchIndex/api/searchIndexContext.js.map +7 -0
- package/dist/commonjs/searchIndex/index.d.ts +3 -0
- package/dist/commonjs/searchIndex/index.d.ts.map +1 -0
- package/dist/commonjs/searchIndex/index.js +28 -0
- package/dist/commonjs/searchIndex/index.js.map +7 -0
- package/dist/commonjs/searchIndex/searchIndexClient.d.ts +75 -0
- package/dist/commonjs/searchIndex/searchIndexClient.d.ts.map +1 -0
- package/dist/commonjs/searchIndex/searchIndexClient.js +163 -0
- package/dist/commonjs/searchIndex/searchIndexClient.js.map +7 -0
- package/dist/commonjs/searchIndexClient.d.ts +6 -22
- package/dist/commonjs/searchIndexClient.d.ts.map +1 -1
- package/dist/commonjs/searchIndexClient.js +628 -977
- package/dist/commonjs/searchIndexClient.js.map +7 -1
- package/dist/commonjs/searchIndexer/api/index.d.ts +5 -0
- package/dist/commonjs/searchIndexer/api/index.d.ts.map +1 -0
- package/dist/commonjs/searchIndexer/api/index.js +65 -0
- package/dist/commonjs/searchIndexer/api/index.js.map +7 -0
- package/dist/commonjs/searchIndexer/api/operations.d.ts +77 -0
- package/dist/commonjs/searchIndexer/api/operations.d.ts.map +1 -0
- package/dist/commonjs/searchIndexer/api/operations.js +791 -0
- package/dist/commonjs/searchIndexer/api/operations.js.map +7 -0
- package/dist/commonjs/searchIndexer/api/options.d.ts +158 -0
- package/dist/commonjs/searchIndexer/api/options.d.ts.map +1 -0
- package/dist/commonjs/searchIndexer/api/options.js +16 -0
- package/dist/commonjs/searchIndexer/api/options.js.map +7 -0
- package/dist/commonjs/searchIndexer/api/searchIndexerContext.d.ts +15 -0
- package/dist/commonjs/searchIndexer/api/searchIndexerContext.d.ts.map +1 -0
- package/dist/commonjs/searchIndexer/api/searchIndexerContext.js +47 -0
- package/dist/commonjs/searchIndexer/api/searchIndexerContext.js.map +7 -0
- package/dist/commonjs/searchIndexer/index.d.ts +3 -0
- package/dist/commonjs/searchIndexer/index.d.ts.map +1 -0
- package/dist/commonjs/searchIndexer/index.js +28 -0
- package/dist/commonjs/searchIndexer/index.js.map +7 -0
- package/dist/commonjs/searchIndexer/searchIndexerClient.d.ts +49 -0
- package/dist/commonjs/searchIndexer/searchIndexerClient.d.ts.map +1 -0
- package/dist/commonjs/searchIndexer/searchIndexerClient.js +115 -0
- package/dist/commonjs/searchIndexer/searchIndexerClient.js.map +7 -0
- package/dist/commonjs/searchIndexerClient.d.ts +4 -17
- package/dist/commonjs/searchIndexerClient.d.ts.map +1 -1
- package/dist/commonjs/searchIndexerClient.js +479 -642
- package/dist/commonjs/searchIndexerClient.js.map +7 -1
- package/dist/commonjs/searchIndexingBufferedSender.d.ts +1 -1
- package/dist/commonjs/searchIndexingBufferedSender.d.ts.map +1 -1
- package/dist/commonjs/searchIndexingBufferedSender.js +320 -313
- package/dist/commonjs/searchIndexingBufferedSender.js.map +7 -1
- package/dist/commonjs/serialization.js +146 -117
- package/dist/commonjs/serialization.js.map +7 -1
- package/dist/commonjs/serviceModels.d.ts +80 -199
- package/dist/commonjs/serviceModels.d.ts.map +1 -1
- package/dist/commonjs/serviceModels.js +189 -657
- package/dist/commonjs/serviceModels.js.map +7 -1
- package/dist/commonjs/serviceUtils.d.ts +52 -10
- package/dist/commonjs/serviceUtils.d.ts.map +1 -1
- package/dist/commonjs/serviceUtils.js +822 -720
- package/dist/commonjs/serviceUtils.js.map +7 -1
- package/dist/commonjs/static-helpers/pagingHelpers.d.ts +74 -0
- package/dist/commonjs/static-helpers/pagingHelpers.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/pagingHelpers.js +143 -0
- package/dist/commonjs/static-helpers/pagingHelpers.js.map +7 -0
- package/dist/commonjs/static-helpers/serialization/build-csv-collection.d.ts +2 -0
- package/dist/commonjs/static-helpers/serialization/build-csv-collection.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/serialization/build-csv-collection.js +30 -0
- package/dist/commonjs/static-helpers/serialization/build-csv-collection.js.map +7 -0
- package/dist/commonjs/static-helpers/serialization/build-newline-collection.d.ts +2 -0
- package/dist/commonjs/static-helpers/serialization/build-newline-collection.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/serialization/build-newline-collection.js +30 -0
- package/dist/commonjs/static-helpers/serialization/build-newline-collection.js.map +7 -0
- package/dist/commonjs/static-helpers/serialization/build-pipe-collection.d.ts +2 -0
- package/dist/commonjs/static-helpers/serialization/build-pipe-collection.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/serialization/build-pipe-collection.js +30 -0
- package/dist/commonjs/static-helpers/serialization/build-pipe-collection.js.map +7 -0
- package/dist/commonjs/static-helpers/serialization/check-prop-undefined.d.ts +8 -0
- package/dist/commonjs/static-helpers/serialization/check-prop-undefined.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/serialization/check-prop-undefined.js +35 -0
- package/dist/commonjs/static-helpers/serialization/check-prop-undefined.js.map +7 -0
- package/dist/commonjs/static-helpers/serialization/parse-csv-collection.d.ts +2 -0
- package/dist/commonjs/static-helpers/serialization/parse-csv-collection.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/serialization/parse-csv-collection.js +30 -0
- package/dist/commonjs/static-helpers/serialization/parse-csv-collection.js.map +7 -0
- package/dist/commonjs/static-helpers/serialization/parse-newline-collection.d.ts +2 -0
- package/dist/commonjs/static-helpers/serialization/parse-newline-collection.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/serialization/parse-newline-collection.js +30 -0
- package/dist/commonjs/static-helpers/serialization/parse-newline-collection.js.map +7 -0
- package/dist/commonjs/static-helpers/serialization/parse-pipe-collection.d.ts +2 -0
- package/dist/commonjs/static-helpers/serialization/parse-pipe-collection.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/serialization/parse-pipe-collection.js +30 -0
- package/dist/commonjs/static-helpers/serialization/parse-pipe-collection.js.map +7 -0
- package/dist/commonjs/static-helpers/serialization/serialize-record.d.ts +2 -0
- package/dist/commonjs/static-helpers/serialization/serialize-record.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/serialization/serialize-record.js +42 -0
- package/dist/commonjs/static-helpers/serialization/serialize-record.js.map +7 -0
- package/dist/commonjs/static-helpers/urlTemplate.d.ts +5 -0
- package/dist/commonjs/static-helpers/urlTemplate.d.ts.map +1 -0
- package/dist/commonjs/static-helpers/urlTemplate.js +197 -0
- package/dist/commonjs/static-helpers/urlTemplate.js.map +7 -0
- package/dist/commonjs/synonymMapHelper.js +45 -26
- package/dist/commonjs/synonymMapHelper.js.map +7 -1
- package/dist/commonjs/timers.d.ts +9 -0
- package/dist/commonjs/timers.d.ts.map +1 -0
- package/dist/commonjs/timers.js +32 -0
- package/dist/commonjs/timers.js.map +7 -0
- package/dist/commonjs/tracing.d.ts +5 -0
- package/dist/commonjs/tracing.d.ts.map +1 -1
- package/dist/commonjs/tracing.js +34 -15
- package/dist/commonjs/tracing.js.map +7 -1
- package/dist/commonjs/tsdoc-metadata.json +1 -1
- package/dist/commonjs/walk.js +76 -67
- package/dist/commonjs/walk.js.map +7 -1
- package/dist/esm/base64.d.ts.map +1 -1
- package/dist/esm/base64.js +3 -2
- package/dist/esm/base64.js.map +1 -1
- package/dist/esm/index.d.ts +13 -11
- package/dist/esm/index.d.ts.map +1 -1
- package/dist/esm/index.js +2 -3
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/indexModels.d.ts +16 -114
- package/dist/esm/indexModels.d.ts.map +1 -1
- package/dist/esm/indexModels.js.map +1 -1
- package/dist/esm/knowledgeBaseModels.d.ts +6 -18
- package/dist/esm/knowledgeBaseModels.d.ts.map +1 -1
- package/dist/esm/knowledgeBaseModels.js.map +1 -1
- package/dist/esm/knowledgeBaseRetrieval/api/index.d.ts +5 -0
- package/dist/esm/knowledgeBaseRetrieval/api/index.d.ts.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/api/index.js +5 -0
- package/dist/esm/knowledgeBaseRetrieval/api/index.js.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.d.ts +17 -0
- package/dist/esm/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.d.ts.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.js +25 -0
- package/dist/esm/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.js.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/api/operations.d.ts +9 -0
- package/dist/esm/knowledgeBaseRetrieval/api/operations.d.ts.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/api/operations.js +45 -0
- package/dist/esm/knowledgeBaseRetrieval/api/operations.js.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/api/options.d.ts +9 -0
- package/dist/esm/knowledgeBaseRetrieval/api/options.d.ts.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/api/options.js +4 -0
- package/dist/esm/knowledgeBaseRetrieval/api/options.js.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/index.d.ts +3 -0
- package/dist/esm/knowledgeBaseRetrieval/index.d.ts.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/index.js +4 -0
- package/dist/esm/knowledgeBaseRetrieval/index.js.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.d.ts +15 -0
- package/dist/esm/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.d.ts.map +1 -0
- package/dist/esm/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.js +25 -0
- package/dist/esm/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.js.map +1 -0
- package/dist/esm/knowledgeRetrievalClient.d.ts +5 -5
- package/dist/esm/knowledgeRetrievalClient.d.ts.map +1 -1
- package/dist/esm/knowledgeRetrievalClient.js +11 -19
- package/dist/esm/knowledgeRetrievalClient.js.map +1 -1
- package/dist/esm/models/azure/search/documents/index.d.ts +3 -0
- package/dist/esm/models/azure/search/documents/index.d.ts.map +1 -0
- package/dist/esm/models/azure/search/documents/index.js +4 -0
- package/dist/esm/models/azure/search/documents/index.js.map +1 -0
- package/dist/esm/models/azure/search/documents/indexes/index.d.ts +3 -0
- package/dist/esm/models/azure/search/documents/indexes/index.d.ts.map +1 -0
- package/dist/esm/models/azure/search/documents/indexes/index.js +4 -0
- package/dist/esm/models/azure/search/documents/indexes/index.js.map +1 -0
- package/dist/esm/models/azure/search/documents/indexes/models.d.ts +5170 -0
- package/dist/esm/models/azure/search/documents/indexes/models.d.ts.map +1 -0
- package/dist/esm/models/azure/search/documents/indexes/models.js +6380 -0
- package/dist/esm/models/azure/search/documents/indexes/models.js.map +1 -0
- package/dist/esm/models/azure/search/documents/knowledgeBases/index.d.ts +3 -0
- package/dist/esm/models/azure/search/documents/knowledgeBases/index.d.ts.map +1 -0
- package/dist/esm/models/azure/search/documents/knowledgeBases/index.js +4 -0
- package/dist/esm/models/azure/search/documents/knowledgeBases/index.js.map +1 -0
- package/dist/esm/models/azure/search/documents/knowledgeBases/models.d.ts +492 -0
- package/dist/esm/models/azure/search/documents/knowledgeBases/models.d.ts.map +1 -0
- package/dist/esm/models/azure/search/documents/knowledgeBases/models.js +592 -0
- package/dist/esm/models/azure/search/documents/knowledgeBases/models.js.map +1 -0
- package/dist/esm/models/azure/search/documents/models.d.ts +537 -0
- package/dist/esm/models/azure/search/documents/models.d.ts.map +1 -0
- package/dist/esm/models/azure/search/documents/models.js +593 -0
- package/dist/esm/models/azure/search/documents/models.js.map +1 -0
- package/dist/esm/models/index.d.ts +3 -0
- package/dist/esm/models/index.d.ts.map +1 -0
- package/dist/esm/models/index.js +4 -0
- package/dist/esm/models/index.js.map +1 -0
- package/dist/esm/models/models.d.ts +17 -0
- package/dist/esm/models/models.d.ts.map +1 -0
- package/dist/esm/models/models.js +15 -0
- package/dist/esm/models/models.js.map +1 -0
- package/dist/esm/search/api/index.d.ts +5 -0
- package/dist/esm/search/api/index.d.ts.map +1 -0
- package/dist/esm/search/api/index.js +5 -0
- package/dist/esm/search/api/index.js.map +1 -0
- package/dist/esm/search/api/operations.d.ts +41 -0
- package/dist/esm/search/api/operations.d.ts.map +1 -0
- package/dist/esm/search/api/operations.js +456 -0
- package/dist/esm/search/api/operations.js.map +1 -0
- package/dist/esm/search/api/options.d.ts +244 -0
- package/dist/esm/search/api/options.d.ts.map +1 -0
- package/dist/esm/search/api/options.js +4 -0
- package/dist/esm/search/api/options.js.map +1 -0
- package/dist/esm/search/api/searchContext.d.ts +17 -0
- package/dist/esm/search/api/searchContext.d.ts.map +1 -0
- package/dist/esm/search/api/searchContext.js +25 -0
- package/dist/esm/search/api/searchContext.js.map +1 -0
- package/dist/esm/search/index.d.ts +3 -0
- package/dist/esm/search/index.d.ts.map +1 -0
- package/dist/esm/search/index.js +4 -0
- package/dist/esm/search/index.js.map +1 -0
- package/dist/esm/search/searchClient.d.ts +31 -0
- package/dist/esm/search/searchClient.d.ts.map +1 -0
- package/dist/esm/search/searchClient.js +57 -0
- package/dist/esm/search/searchClient.js.map +1 -0
- package/dist/esm/searchClient.d.ts +4 -6
- package/dist/esm/searchClient.d.ts.map +1 -1
- package/dist/esm/searchClient.js +93 -238
- package/dist/esm/searchClient.js.map +1 -1
- package/dist/esm/searchIndex/api/index.d.ts +5 -0
- package/dist/esm/searchIndex/api/index.d.ts.map +1 -0
- package/dist/esm/searchIndex/api/index.js +5 -0
- package/dist/esm/searchIndex/api/index.js.map +1 -0
- package/dist/esm/searchIndex/api/operations.d.ts +128 -0
- package/dist/esm/searchIndex/api/operations.d.ts.map +1 -0
- package/dist/esm/searchIndex/api/operations.js +1123 -0
- package/dist/esm/searchIndex/api/operations.js.map +1 -0
- package/dist/esm/searchIndex/api/options.d.ts +258 -0
- package/dist/esm/searchIndex/api/options.d.ts.map +1 -0
- package/dist/esm/searchIndex/api/options.js +4 -0
- package/dist/esm/searchIndex/api/options.js.map +1 -0
- package/dist/esm/searchIndex/api/searchIndexContext.d.ts +15 -0
- package/dist/esm/searchIndex/api/searchIndexContext.d.ts.map +1 -0
- package/dist/esm/searchIndex/api/searchIndexContext.js +25 -0
- package/dist/esm/searchIndex/api/searchIndexContext.js.map +1 -0
- package/dist/esm/searchIndex/index.d.ts +3 -0
- package/dist/esm/searchIndex/index.d.ts.map +1 -0
- package/dist/esm/searchIndex/index.js +4 -0
- package/dist/esm/searchIndex/index.js.map +1 -0
- package/dist/esm/searchIndex/searchIndexClient.d.ts +75 -0
- package/dist/esm/searchIndex/searchIndexClient.d.ts.map +1 -0
- package/dist/esm/searchIndex/searchIndexClient.js +141 -0
- package/dist/esm/searchIndex/searchIndexClient.js.map +1 -0
- package/dist/esm/searchIndexClient.d.ts +6 -22
- package/dist/esm/searchIndexClient.d.ts.map +1 -1
- package/dist/esm/searchIndexClient.js +108 -616
- package/dist/esm/searchIndexClient.js.map +1 -1
- package/dist/esm/searchIndexer/api/index.d.ts +5 -0
- package/dist/esm/searchIndexer/api/index.d.ts.map +1 -0
- package/dist/esm/searchIndexer/api/index.js +5 -0
- package/dist/esm/searchIndexer/api/index.js.map +1 -0
- package/dist/esm/searchIndexer/api/operations.d.ts +77 -0
- package/dist/esm/searchIndexer/api/operations.d.ts.map +1 -0
- package/dist/esm/searchIndexer/api/operations.js +679 -0
- package/dist/esm/searchIndexer/api/operations.js.map +1 -0
- package/dist/esm/searchIndexer/api/options.d.ts +158 -0
- package/dist/esm/searchIndexer/api/options.d.ts.map +1 -0
- package/dist/esm/searchIndexer/api/options.js +4 -0
- package/dist/esm/searchIndexer/api/options.js.map +1 -0
- package/dist/esm/searchIndexer/api/searchIndexerContext.d.ts +15 -0
- package/dist/esm/searchIndexer/api/searchIndexerContext.d.ts.map +1 -0
- package/dist/esm/searchIndexer/api/searchIndexerContext.js +25 -0
- package/dist/esm/searchIndexer/api/searchIndexerContext.js.map +1 -0
- package/dist/esm/searchIndexer/index.d.ts +3 -0
- package/dist/esm/searchIndexer/index.d.ts.map +1 -0
- package/dist/esm/searchIndexer/index.js +4 -0
- package/dist/esm/searchIndexer/index.js.map +1 -0
- package/dist/esm/searchIndexer/searchIndexerClient.d.ts +49 -0
- package/dist/esm/searchIndexer/searchIndexerClient.d.ts.map +1 -0
- package/dist/esm/searchIndexer/searchIndexerClient.js +93 -0
- package/dist/esm/searchIndexer/searchIndexerClient.js.map +1 -0
- package/dist/esm/searchIndexerClient.d.ts +4 -17
- package/dist/esm/searchIndexerClient.d.ts.map +1 -1
- package/dist/esm/searchIndexerClient.js +74 -350
- package/dist/esm/searchIndexerClient.js.map +1 -1
- package/dist/esm/searchIndexingBufferedSender.d.ts +1 -1
- package/dist/esm/searchIndexingBufferedSender.d.ts.map +1 -1
- package/dist/esm/searchIndexingBufferedSender.js +3 -7
- package/dist/esm/searchIndexingBufferedSender.js.map +1 -1
- package/dist/esm/serviceModels.d.ts +80 -199
- package/dist/esm/serviceModels.d.ts.map +1 -1
- package/dist/esm/serviceModels.js.map +1 -1
- package/dist/esm/serviceUtils.d.ts +52 -10
- package/dist/esm/serviceUtils.d.ts.map +1 -1
- package/dist/esm/serviceUtils.js +210 -167
- package/dist/esm/serviceUtils.js.map +1 -1
- package/dist/esm/static-helpers/pagingHelpers.d.ts +74 -0
- package/dist/esm/static-helpers/pagingHelpers.d.ts.map +1 -0
- package/dist/esm/static-helpers/pagingHelpers.js +143 -0
- package/dist/esm/static-helpers/pagingHelpers.js.map +1 -0
- package/dist/esm/static-helpers/serialization/build-csv-collection.d.ts +2 -0
- package/dist/esm/static-helpers/serialization/build-csv-collection.d.ts.map +1 -0
- package/dist/esm/static-helpers/serialization/build-csv-collection.js +6 -0
- package/dist/esm/static-helpers/serialization/build-csv-collection.js.map +1 -0
- package/dist/esm/static-helpers/serialization/build-newline-collection.d.ts +2 -0
- package/dist/esm/static-helpers/serialization/build-newline-collection.d.ts.map +1 -0
- package/dist/esm/static-helpers/serialization/build-newline-collection.js +6 -0
- package/dist/esm/static-helpers/serialization/build-newline-collection.js.map +1 -0
- package/dist/esm/static-helpers/serialization/build-pipe-collection.d.ts +2 -0
- package/dist/esm/static-helpers/serialization/build-pipe-collection.d.ts.map +1 -0
- package/dist/esm/static-helpers/serialization/build-pipe-collection.js +6 -0
- package/dist/esm/static-helpers/serialization/build-pipe-collection.js.map +1 -0
- package/dist/esm/static-helpers/serialization/check-prop-undefined.d.ts +8 -0
- package/dist/esm/static-helpers/serialization/check-prop-undefined.d.ts.map +1 -0
- package/dist/esm/static-helpers/serialization/check-prop-undefined.js +17 -0
- package/dist/esm/static-helpers/serialization/check-prop-undefined.js.map +1 -0
- package/dist/esm/static-helpers/serialization/parse-csv-collection.d.ts +2 -0
- package/dist/esm/static-helpers/serialization/parse-csv-collection.d.ts.map +1 -0
- package/dist/esm/static-helpers/serialization/parse-csv-collection.js +6 -0
- package/dist/esm/static-helpers/serialization/parse-csv-collection.js.map +1 -0
- package/dist/esm/static-helpers/serialization/parse-newline-collection.d.ts +2 -0
- package/dist/esm/static-helpers/serialization/parse-newline-collection.d.ts.map +1 -0
- package/dist/esm/static-helpers/serialization/parse-newline-collection.js +6 -0
- package/dist/esm/static-helpers/serialization/parse-newline-collection.js.map +1 -0
- package/dist/esm/static-helpers/serialization/parse-pipe-collection.d.ts +2 -0
- package/dist/esm/static-helpers/serialization/parse-pipe-collection.d.ts.map +1 -0
- package/dist/esm/static-helpers/serialization/parse-pipe-collection.js +6 -0
- package/dist/esm/static-helpers/serialization/parse-pipe-collection.js.map +1 -0
- package/dist/esm/static-helpers/serialization/serialize-record.d.ts +2 -0
- package/dist/esm/static-helpers/serialization/serialize-record.d.ts.map +1 -0
- package/dist/esm/static-helpers/serialization/serialize-record.js +19 -0
- package/dist/esm/static-helpers/serialization/serialize-record.js.map +1 -0
- package/dist/esm/static-helpers/urlTemplate.d.ts +5 -0
- package/dist/esm/static-helpers/urlTemplate.d.ts.map +1 -0
- package/dist/esm/static-helpers/urlTemplate.js +200 -0
- package/dist/esm/static-helpers/urlTemplate.js.map +1 -0
- package/dist/esm/timers.d.ts +9 -0
- package/dist/esm/timers.d.ts.map +1 -0
- package/dist/esm/timers.js +15 -0
- package/dist/esm/timers.js.map +1 -0
- package/dist/esm/tracing.d.ts +5 -0
- package/dist/esm/tracing.d.ts.map +1 -1
- package/dist/esm/tracing.js +1 -1
- package/dist/esm/tracing.js.map +1 -1
- package/dist/esm/walk.js +1 -1
- package/dist/esm/walk.js.map +1 -1
- package/dist/react-native/base64.d.ts.map +1 -1
- package/dist/react-native/base64.js +3 -2
- package/dist/react-native/base64.js.map +1 -1
- package/dist/react-native/index.d.ts +14 -12
- package/dist/react-native/index.d.ts.map +1 -1
- package/dist/react-native/index.js +3 -4
- package/dist/react-native/index.js.map +1 -1
- package/dist/react-native/indexModels.d.ts +16 -114
- package/dist/react-native/indexModels.d.ts.map +1 -1
- package/dist/react-native/indexModels.js.map +1 -1
- package/dist/react-native/knowledgeBaseModels.d.ts +6 -18
- package/dist/react-native/knowledgeBaseModels.d.ts.map +1 -1
- package/dist/react-native/knowledgeBaseModels.js.map +1 -1
- package/dist/react-native/knowledgeBaseRetrieval/api/index.d.ts +5 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/index.d.ts.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/index.js +5 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/index.js.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.d.ts +17 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.d.ts.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.js +25 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/knowledgeBaseRetrievalContext.js.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/operations.d.ts +9 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/operations.d.ts.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/operations.js +45 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/operations.js.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/options.d.ts +9 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/options.d.ts.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/options.js +4 -0
- package/dist/react-native/knowledgeBaseRetrieval/api/options.js.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/index.d.ts +3 -0
- package/dist/react-native/knowledgeBaseRetrieval/index.d.ts.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/index.js +4 -0
- package/dist/react-native/knowledgeBaseRetrieval/index.js.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.d.ts +15 -0
- package/dist/react-native/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.d.ts.map +1 -0
- package/dist/react-native/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.js +25 -0
- package/dist/react-native/knowledgeBaseRetrieval/knowledgeBaseRetrievalClient.js.map +1 -0
- package/dist/react-native/knowledgeRetrievalClient.d.ts +5 -5
- package/dist/react-native/knowledgeRetrievalClient.d.ts.map +1 -1
- package/dist/react-native/knowledgeRetrievalClient.js +11 -19
- package/dist/react-native/knowledgeRetrievalClient.js.map +1 -1
- package/dist/react-native/models/azure/search/documents/index.d.ts +3 -0
- package/dist/react-native/models/azure/search/documents/index.d.ts.map +1 -0
- package/dist/react-native/models/azure/search/documents/index.js +4 -0
- package/dist/react-native/models/azure/search/documents/index.js.map +1 -0
- package/dist/react-native/models/azure/search/documents/indexes/index.d.ts +3 -0
- package/dist/react-native/models/azure/search/documents/indexes/index.d.ts.map +1 -0
- package/dist/react-native/models/azure/search/documents/indexes/index.js +4 -0
- package/dist/react-native/models/azure/search/documents/indexes/index.js.map +1 -0
- package/dist/react-native/models/azure/search/documents/indexes/models.d.ts +5170 -0
- package/dist/react-native/models/azure/search/documents/indexes/models.d.ts.map +1 -0
- package/dist/react-native/models/azure/search/documents/indexes/models.js +6380 -0
- package/dist/react-native/models/azure/search/documents/indexes/models.js.map +1 -0
- package/dist/react-native/models/azure/search/documents/knowledgeBases/index.d.ts +3 -0
- package/dist/react-native/models/azure/search/documents/knowledgeBases/index.d.ts.map +1 -0
- package/dist/react-native/models/azure/search/documents/knowledgeBases/index.js +4 -0
- package/dist/react-native/models/azure/search/documents/knowledgeBases/index.js.map +1 -0
- package/dist/react-native/models/azure/search/documents/knowledgeBases/models.d.ts +492 -0
- package/dist/react-native/models/azure/search/documents/knowledgeBases/models.d.ts.map +1 -0
- package/dist/react-native/models/azure/search/documents/knowledgeBases/models.js +592 -0
- package/dist/react-native/models/azure/search/documents/knowledgeBases/models.js.map +1 -0
- package/dist/react-native/models/azure/search/documents/models.d.ts +537 -0
- package/dist/react-native/models/azure/search/documents/models.d.ts.map +1 -0
- package/dist/react-native/models/azure/search/documents/models.js +593 -0
- package/dist/react-native/models/azure/search/documents/models.js.map +1 -0
- package/dist/react-native/models/index.d.ts +3 -0
- package/dist/react-native/models/index.d.ts.map +1 -0
- package/dist/react-native/models/index.js +4 -0
- package/dist/react-native/models/index.js.map +1 -0
- package/dist/react-native/models/models.d.ts +17 -0
- package/dist/react-native/models/models.d.ts.map +1 -0
- package/dist/react-native/models/models.js +15 -0
- package/dist/react-native/models/models.js.map +1 -0
- package/dist/react-native/search/api/index.d.ts +5 -0
- package/dist/react-native/search/api/index.d.ts.map +1 -0
- package/dist/react-native/search/api/index.js +5 -0
- package/dist/react-native/search/api/index.js.map +1 -0
- package/dist/react-native/search/api/operations.d.ts +41 -0
- package/dist/react-native/search/api/operations.d.ts.map +1 -0
- package/dist/react-native/search/api/operations.js +456 -0
- package/dist/react-native/search/api/operations.js.map +1 -0
- package/dist/react-native/search/api/options.d.ts +244 -0
- package/dist/react-native/search/api/options.d.ts.map +1 -0
- package/dist/react-native/search/api/options.js +4 -0
- package/dist/react-native/search/api/options.js.map +1 -0
- package/dist/react-native/search/api/searchContext.d.ts +17 -0
- package/dist/react-native/search/api/searchContext.d.ts.map +1 -0
- package/dist/react-native/search/api/searchContext.js +25 -0
- package/dist/react-native/search/api/searchContext.js.map +1 -0
- package/dist/react-native/search/index.d.ts +3 -0
- package/dist/react-native/search/index.d.ts.map +1 -0
- package/dist/react-native/search/index.js +4 -0
- package/dist/react-native/search/index.js.map +1 -0
- package/dist/react-native/search/searchClient.d.ts +31 -0
- package/dist/react-native/search/searchClient.d.ts.map +1 -0
- package/dist/react-native/search/searchClient.js +57 -0
- package/dist/react-native/search/searchClient.js.map +1 -0
- package/dist/react-native/searchClient.d.ts +4 -6
- package/dist/react-native/searchClient.d.ts.map +1 -1
- package/dist/react-native/searchClient.js +93 -238
- package/dist/react-native/searchClient.js.map +1 -1
- package/dist/react-native/searchIndex/api/index.d.ts +5 -0
- package/dist/react-native/searchIndex/api/index.d.ts.map +1 -0
- package/dist/react-native/searchIndex/api/index.js +5 -0
- package/dist/react-native/searchIndex/api/index.js.map +1 -0
- package/dist/react-native/searchIndex/api/operations.d.ts +128 -0
- package/dist/react-native/searchIndex/api/operations.d.ts.map +1 -0
- package/dist/react-native/searchIndex/api/operations.js +1123 -0
- package/dist/react-native/searchIndex/api/operations.js.map +1 -0
- package/dist/react-native/searchIndex/api/options.d.ts +258 -0
- package/dist/react-native/searchIndex/api/options.d.ts.map +1 -0
- package/dist/react-native/searchIndex/api/options.js +4 -0
- package/dist/react-native/searchIndex/api/options.js.map +1 -0
- package/dist/react-native/searchIndex/api/searchIndexContext.d.ts +15 -0
- package/dist/react-native/searchIndex/api/searchIndexContext.d.ts.map +1 -0
- package/dist/react-native/searchIndex/api/searchIndexContext.js +25 -0
- package/dist/react-native/searchIndex/api/searchIndexContext.js.map +1 -0
- package/dist/react-native/searchIndex/index.d.ts +3 -0
- package/dist/react-native/searchIndex/index.d.ts.map +1 -0
- package/dist/react-native/searchIndex/index.js +4 -0
- package/dist/react-native/searchIndex/index.js.map +1 -0
- package/dist/react-native/searchIndex/searchIndexClient.d.ts +75 -0
- package/dist/react-native/searchIndex/searchIndexClient.d.ts.map +1 -0
- package/dist/react-native/searchIndex/searchIndexClient.js +141 -0
- package/dist/react-native/searchIndex/searchIndexClient.js.map +1 -0
- package/dist/react-native/searchIndexClient.d.ts +6 -22
- package/dist/react-native/searchIndexClient.d.ts.map +1 -1
- package/dist/react-native/searchIndexClient.js +108 -616
- package/dist/react-native/searchIndexClient.js.map +1 -1
- package/dist/react-native/searchIndexer/api/index.d.ts +5 -0
- package/dist/react-native/searchIndexer/api/index.d.ts.map +1 -0
- package/dist/react-native/searchIndexer/api/index.js +5 -0
- package/dist/react-native/searchIndexer/api/index.js.map +1 -0
- package/dist/react-native/searchIndexer/api/operations.d.ts +77 -0
- package/dist/react-native/searchIndexer/api/operations.d.ts.map +1 -0
- package/dist/react-native/searchIndexer/api/operations.js +679 -0
- package/dist/react-native/searchIndexer/api/operations.js.map +1 -0
- package/dist/react-native/searchIndexer/api/options.d.ts +158 -0
- package/dist/react-native/searchIndexer/api/options.d.ts.map +1 -0
- package/dist/react-native/searchIndexer/api/options.js +4 -0
- package/dist/react-native/searchIndexer/api/options.js.map +1 -0
- package/dist/react-native/searchIndexer/api/searchIndexerContext.d.ts +15 -0
- package/dist/react-native/searchIndexer/api/searchIndexerContext.d.ts.map +1 -0
- package/dist/react-native/searchIndexer/api/searchIndexerContext.js +25 -0
- package/dist/react-native/searchIndexer/api/searchIndexerContext.js.map +1 -0
- package/dist/react-native/searchIndexer/index.d.ts +3 -0
- package/dist/react-native/searchIndexer/index.d.ts.map +1 -0
- package/dist/react-native/searchIndexer/index.js +4 -0
- package/dist/react-native/searchIndexer/index.js.map +1 -0
- package/dist/react-native/searchIndexer/searchIndexerClient.d.ts +49 -0
- package/dist/react-native/searchIndexer/searchIndexerClient.d.ts.map +1 -0
- package/dist/react-native/searchIndexer/searchIndexerClient.js +93 -0
- package/dist/react-native/searchIndexer/searchIndexerClient.js.map +1 -0
- package/dist/react-native/searchIndexerClient.d.ts +4 -17
- package/dist/react-native/searchIndexerClient.d.ts.map +1 -1
- package/dist/react-native/searchIndexerClient.js +74 -350
- package/dist/react-native/searchIndexerClient.js.map +1 -1
- package/dist/react-native/searchIndexingBufferedSender.d.ts +1 -1
- package/dist/react-native/searchIndexingBufferedSender.d.ts.map +1 -1
- package/dist/react-native/searchIndexingBufferedSender.js +3 -7
- package/dist/react-native/searchIndexingBufferedSender.js.map +1 -1
- package/dist/react-native/serviceModels.d.ts +80 -199
- package/dist/react-native/serviceModels.d.ts.map +1 -1
- package/dist/react-native/serviceModels.js.map +1 -1
- package/dist/react-native/serviceUtils.d.ts +52 -10
- package/dist/react-native/serviceUtils.d.ts.map +1 -1
- package/dist/react-native/serviceUtils.js +210 -167
- package/dist/react-native/serviceUtils.js.map +1 -1
- package/dist/react-native/static-helpers/pagingHelpers.d.ts +74 -0
- package/dist/react-native/static-helpers/pagingHelpers.d.ts.map +1 -0
- package/dist/react-native/static-helpers/pagingHelpers.js +143 -0
- package/dist/react-native/static-helpers/pagingHelpers.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/build-csv-collection.d.ts +2 -0
- package/dist/react-native/static-helpers/serialization/build-csv-collection.d.ts.map +1 -0
- package/dist/react-native/static-helpers/serialization/build-csv-collection.js +6 -0
- package/dist/react-native/static-helpers/serialization/build-csv-collection.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/build-newline-collection.d.ts +2 -0
- package/dist/react-native/static-helpers/serialization/build-newline-collection.d.ts.map +1 -0
- package/dist/react-native/static-helpers/serialization/build-newline-collection.js +6 -0
- package/dist/react-native/static-helpers/serialization/build-newline-collection.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/build-pipe-collection.d.ts +2 -0
- package/dist/react-native/static-helpers/serialization/build-pipe-collection.d.ts.map +1 -0
- package/dist/react-native/static-helpers/serialization/build-pipe-collection.js +6 -0
- package/dist/react-native/static-helpers/serialization/build-pipe-collection.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/check-prop-undefined.d.ts +8 -0
- package/dist/react-native/static-helpers/serialization/check-prop-undefined.d.ts.map +1 -0
- package/dist/react-native/static-helpers/serialization/check-prop-undefined.js +17 -0
- package/dist/react-native/static-helpers/serialization/check-prop-undefined.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/parse-csv-collection.d.ts +2 -0
- package/dist/react-native/static-helpers/serialization/parse-csv-collection.d.ts.map +1 -0
- package/dist/react-native/static-helpers/serialization/parse-csv-collection.js +6 -0
- package/dist/react-native/static-helpers/serialization/parse-csv-collection.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/parse-newline-collection.d.ts +2 -0
- package/dist/react-native/static-helpers/serialization/parse-newline-collection.d.ts.map +1 -0
- package/dist/react-native/static-helpers/serialization/parse-newline-collection.js +6 -0
- package/dist/react-native/static-helpers/serialization/parse-newline-collection.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/parse-pipe-collection.d.ts +2 -0
- package/dist/react-native/static-helpers/serialization/parse-pipe-collection.d.ts.map +1 -0
- package/dist/react-native/static-helpers/serialization/parse-pipe-collection.js +6 -0
- package/dist/react-native/static-helpers/serialization/parse-pipe-collection.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/serialize-record.d.ts +2 -0
- package/dist/react-native/static-helpers/serialization/serialize-record.d.ts.map +1 -0
- package/dist/react-native/static-helpers/serialization/serialize-record.js +19 -0
- package/dist/react-native/static-helpers/serialization/serialize-record.js.map +1 -0
- package/dist/react-native/static-helpers/urlTemplate.d.ts +5 -0
- package/dist/react-native/static-helpers/urlTemplate.d.ts.map +1 -0
- package/dist/react-native/static-helpers/urlTemplate.js +200 -0
- package/dist/react-native/static-helpers/urlTemplate.js.map +1 -0
- package/dist/react-native/synonymMapHelper-browser.d.mts +11 -0
- package/dist/react-native/synonymMapHelper-browser.d.mts.map +1 -0
- package/dist/react-native/synonymMapHelper-browser.mjs +14 -0
- package/dist/react-native/synonymMapHelper-browser.mjs.map +1 -0
- package/dist/react-native/synonymMapHelper-react-native.d.mts +2 -0
- package/dist/react-native/synonymMapHelper-react-native.d.mts.map +1 -0
- package/dist/react-native/synonymMapHelper-react-native.mjs +4 -0
- package/dist/react-native/synonymMapHelper-react-native.mjs.map +1 -0
- package/dist/react-native/timers-browser.d.mts +8 -0
- package/dist/react-native/timers-browser.d.mts.map +1 -0
- package/dist/react-native/timers-browser.mjs +13 -0
- package/dist/react-native/timers-browser.mjs.map +1 -0
- package/dist/react-native/timers-react-native.d.mts +2 -0
- package/dist/react-native/timers-react-native.d.mts.map +1 -0
- package/dist/react-native/timers-react-native.mjs +4 -0
- package/dist/react-native/timers-react-native.mjs.map +1 -0
- package/dist/react-native/tracing.d.ts +5 -0
- package/dist/react-native/tracing.d.ts.map +1 -1
- package/dist/react-native/tracing.js +1 -1
- package/dist/react-native/tracing.js.map +1 -1
- package/dist/react-native/walk.js +1 -1
- package/dist/react-native/walk.js.map +1 -1
- package/package.json +260 -55
- package/dist/browser/base64-browser.d.mts.map +0 -1
- package/dist/browser/base64-browser.mjs.map +0 -1
- package/dist/browser/errorModels.d.ts +0 -56
- package/dist/browser/errorModels.d.ts.map +0 -1
- package/dist/browser/errorModels.js +0 -4
- package/dist/browser/errorModels.js.map +0 -1
- package/dist/browser/generated/data/index.d.ts +0 -4
- package/dist/browser/generated/data/index.d.ts.map +0 -1
- package/dist/browser/generated/data/index.js +0 -11
- package/dist/browser/generated/data/index.js.map +0 -1
- package/dist/browser/generated/data/models/index.d.ts +0 -1356
- package/dist/browser/generated/data/models/index.d.ts.map +0 -1
- package/dist/browser/generated/data/models/index.js +0 -290
- package/dist/browser/generated/data/models/index.js.map +0 -1
- package/dist/browser/generated/data/models/mappers.d.ts +0 -52
- package/dist/browser/generated/data/models/mappers.d.ts.map +0 -1
- package/dist/browser/generated/data/models/mappers.js +0 -1547
- package/dist/browser/generated/data/models/mappers.js.map +0 -1
- package/dist/browser/generated/data/models/parameters.d.ts +0 -64
- package/dist/browser/generated/data/models/parameters.d.ts.map +0 -1
- package/dist/browser/generated/data/models/parameters.js +0 -637
- package/dist/browser/generated/data/models/parameters.js.map +0 -1
- package/dist/browser/generated/data/operations/documents.d.ts +0 -70
- package/dist/browser/generated/data/operations/documents.d.ts.map +0 -1
- package/dist/browser/generated/data/operations/documents.js +0 -332
- package/dist/browser/generated/data/operations/documents.js.map +0 -1
- package/dist/browser/generated/data/operations/index.d.ts +0 -2
- package/dist/browser/generated/data/operations/index.d.ts.map +0 -1
- package/dist/browser/generated/data/operations/index.js +0 -9
- package/dist/browser/generated/data/operations/index.js.map +0 -1
- package/dist/browser/generated/data/operationsInterfaces/documents.d.ts +0 -62
- package/dist/browser/generated/data/operationsInterfaces/documents.d.ts.map +0 -1
- package/dist/browser/generated/data/operationsInterfaces/documents.js +0 -9
- package/dist/browser/generated/data/operationsInterfaces/documents.js.map +0 -1
- package/dist/browser/generated/data/operationsInterfaces/index.d.ts +0 -2
- package/dist/browser/generated/data/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/browser/generated/data/operationsInterfaces/index.js +0 -9
- package/dist/browser/generated/data/operationsInterfaces/index.js.map +0 -1
- package/dist/browser/generated/data/searchClient.d.ts +0 -21
- package/dist/browser/generated/data/searchClient.d.ts.map +0 -1
- package/dist/browser/generated/data/searchClient.js +0 -88
- package/dist/browser/generated/data/searchClient.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/index.d.ts +0 -4
- package/dist/browser/generated/knowledgeBase/index.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/index.js +0 -11
- package/dist/browser/generated/knowledgeBase/index.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/models/index.d.ts +0 -570
- package/dist/browser/generated/knowledgeBase/models/index.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/models/index.js +0 -62
- package/dist/browser/generated/knowledgeBase/models/index.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/models/mappers.d.ts +0 -89
- package/dist/browser/generated/knowledgeBase/models/mappers.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/models/mappers.js +0 -1325
- package/dist/browser/generated/knowledgeBase/models/mappers.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/models/parameters.d.ts +0 -9
- package/dist/browser/generated/knowledgeBase/models/parameters.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/models/parameters.js +0 -75
- package/dist/browser/generated/knowledgeBase/models/parameters.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/operations/index.d.ts +0 -2
- package/dist/browser/generated/knowledgeBase/operations/index.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/operations/index.js +0 -9
- package/dist/browser/generated/knowledgeBase/operations/index.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/operations/knowledgeRetrieval.d.ts +0 -19
- package/dist/browser/generated/knowledgeBase/operations/knowledgeRetrieval.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/operations/knowledgeRetrieval.js +0 -57
- package/dist/browser/generated/knowledgeBase/operations/knowledgeRetrieval.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/operationsInterfaces/index.d.ts +0 -2
- package/dist/browser/generated/knowledgeBase/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/operationsInterfaces/index.js +0 -9
- package/dist/browser/generated/knowledgeBase/operationsInterfaces/index.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.d.ts +0 -11
- package/dist/browser/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.js +0 -9
- package/dist/browser/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.js.map +0 -1
- package/dist/browser/generated/knowledgeBase/searchClient.d.ts +0 -21
- package/dist/browser/generated/knowledgeBase/searchClient.d.ts.map +0 -1
- package/dist/browser/generated/knowledgeBase/searchClient.js +0 -88
- package/dist/browser/generated/knowledgeBase/searchClient.js.map +0 -1
- package/dist/browser/generated/service/index.d.ts +0 -4
- package/dist/browser/generated/service/index.d.ts.map +0 -1
- package/dist/browser/generated/service/index.js +0 -11
- package/dist/browser/generated/service/index.js.map +0 -1
- package/dist/browser/generated/service/models/index.d.ts +0 -5693
- package/dist/browser/generated/service/models/index.d.ts.map +0 -1
- package/dist/browser/generated/service/models/index.js +0 -1706
- package/dist/browser/generated/service/models/index.js.map +0 -1
- package/dist/browser/generated/service/models/mappers.d.ts +0 -358
- package/dist/browser/generated/service/models/mappers.d.ts.map +0 -1
- package/dist/browser/generated/service/models/mappers.js +0 -8157
- package/dist/browser/generated/service/models/mappers.js.map +0 -1
- package/dist/browser/generated/service/models/parameters.d.ts +0 -34
- package/dist/browser/generated/service/models/parameters.d.ts.map +0 -1
- package/dist/browser/generated/service/models/parameters.js +0 -255
- package/dist/browser/generated/service/models/parameters.js.map +0 -1
- package/dist/browser/generated/service/operations/aliases.d.ts +0 -44
- package/dist/browser/generated/service/operations/aliases.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/aliases.js +0 -161
- package/dist/browser/generated/service/operations/aliases.js.map +0 -1
- package/dist/browser/generated/service/operations/dataSources.d.ts +0 -43
- package/dist/browser/generated/service/operations/dataSources.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/dataSources.js +0 -163
- package/dist/browser/generated/service/operations/dataSources.js.map +0 -1
- package/dist/browser/generated/service/operations/index.d.ts +0 -9
- package/dist/browser/generated/service/operations/index.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/index.js +0 -16
- package/dist/browser/generated/service/operations/index.js.map +0 -1
- package/dist/browser/generated/service/operations/indexers.d.ts +0 -74
- package/dist/browser/generated/service/operations/indexers.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/indexers.js +0 -281
- package/dist/browser/generated/service/operations/indexers.js.map +0 -1
- package/dist/browser/generated/service/operations/indexes.d.ts +0 -58
- package/dist/browser/generated/service/operations/indexes.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/indexes.js +0 -213
- package/dist/browser/generated/service/operations/indexes.js.map +0 -1
- package/dist/browser/generated/service/operations/knowledgeBases.d.ts +0 -43
- package/dist/browser/generated/service/operations/knowledgeBases.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/knowledgeBases.js +0 -160
- package/dist/browser/generated/service/operations/knowledgeBases.js.map +0 -1
- package/dist/browser/generated/service/operations/knowledgeSources.d.ts +0 -49
- package/dist/browser/generated/service/operations/knowledgeSources.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/knowledgeSources.js +0 -184
- package/dist/browser/generated/service/operations/knowledgeSources.js.map +0 -1
- package/dist/browser/generated/service/operations/skillsets.d.ts +0 -50
- package/dist/browser/generated/service/operations/skillsets.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/skillsets.js +0 -189
- package/dist/browser/generated/service/operations/skillsets.js.map +0 -1
- package/dist/browser/generated/service/operations/synonymMaps.d.ts +0 -43
- package/dist/browser/generated/service/operations/synonymMaps.d.ts.map +0 -1
- package/dist/browser/generated/service/operations/synonymMaps.js +0 -160
- package/dist/browser/generated/service/operations/synonymMaps.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/aliases.d.ts +0 -36
- package/dist/browser/generated/service/operationsInterfaces/aliases.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/aliases.js +0 -9
- package/dist/browser/generated/service/operationsInterfaces/aliases.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/dataSources.d.ts +0 -35
- package/dist/browser/generated/service/operationsInterfaces/dataSources.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/dataSources.js +0 -9
- package/dist/browser/generated/service/operationsInterfaces/dataSources.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/index.d.ts +0 -9
- package/dist/browser/generated/service/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/index.js +0 -16
- package/dist/browser/generated/service/operationsInterfaces/index.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/indexers.d.ts +0 -66
- package/dist/browser/generated/service/operationsInterfaces/indexers.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/indexers.js +0 -9
- package/dist/browser/generated/service/operationsInterfaces/indexers.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/indexes.d.ts +0 -50
- package/dist/browser/generated/service/operationsInterfaces/indexes.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/indexes.js +0 -9
- package/dist/browser/generated/service/operationsInterfaces/indexes.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/knowledgeBases.d.ts +0 -35
- package/dist/browser/generated/service/operationsInterfaces/knowledgeBases.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/knowledgeBases.js +0 -9
- package/dist/browser/generated/service/operationsInterfaces/knowledgeBases.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/knowledgeSources.d.ts +0 -41
- package/dist/browser/generated/service/operationsInterfaces/knowledgeSources.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/knowledgeSources.js +0 -9
- package/dist/browser/generated/service/operationsInterfaces/knowledgeSources.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/skillsets.d.ts +0 -42
- package/dist/browser/generated/service/operationsInterfaces/skillsets.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/skillsets.js +0 -9
- package/dist/browser/generated/service/operationsInterfaces/skillsets.js.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/synonymMaps.d.ts +0 -35
- package/dist/browser/generated/service/operationsInterfaces/synonymMaps.d.ts.map +0 -1
- package/dist/browser/generated/service/operationsInterfaces/synonymMaps.js +0 -9
- package/dist/browser/generated/service/operationsInterfaces/synonymMaps.js.map +0 -1
- package/dist/browser/generated/service/searchServiceClient.d.ts +0 -36
- package/dist/browser/generated/service/searchServiceClient.d.ts.map +0 -1
- package/dist/browser/generated/service/searchServiceClient.js +0 -145
- package/dist/browser/generated/service/searchServiceClient.js.map +0 -1
- package/dist/browser/synonymMapHelper.d.ts +0 -11
- package/dist/browser/synonymMapHelper.js +0 -14
- package/dist/commonjs/errorModels.d.ts +0 -56
- package/dist/commonjs/errorModels.d.ts.map +0 -1
- package/dist/commonjs/errorModels.js +0 -5
- package/dist/commonjs/errorModels.js.map +0 -1
- package/dist/commonjs/generated/data/index.d.ts +0 -4
- package/dist/commonjs/generated/data/index.d.ts.map +0 -1
- package/dist/commonjs/generated/data/index.js +0 -16
- package/dist/commonjs/generated/data/index.js.map +0 -1
- package/dist/commonjs/generated/data/models/index.d.ts +0 -1356
- package/dist/commonjs/generated/data/models/index.d.ts.map +0 -1
- package/dist/commonjs/generated/data/models/index.js +0 -293
- package/dist/commonjs/generated/data/models/index.js.map +0 -1
- package/dist/commonjs/generated/data/models/mappers.d.ts +0 -52
- package/dist/commonjs/generated/data/models/mappers.d.ts.map +0 -1
- package/dist/commonjs/generated/data/models/mappers.js +0 -1550
- package/dist/commonjs/generated/data/models/mappers.js.map +0 -1
- package/dist/commonjs/generated/data/models/parameters.d.ts +0 -64
- package/dist/commonjs/generated/data/models/parameters.d.ts.map +0 -1
- package/dist/commonjs/generated/data/models/parameters.js +0 -641
- package/dist/commonjs/generated/data/models/parameters.js.map +0 -1
- package/dist/commonjs/generated/data/operations/documents.d.ts +0 -70
- package/dist/commonjs/generated/data/operations/documents.d.ts.map +0 -1
- package/dist/commonjs/generated/data/operations/documents.js +0 -337
- package/dist/commonjs/generated/data/operations/documents.js.map +0 -1
- package/dist/commonjs/generated/data/operations/index.d.ts +0 -2
- package/dist/commonjs/generated/data/operations/index.d.ts.map +0 -1
- package/dist/commonjs/generated/data/operations/index.js +0 -12
- package/dist/commonjs/generated/data/operations/index.js.map +0 -1
- package/dist/commonjs/generated/data/operationsInterfaces/documents.d.ts +0 -62
- package/dist/commonjs/generated/data/operationsInterfaces/documents.d.ts.map +0 -1
- package/dist/commonjs/generated/data/operationsInterfaces/documents.js +0 -10
- package/dist/commonjs/generated/data/operationsInterfaces/documents.js.map +0 -1
- package/dist/commonjs/generated/data/operationsInterfaces/index.d.ts +0 -2
- package/dist/commonjs/generated/data/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/commonjs/generated/data/operationsInterfaces/index.js +0 -12
- package/dist/commonjs/generated/data/operationsInterfaces/index.js.map +0 -1
- package/dist/commonjs/generated/data/searchClient.d.ts +0 -21
- package/dist/commonjs/generated/data/searchClient.d.ts.map +0 -1
- package/dist/commonjs/generated/data/searchClient.js +0 -93
- package/dist/commonjs/generated/data/searchClient.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/index.d.ts +0 -4
- package/dist/commonjs/generated/knowledgeBase/index.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/index.js +0 -16
- package/dist/commonjs/generated/knowledgeBase/index.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/models/index.d.ts +0 -570
- package/dist/commonjs/generated/knowledgeBase/models/index.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/models/index.js +0 -65
- package/dist/commonjs/generated/knowledgeBase/models/index.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/models/mappers.d.ts +0 -89
- package/dist/commonjs/generated/knowledgeBase/models/mappers.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/models/mappers.js +0 -1329
- package/dist/commonjs/generated/knowledgeBase/models/mappers.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/models/parameters.d.ts +0 -9
- package/dist/commonjs/generated/knowledgeBase/models/parameters.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/models/parameters.js +0 -78
- package/dist/commonjs/generated/knowledgeBase/models/parameters.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/operations/index.d.ts +0 -2
- package/dist/commonjs/generated/knowledgeBase/operations/index.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/operations/index.js +0 -12
- package/dist/commonjs/generated/knowledgeBase/operations/index.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/operations/knowledgeRetrieval.d.ts +0 -19
- package/dist/commonjs/generated/knowledgeBase/operations/knowledgeRetrieval.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/operations/knowledgeRetrieval.js +0 -62
- package/dist/commonjs/generated/knowledgeBase/operations/knowledgeRetrieval.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/operationsInterfaces/index.d.ts +0 -2
- package/dist/commonjs/generated/knowledgeBase/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/operationsInterfaces/index.js +0 -12
- package/dist/commonjs/generated/knowledgeBase/operationsInterfaces/index.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.d.ts +0 -11
- package/dist/commonjs/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.js +0 -10
- package/dist/commonjs/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.js.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/searchClient.d.ts +0 -21
- package/dist/commonjs/generated/knowledgeBase/searchClient.d.ts.map +0 -1
- package/dist/commonjs/generated/knowledgeBase/searchClient.js +0 -93
- package/dist/commonjs/generated/knowledgeBase/searchClient.js.map +0 -1
- package/dist/commonjs/generated/service/index.d.ts +0 -4
- package/dist/commonjs/generated/service/index.d.ts.map +0 -1
- package/dist/commonjs/generated/service/index.js +0 -16
- package/dist/commonjs/generated/service/index.js.map +0 -1
- package/dist/commonjs/generated/service/models/index.d.ts +0 -5693
- package/dist/commonjs/generated/service/models/index.d.ts.map +0 -1
- package/dist/commonjs/generated/service/models/index.js +0 -1710
- package/dist/commonjs/generated/service/models/index.js.map +0 -1
- package/dist/commonjs/generated/service/models/mappers.d.ts +0 -358
- package/dist/commonjs/generated/service/models/mappers.d.ts.map +0 -1
- package/dist/commonjs/generated/service/models/mappers.js +0 -8164
- package/dist/commonjs/generated/service/models/mappers.js.map +0 -1
- package/dist/commonjs/generated/service/models/parameters.d.ts +0 -34
- package/dist/commonjs/generated/service/models/parameters.d.ts.map +0 -1
- package/dist/commonjs/generated/service/models/parameters.js +0 -258
- package/dist/commonjs/generated/service/models/parameters.js.map +0 -1
- package/dist/commonjs/generated/service/operations/aliases.d.ts +0 -44
- package/dist/commonjs/generated/service/operations/aliases.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/aliases.js +0 -166
- package/dist/commonjs/generated/service/operations/aliases.js.map +0 -1
- package/dist/commonjs/generated/service/operations/dataSources.d.ts +0 -43
- package/dist/commonjs/generated/service/operations/dataSources.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/dataSources.js +0 -168
- package/dist/commonjs/generated/service/operations/dataSources.js.map +0 -1
- package/dist/commonjs/generated/service/operations/index.d.ts +0 -9
- package/dist/commonjs/generated/service/operations/index.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/index.js +0 -19
- package/dist/commonjs/generated/service/operations/index.js.map +0 -1
- package/dist/commonjs/generated/service/operations/indexers.d.ts +0 -74
- package/dist/commonjs/generated/service/operations/indexers.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/indexers.js +0 -286
- package/dist/commonjs/generated/service/operations/indexers.js.map +0 -1
- package/dist/commonjs/generated/service/operations/indexes.d.ts +0 -58
- package/dist/commonjs/generated/service/operations/indexes.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/indexes.js +0 -218
- package/dist/commonjs/generated/service/operations/indexes.js.map +0 -1
- package/dist/commonjs/generated/service/operations/knowledgeBases.d.ts +0 -43
- package/dist/commonjs/generated/service/operations/knowledgeBases.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/knowledgeBases.js +0 -165
- package/dist/commonjs/generated/service/operations/knowledgeBases.js.map +0 -1
- package/dist/commonjs/generated/service/operations/knowledgeSources.d.ts +0 -49
- package/dist/commonjs/generated/service/operations/knowledgeSources.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/knowledgeSources.js +0 -189
- package/dist/commonjs/generated/service/operations/knowledgeSources.js.map +0 -1
- package/dist/commonjs/generated/service/operations/skillsets.d.ts +0 -50
- package/dist/commonjs/generated/service/operations/skillsets.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/skillsets.js +0 -194
- package/dist/commonjs/generated/service/operations/skillsets.js.map +0 -1
- package/dist/commonjs/generated/service/operations/synonymMaps.d.ts +0 -43
- package/dist/commonjs/generated/service/operations/synonymMaps.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operations/synonymMaps.js +0 -165
- package/dist/commonjs/generated/service/operations/synonymMaps.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/aliases.d.ts +0 -36
- package/dist/commonjs/generated/service/operationsInterfaces/aliases.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/aliases.js +0 -10
- package/dist/commonjs/generated/service/operationsInterfaces/aliases.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/dataSources.d.ts +0 -35
- package/dist/commonjs/generated/service/operationsInterfaces/dataSources.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/dataSources.js +0 -10
- package/dist/commonjs/generated/service/operationsInterfaces/dataSources.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/index.d.ts +0 -9
- package/dist/commonjs/generated/service/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/index.js +0 -19
- package/dist/commonjs/generated/service/operationsInterfaces/index.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/indexers.d.ts +0 -66
- package/dist/commonjs/generated/service/operationsInterfaces/indexers.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/indexers.js +0 -10
- package/dist/commonjs/generated/service/operationsInterfaces/indexers.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/indexes.d.ts +0 -50
- package/dist/commonjs/generated/service/operationsInterfaces/indexes.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/indexes.js +0 -10
- package/dist/commonjs/generated/service/operationsInterfaces/indexes.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/knowledgeBases.d.ts +0 -35
- package/dist/commonjs/generated/service/operationsInterfaces/knowledgeBases.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/knowledgeBases.js +0 -10
- package/dist/commonjs/generated/service/operationsInterfaces/knowledgeBases.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/knowledgeSources.d.ts +0 -41
- package/dist/commonjs/generated/service/operationsInterfaces/knowledgeSources.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/knowledgeSources.js +0 -10
- package/dist/commonjs/generated/service/operationsInterfaces/knowledgeSources.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/skillsets.d.ts +0 -42
- package/dist/commonjs/generated/service/operationsInterfaces/skillsets.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/skillsets.js +0 -10
- package/dist/commonjs/generated/service/operationsInterfaces/skillsets.js.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/synonymMaps.d.ts +0 -35
- package/dist/commonjs/generated/service/operationsInterfaces/synonymMaps.d.ts.map +0 -1
- package/dist/commonjs/generated/service/operationsInterfaces/synonymMaps.js +0 -10
- package/dist/commonjs/generated/service/operationsInterfaces/synonymMaps.js.map +0 -1
- package/dist/commonjs/generated/service/searchServiceClient.d.ts +0 -36
- package/dist/commonjs/generated/service/searchServiceClient.d.ts.map +0 -1
- package/dist/commonjs/generated/service/searchServiceClient.js +0 -150
- package/dist/commonjs/generated/service/searchServiceClient.js.map +0 -1
- package/dist/esm/errorModels.d.ts +0 -56
- package/dist/esm/errorModels.d.ts.map +0 -1
- package/dist/esm/errorModels.js +0 -4
- package/dist/esm/errorModels.js.map +0 -1
- package/dist/esm/generated/data/index.d.ts +0 -4
- package/dist/esm/generated/data/index.d.ts.map +0 -1
- package/dist/esm/generated/data/index.js +0 -11
- package/dist/esm/generated/data/index.js.map +0 -1
- package/dist/esm/generated/data/models/index.d.ts +0 -1356
- package/dist/esm/generated/data/models/index.d.ts.map +0 -1
- package/dist/esm/generated/data/models/index.js +0 -290
- package/dist/esm/generated/data/models/index.js.map +0 -1
- package/dist/esm/generated/data/models/mappers.d.ts +0 -52
- package/dist/esm/generated/data/models/mappers.d.ts.map +0 -1
- package/dist/esm/generated/data/models/mappers.js +0 -1547
- package/dist/esm/generated/data/models/mappers.js.map +0 -1
- package/dist/esm/generated/data/models/parameters.d.ts +0 -64
- package/dist/esm/generated/data/models/parameters.d.ts.map +0 -1
- package/dist/esm/generated/data/models/parameters.js +0 -637
- package/dist/esm/generated/data/models/parameters.js.map +0 -1
- package/dist/esm/generated/data/operations/documents.d.ts +0 -70
- package/dist/esm/generated/data/operations/documents.d.ts.map +0 -1
- package/dist/esm/generated/data/operations/documents.js +0 -332
- package/dist/esm/generated/data/operations/documents.js.map +0 -1
- package/dist/esm/generated/data/operations/index.d.ts +0 -2
- package/dist/esm/generated/data/operations/index.d.ts.map +0 -1
- package/dist/esm/generated/data/operations/index.js +0 -9
- package/dist/esm/generated/data/operations/index.js.map +0 -1
- package/dist/esm/generated/data/operationsInterfaces/documents.d.ts +0 -62
- package/dist/esm/generated/data/operationsInterfaces/documents.d.ts.map +0 -1
- package/dist/esm/generated/data/operationsInterfaces/documents.js +0 -9
- package/dist/esm/generated/data/operationsInterfaces/documents.js.map +0 -1
- package/dist/esm/generated/data/operationsInterfaces/index.d.ts +0 -2
- package/dist/esm/generated/data/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/esm/generated/data/operationsInterfaces/index.js +0 -9
- package/dist/esm/generated/data/operationsInterfaces/index.js.map +0 -1
- package/dist/esm/generated/data/searchClient.d.ts +0 -21
- package/dist/esm/generated/data/searchClient.d.ts.map +0 -1
- package/dist/esm/generated/data/searchClient.js +0 -88
- package/dist/esm/generated/data/searchClient.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/index.d.ts +0 -4
- package/dist/esm/generated/knowledgeBase/index.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/index.js +0 -11
- package/dist/esm/generated/knowledgeBase/index.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/models/index.d.ts +0 -570
- package/dist/esm/generated/knowledgeBase/models/index.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/models/index.js +0 -62
- package/dist/esm/generated/knowledgeBase/models/index.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/models/mappers.d.ts +0 -89
- package/dist/esm/generated/knowledgeBase/models/mappers.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/models/mappers.js +0 -1325
- package/dist/esm/generated/knowledgeBase/models/mappers.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/models/parameters.d.ts +0 -9
- package/dist/esm/generated/knowledgeBase/models/parameters.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/models/parameters.js +0 -75
- package/dist/esm/generated/knowledgeBase/models/parameters.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/operations/index.d.ts +0 -2
- package/dist/esm/generated/knowledgeBase/operations/index.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/operations/index.js +0 -9
- package/dist/esm/generated/knowledgeBase/operations/index.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/operations/knowledgeRetrieval.d.ts +0 -19
- package/dist/esm/generated/knowledgeBase/operations/knowledgeRetrieval.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/operations/knowledgeRetrieval.js +0 -57
- package/dist/esm/generated/knowledgeBase/operations/knowledgeRetrieval.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/operationsInterfaces/index.d.ts +0 -2
- package/dist/esm/generated/knowledgeBase/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/operationsInterfaces/index.js +0 -9
- package/dist/esm/generated/knowledgeBase/operationsInterfaces/index.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.d.ts +0 -11
- package/dist/esm/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.js +0 -9
- package/dist/esm/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.js.map +0 -1
- package/dist/esm/generated/knowledgeBase/searchClient.d.ts +0 -21
- package/dist/esm/generated/knowledgeBase/searchClient.d.ts.map +0 -1
- package/dist/esm/generated/knowledgeBase/searchClient.js +0 -88
- package/dist/esm/generated/knowledgeBase/searchClient.js.map +0 -1
- package/dist/esm/generated/service/index.d.ts +0 -4
- package/dist/esm/generated/service/index.d.ts.map +0 -1
- package/dist/esm/generated/service/index.js +0 -11
- package/dist/esm/generated/service/index.js.map +0 -1
- package/dist/esm/generated/service/models/index.d.ts +0 -5693
- package/dist/esm/generated/service/models/index.d.ts.map +0 -1
- package/dist/esm/generated/service/models/index.js +0 -1706
- package/dist/esm/generated/service/models/index.js.map +0 -1
- package/dist/esm/generated/service/models/mappers.d.ts +0 -358
- package/dist/esm/generated/service/models/mappers.d.ts.map +0 -1
- package/dist/esm/generated/service/models/mappers.js +0 -8157
- package/dist/esm/generated/service/models/mappers.js.map +0 -1
- package/dist/esm/generated/service/models/parameters.d.ts +0 -34
- package/dist/esm/generated/service/models/parameters.d.ts.map +0 -1
- package/dist/esm/generated/service/models/parameters.js +0 -255
- package/dist/esm/generated/service/models/parameters.js.map +0 -1
- package/dist/esm/generated/service/operations/aliases.d.ts +0 -44
- package/dist/esm/generated/service/operations/aliases.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/aliases.js +0 -161
- package/dist/esm/generated/service/operations/aliases.js.map +0 -1
- package/dist/esm/generated/service/operations/dataSources.d.ts +0 -43
- package/dist/esm/generated/service/operations/dataSources.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/dataSources.js +0 -163
- package/dist/esm/generated/service/operations/dataSources.js.map +0 -1
- package/dist/esm/generated/service/operations/index.d.ts +0 -9
- package/dist/esm/generated/service/operations/index.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/index.js +0 -16
- package/dist/esm/generated/service/operations/index.js.map +0 -1
- package/dist/esm/generated/service/operations/indexers.d.ts +0 -74
- package/dist/esm/generated/service/operations/indexers.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/indexers.js +0 -281
- package/dist/esm/generated/service/operations/indexers.js.map +0 -1
- package/dist/esm/generated/service/operations/indexes.d.ts +0 -58
- package/dist/esm/generated/service/operations/indexes.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/indexes.js +0 -213
- package/dist/esm/generated/service/operations/indexes.js.map +0 -1
- package/dist/esm/generated/service/operations/knowledgeBases.d.ts +0 -43
- package/dist/esm/generated/service/operations/knowledgeBases.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/knowledgeBases.js +0 -160
- package/dist/esm/generated/service/operations/knowledgeBases.js.map +0 -1
- package/dist/esm/generated/service/operations/knowledgeSources.d.ts +0 -49
- package/dist/esm/generated/service/operations/knowledgeSources.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/knowledgeSources.js +0 -184
- package/dist/esm/generated/service/operations/knowledgeSources.js.map +0 -1
- package/dist/esm/generated/service/operations/skillsets.d.ts +0 -50
- package/dist/esm/generated/service/operations/skillsets.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/skillsets.js +0 -189
- package/dist/esm/generated/service/operations/skillsets.js.map +0 -1
- package/dist/esm/generated/service/operations/synonymMaps.d.ts +0 -43
- package/dist/esm/generated/service/operations/synonymMaps.d.ts.map +0 -1
- package/dist/esm/generated/service/operations/synonymMaps.js +0 -160
- package/dist/esm/generated/service/operations/synonymMaps.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/aliases.d.ts +0 -36
- package/dist/esm/generated/service/operationsInterfaces/aliases.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/aliases.js +0 -9
- package/dist/esm/generated/service/operationsInterfaces/aliases.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/dataSources.d.ts +0 -35
- package/dist/esm/generated/service/operationsInterfaces/dataSources.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/dataSources.js +0 -9
- package/dist/esm/generated/service/operationsInterfaces/dataSources.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/index.d.ts +0 -9
- package/dist/esm/generated/service/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/index.js +0 -16
- package/dist/esm/generated/service/operationsInterfaces/index.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/indexers.d.ts +0 -66
- package/dist/esm/generated/service/operationsInterfaces/indexers.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/indexers.js +0 -9
- package/dist/esm/generated/service/operationsInterfaces/indexers.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/indexes.d.ts +0 -50
- package/dist/esm/generated/service/operationsInterfaces/indexes.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/indexes.js +0 -9
- package/dist/esm/generated/service/operationsInterfaces/indexes.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/knowledgeBases.d.ts +0 -35
- package/dist/esm/generated/service/operationsInterfaces/knowledgeBases.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/knowledgeBases.js +0 -9
- package/dist/esm/generated/service/operationsInterfaces/knowledgeBases.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/knowledgeSources.d.ts +0 -41
- package/dist/esm/generated/service/operationsInterfaces/knowledgeSources.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/knowledgeSources.js +0 -9
- package/dist/esm/generated/service/operationsInterfaces/knowledgeSources.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/skillsets.d.ts +0 -42
- package/dist/esm/generated/service/operationsInterfaces/skillsets.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/skillsets.js +0 -9
- package/dist/esm/generated/service/operationsInterfaces/skillsets.js.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/synonymMaps.d.ts +0 -35
- package/dist/esm/generated/service/operationsInterfaces/synonymMaps.d.ts.map +0 -1
- package/dist/esm/generated/service/operationsInterfaces/synonymMaps.js +0 -9
- package/dist/esm/generated/service/operationsInterfaces/synonymMaps.js.map +0 -1
- package/dist/esm/generated/service/searchServiceClient.d.ts +0 -36
- package/dist/esm/generated/service/searchServiceClient.d.ts.map +0 -1
- package/dist/esm/generated/service/searchServiceClient.js +0 -145
- package/dist/esm/generated/service/searchServiceClient.js.map +0 -1
- package/dist/react-native/errorModels.d.ts +0 -56
- package/dist/react-native/errorModels.d.ts.map +0 -1
- package/dist/react-native/errorModels.js +0 -4
- package/dist/react-native/errorModels.js.map +0 -1
- package/dist/react-native/generated/data/index.d.ts +0 -4
- package/dist/react-native/generated/data/index.d.ts.map +0 -1
- package/dist/react-native/generated/data/index.js +0 -11
- package/dist/react-native/generated/data/index.js.map +0 -1
- package/dist/react-native/generated/data/models/index.d.ts +0 -1356
- package/dist/react-native/generated/data/models/index.d.ts.map +0 -1
- package/dist/react-native/generated/data/models/index.js +0 -290
- package/dist/react-native/generated/data/models/index.js.map +0 -1
- package/dist/react-native/generated/data/models/mappers.d.ts +0 -52
- package/dist/react-native/generated/data/models/mappers.d.ts.map +0 -1
- package/dist/react-native/generated/data/models/mappers.js +0 -1547
- package/dist/react-native/generated/data/models/mappers.js.map +0 -1
- package/dist/react-native/generated/data/models/parameters.d.ts +0 -64
- package/dist/react-native/generated/data/models/parameters.d.ts.map +0 -1
- package/dist/react-native/generated/data/models/parameters.js +0 -637
- package/dist/react-native/generated/data/models/parameters.js.map +0 -1
- package/dist/react-native/generated/data/operations/documents.d.ts +0 -70
- package/dist/react-native/generated/data/operations/documents.d.ts.map +0 -1
- package/dist/react-native/generated/data/operations/documents.js +0 -332
- package/dist/react-native/generated/data/operations/documents.js.map +0 -1
- package/dist/react-native/generated/data/operations/index.d.ts +0 -2
- package/dist/react-native/generated/data/operations/index.d.ts.map +0 -1
- package/dist/react-native/generated/data/operations/index.js +0 -9
- package/dist/react-native/generated/data/operations/index.js.map +0 -1
- package/dist/react-native/generated/data/operationsInterfaces/documents.d.ts +0 -62
- package/dist/react-native/generated/data/operationsInterfaces/documents.d.ts.map +0 -1
- package/dist/react-native/generated/data/operationsInterfaces/documents.js +0 -9
- package/dist/react-native/generated/data/operationsInterfaces/documents.js.map +0 -1
- package/dist/react-native/generated/data/operationsInterfaces/index.d.ts +0 -2
- package/dist/react-native/generated/data/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/react-native/generated/data/operationsInterfaces/index.js +0 -9
- package/dist/react-native/generated/data/operationsInterfaces/index.js.map +0 -1
- package/dist/react-native/generated/data/searchClient.d.ts +0 -21
- package/dist/react-native/generated/data/searchClient.d.ts.map +0 -1
- package/dist/react-native/generated/data/searchClient.js +0 -88
- package/dist/react-native/generated/data/searchClient.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/index.d.ts +0 -4
- package/dist/react-native/generated/knowledgeBase/index.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/index.js +0 -11
- package/dist/react-native/generated/knowledgeBase/index.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/models/index.d.ts +0 -570
- package/dist/react-native/generated/knowledgeBase/models/index.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/models/index.js +0 -62
- package/dist/react-native/generated/knowledgeBase/models/index.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/models/mappers.d.ts +0 -89
- package/dist/react-native/generated/knowledgeBase/models/mappers.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/models/mappers.js +0 -1325
- package/dist/react-native/generated/knowledgeBase/models/mappers.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/models/parameters.d.ts +0 -9
- package/dist/react-native/generated/knowledgeBase/models/parameters.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/models/parameters.js +0 -75
- package/dist/react-native/generated/knowledgeBase/models/parameters.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/operations/index.d.ts +0 -2
- package/dist/react-native/generated/knowledgeBase/operations/index.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/operations/index.js +0 -9
- package/dist/react-native/generated/knowledgeBase/operations/index.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/operations/knowledgeRetrieval.d.ts +0 -19
- package/dist/react-native/generated/knowledgeBase/operations/knowledgeRetrieval.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/operations/knowledgeRetrieval.js +0 -57
- package/dist/react-native/generated/knowledgeBase/operations/knowledgeRetrieval.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/operationsInterfaces/index.d.ts +0 -2
- package/dist/react-native/generated/knowledgeBase/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/operationsInterfaces/index.js +0 -9
- package/dist/react-native/generated/knowledgeBase/operationsInterfaces/index.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.d.ts +0 -11
- package/dist/react-native/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.js +0 -9
- package/dist/react-native/generated/knowledgeBase/operationsInterfaces/knowledgeRetrieval.js.map +0 -1
- package/dist/react-native/generated/knowledgeBase/searchClient.d.ts +0 -21
- package/dist/react-native/generated/knowledgeBase/searchClient.d.ts.map +0 -1
- package/dist/react-native/generated/knowledgeBase/searchClient.js +0 -88
- package/dist/react-native/generated/knowledgeBase/searchClient.js.map +0 -1
- package/dist/react-native/generated/service/index.d.ts +0 -4
- package/dist/react-native/generated/service/index.d.ts.map +0 -1
- package/dist/react-native/generated/service/index.js +0 -11
- package/dist/react-native/generated/service/index.js.map +0 -1
- package/dist/react-native/generated/service/models/index.d.ts +0 -5693
- package/dist/react-native/generated/service/models/index.d.ts.map +0 -1
- package/dist/react-native/generated/service/models/index.js +0 -1706
- package/dist/react-native/generated/service/models/index.js.map +0 -1
- package/dist/react-native/generated/service/models/mappers.d.ts +0 -358
- package/dist/react-native/generated/service/models/mappers.d.ts.map +0 -1
- package/dist/react-native/generated/service/models/mappers.js +0 -8157
- package/dist/react-native/generated/service/models/mappers.js.map +0 -1
- package/dist/react-native/generated/service/models/parameters.d.ts +0 -34
- package/dist/react-native/generated/service/models/parameters.d.ts.map +0 -1
- package/dist/react-native/generated/service/models/parameters.js +0 -255
- package/dist/react-native/generated/service/models/parameters.js.map +0 -1
- package/dist/react-native/generated/service/operations/aliases.d.ts +0 -44
- package/dist/react-native/generated/service/operations/aliases.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/aliases.js +0 -161
- package/dist/react-native/generated/service/operations/aliases.js.map +0 -1
- package/dist/react-native/generated/service/operations/dataSources.d.ts +0 -43
- package/dist/react-native/generated/service/operations/dataSources.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/dataSources.js +0 -163
- package/dist/react-native/generated/service/operations/dataSources.js.map +0 -1
- package/dist/react-native/generated/service/operations/index.d.ts +0 -9
- package/dist/react-native/generated/service/operations/index.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/index.js +0 -16
- package/dist/react-native/generated/service/operations/index.js.map +0 -1
- package/dist/react-native/generated/service/operations/indexers.d.ts +0 -74
- package/dist/react-native/generated/service/operations/indexers.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/indexers.js +0 -281
- package/dist/react-native/generated/service/operations/indexers.js.map +0 -1
- package/dist/react-native/generated/service/operations/indexes.d.ts +0 -58
- package/dist/react-native/generated/service/operations/indexes.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/indexes.js +0 -213
- package/dist/react-native/generated/service/operations/indexes.js.map +0 -1
- package/dist/react-native/generated/service/operations/knowledgeBases.d.ts +0 -43
- package/dist/react-native/generated/service/operations/knowledgeBases.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/knowledgeBases.js +0 -160
- package/dist/react-native/generated/service/operations/knowledgeBases.js.map +0 -1
- package/dist/react-native/generated/service/operations/knowledgeSources.d.ts +0 -49
- package/dist/react-native/generated/service/operations/knowledgeSources.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/knowledgeSources.js +0 -184
- package/dist/react-native/generated/service/operations/knowledgeSources.js.map +0 -1
- package/dist/react-native/generated/service/operations/skillsets.d.ts +0 -50
- package/dist/react-native/generated/service/operations/skillsets.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/skillsets.js +0 -189
- package/dist/react-native/generated/service/operations/skillsets.js.map +0 -1
- package/dist/react-native/generated/service/operations/synonymMaps.d.ts +0 -43
- package/dist/react-native/generated/service/operations/synonymMaps.d.ts.map +0 -1
- package/dist/react-native/generated/service/operations/synonymMaps.js +0 -160
- package/dist/react-native/generated/service/operations/synonymMaps.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/aliases.d.ts +0 -36
- package/dist/react-native/generated/service/operationsInterfaces/aliases.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/aliases.js +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/aliases.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/dataSources.d.ts +0 -35
- package/dist/react-native/generated/service/operationsInterfaces/dataSources.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/dataSources.js +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/dataSources.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/index.d.ts +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/index.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/index.js +0 -16
- package/dist/react-native/generated/service/operationsInterfaces/index.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/indexers.d.ts +0 -66
- package/dist/react-native/generated/service/operationsInterfaces/indexers.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/indexers.js +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/indexers.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/indexes.d.ts +0 -50
- package/dist/react-native/generated/service/operationsInterfaces/indexes.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/indexes.js +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/indexes.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/knowledgeBases.d.ts +0 -35
- package/dist/react-native/generated/service/operationsInterfaces/knowledgeBases.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/knowledgeBases.js +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/knowledgeBases.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/knowledgeSources.d.ts +0 -41
- package/dist/react-native/generated/service/operationsInterfaces/knowledgeSources.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/knowledgeSources.js +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/knowledgeSources.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/skillsets.d.ts +0 -42
- package/dist/react-native/generated/service/operationsInterfaces/skillsets.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/skillsets.js +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/skillsets.js.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/synonymMaps.d.ts +0 -35
- package/dist/react-native/generated/service/operationsInterfaces/synonymMaps.d.ts.map +0 -1
- package/dist/react-native/generated/service/operationsInterfaces/synonymMaps.js +0 -9
- package/dist/react-native/generated/service/operationsInterfaces/synonymMaps.js.map +0 -1
- package/dist/react-native/generated/service/searchServiceClient.d.ts +0 -36
- package/dist/react-native/generated/service/searchServiceClient.d.ts.map +0 -1
- package/dist/react-native/generated/service/searchServiceClient.js +0 -145
- package/dist/react-native/generated/service/searchServiceClient.js.map +0 -1
- package/dist/react-native/synonymMapHelper.d.ts +0 -10
- package/dist/react-native/synonymMapHelper.d.ts.map +0 -1
- package/dist/react-native/synonymMapHelper.js +0 -24
- package/dist/react-native/synonymMapHelper.js.map +0 -1
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/models/azure/search/documents/indexes/models.ts"],
|
|
4
|
+
"sourcesContent": ["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { buildNewlineCollection } from \"../../../../../static-helpers/serialization/build-newline-collection.js\";\nimport { buildPipeCollection } from \"../../../../../static-helpers/serialization/build-pipe-collection.js\";\nimport { areAllPropsUndefined } from \"../../../../../static-helpers/serialization/check-prop-undefined.js\";\nimport { parseNewlineCollection } from \"../../../../../static-helpers/serialization/parse-newline-collection.js\";\nimport { parsePipeCollection } from \"../../../../../static-helpers/serialization/parse-pipe-collection.js\";\nimport { serializeRecord } from \"../../../../../static-helpers/serialization/serialize-record.js\";\nimport type { KnowledgeSourceIngestionParameters } from \"../knowledgeBases/models.js\";\nimport {\n knowledgeSourceIngestionParametersSerializer,\n knowledgeSourceIngestionParametersDeserializer,\n} from \"../knowledgeBases/models.js\";\n\n/**\n * This file contains only generated model types and their (de)serializers.\n * Disable the following rules for internal models with '_' prefix and deserializers which require 'any' for raw JSON input.\n */\n/* eslint-disable @typescript-eslint/naming-convention */\n/* eslint-disable @typescript-eslint/explicit-module-boundary-types */\n/** Represents a synonym map definition. */\nexport interface SynonymMap {\n /** The name of the synonym map. */\n name: string;\n /** The format of the synonym map. Only the 'solr' format is currently supported. */\n format: \"solr\";\n /** A series of synonym rules in the specified synonym map format. The rules must be separated by newlines. */\n synonyms: string[];\n /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */\n encryptionKey?: SearchResourceEncryptionKey;\n /** The ETag of the synonym map. */\n eTag?: string;\n}\n\nexport function synonymMapSerializer(item: SynonymMap): any {\n return {\n name: item[\"name\"],\n format: item[\"format\"],\n synonyms: buildNewlineCollection(\n item[\"synonyms\"].map((p: any) => {\n return p;\n }),\n ),\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n \"@odata.etag\": item[\"eTag\"],\n };\n}\n\nexport function synonymMapDeserializer(item: any): SynonymMap {\n return {\n name: item[\"name\"],\n format: item[\"format\"],\n synonyms: parseNewlineCollection(item[\"synonyms\"]),\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n eTag: item[\"@odata.etag\"],\n };\n}\n\n/** A customer-managed encryption key in Azure Key Vault. Keys that you create and manage can be used to encrypt or decrypt data-at-rest, such as indexes and synonym maps. */\nexport interface SearchResourceEncryptionKey {\n /** The name of your Azure Key Vault key to be used to encrypt your data at rest. */\n keyName: string;\n /** The version of your Azure Key Vault key to be used to encrypt your data at rest. */\n keyVersion?: string;\n /** The URI of your Azure Key Vault, also referred to as DNS name, that contains the key to be used to encrypt your data at rest. An example URI might be `https://my-keyvault-name.vault.azure.net`. */\n vaultUri: string;\n /** An explicit managed identity to use for this encryption key. If not specified and the access credentials property is null, the system-assigned managed identity is used. On update to the resource, if the explicit identity is unspecified, it remains unchanged. If \"none\" is specified, the value of this property is cleared. */\n identity?: SearchIndexerDataIdentityUnion;\n /** An AAD Application ID that was granted the required access permissions to the Azure Key Vault that is to be used when encrypting your data at rest. The Application ID should not be confused with the Object ID for your AAD Application. */\n applicationId?: string;\n /** The authentication key of the specified AAD application. */\n applicationSecret?: string;\n}\n\nexport function searchResourceEncryptionKeySerializer(item: SearchResourceEncryptionKey): any {\n return {\n keyVaultKeyName: item[\"keyName\"],\n keyVaultKeyVersion: item[\"keyVersion\"],\n keyVaultUri: item[\"vaultUri\"],\n accessCredentials: areAllPropsUndefined(item, [\"applicationId\", \"applicationSecret\"])\n ? undefined\n : _searchResourceEncryptionKeyAccessCredentialsSerializer(item),\n identity: !item[\"identity\"]\n ? item[\"identity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"identity\"]),\n };\n}\n\nexport function searchResourceEncryptionKeyDeserializer(item: any): SearchResourceEncryptionKey {\n return {\n keyName: item[\"keyVaultKeyName\"],\n keyVersion: item[\"keyVaultKeyVersion\"],\n vaultUri: item[\"keyVaultUri\"],\n ...(!item[\"accessCredentials\"]\n ? item[\"accessCredentials\"]\n : _searchResourceEncryptionKeyAccessCredentialsDeserializer(item[\"accessCredentials\"])),\n identity: !item[\"identity\"]\n ? item[\"identity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"identity\"]),\n };\n}\n\n/** Credentials of a registered application created for your search service, used for authenticated access to the encryption keys stored in Azure Key Vault. */\nexport interface AzureActiveDirectoryApplicationCredentials {\n /** An AAD Application ID that was granted the required access permissions to the Azure Key Vault that is to be used when encrypting your data at rest. The Application ID should not be confused with the Object ID for your AAD Application. */\n applicationId: string;\n /** The authentication key of the specified AAD application. */\n applicationSecret?: string;\n}\n\nexport function azureActiveDirectoryApplicationCredentialsSerializer(\n item: AzureActiveDirectoryApplicationCredentials,\n): any {\n return { applicationId: item[\"applicationId\"], applicationSecret: item[\"applicationSecret\"] };\n}\n\nexport function azureActiveDirectoryApplicationCredentialsDeserializer(\n item: any,\n): AzureActiveDirectoryApplicationCredentials {\n return {\n applicationId: item[\"applicationId\"],\n applicationSecret: item[\"applicationSecret\"],\n };\n}\n\n/** Abstract base type for data identities. */\nexport interface SearchIndexerDataIdentity {\n /** A URI fragment specifying the type of identity. */\n /** The discriminator possible values: #Microsoft.Azure.Search.DataNoneIdentity, #Microsoft.Azure.Search.DataUserAssignedIdentity */\n odatatype: string;\n}\n\nexport function searchIndexerDataIdentitySerializer(item: SearchIndexerDataIdentity): any {\n return { \"@odata.type\": item[\"odatatype\"] };\n}\n\nexport function searchIndexerDataIdentityDeserializer(item: any): SearchIndexerDataIdentity {\n return {\n odatatype: item[\"@odata.type\"],\n };\n}\n\n/** Alias for SearchIndexerDataIdentityUnion */\nexport type SearchIndexerDataIdentityUnion =\n | SearchIndexerDataNoneIdentity\n | SearchIndexerDataUserAssignedIdentity\n | SearchIndexerDataIdentity;\n\nexport function searchIndexerDataIdentityUnionSerializer(\n item: SearchIndexerDataIdentityUnion,\n): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.DataNoneIdentity\":\n return searchIndexerDataNoneIdentitySerializer(item as SearchIndexerDataNoneIdentity);\n\n case \"#Microsoft.Azure.Search.DataUserAssignedIdentity\":\n return searchIndexerDataUserAssignedIdentitySerializer(\n item as SearchIndexerDataUserAssignedIdentity,\n );\n\n default:\n return searchIndexerDataIdentitySerializer(item);\n }\n}\n\nexport function searchIndexerDataIdentityUnionDeserializer(\n item: any,\n): SearchIndexerDataIdentityUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.DataNoneIdentity\":\n return searchIndexerDataNoneIdentityDeserializer(item as SearchIndexerDataNoneIdentity);\n\n case \"#Microsoft.Azure.Search.DataUserAssignedIdentity\":\n return searchIndexerDataUserAssignedIdentityDeserializer(\n item as SearchIndexerDataUserAssignedIdentity,\n );\n\n default:\n return searchIndexerDataIdentityDeserializer(item);\n }\n}\n\n/** Clears the identity property of a datasource. */\nexport interface SearchIndexerDataNoneIdentity extends SearchIndexerDataIdentity {\n /** The discriminator for derived types. */\n odatatype: \"#Microsoft.Azure.Search.DataNoneIdentity\";\n}\n\nexport function searchIndexerDataNoneIdentitySerializer(item: SearchIndexerDataNoneIdentity): any {\n return { \"@odata.type\": item[\"odatatype\"] };\n}\n\nexport function searchIndexerDataNoneIdentityDeserializer(\n item: any,\n): SearchIndexerDataNoneIdentity {\n return {\n odatatype: item[\"@odata.type\"],\n };\n}\n\n/** Specifies the identity for a datasource to use. */\nexport interface SearchIndexerDataUserAssignedIdentity extends SearchIndexerDataIdentity {\n /** The fully qualified Azure resource Id of a user assigned managed identity typically in the form \"/subscriptions/12345678-1234-1234-1234-1234567890ab/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myId\" that should have been assigned to the search service. */\n resourceId: string;\n /** A URI fragment specifying the type of identity. */\n odatatype: \"#Microsoft.Azure.Search.DataUserAssignedIdentity\";\n}\n\nexport function searchIndexerDataUserAssignedIdentitySerializer(\n item: SearchIndexerDataUserAssignedIdentity,\n): any {\n return { \"@odata.type\": item[\"odatatype\"], userAssignedIdentity: item[\"resourceId\"] };\n}\n\nexport function searchIndexerDataUserAssignedIdentityDeserializer(\n item: any,\n): SearchIndexerDataUserAssignedIdentity {\n return {\n odatatype: item[\"@odata.type\"],\n resourceId: item[\"userAssignedIdentity\"],\n };\n}\n\n/** Response from a List SynonymMaps request. If successful, it includes the full definitions of all synonym maps. */\nexport interface ListSynonymMapsResult {\n /** The synonym maps in the Search service. */\n readonly synonymMaps: SynonymMap[];\n}\n\nexport function listSynonymMapsResultDeserializer(item: any): ListSynonymMapsResult {\n return {\n synonymMaps: synonymMapArrayDeserializer(item[\"value\"]),\n };\n}\n\nexport function synonymMapArraySerializer(result: Array<SynonymMap>): any[] {\n return result.map((item) => {\n return synonymMapSerializer(item);\n });\n}\n\nexport function synonymMapArrayDeserializer(result: Array<SynonymMap>): any[] {\n return result.map((item) => {\n return synonymMapDeserializer(item);\n });\n}\n\n/** Represents a search index definition, which describes the fields and search behavior of an index. */\nexport interface SearchIndex {\n /** The name of the index. */\n name: string;\n /** The description of the index. */\n description?: string;\n /** The fields of the index. */\n fields: SearchField[];\n /** The scoring profiles for the index. */\n scoringProfiles?: ScoringProfile[];\n /** The name of the scoring profile to use if none is specified in the query. If this property is not set and no scoring profile is specified in the query, then default scoring (tf-idf) will be used. */\n defaultScoringProfile?: string;\n /** Options to control Cross-Origin Resource Sharing (CORS) for the index. */\n corsOptions?: CorsOptions;\n /** The suggesters for the index. */\n suggesters?: SearchSuggester[];\n /** The analyzers for the index. */\n analyzers?: LexicalAnalyzerUnion[];\n /** The tokenizers for the index. */\n tokenizers?: LexicalTokenizerUnion[];\n /** The token filters for the index. */\n tokenFilters?: TokenFilterUnion[];\n /** The character filters for the index. */\n charFilters?: CharFilterUnion[];\n /** The normalizers for the index. */\n normalizers?: LexicalNormalizerUnion[];\n /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */\n encryptionKey?: SearchResourceEncryptionKey;\n /** The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. */\n similarity?: SimilarityAlgorithmUnion;\n /** Defines parameters for a search index that influence semantic capabilities. */\n semanticSearch?: SemanticSearch;\n /** Contains configuration options related to vector search. */\n vectorSearch?: VectorSearch;\n /** The ETag of the index. */\n eTag?: string;\n}\n\nexport function searchIndexSerializer(item: SearchIndex): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n fields: searchFieldArraySerializer(item[\"fields\"]),\n scoringProfiles: !item[\"scoringProfiles\"]\n ? item[\"scoringProfiles\"]\n : scoringProfileArraySerializer(item[\"scoringProfiles\"]),\n defaultScoringProfile: item[\"defaultScoringProfile\"],\n corsOptions: !item[\"corsOptions\"]\n ? item[\"corsOptions\"]\n : corsOptionsSerializer(item[\"corsOptions\"]),\n suggesters: !item[\"suggesters\"]\n ? item[\"suggesters\"]\n : searchSuggesterArraySerializer(item[\"suggesters\"]),\n analyzers: !item[\"analyzers\"]\n ? item[\"analyzers\"]\n : lexicalAnalyzerUnionArraySerializer(item[\"analyzers\"]),\n tokenizers: !item[\"tokenizers\"]\n ? item[\"tokenizers\"]\n : lexicalTokenizerUnionArraySerializer(item[\"tokenizers\"]),\n tokenFilters: !item[\"tokenFilters\"]\n ? item[\"tokenFilters\"]\n : tokenFilterUnionArraySerializer(item[\"tokenFilters\"]),\n charFilters: !item[\"charFilters\"]\n ? item[\"charFilters\"]\n : charFilterUnionArraySerializer(item[\"charFilters\"]),\n normalizers: !item[\"normalizers\"]\n ? item[\"normalizers\"]\n : lexicalNormalizerUnionArraySerializer(item[\"normalizers\"]),\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n similarity: !item[\"similarity\"]\n ? item[\"similarity\"]\n : similarityAlgorithmUnionSerializer(item[\"similarity\"]),\n semantic: !item[\"semanticSearch\"]\n ? item[\"semanticSearch\"]\n : semanticSearchSerializer(item[\"semanticSearch\"]),\n vectorSearch: !item[\"vectorSearch\"]\n ? item[\"vectorSearch\"]\n : vectorSearchSerializer(item[\"vectorSearch\"]),\n \"@odata.etag\": item[\"eTag\"],\n };\n}\n\nexport function searchIndexDeserializer(item: any): SearchIndex {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n fields: searchFieldArrayDeserializer(item[\"fields\"]),\n scoringProfiles: !item[\"scoringProfiles\"]\n ? item[\"scoringProfiles\"]\n : scoringProfileArrayDeserializer(item[\"scoringProfiles\"]),\n defaultScoringProfile: item[\"defaultScoringProfile\"],\n corsOptions: !item[\"corsOptions\"]\n ? item[\"corsOptions\"]\n : corsOptionsDeserializer(item[\"corsOptions\"]),\n suggesters: !item[\"suggesters\"]\n ? item[\"suggesters\"]\n : searchSuggesterArrayDeserializer(item[\"suggesters\"]),\n analyzers: !item[\"analyzers\"]\n ? item[\"analyzers\"]\n : lexicalAnalyzerUnionArrayDeserializer(item[\"analyzers\"]),\n tokenizers: !item[\"tokenizers\"]\n ? item[\"tokenizers\"]\n : lexicalTokenizerUnionArrayDeserializer(item[\"tokenizers\"]),\n tokenFilters: !item[\"tokenFilters\"]\n ? item[\"tokenFilters\"]\n : tokenFilterUnionArrayDeserializer(item[\"tokenFilters\"]),\n charFilters: !item[\"charFilters\"]\n ? item[\"charFilters\"]\n : charFilterUnionArrayDeserializer(item[\"charFilters\"]),\n normalizers: !item[\"normalizers\"]\n ? item[\"normalizers\"]\n : lexicalNormalizerUnionArrayDeserializer(item[\"normalizers\"]),\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n similarity: !item[\"similarity\"]\n ? item[\"similarity\"]\n : similarityAlgorithmUnionDeserializer(item[\"similarity\"]),\n semanticSearch: !item[\"semantic\"]\n ? item[\"semantic\"]\n : semanticSearchDeserializer(item[\"semantic\"]),\n vectorSearch: !item[\"vectorSearch\"]\n ? item[\"vectorSearch\"]\n : vectorSearchDeserializer(item[\"vectorSearch\"]),\n eTag: item[\"@odata.etag\"],\n };\n}\n\nexport function searchFieldArraySerializer(result: Array<SearchField>): any[] {\n return result.map((item) => {\n return searchFieldSerializer(item);\n });\n}\n\nexport function searchFieldArrayDeserializer(result: Array<SearchField>): any[] {\n return result.map((item) => {\n return searchFieldDeserializer(item);\n });\n}\n\n/** Represents a field in an index definition, which describes the name, data type, and search behavior of a field. */\nexport interface SearchField {\n /** The name of the field, which must be unique within the fields collection of the index or parent field. */\n name: string;\n /** The data type of the field. */\n type: SearchFieldDataType;\n /** A value indicating whether the field uniquely identifies documents in the index. Exactly one top-level field in each index must be chosen as the key field and it must be of type Edm.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is false for simple fields and null for complex fields. */\n key?: boolean;\n /** A value indicating whether the field can be returned in a search result. You can disable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be true for key fields, and it must be null for complex fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is true for simple fields, false for vector fields, and null for complex fields. */\n retrievable?: boolean;\n /** An immutable value indicating whether the field will be persisted separately on disk to be returned in a search result. You can disable this option if you don't plan to return the field contents in a search response to save on storage overhead. This can only be set during index creation and only for vector fields. This property cannot be changed for existing fields or set as false for new fields. If this property is set as false, the property 'retrievable' must also be set to false. This property must be true or unset for key fields, for new fields, and for non-vector fields, and it must be null for complex fields. Disabling this property will reduce index storage requirements. The default is true for vector fields. */\n stored?: boolean;\n /** A value indicating whether the field is full-text searchable. This means it will undergo analysis such as word-breaking during indexing. If you set a searchable field to a value like \"sunny day\", internally it will be split into the individual tokens \"sunny\" and \"day\". This enables full-text searches for these terms. Fields of type Edm.String or Collection(Edm.String) are searchable by default. This property must be false for simple fields of other non-string data types, and it must be null for complex fields. Note: searchable fields consume extra space in your index to accommodate additional tokenized versions of the field value for full-text searches. If you want to save space in your index and you don't need a field to be included in searches, set searchable to false. */\n searchable?: boolean;\n /** A value indicating whether to enable the field to be referenced in $filter queries. filterable differs from searchable in how strings are handled. Fields of type Edm.String or Collection(Edm.String) that are filterable do not undergo word-breaking, so comparisons are for exact matches only. For example, if you set such a field f to \"sunny day\", $filter=f eq 'sunny' will find no matches, but $filter=f eq 'sunny day' will. This property must be null for complex fields. Default is true for simple fields and null for complex fields. */\n filterable?: boolean;\n /** A value indicating whether to enable the field to be referenced in $orderby expressions. By default, the search engine sorts results by score, but in many experiences users will want to sort by fields in the documents. A simple field can be sortable only if it is single-valued (it has a single value in the scope of the parent document). Simple collection fields cannot be sortable, since they are multi-valued. Simple sub-fields of complex collections are also multi-valued, and therefore cannot be sortable. This is true whether it's an immediate parent field, or an ancestor field, that's the complex collection. Complex fields cannot be sortable and the sortable property must be null for such fields. The default for sortable is true for single-valued simple fields, false for multi-valued simple fields, and null for complex fields. */\n sortable?: boolean;\n /** A value indicating whether to enable the field to be referenced in facet queries. Typically used in a presentation of search results that includes hit count by category (for example, search for digital cameras and see hits by brand, by megapixels, by price, and so on). This property must be null for complex fields. Fields of type Edm.GeographyPoint or Collection(Edm.GeographyPoint) cannot be facetable. Default is true for all other simple fields. */\n facetable?: boolean;\n /** The name of the analyzer to use for the field. This option can be used only with searchable fields and it can't be set together with either searchAnalyzer or indexAnalyzer. Once the analyzer is chosen, it cannot be changed for the field. Must be null for complex fields. */\n analyzerName?: LexicalAnalyzerName;\n /** The name of the analyzer used at search time for the field. This option can be used only with searchable fields. It must be set together with indexAnalyzer and it cannot be set together with the analyzer option. This property cannot be set to the name of a language analyzer; use the analyzer property instead if you need a language analyzer. This analyzer can be updated on an existing field. Must be null for complex fields. */\n searchAnalyzerName?: LexicalAnalyzerName;\n /** The name of the analyzer used at indexing time for the field. This option can be used only with searchable fields. It must be set together with searchAnalyzer and it cannot be set together with the analyzer option. This property cannot be set to the name of a language analyzer; use the analyzer property instead if you need a language analyzer. Once the analyzer is chosen, it cannot be changed for the field. Must be null for complex fields. */\n indexAnalyzerName?: LexicalAnalyzerName;\n /** The name of the normalizer to use for the field. This option can be used only with fields with filterable, sortable, or facetable enabled. Once the normalizer is chosen, it cannot be changed for the field. Must be null for complex fields. */\n normalizerName?: LexicalNormalizerName;\n /** The dimensionality of the vector field. */\n vectorSearchDimensions?: number;\n /** The name of the vector search profile that specifies the algorithm and vectorizer to use when searching the vector field. */\n vectorSearchProfileName?: string;\n /** The encoding format to interpret the field contents. */\n vectorEncodingFormat?: VectorEncodingFormat;\n /** A list of the names of synonym maps to associate with this field. This option can be used only with searchable fields. Currently only one synonym map per field is supported. Assigning a synonym map to a field ensures that query terms targeting that field are expanded at query-time using the rules in the synonym map. This attribute can be changed on existing fields. Must be null or an empty collection for complex fields. */\n synonymMapNames?: string[];\n /** A list of sub-fields if this is a field of type Edm.ComplexType or Collection(Edm.ComplexType). Must be null or empty for simple fields. */\n fields?: SearchField[];\n}\n\nexport function searchFieldSerializer(item: SearchField): any {\n return {\n name: item[\"name\"],\n type: item[\"type\"],\n key: item[\"key\"],\n retrievable: item[\"retrievable\"],\n stored: item[\"stored\"],\n searchable: item[\"searchable\"],\n filterable: item[\"filterable\"],\n sortable: item[\"sortable\"],\n facetable: item[\"facetable\"],\n analyzer: item[\"analyzerName\"],\n searchAnalyzer: item[\"searchAnalyzerName\"],\n indexAnalyzer: item[\"indexAnalyzerName\"],\n normalizer: item[\"normalizerName\"],\n dimensions: item[\"vectorSearchDimensions\"],\n vectorSearchProfile: item[\"vectorSearchProfileName\"],\n vectorEncoding: item[\"vectorEncodingFormat\"],\n synonymMaps: !item[\"synonymMapNames\"]\n ? item[\"synonymMapNames\"]\n : item[\"synonymMapNames\"].map((p: any) => {\n return p;\n }),\n fields: !item[\"fields\"] ? item[\"fields\"] : searchFieldArraySerializer(item[\"fields\"]),\n };\n}\n\nexport function searchFieldDeserializer(item: any): SearchField {\n return {\n name: item[\"name\"],\n type: item[\"type\"],\n key: item[\"key\"],\n retrievable: item[\"retrievable\"],\n stored: item[\"stored\"],\n searchable: item[\"searchable\"],\n filterable: item[\"filterable\"],\n sortable: item[\"sortable\"],\n facetable: item[\"facetable\"],\n analyzerName: item[\"analyzer\"],\n searchAnalyzerName: item[\"searchAnalyzer\"],\n indexAnalyzerName: item[\"indexAnalyzer\"],\n normalizerName: item[\"normalizer\"],\n vectorSearchDimensions: item[\"dimensions\"],\n vectorSearchProfileName: item[\"vectorSearchProfile\"],\n vectorEncodingFormat: item[\"vectorEncoding\"],\n synonymMapNames: !item[\"synonymMaps\"]\n ? item[\"synonymMaps\"]\n : item[\"synonymMaps\"].map((p: any) => {\n return p;\n }),\n fields: !item[\"fields\"] ? item[\"fields\"] : searchFieldArrayDeserializer(item[\"fields\"]),\n };\n}\n\n/** Defines the data type of a field in a search index. */\nexport enum KnownSearchFieldDataType {\n /** Indicates that a field contains a string. */\n String = \"Edm.String\",\n /** Indicates that a field contains a 32-bit signed integer. */\n Int32 = \"Edm.Int32\",\n /** Indicates that a field contains a 64-bit signed integer. */\n Int64 = \"Edm.Int64\",\n /** Indicates that a field contains an IEEE double-precision floating point number. */\n Double = \"Edm.Double\",\n /** Indicates that a field contains a Boolean value (true or false). */\n Boolean = \"Edm.Boolean\",\n /** Indicates that a field contains a date/time value, including timezone information. */\n DateTimeOffset = \"Edm.DateTimeOffset\",\n /** Indicates that a field contains a geo-location in terms of longitude and latitude. */\n GeographyPoint = \"Edm.GeographyPoint\",\n /** Indicates that a field contains one or more complex objects that in turn have sub-fields of other types. */\n Complex = \"Edm.ComplexType\",\n /** Indicates that a field contains a single-precision floating point number. This is only valid when used with Collection(Edm.Single). */\n Single = \"Edm.Single\",\n /** Indicates that a field contains a half-precision floating point number. This is only valid when used with Collection(Edm.Half). */\n Half = \"Edm.Half\",\n /** Indicates that a field contains a 16-bit signed integer. This is only valid when used with Collection(Edm.Int16). */\n Int16 = \"Edm.Int16\",\n /** Indicates that a field contains a 8-bit signed integer. This is only valid when used with Collection(Edm.SByte). */\n SByte = \"Edm.SByte\",\n /** Indicates that a field contains a 8-bit unsigned integer. This is only valid when used with Collection(Edm.Byte). */\n Byte = \"Edm.Byte\",\n}\n\n/**\n * Defines the data type of a field in a search index. \\\n * {@link KnownSearchFieldDataType} can be used interchangeably with SearchFieldDataType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **Edm.String**: Indicates that a field contains a string. \\\n * **Edm.Int32**: Indicates that a field contains a 32-bit signed integer. \\\n * **Edm.Int64**: Indicates that a field contains a 64-bit signed integer. \\\n * **Edm.Double**: Indicates that a field contains an IEEE double-precision floating point number. \\\n * **Edm.Boolean**: Indicates that a field contains a Boolean value (true or false). \\\n * **Edm.DateTimeOffset**: Indicates that a field contains a date\\/time value, including timezone information. \\\n * **Edm.GeographyPoint**: Indicates that a field contains a geo-location in terms of longitude and latitude. \\\n * **Edm.ComplexType**: Indicates that a field contains one or more complex objects that in turn have sub-fields of other types. \\\n * **Edm.Single**: Indicates that a field contains a single-precision floating point number. This is only valid when used with Collection(Edm.Single). \\\n * **Edm.Half**: Indicates that a field contains a half-precision floating point number. This is only valid when used with Collection(Edm.Half). \\\n * **Edm.Int16**: Indicates that a field contains a 16-bit signed integer. This is only valid when used with Collection(Edm.Int16). \\\n * **Edm.SByte**: Indicates that a field contains a 8-bit signed integer. This is only valid when used with Collection(Edm.SByte). \\\n * **Edm.Byte**: Indicates that a field contains a 8-bit unsigned integer. This is only valid when used with Collection(Edm.Byte).\n */\nexport type SearchFieldDataType = string;\n\n/** Defines the names of all text analyzers supported by the search engine. */\nexport enum KnownLexicalAnalyzerName {\n /** Microsoft analyzer for Arabic. */\n ArMicrosoft = \"ar.microsoft\",\n /** Lucene analyzer for Arabic. */\n ArLucene = \"ar.lucene\",\n /** Lucene analyzer for Armenian. */\n HyLucene = \"hy.lucene\",\n /** Microsoft analyzer for Bangla. */\n BnMicrosoft = \"bn.microsoft\",\n /** Lucene analyzer for Basque. */\n EuLucene = \"eu.lucene\",\n /** Microsoft analyzer for Bulgarian. */\n BgMicrosoft = \"bg.microsoft\",\n /** Lucene analyzer for Bulgarian. */\n BgLucene = \"bg.lucene\",\n /** Microsoft analyzer for Catalan. */\n CaMicrosoft = \"ca.microsoft\",\n /** Lucene analyzer for Catalan. */\n CaLucene = \"ca.lucene\",\n /** Microsoft analyzer for Chinese (Simplified). */\n ZhHansMicrosoft = \"zh-Hans.microsoft\",\n /** Lucene analyzer for Chinese (Simplified). */\n ZhHansLucene = \"zh-Hans.lucene\",\n /** Microsoft analyzer for Chinese (Traditional). */\n ZhHantMicrosoft = \"zh-Hant.microsoft\",\n /** Lucene analyzer for Chinese (Traditional). */\n ZhHantLucene = \"zh-Hant.lucene\",\n /** Microsoft analyzer for Croatian. */\n HrMicrosoft = \"hr.microsoft\",\n /** Microsoft analyzer for Czech. */\n CsMicrosoft = \"cs.microsoft\",\n /** Lucene analyzer for Czech. */\n CsLucene = \"cs.lucene\",\n /** Microsoft analyzer for Danish. */\n DaMicrosoft = \"da.microsoft\",\n /** Lucene analyzer for Danish. */\n DaLucene = \"da.lucene\",\n /** Microsoft analyzer for Dutch. */\n NlMicrosoft = \"nl.microsoft\",\n /** Lucene analyzer for Dutch. */\n NlLucene = \"nl.lucene\",\n /** Microsoft analyzer for English. */\n EnMicrosoft = \"en.microsoft\",\n /** Lucene analyzer for English. */\n EnLucene = \"en.lucene\",\n /** Microsoft analyzer for Estonian. */\n EtMicrosoft = \"et.microsoft\",\n /** Microsoft analyzer for Finnish. */\n FiMicrosoft = \"fi.microsoft\",\n /** Lucene analyzer for Finnish. */\n FiLucene = \"fi.lucene\",\n /** Microsoft analyzer for French. */\n FrMicrosoft = \"fr.microsoft\",\n /** Lucene analyzer for French. */\n FrLucene = \"fr.lucene\",\n /** Lucene analyzer for Galician. */\n GlLucene = \"gl.lucene\",\n /** Microsoft analyzer for German. */\n DeMicrosoft = \"de.microsoft\",\n /** Lucene analyzer for German. */\n DeLucene = \"de.lucene\",\n /** Microsoft analyzer for Greek. */\n ElMicrosoft = \"el.microsoft\",\n /** Lucene analyzer for Greek. */\n ElLucene = \"el.lucene\",\n /** Microsoft analyzer for Gujarati. */\n GuMicrosoft = \"gu.microsoft\",\n /** Microsoft analyzer for Hebrew. */\n HeMicrosoft = \"he.microsoft\",\n /** Microsoft analyzer for Hindi. */\n HiMicrosoft = \"hi.microsoft\",\n /** Lucene analyzer for Hindi. */\n HiLucene = \"hi.lucene\",\n /** Microsoft analyzer for Hungarian. */\n HuMicrosoft = \"hu.microsoft\",\n /** Lucene analyzer for Hungarian. */\n HuLucene = \"hu.lucene\",\n /** Microsoft analyzer for Icelandic. */\n IsMicrosoft = \"is.microsoft\",\n /** Microsoft analyzer for Indonesian (Bahasa). */\n IdMicrosoft = \"id.microsoft\",\n /** Lucene analyzer for Indonesian. */\n IdLucene = \"id.lucene\",\n /** Lucene analyzer for Irish. */\n GaLucene = \"ga.lucene\",\n /** Microsoft analyzer for Italian. */\n ItMicrosoft = \"it.microsoft\",\n /** Lucene analyzer for Italian. */\n ItLucene = \"it.lucene\",\n /** Microsoft analyzer for Japanese. */\n JaMicrosoft = \"ja.microsoft\",\n /** Lucene analyzer for Japanese. */\n JaLucene = \"ja.lucene\",\n /** Microsoft analyzer for Kannada. */\n KnMicrosoft = \"kn.microsoft\",\n /** Microsoft analyzer for Korean. */\n KoMicrosoft = \"ko.microsoft\",\n /** Lucene analyzer for Korean. */\n KoLucene = \"ko.lucene\",\n /** Microsoft analyzer for Latvian. */\n LvMicrosoft = \"lv.microsoft\",\n /** Lucene analyzer for Latvian. */\n LvLucene = \"lv.lucene\",\n /** Microsoft analyzer for Lithuanian. */\n LtMicrosoft = \"lt.microsoft\",\n /** Microsoft analyzer for Malayalam. */\n MlMicrosoft = \"ml.microsoft\",\n /** Microsoft analyzer for Malay (Latin). */\n MsMicrosoft = \"ms.microsoft\",\n /** Microsoft analyzer for Marathi. */\n MrMicrosoft = \"mr.microsoft\",\n /** Microsoft analyzer for Norwegian (Bokm\u00C3\u00A5l). */\n NbMicrosoft = \"nb.microsoft\",\n /** Lucene analyzer for Norwegian. */\n NoLucene = \"no.lucene\",\n /** Lucene analyzer for Persian. */\n FaLucene = \"fa.lucene\",\n /** Microsoft analyzer for Polish. */\n PlMicrosoft = \"pl.microsoft\",\n /** Lucene analyzer for Polish. */\n PlLucene = \"pl.lucene\",\n /** Microsoft analyzer for Portuguese (Brazil). */\n PtBrMicrosoft = \"pt-BR.microsoft\",\n /** Lucene analyzer for Portuguese (Brazil). */\n PtBrLucene = \"pt-BR.lucene\",\n /** Microsoft analyzer for Portuguese (Portugal). */\n PtPtMicrosoft = \"pt-PT.microsoft\",\n /** Lucene analyzer for Portuguese (Portugal). */\n PtPtLucene = \"pt-PT.lucene\",\n /** Microsoft analyzer for Punjabi. */\n PaMicrosoft = \"pa.microsoft\",\n /** Microsoft analyzer for Romanian. */\n RoMicrosoft = \"ro.microsoft\",\n /** Lucene analyzer for Romanian. */\n RoLucene = \"ro.lucene\",\n /** Microsoft analyzer for Russian. */\n RuMicrosoft = \"ru.microsoft\",\n /** Lucene analyzer for Russian. */\n RuLucene = \"ru.lucene\",\n /** Microsoft analyzer for Serbian (Cyrillic). */\n SrCyrillicMicrosoft = \"sr-cyrillic.microsoft\",\n /** Microsoft analyzer for Serbian (Latin). */\n SrLatinMicrosoft = \"sr-latin.microsoft\",\n /** Microsoft analyzer for Slovak. */\n SkMicrosoft = \"sk.microsoft\",\n /** Microsoft analyzer for Slovenian. */\n SlMicrosoft = \"sl.microsoft\",\n /** Microsoft analyzer for Spanish. */\n EsMicrosoft = \"es.microsoft\",\n /** Lucene analyzer for Spanish. */\n EsLucene = \"es.lucene\",\n /** Microsoft analyzer for Swedish. */\n SvMicrosoft = \"sv.microsoft\",\n /** Lucene analyzer for Swedish. */\n SvLucene = \"sv.lucene\",\n /** Microsoft analyzer for Tamil. */\n TaMicrosoft = \"ta.microsoft\",\n /** Microsoft analyzer for Telugu. */\n TeMicrosoft = \"te.microsoft\",\n /** Microsoft analyzer for Thai. */\n ThMicrosoft = \"th.microsoft\",\n /** Lucene analyzer for Thai. */\n ThLucene = \"th.lucene\",\n /** Microsoft analyzer for Turkish. */\n TrMicrosoft = \"tr.microsoft\",\n /** Lucene analyzer for Turkish. */\n TrLucene = \"tr.lucene\",\n /** Microsoft analyzer for Ukrainian. */\n UkMicrosoft = \"uk.microsoft\",\n /** Microsoft analyzer for Urdu. */\n UrMicrosoft = \"ur.microsoft\",\n /** Microsoft analyzer for Vietnamese. */\n ViMicrosoft = \"vi.microsoft\",\n /** Standard Lucene analyzer. */\n StandardLucene = \"standard.lucene\",\n /** Standard ASCII Folding Lucene analyzer. See https://learn.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#Analyzers */\n StandardAsciiFoldingLucene = \"standardasciifolding.lucene\",\n /** Treats the entire content of a field as a single token. This is useful for data like zip codes, ids, and some product names. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/KeywordAnalyzer.html */\n Keyword = \"keyword\",\n /** Flexibly separates text into terms via a regular expression pattern. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/PatternAnalyzer.html */\n Pattern = \"pattern\",\n /** Divides text at non-letters and converts them to lower case. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/SimpleAnalyzer.html */\n Simple = \"simple\",\n /** Divides text at non-letters; Applies the lowercase and stopword token filters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopAnalyzer.html */\n Stop = \"stop\",\n /** An analyzer that uses the whitespace tokenizer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/WhitespaceAnalyzer.html */\n Whitespace = \"whitespace\",\n}\n\n/**\n * Defines the names of all text analyzers supported by the search engine. \\\n * {@link KnownLexicalAnalyzerName} can be used interchangeably with LexicalAnalyzerName,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ar.microsoft**: Microsoft analyzer for Arabic. \\\n * **ar.lucene**: Lucene analyzer for Arabic. \\\n * **hy.lucene**: Lucene analyzer for Armenian. \\\n * **bn.microsoft**: Microsoft analyzer for Bangla. \\\n * **eu.lucene**: Lucene analyzer for Basque. \\\n * **bg.microsoft**: Microsoft analyzer for Bulgarian. \\\n * **bg.lucene**: Lucene analyzer for Bulgarian. \\\n * **ca.microsoft**: Microsoft analyzer for Catalan. \\\n * **ca.lucene**: Lucene analyzer for Catalan. \\\n * **zh-Hans.microsoft**: Microsoft analyzer for Chinese (Simplified). \\\n * **zh-Hans.lucene**: Lucene analyzer for Chinese (Simplified). \\\n * **zh-Hant.microsoft**: Microsoft analyzer for Chinese (Traditional). \\\n * **zh-Hant.lucene**: Lucene analyzer for Chinese (Traditional). \\\n * **hr.microsoft**: Microsoft analyzer for Croatian. \\\n * **cs.microsoft**: Microsoft analyzer for Czech. \\\n * **cs.lucene**: Lucene analyzer for Czech. \\\n * **da.microsoft**: Microsoft analyzer for Danish. \\\n * **da.lucene**: Lucene analyzer for Danish. \\\n * **nl.microsoft**: Microsoft analyzer for Dutch. \\\n * **nl.lucene**: Lucene analyzer for Dutch. \\\n * **en.microsoft**: Microsoft analyzer for English. \\\n * **en.lucene**: Lucene analyzer for English. \\\n * **et.microsoft**: Microsoft analyzer for Estonian. \\\n * **fi.microsoft**: Microsoft analyzer for Finnish. \\\n * **fi.lucene**: Lucene analyzer for Finnish. \\\n * **fr.microsoft**: Microsoft analyzer for French. \\\n * **fr.lucene**: Lucene analyzer for French. \\\n * **gl.lucene**: Lucene analyzer for Galician. \\\n * **de.microsoft**: Microsoft analyzer for German. \\\n * **de.lucene**: Lucene analyzer for German. \\\n * **el.microsoft**: Microsoft analyzer for Greek. \\\n * **el.lucene**: Lucene analyzer for Greek. \\\n * **gu.microsoft**: Microsoft analyzer for Gujarati. \\\n * **he.microsoft**: Microsoft analyzer for Hebrew. \\\n * **hi.microsoft**: Microsoft analyzer for Hindi. \\\n * **hi.lucene**: Lucene analyzer for Hindi. \\\n * **hu.microsoft**: Microsoft analyzer for Hungarian. \\\n * **hu.lucene**: Lucene analyzer for Hungarian. \\\n * **is.microsoft**: Microsoft analyzer for Icelandic. \\\n * **id.microsoft**: Microsoft analyzer for Indonesian (Bahasa). \\\n * **id.lucene**: Lucene analyzer for Indonesian. \\\n * **ga.lucene**: Lucene analyzer for Irish. \\\n * **it.microsoft**: Microsoft analyzer for Italian. \\\n * **it.lucene**: Lucene analyzer for Italian. \\\n * **ja.microsoft**: Microsoft analyzer for Japanese. \\\n * **ja.lucene**: Lucene analyzer for Japanese. \\\n * **kn.microsoft**: Microsoft analyzer for Kannada. \\\n * **ko.microsoft**: Microsoft analyzer for Korean. \\\n * **ko.lucene**: Lucene analyzer for Korean. \\\n * **lv.microsoft**: Microsoft analyzer for Latvian. \\\n * **lv.lucene**: Lucene analyzer for Latvian. \\\n * **lt.microsoft**: Microsoft analyzer for Lithuanian. \\\n * **ml.microsoft**: Microsoft analyzer for Malayalam. \\\n * **ms.microsoft**: Microsoft analyzer for Malay (Latin). \\\n * **mr.microsoft**: Microsoft analyzer for Marathi. \\\n * **nb.microsoft**: Microsoft analyzer for Norwegian (Bokm\u00C3\u00A5l). \\\n * **no.lucene**: Lucene analyzer for Norwegian. \\\n * **fa.lucene**: Lucene analyzer for Persian. \\\n * **pl.microsoft**: Microsoft analyzer for Polish. \\\n * **pl.lucene**: Lucene analyzer for Polish. \\\n * **pt-BR.microsoft**: Microsoft analyzer for Portuguese (Brazil). \\\n * **pt-BR.lucene**: Lucene analyzer for Portuguese (Brazil). \\\n * **pt-PT.microsoft**: Microsoft analyzer for Portuguese (Portugal). \\\n * **pt-PT.lucene**: Lucene analyzer for Portuguese (Portugal). \\\n * **pa.microsoft**: Microsoft analyzer for Punjabi. \\\n * **ro.microsoft**: Microsoft analyzer for Romanian. \\\n * **ro.lucene**: Lucene analyzer for Romanian. \\\n * **ru.microsoft**: Microsoft analyzer for Russian. \\\n * **ru.lucene**: Lucene analyzer for Russian. \\\n * **sr-cyrillic.microsoft**: Microsoft analyzer for Serbian (Cyrillic). \\\n * **sr-latin.microsoft**: Microsoft analyzer for Serbian (Latin). \\\n * **sk.microsoft**: Microsoft analyzer for Slovak. \\\n * **sl.microsoft**: Microsoft analyzer for Slovenian. \\\n * **es.microsoft**: Microsoft analyzer for Spanish. \\\n * **es.lucene**: Lucene analyzer for Spanish. \\\n * **sv.microsoft**: Microsoft analyzer for Swedish. \\\n * **sv.lucene**: Lucene analyzer for Swedish. \\\n * **ta.microsoft**: Microsoft analyzer for Tamil. \\\n * **te.microsoft**: Microsoft analyzer for Telugu. \\\n * **th.microsoft**: Microsoft analyzer for Thai. \\\n * **th.lucene**: Lucene analyzer for Thai. \\\n * **tr.microsoft**: Microsoft analyzer for Turkish. \\\n * **tr.lucene**: Lucene analyzer for Turkish. \\\n * **uk.microsoft**: Microsoft analyzer for Ukrainian. \\\n * **ur.microsoft**: Microsoft analyzer for Urdu. \\\n * **vi.microsoft**: Microsoft analyzer for Vietnamese. \\\n * **standard.lucene**: Standard Lucene analyzer. \\\n * **standardasciifolding.lucene**: Standard ASCII Folding Lucene analyzer. See https:\\//learn.microsoft.com\\/rest\\/api\\/searchservice\\/Custom-analyzers-in-Azure-Search#Analyzers \\\n * **keyword**: Treats the entire content of a field as a single token. This is useful for data like zip codes, ids, and some product names. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/KeywordAnalyzer.html \\\n * **pattern**: Flexibly separates text into terms via a regular expression pattern. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/PatternAnalyzer.html \\\n * **simple**: Divides text at non-letters and converts them to lower case. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/SimpleAnalyzer.html \\\n * **stop**: Divides text at non-letters; Applies the lowercase and stopword token filters. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/StopAnalyzer.html \\\n * **whitespace**: An analyzer that uses the whitespace tokenizer. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/WhitespaceAnalyzer.html\n */\nexport type LexicalAnalyzerName = string;\n\n/** Defines the names of all text normalizers supported by the search engine. */\nexport enum KnownLexicalNormalizerName {\n /** Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the \"Basic Latin\" Unicode block) into their ASCII equivalents, if such equivalents exist. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.html */\n AsciiFolding = \"asciifolding\",\n /** Removes elisions. For example, \"l'avion\" (the plane) will be converted to \"avion\" (plane). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/util/ElisionFilter.html */\n Elision = \"elision\",\n /** Normalizes token text to lowercase. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/LowerCaseFilter.html */\n Lowercase = \"lowercase\",\n /** Standard normalizer, which consists of lowercase and asciifolding. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/reverse/ReverseStringFilter.html */\n Standard = \"standard\",\n /** Normalizes token text to uppercase. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/UpperCaseFilter.html */\n Uppercase = \"uppercase\",\n}\n\n/**\n * Defines the names of all text normalizers supported by the search engine. \\\n * {@link KnownLexicalNormalizerName} can be used interchangeably with LexicalNormalizerName,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **asciifolding**: Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the \"Basic Latin\" Unicode block) into their ASCII equivalents, if such equivalents exist. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/ASCIIFoldingFilter.html \\\n * **elision**: Removes elisions. For example, \"l'avion\" (the plane) will be converted to \"avion\" (plane). See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/util\\/ElisionFilter.html \\\n * **lowercase**: Normalizes token text to lowercase. See https:\\//lucene.apache.org\\/core\\/6_6_1\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/LowerCaseFilter.html \\\n * **standard**: Standard normalizer, which consists of lowercase and asciifolding. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/reverse\\/ReverseStringFilter.html \\\n * **uppercase**: Normalizes token text to uppercase. See https:\\//lucene.apache.org\\/core\\/6_6_1\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/UpperCaseFilter.html\n */\nexport type LexicalNormalizerName = string;\n\n/** The encoding format for interpreting vector field contents. */\nexport enum KnownVectorEncodingFormat {\n /** Encoding format representing bits packed into a wider data type. */\n PackedBit = \"packedBit\",\n}\n\n/**\n * The encoding format for interpreting vector field contents. \\\n * {@link KnownVectorEncodingFormat} can be used interchangeably with VectorEncodingFormat,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **packedBit**: Encoding format representing bits packed into a wider data type.\n */\nexport type VectorEncodingFormat = string;\n\nexport function scoringProfileArraySerializer(result: Array<ScoringProfile>): any[] {\n return result.map((item) => {\n return scoringProfileSerializer(item);\n });\n}\n\nexport function scoringProfileArrayDeserializer(result: Array<ScoringProfile>): any[] {\n return result.map((item) => {\n return scoringProfileDeserializer(item);\n });\n}\n\n/** Defines parameters for a search index that influence scoring in search queries. */\nexport interface ScoringProfile {\n /** The name of the scoring profile. */\n name: string;\n /** Parameters that boost scoring based on text matches in certain index fields. */\n textWeights?: TextWeights;\n /** The collection of functions that influence the scoring of documents. */\n functions?: ScoringFunctionUnion[];\n /** A value indicating how the results of individual scoring functions should be combined. Defaults to \"Sum\". Ignored if there are no scoring functions. */\n functionAggregation?: ScoringFunctionAggregation;\n}\n\nexport function scoringProfileSerializer(item: ScoringProfile): any {\n return {\n name: item[\"name\"],\n text: !item[\"textWeights\"] ? item[\"textWeights\"] : textWeightsSerializer(item[\"textWeights\"]),\n functions: !item[\"functions\"]\n ? item[\"functions\"]\n : scoringFunctionUnionArraySerializer(item[\"functions\"]),\n functionAggregation: item[\"functionAggregation\"],\n };\n}\n\nexport function scoringProfileDeserializer(item: any): ScoringProfile {\n return {\n name: item[\"name\"],\n textWeights: !item[\"text\"] ? item[\"text\"] : textWeightsDeserializer(item[\"text\"]),\n functions: !item[\"functions\"]\n ? item[\"functions\"]\n : scoringFunctionUnionArrayDeserializer(item[\"functions\"]),\n functionAggregation: item[\"functionAggregation\"],\n };\n}\n\n/** Defines weights on index fields for which matches should boost scoring in search queries. */\nexport interface TextWeights {\n /** The dictionary of per-field weights to boost document scoring. The keys are field names and the values are the weights for each field. */\n weights: Record<string, number>;\n}\n\nexport function textWeightsSerializer(item: TextWeights): any {\n return { weights: item[\"weights\"] };\n}\n\nexport function textWeightsDeserializer(item: any): TextWeights {\n return {\n weights: Object.fromEntries(\n Object.entries(item[\"weights\"]).map(([k, p]: [string, any]) => [k, p]),\n ),\n };\n}\n\nexport function scoringFunctionUnionArraySerializer(result: Array<ScoringFunctionUnion>): any[] {\n return result.map((item) => {\n return scoringFunctionUnionSerializer(item);\n });\n}\n\nexport function scoringFunctionUnionArrayDeserializer(result: Array<ScoringFunctionUnion>): any[] {\n return result.map((item) => {\n return scoringFunctionUnionDeserializer(item);\n });\n}\n\n/** Base type for functions that can modify document scores during ranking. */\nexport interface ScoringFunction {\n /** The name of the field used as input to the scoring function. */\n fieldName: string;\n /** A multiplier for the raw score. Must be a positive number not equal to 1.0. */\n boost: number;\n /** A value indicating how boosting will be interpolated across document scores; defaults to \"Linear\". */\n interpolation?: ScoringFunctionInterpolation;\n /** Type of ScoringFunction. */\n /** The discriminator possible values: distance, freshness, magnitude, tag */\n type: string;\n}\n\nexport function scoringFunctionSerializer(item: ScoringFunction): any {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n };\n}\n\nexport function scoringFunctionDeserializer(item: any): ScoringFunction {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n };\n}\n\n/** Alias for ScoringFunctionUnion */\nexport type ScoringFunctionUnion =\n | DistanceScoringFunction\n | FreshnessScoringFunction\n | MagnitudeScoringFunction\n | TagScoringFunction\n | ScoringFunction;\n\nexport function scoringFunctionUnionSerializer(item: ScoringFunctionUnion): any {\n switch (item.type) {\n case \"distance\":\n return distanceScoringFunctionSerializer(item as DistanceScoringFunction);\n\n case \"freshness\":\n return freshnessScoringFunctionSerializer(item as FreshnessScoringFunction);\n\n case \"magnitude\":\n return magnitudeScoringFunctionSerializer(item as MagnitudeScoringFunction);\n\n case \"tag\":\n return tagScoringFunctionSerializer(item as TagScoringFunction);\n\n default:\n return scoringFunctionSerializer(item);\n }\n}\n\nexport function scoringFunctionUnionDeserializer(item: any): ScoringFunctionUnion {\n switch (item[\"type\"]) {\n case \"distance\":\n return distanceScoringFunctionDeserializer(item as DistanceScoringFunction);\n\n case \"freshness\":\n return freshnessScoringFunctionDeserializer(item as FreshnessScoringFunction);\n\n case \"magnitude\":\n return magnitudeScoringFunctionDeserializer(item as MagnitudeScoringFunction);\n\n case \"tag\":\n return tagScoringFunctionDeserializer(item as TagScoringFunction);\n\n default:\n return scoringFunctionDeserializer(item);\n }\n}\n\n/** Defines the function used to interpolate score boosting across a range of documents. */\nexport type ScoringFunctionInterpolation = \"linear\" | \"constant\" | \"quadratic\" | \"logarithmic\";\n\n/** Defines a function that boosts scores based on distance from a geographic location. */\nexport interface DistanceScoringFunction extends ScoringFunction {\n /** Parameter values for the distance scoring function. */\n parameters: DistanceScoringParameters;\n /** Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. */\n type: \"distance\";\n}\n\nexport function distanceScoringFunctionSerializer(item: DistanceScoringFunction): any {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n distance: distanceScoringParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function distanceScoringFunctionDeserializer(item: any): DistanceScoringFunction {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n parameters: distanceScoringParametersDeserializer(item[\"distance\"]),\n };\n}\n\n/** Provides parameter values to a distance scoring function. */\nexport interface DistanceScoringParameters {\n /** The name of the parameter passed in search queries to specify the reference location. */\n referencePointParameter: string;\n /** The distance in kilometers from the reference location where the boosting range ends. */\n boostingDistance: number;\n}\n\nexport function distanceScoringParametersSerializer(item: DistanceScoringParameters): any {\n return {\n referencePointParameter: item[\"referencePointParameter\"],\n boostingDistance: item[\"boostingDistance\"],\n };\n}\n\nexport function distanceScoringParametersDeserializer(item: any): DistanceScoringParameters {\n return {\n referencePointParameter: item[\"referencePointParameter\"],\n boostingDistance: item[\"boostingDistance\"],\n };\n}\n\n/** Defines a function that boosts scores based on the value of a date-time field. */\nexport interface FreshnessScoringFunction extends ScoringFunction {\n /** Parameter values for the freshness scoring function. */\n parameters: FreshnessScoringParameters;\n /** Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. */\n type: \"freshness\";\n}\n\nexport function freshnessScoringFunctionSerializer(item: FreshnessScoringFunction): any {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n freshness: freshnessScoringParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function freshnessScoringFunctionDeserializer(item: any): FreshnessScoringFunction {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n parameters: freshnessScoringParametersDeserializer(item[\"freshness\"]),\n };\n}\n\n/** Provides parameter values to a freshness scoring function. */\nexport interface FreshnessScoringParameters {\n /** The expiration period after which boosting will stop for a particular document. */\n boostingDuration: string;\n}\n\nexport function freshnessScoringParametersSerializer(item: FreshnessScoringParameters): any {\n return { boostingDuration: item[\"boostingDuration\"] };\n}\n\nexport function freshnessScoringParametersDeserializer(item: any): FreshnessScoringParameters {\n return {\n boostingDuration: item[\"boostingDuration\"],\n };\n}\n\n/** Defines a function that boosts scores based on the magnitude of a numeric field. */\nexport interface MagnitudeScoringFunction extends ScoringFunction {\n /** Parameter values for the magnitude scoring function. */\n parameters: MagnitudeScoringParameters;\n /** Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. */\n type: \"magnitude\";\n}\n\nexport function magnitudeScoringFunctionSerializer(item: MagnitudeScoringFunction): any {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n magnitude: magnitudeScoringParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function magnitudeScoringFunctionDeserializer(item: any): MagnitudeScoringFunction {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n parameters: magnitudeScoringParametersDeserializer(item[\"magnitude\"]),\n };\n}\n\n/** Provides parameter values to a magnitude scoring function. */\nexport interface MagnitudeScoringParameters {\n /** The field value at which boosting starts. */\n boostingRangeStart: number;\n /** The field value at which boosting ends. */\n boostingRangeEnd: number;\n /** A value indicating whether to apply a constant boost for field values beyond the range end value; default is false. */\n shouldBoostBeyondRangeByConstant?: boolean;\n}\n\nexport function magnitudeScoringParametersSerializer(item: MagnitudeScoringParameters): any {\n return {\n boostingRangeStart: item[\"boostingRangeStart\"],\n boostingRangeEnd: item[\"boostingRangeEnd\"],\n constantBoostBeyondRange: item[\"shouldBoostBeyondRangeByConstant\"],\n };\n}\n\nexport function magnitudeScoringParametersDeserializer(item: any): MagnitudeScoringParameters {\n return {\n boostingRangeStart: item[\"boostingRangeStart\"],\n boostingRangeEnd: item[\"boostingRangeEnd\"],\n shouldBoostBeyondRangeByConstant: item[\"constantBoostBeyondRange\"],\n };\n}\n\n/** Defines a function that boosts scores of documents with string values matching a given list of tags. */\nexport interface TagScoringFunction extends ScoringFunction {\n /** Parameter values for the tag scoring function. */\n parameters: TagScoringParameters;\n /** Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. */\n type: \"tag\";\n}\n\nexport function tagScoringFunctionSerializer(item: TagScoringFunction): any {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n tag: tagScoringParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function tagScoringFunctionDeserializer(item: any): TagScoringFunction {\n return {\n fieldName: item[\"fieldName\"],\n boost: item[\"boost\"],\n interpolation: item[\"interpolation\"],\n type: item[\"type\"],\n parameters: tagScoringParametersDeserializer(item[\"tag\"]),\n };\n}\n\n/** Provides parameter values to a tag scoring function. */\nexport interface TagScoringParameters {\n /** The name of the parameter passed in search queries to specify the list of tags to compare against the target field. */\n tagsParameter: string;\n}\n\nexport function tagScoringParametersSerializer(item: TagScoringParameters): any {\n return { tagsParameter: item[\"tagsParameter\"] };\n}\n\nexport function tagScoringParametersDeserializer(item: any): TagScoringParameters {\n return {\n tagsParameter: item[\"tagsParameter\"],\n };\n}\n\n/** Defines the aggregation function used to combine the results of all the scoring functions in a scoring profile. */\nexport type ScoringFunctionAggregation =\n | \"sum\"\n | \"average\"\n | \"minimum\"\n | \"maximum\"\n | \"firstMatching\"\n | \"product\";\n\n/** Defines options to control Cross-Origin Resource Sharing (CORS) for an index. */\nexport interface CorsOptions {\n /** The list of origins from which JavaScript code will be granted access to your index. Can contain a list of hosts of the form {protocol}://{fully-qualified-domain-name}[:{port#}], or a single '*' to allow all origins (not recommended). */\n allowedOrigins: string[];\n /** The duration for which browsers should cache CORS preflight responses. Defaults to 5 minutes. */\n maxAgeInSeconds?: number;\n}\n\nexport function corsOptionsSerializer(item: CorsOptions): any {\n return {\n allowedOrigins: item[\"allowedOrigins\"].map((p: any) => {\n return p;\n }),\n maxAgeInSeconds: item[\"maxAgeInSeconds\"],\n };\n}\n\nexport function corsOptionsDeserializer(item: any): CorsOptions {\n return {\n allowedOrigins: item[\"allowedOrigins\"].map((p: any) => {\n return p;\n }),\n maxAgeInSeconds: item[\"maxAgeInSeconds\"],\n };\n}\n\nexport function searchSuggesterArraySerializer(result: Array<SearchSuggester>): any[] {\n return result.map((item) => {\n return searchSuggesterSerializer(item);\n });\n}\n\nexport function searchSuggesterArrayDeserializer(result: Array<SearchSuggester>): any[] {\n return result.map((item) => {\n return searchSuggesterDeserializer(item);\n });\n}\n\n/** Defines how the Suggest API should apply to a group of fields in the index. */\nexport interface SearchSuggester {\n /** The name of the suggester. */\n name: string;\n /** A value indicating the capabilities of the suggester. */\n searchMode: \"analyzingInfixMatching\";\n /** The list of field names to which the suggester applies. Each field must be searchable. */\n sourceFields: string[];\n}\n\nexport function searchSuggesterSerializer(item: SearchSuggester): any {\n return {\n name: item[\"name\"],\n searchMode: item[\"searchMode\"],\n sourceFields: item[\"sourceFields\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function searchSuggesterDeserializer(item: any): SearchSuggester {\n return {\n name: item[\"name\"],\n searchMode: item[\"searchMode\"],\n sourceFields: item[\"sourceFields\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function lexicalAnalyzerUnionArraySerializer(result: Array<LexicalAnalyzerUnion>): any[] {\n return result.map((item) => {\n return lexicalAnalyzerUnionSerializer(item);\n });\n}\n\nexport function lexicalAnalyzerUnionArrayDeserializer(result: Array<LexicalAnalyzerUnion>): any[] {\n return result.map((item) => {\n return lexicalAnalyzerUnionDeserializer(item);\n });\n}\n\n/** Base type for analyzers. */\nexport interface LexicalAnalyzer {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.CustomAnalyzer, #Microsoft.Azure.Search.PatternAnalyzer, #Microsoft.Azure.Search.StandardAnalyzer, #Microsoft.Azure.Search.StopAnalyzer */\n odatatype: string;\n /** The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. */\n name: string;\n}\n\nexport function lexicalAnalyzerSerializer(item: LexicalAnalyzer): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"] };\n}\n\nexport function lexicalAnalyzerDeserializer(item: any): LexicalAnalyzer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n };\n}\n\n/** Alias for LexicalAnalyzerUnion */\nexport type LexicalAnalyzerUnion =\n | CustomAnalyzer\n | PatternAnalyzer\n | LuceneStandardAnalyzer\n | StopAnalyzer\n | LexicalAnalyzer;\n\nexport function lexicalAnalyzerUnionSerializer(item: LexicalAnalyzerUnion): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.CustomAnalyzer\":\n return customAnalyzerSerializer(item as CustomAnalyzer);\n\n case \"#Microsoft.Azure.Search.PatternAnalyzer\":\n return patternAnalyzerSerializer(item as PatternAnalyzer);\n\n case \"#Microsoft.Azure.Search.StandardAnalyzer\":\n return luceneStandardAnalyzerSerializer(item as LuceneStandardAnalyzer);\n\n case \"#Microsoft.Azure.Search.StopAnalyzer\":\n return stopAnalyzerSerializer(item as StopAnalyzer);\n\n default:\n return lexicalAnalyzerSerializer(item);\n }\n}\n\nexport function lexicalAnalyzerUnionDeserializer(item: any): LexicalAnalyzerUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.CustomAnalyzer\":\n return customAnalyzerDeserializer(item as CustomAnalyzer);\n\n case \"#Microsoft.Azure.Search.PatternAnalyzer\":\n return patternAnalyzerDeserializer(item as PatternAnalyzer);\n\n case \"#Microsoft.Azure.Search.StandardAnalyzer\":\n return luceneStandardAnalyzerDeserializer(item as LuceneStandardAnalyzer);\n\n case \"#Microsoft.Azure.Search.StopAnalyzer\":\n return stopAnalyzerDeserializer(item as StopAnalyzer);\n\n default:\n return lexicalAnalyzerDeserializer(item);\n }\n}\n\n/** Allows you to take control over the process of converting text into indexable/searchable tokens. It's a user-defined configuration consisting of a single predefined tokenizer and one or more filters. The tokenizer is responsible for breaking text into tokens, and the filters for modifying tokens emitted by the tokenizer. */\nexport interface CustomAnalyzer extends LexicalAnalyzer {\n /** The name of the tokenizer to use to divide continuous text into a sequence of tokens, such as breaking a sentence into words. */\n tokenizer: LexicalTokenizerName;\n /** A list of token filters used to filter out or modify the tokens generated by a tokenizer. For example, you can specify a lowercase filter that converts all characters to lowercase. The filters are run in the order in which they are listed. */\n tokenFilters?: TokenFilterName[];\n /** A list of character filters used to prepare input text before it is processed by the tokenizer. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. */\n charFilters?: CharFilterName[];\n /** A URI fragment specifying the type of analyzer. */\n odatatype: \"#Microsoft.Azure.Search.CustomAnalyzer\";\n}\n\nexport function customAnalyzerSerializer(item: CustomAnalyzer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n tokenizer: item[\"tokenizer\"],\n tokenFilters: !item[\"tokenFilters\"]\n ? item[\"tokenFilters\"]\n : item[\"tokenFilters\"].map((p: any) => {\n return p;\n }),\n charFilters: !item[\"charFilters\"]\n ? item[\"charFilters\"]\n : item[\"charFilters\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function customAnalyzerDeserializer(item: any): CustomAnalyzer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n tokenizer: item[\"tokenizer\"],\n tokenFilters: !item[\"tokenFilters\"]\n ? item[\"tokenFilters\"]\n : item[\"tokenFilters\"].map((p: any) => {\n return p;\n }),\n charFilters: !item[\"charFilters\"]\n ? item[\"charFilters\"]\n : item[\"charFilters\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** Defines the names of all tokenizers supported by the search engine. */\nexport enum KnownLexicalTokenizerName {\n /** Grammar-based tokenizer that is suitable for processing most European-language documents. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/ClassicTokenizer.html */\n Classic = \"classic\",\n /** Tokenizes the input from an edge into n-grams of the given size(s). See https://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/EdgeNGramTokenizer.html */\n EdgeNGram = \"edgeNGram\",\n /** Emits the entire input as a single token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/KeywordTokenizer.html */\n Keyword = \"keyword_v2\",\n /** Divides text at non-letters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/LetterTokenizer.html */\n Letter = \"letter\",\n /** Divides text at non-letters and converts them to lower case. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/LowerCaseTokenizer.html */\n Lowercase = \"lowercase\",\n /** Divides text using language-specific rules. */\n MicrosoftLanguageTokenizer = \"microsoft_language_tokenizer\",\n /** Divides text using language-specific rules and reduces words to their base forms. */\n MicrosoftLanguageStemmingTokenizer = \"microsoft_language_stemming_tokenizer\",\n /** Tokenizes the input into n-grams of the given size(s). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/NGramTokenizer.html */\n NGram = \"nGram\",\n /** Tokenizer for path-like hierarchies. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/path/PathHierarchyTokenizer.html */\n PathHierarchy = \"path_hierarchy_v2\",\n /** Tokenizer that uses regex pattern matching to construct distinct tokens. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/pattern/PatternTokenizer.html */\n Pattern = \"pattern\",\n /** Standard Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/StandardTokenizer.html */\n Standard = \"standard_v2\",\n /** Tokenizes urls and emails as one token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.html */\n UaxUrlEmail = \"uax_url_email\",\n /** Divides text at whitespace. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/WhitespaceTokenizer.html */\n Whitespace = \"whitespace\",\n}\n\n/**\n * Defines the names of all tokenizers supported by the search engine. \\\n * {@link KnownLexicalTokenizerName} can be used interchangeably with LexicalTokenizerName,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **classic**: Grammar-based tokenizer that is suitable for processing most European-language documents. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/standard\\/ClassicTokenizer.html \\\n * **edgeNGram**: Tokenizes the input from an edge into n-grams of the given size(s). See https:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/ngram\\/EdgeNGramTokenizer.html \\\n * **keyword_v2**: Emits the entire input as a single token. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/KeywordTokenizer.html \\\n * **letter**: Divides text at non-letters. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/LetterTokenizer.html \\\n * **lowercase**: Divides text at non-letters and converts them to lower case. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/LowerCaseTokenizer.html \\\n * **microsoft_language_tokenizer**: Divides text using language-specific rules. \\\n * **microsoft_language_stemming_tokenizer**: Divides text using language-specific rules and reduces words to their base forms. \\\n * **nGram**: Tokenizes the input into n-grams of the given size(s). See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/ngram\\/NGramTokenizer.html \\\n * **path_hierarchy_v2**: Tokenizer for path-like hierarchies. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/path\\/PathHierarchyTokenizer.html \\\n * **pattern**: Tokenizer that uses regex pattern matching to construct distinct tokens. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/pattern\\/PatternTokenizer.html \\\n * **standard_v2**: Standard Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/standard\\/StandardTokenizer.html \\\n * **uax_url_email**: Tokenizes urls and emails as one token. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/standard\\/UAX29URLEmailTokenizer.html \\\n * **whitespace**: Divides text at whitespace. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/WhitespaceTokenizer.html\n */\nexport type LexicalTokenizerName = string;\n\n/** Defines the names of all token filters supported by the search engine. */\nexport enum KnownTokenFilterName {\n /** A token filter that applies the Arabic normalizer to normalize the orthography. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ar/ArabicNormalizationFilter.html */\n ArabicNormalization = \"arabic_normalization\",\n /** Strips all characters after an apostrophe (including the apostrophe itself). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/tr/ApostropheFilter.html */\n Apostrophe = \"apostrophe\",\n /** Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the \"Basic Latin\" Unicode block) into their ASCII equivalents, if such equivalents exist. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.html */\n AsciiFolding = \"asciifolding\",\n /** Forms bigrams of CJK terms that are generated from the standard tokenizer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/cjk/CJKBigramFilter.html */\n CjkBigram = \"cjk_bigram\",\n /** Normalizes CJK width differences. Folds full-width ASCII variants into the equivalent basic Latin, and half-width Katakana variants into the equivalent Kana. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/cjk/CJKWidthFilter.html */\n CjkWidth = \"cjk_width\",\n /** Removes English possessives, and dots from acronyms. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/ClassicFilter.html */\n Classic = \"classic\",\n /** Construct bigrams for frequently occurring terms while indexing. Single terms are still indexed too, with bigrams overlaid. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/commongrams/CommonGramsFilter.html */\n CommonGram = \"common_grams\",\n /** Generates n-grams of the given size(s) starting from the front or the back of an input token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilter.html */\n EdgeNGram = \"edgeNGram_v2\",\n /** Removes elisions. For example, \"l'avion\" (the plane) will be converted to \"avion\" (plane). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/util/ElisionFilter.html */\n Elision = \"elision\",\n /** Normalizes German characters according to the heuristics of the German2 snowball algorithm. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/de/GermanNormalizationFilter.html */\n GermanNormalization = \"german_normalization\",\n /** Normalizes text in Hindi to remove some differences in spelling variations. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/hi/HindiNormalizationFilter.html */\n HindiNormalization = \"hindi_normalization\",\n /** Normalizes the Unicode representation of text in Indian languages. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/in/IndicNormalizationFilter.html */\n IndicNormalization = \"indic_normalization\",\n /** Emits each incoming token twice, once as keyword and once as non-keyword. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/KeywordRepeatFilter.html */\n KeywordRepeat = \"keyword_repeat\",\n /** A high-performance kstem filter for English. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/en/KStemFilter.html */\n KStem = \"kstem\",\n /** Removes words that are too long or too short. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/LengthFilter.html */\n Length = \"length\",\n /** Limits the number of tokens while indexing. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/LimitTokenCountFilter.html */\n Limit = \"limit\",\n /** Normalizes token text to lower case. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/LowerCaseFilter.html */\n Lowercase = \"lowercase\",\n /** Generates n-grams of the given size(s). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/NGramTokenFilter.html */\n NGram = \"nGram_v2\",\n /** Applies normalization for Persian. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/fa/PersianNormalizationFilter.html */\n PersianNormalization = \"persian_normalization\",\n /** Create tokens for phonetic matches. See https://lucene.apache.org/core/4_10_3/analyzers-phonetic/org/apache/lucene/analysis/phonetic/package-tree.html */\n Phonetic = \"phonetic\",\n /** Uses the Porter stemming algorithm to transform the token stream. See http://tartarus.org/~martin/PorterStemmer */\n PorterStem = \"porter_stem\",\n /** Reverses the token string. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/reverse/ReverseStringFilter.html */\n Reverse = \"reverse\",\n /** Normalizes use of the interchangeable Scandinavian characters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ScandinavianNormalizationFilter.html */\n ScandinavianNormalization = \"scandinavian_normalization\",\n /** Folds Scandinavian characters \u00C3\u00A5\u00C3\u2026\u00C3\u00A4\u00C3\u00A6\u00C3\u201E\u00C3\u2020->a and \u00C3\u00B6\u00C3\u2013\u00C3\u00B8\u00C3\u02DC->o. It also discriminates against use of double vowels aa, ae, ao, oe and oo, leaving just the first one. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ScandinavianFoldingFilter.html */\n ScandinavianFoldingNormalization = \"scandinavian_folding\",\n /** Creates combinations of tokens as a single token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/shingle/ShingleFilter.html */\n Shingle = \"shingle\",\n /** A filter that stems words using a Snowball-generated stemmer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/snowball/SnowballFilter.html */\n Snowball = \"snowball\",\n /** Normalizes the Unicode representation of Sorani text. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ckb/SoraniNormalizationFilter.html */\n SoraniNormalization = \"sorani_normalization\",\n /** Language specific stemming filter. See https://learn.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#TokenFilters */\n Stemmer = \"stemmer\",\n /** Removes stop words from a token stream. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopFilter.html */\n Stopwords = \"stopwords\",\n /** Trims leading and trailing whitespace from tokens. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/TrimFilter.html */\n Trim = \"trim\",\n /** Truncates the terms to a specific length. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilter.html */\n Truncate = \"truncate\",\n /** Filters out tokens with same text as the previous token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/RemoveDuplicatesTokenFilter.html */\n Unique = \"unique\",\n /** Normalizes token text to upper case. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/UpperCaseFilter.html */\n Uppercase = \"uppercase\",\n /** Splits words into subwords and performs optional transformations on subword groups. */\n WordDelimiter = \"word_delimiter\",\n}\n\n/**\n * Defines the names of all token filters supported by the search engine. \\\n * {@link KnownTokenFilterName} can be used interchangeably with TokenFilterName,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **arabic_normalization**: A token filter that applies the Arabic normalizer to normalize the orthography. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/ar\\/ArabicNormalizationFilter.html \\\n * **apostrophe**: Strips all characters after an apostrophe (including the apostrophe itself). See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/tr\\/ApostropheFilter.html \\\n * **asciifolding**: Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the \"Basic Latin\" Unicode block) into their ASCII equivalents, if such equivalents exist. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/ASCIIFoldingFilter.html \\\n * **cjk_bigram**: Forms bigrams of CJK terms that are generated from the standard tokenizer. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/cjk\\/CJKBigramFilter.html \\\n * **cjk_width**: Normalizes CJK width differences. Folds full-width ASCII variants into the equivalent basic Latin, and half-width Katakana variants into the equivalent Kana. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/cjk\\/CJKWidthFilter.html \\\n * **classic**: Removes English possessives, and dots from acronyms. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/standard\\/ClassicFilter.html \\\n * **common_grams**: Construct bigrams for frequently occurring terms while indexing. Single terms are still indexed too, with bigrams overlaid. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/commongrams\\/CommonGramsFilter.html \\\n * **edgeNGram_v2**: Generates n-grams of the given size(s) starting from the front or the back of an input token. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/ngram\\/EdgeNGramTokenFilter.html \\\n * **elision**: Removes elisions. For example, \"l'avion\" (the plane) will be converted to \"avion\" (plane). See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/util\\/ElisionFilter.html \\\n * **german_normalization**: Normalizes German characters according to the heuristics of the German2 snowball algorithm. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/de\\/GermanNormalizationFilter.html \\\n * **hindi_normalization**: Normalizes text in Hindi to remove some differences in spelling variations. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/hi\\/HindiNormalizationFilter.html \\\n * **indic_normalization**: Normalizes the Unicode representation of text in Indian languages. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/in\\/IndicNormalizationFilter.html \\\n * **keyword_repeat**: Emits each incoming token twice, once as keyword and once as non-keyword. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/KeywordRepeatFilter.html \\\n * **kstem**: A high-performance kstem filter for English. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/en\\/KStemFilter.html \\\n * **length**: Removes words that are too long or too short. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/LengthFilter.html \\\n * **limit**: Limits the number of tokens while indexing. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/LimitTokenCountFilter.html \\\n * **lowercase**: Normalizes token text to lower case. See https:\\//lucene.apache.org\\/core\\/6_6_1\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/LowerCaseFilter.html \\\n * **nGram_v2**: Generates n-grams of the given size(s). See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/ngram\\/NGramTokenFilter.html \\\n * **persian_normalization**: Applies normalization for Persian. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/fa\\/PersianNormalizationFilter.html \\\n * **phonetic**: Create tokens for phonetic matches. See https:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-phonetic\\/org\\/apache\\/lucene\\/analysis\\/phonetic\\/package-tree.html \\\n * **porter_stem**: Uses the Porter stemming algorithm to transform the token stream. See http:\\//tartarus.org\\/~martin\\/PorterStemmer \\\n * **reverse**: Reverses the token string. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/reverse\\/ReverseStringFilter.html \\\n * **scandinavian_normalization**: Normalizes use of the interchangeable Scandinavian characters. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/ScandinavianNormalizationFilter.html \\\n * **scandinavian_folding**: Folds Scandinavian characters \u00C3\u00A5\u00C3\u2026\u00C3\u00A4\u00C3\u00A6\u00C3\u201E\u00C3\u2020->a and \u00C3\u00B6\u00C3\u2013\u00C3\u00B8\u00C3\u02DC->o. It also discriminates against use of double vowels aa, ae, ao, oe and oo, leaving just the first one. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/ScandinavianFoldingFilter.html \\\n * **shingle**: Creates combinations of tokens as a single token. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/shingle\\/ShingleFilter.html \\\n * **snowball**: A filter that stems words using a Snowball-generated stemmer. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/snowball\\/SnowballFilter.html \\\n * **sorani_normalization**: Normalizes the Unicode representation of Sorani text. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/ckb\\/SoraniNormalizationFilter.html \\\n * **stemmer**: Language specific stemming filter. See https:\\//learn.microsoft.com\\/rest\\/api\\/searchservice\\/Custom-analyzers-in-Azure-Search#TokenFilters \\\n * **stopwords**: Removes stop words from a token stream. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/StopFilter.html \\\n * **trim**: Trims leading and trailing whitespace from tokens. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/TrimFilter.html \\\n * **truncate**: Truncates the terms to a specific length. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/TruncateTokenFilter.html \\\n * **unique**: Filters out tokens with same text as the previous token. See http:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/miscellaneous\\/RemoveDuplicatesTokenFilter.html \\\n * **uppercase**: Normalizes token text to upper case. See https:\\//lucene.apache.org\\/core\\/6_6_1\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/core\\/UpperCaseFilter.html \\\n * **word_delimiter**: Splits words into subwords and performs optional transformations on subword groups.\n */\nexport type TokenFilterName = string;\n\n/** Defines the names of all character filters supported by the search engine. */\nexport enum KnownCharFilterName {\n /** A character filter that attempts to strip out HTML constructs. See https://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.html */\n HtmlStrip = \"html_strip\",\n}\n\n/**\n * Defines the names of all character filters supported by the search engine. \\\n * {@link KnownCharFilterName} can be used interchangeably with CharFilterName,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **html_strip**: A character filter that attempts to strip out HTML constructs. See https:\\//lucene.apache.org\\/core\\/4_10_3\\/analyzers-common\\/org\\/apache\\/lucene\\/analysis\\/charfilter\\/HTMLStripCharFilter.html\n */\nexport type CharFilterName = string;\n\n/** Flexibly separates text into terms via a regular expression pattern. This analyzer is implemented using Apache Lucene. */\nexport interface PatternAnalyzer extends LexicalAnalyzer {\n /** A value indicating whether terms should be lower-cased. Default is true. */\n lowerCaseTerms?: boolean;\n /** A regular expression pattern to match token separators. Default is an expression that matches one or more non-word characters. */\n pattern?: string;\n /** Regular expression flags, specified as a '|' separated string of RegexFlags values. */\n flags?: RegexFlags[];\n /** A list of stopwords. */\n stopwords?: string[];\n /** A URI fragment specifying the type of analyzer. */\n odatatype: \"#Microsoft.Azure.Search.PatternAnalyzer\";\n}\n\nexport function patternAnalyzerSerializer(item: PatternAnalyzer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n lowercase: item[\"lowerCaseTerms\"],\n pattern: item[\"pattern\"],\n flags: !item[\"flags\"]\n ? item[\"flags\"]\n : buildPipeCollection(\n item[\"flags\"].map((p: any) => {\n return p;\n }),\n ),\n stopwords: !item[\"stopwords\"]\n ? item[\"stopwords\"]\n : item[\"stopwords\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function patternAnalyzerDeserializer(item: any): PatternAnalyzer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n lowerCaseTerms: item[\"lowercase\"],\n pattern: item[\"pattern\"],\n flags:\n item[\"flags\"] === null || item[\"flags\"] === undefined\n ? item[\"flags\"]\n : parsePipeCollection(item[\"flags\"]),\n stopwords: !item[\"stopwords\"]\n ? item[\"stopwords\"]\n : item[\"stopwords\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** Defines a regular expression flag that can be used in the pattern analyzer and pattern tokenizer. */\nexport enum KnownRegexFlags {\n /** Enables canonical equivalence. */\n CanonEq = \"CANON_EQ\",\n /** Enables case-insensitive matching. */\n CaseInsensitive = \"CASE_INSENSITIVE\",\n /** Permits whitespace and comments in the pattern. */\n Comments = \"COMMENTS\",\n /** Enables dotall mode. */\n DotAll = \"DOTALL\",\n /** Enables literal parsing of the pattern. */\n Literal = \"LITERAL\",\n /** Enables multiline mode. */\n Multiline = \"MULTILINE\",\n /** Enables Unicode-aware case folding. */\n UnicodeCase = \"UNICODE_CASE\",\n /** Enables Unix lines mode. */\n UnixLines = \"UNIX_LINES\",\n}\n\n/**\n * Defines a regular expression flag that can be used in the pattern analyzer and pattern tokenizer. \\\n * {@link KnownRegexFlags} can be used interchangeably with RegexFlags,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **CANON_EQ**: Enables canonical equivalence. \\\n * **CASE_INSENSITIVE**: Enables case-insensitive matching. \\\n * **COMMENTS**: Permits whitespace and comments in the pattern. \\\n * **DOTALL**: Enables dotall mode. \\\n * **LITERAL**: Enables literal parsing of the pattern. \\\n * **MULTILINE**: Enables multiline mode. \\\n * **UNICODE_CASE**: Enables Unicode-aware case folding. \\\n * **UNIX_LINES**: Enables Unix lines mode.\n */\nexport type RegexFlags = string;\n\n/** Standard Apache Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. */\nexport interface LuceneStandardAnalyzer extends LexicalAnalyzer {\n /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */\n maxTokenLength?: number;\n /** A list of stopwords. */\n stopwords?: string[];\n /** A URI fragment specifying the type of analyzer. */\n odatatype: \"#Microsoft.Azure.Search.StandardAnalyzer\";\n}\n\nexport function luceneStandardAnalyzerSerializer(item: LuceneStandardAnalyzer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n stopwords: !item[\"stopwords\"]\n ? item[\"stopwords\"]\n : item[\"stopwords\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function luceneStandardAnalyzerDeserializer(item: any): LuceneStandardAnalyzer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n stopwords: !item[\"stopwords\"]\n ? item[\"stopwords\"]\n : item[\"stopwords\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** Divides text at non-letters; Applies the lowercase and stopword token filters. This analyzer is implemented using Apache Lucene. */\nexport interface StopAnalyzer extends LexicalAnalyzer {\n /** A list of stopwords. */\n stopwords?: string[];\n /** A URI fragment specifying the type of analyzer. */\n odatatype: \"#Microsoft.Azure.Search.StopAnalyzer\";\n}\n\nexport function stopAnalyzerSerializer(item: StopAnalyzer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n stopwords: !item[\"stopwords\"]\n ? item[\"stopwords\"]\n : item[\"stopwords\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function stopAnalyzerDeserializer(item: any): StopAnalyzer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n stopwords: !item[\"stopwords\"]\n ? item[\"stopwords\"]\n : item[\"stopwords\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function lexicalTokenizerUnionArraySerializer(result: Array<LexicalTokenizerUnion>): any[] {\n return result.map((item) => {\n return lexicalTokenizerUnionSerializer(item);\n });\n}\n\nexport function lexicalTokenizerUnionArrayDeserializer(\n result: Array<LexicalTokenizerUnion>,\n): any[] {\n return result.map((item) => {\n return lexicalTokenizerUnionDeserializer(item);\n });\n}\n\n/** Base type for tokenizers. */\nexport interface LexicalTokenizer {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.ClassicTokenizer, #Microsoft.Azure.Search.EdgeNGramTokenizer, #Microsoft.Azure.Search.KeywordTokenizer, #Microsoft.Azure.Search.KeywordTokenizerV2, #Microsoft.Azure.Search.MicrosoftLanguageTokenizer, #Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer, #Microsoft.Azure.Search.NGramTokenizer, #Microsoft.Azure.Search.PathHierarchyTokenizerV2, #Microsoft.Azure.Search.PatternTokenizer, #Microsoft.Azure.Search.StandardTokenizer, #Microsoft.Azure.Search.StandardTokenizerV2, #Microsoft.Azure.Search.UaxUrlEmailTokenizer */\n odatatype: string;\n /** The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. */\n name: string;\n}\n\nexport function lexicalTokenizerSerializer(item: LexicalTokenizer): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"] };\n}\n\nexport function lexicalTokenizerDeserializer(item: any): LexicalTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n };\n}\n\n/** Alias for LexicalTokenizerUnion */\nexport type LexicalTokenizerUnion =\n | ClassicTokenizer\n | EdgeNGramTokenizer\n | KeywordTokenizer\n | KeywordTokenizerV2\n | MicrosoftLanguageTokenizer\n | MicrosoftLanguageStemmingTokenizer\n | NGramTokenizer\n | PathHierarchyTokenizerV2\n | PatternTokenizer\n | LuceneStandardTokenizer\n | LuceneStandardTokenizerV2\n | UaxUrlEmailTokenizer\n | LexicalTokenizer;\n\nexport function lexicalTokenizerUnionSerializer(item: LexicalTokenizerUnion): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.ClassicTokenizer\":\n return classicTokenizerSerializer(item as ClassicTokenizer);\n\n case \"#Microsoft.Azure.Search.EdgeNGramTokenizer\":\n return edgeNGramTokenizerSerializer(item as EdgeNGramTokenizer);\n\n case \"#Microsoft.Azure.Search.KeywordTokenizer\":\n return keywordTokenizerSerializer(item as KeywordTokenizer);\n\n case \"#Microsoft.Azure.Search.KeywordTokenizerV2\":\n return keywordTokenizerV2Serializer(item as KeywordTokenizerV2);\n\n case \"#Microsoft.Azure.Search.MicrosoftLanguageTokenizer\":\n return microsoftLanguageTokenizerSerializer(item as MicrosoftLanguageTokenizer);\n\n case \"#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer\":\n return microsoftLanguageStemmingTokenizerSerializer(\n item as MicrosoftLanguageStemmingTokenizer,\n );\n\n case \"#Microsoft.Azure.Search.NGramTokenizer\":\n return nGramTokenizerSerializer(item as NGramTokenizer);\n\n case \"#Microsoft.Azure.Search.PathHierarchyTokenizerV2\":\n return pathHierarchyTokenizerV2Serializer(item as PathHierarchyTokenizerV2);\n\n case \"#Microsoft.Azure.Search.PatternTokenizer\":\n return patternTokenizerSerializer(item as PatternTokenizer);\n\n case \"#Microsoft.Azure.Search.StandardTokenizer\":\n return luceneStandardTokenizerSerializer(item as LuceneStandardTokenizer);\n\n case \"#Microsoft.Azure.Search.StandardTokenizerV2\":\n return luceneStandardTokenizerV2Serializer(item as LuceneStandardTokenizerV2);\n\n case \"#Microsoft.Azure.Search.UaxUrlEmailTokenizer\":\n return uaxUrlEmailTokenizerSerializer(item as UaxUrlEmailTokenizer);\n\n default:\n return lexicalTokenizerSerializer(item);\n }\n}\n\nexport function lexicalTokenizerUnionDeserializer(item: any): LexicalTokenizerUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.ClassicTokenizer\":\n return classicTokenizerDeserializer(item as ClassicTokenizer);\n\n case \"#Microsoft.Azure.Search.EdgeNGramTokenizer\":\n return edgeNGramTokenizerDeserializer(item as EdgeNGramTokenizer);\n\n case \"#Microsoft.Azure.Search.KeywordTokenizer\":\n return keywordTokenizerDeserializer(item as KeywordTokenizer);\n\n case \"#Microsoft.Azure.Search.KeywordTokenizerV2\":\n return keywordTokenizerV2Deserializer(item as KeywordTokenizerV2);\n\n case \"#Microsoft.Azure.Search.MicrosoftLanguageTokenizer\":\n return microsoftLanguageTokenizerDeserializer(item as MicrosoftLanguageTokenizer);\n\n case \"#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer\":\n return microsoftLanguageStemmingTokenizerDeserializer(\n item as MicrosoftLanguageStemmingTokenizer,\n );\n\n case \"#Microsoft.Azure.Search.NGramTokenizer\":\n return nGramTokenizerDeserializer(item as NGramTokenizer);\n\n case \"#Microsoft.Azure.Search.PathHierarchyTokenizerV2\":\n return pathHierarchyTokenizerV2Deserializer(item as PathHierarchyTokenizerV2);\n\n case \"#Microsoft.Azure.Search.PatternTokenizer\":\n return patternTokenizerDeserializer(item as PatternTokenizer);\n\n case \"#Microsoft.Azure.Search.StandardTokenizer\":\n return luceneStandardTokenizerDeserializer(item as LuceneStandardTokenizer);\n\n case \"#Microsoft.Azure.Search.StandardTokenizerV2\":\n return luceneStandardTokenizerV2Deserializer(item as LuceneStandardTokenizerV2);\n\n case \"#Microsoft.Azure.Search.UaxUrlEmailTokenizer\":\n return uaxUrlEmailTokenizerDeserializer(item as UaxUrlEmailTokenizer);\n\n default:\n return lexicalTokenizerDeserializer(item);\n }\n}\n\n/** Grammar-based tokenizer that is suitable for processing most European-language documents. This tokenizer is implemented using Apache Lucene. */\nexport interface ClassicTokenizer extends LexicalTokenizer {\n /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */\n maxTokenLength?: number;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.ClassicTokenizer\";\n}\n\nexport function classicTokenizerSerializer(item: ClassicTokenizer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\nexport function classicTokenizerDeserializer(item: any): ClassicTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\n/** Tokenizes the input from an edge into n-grams of the given size(s). This tokenizer is implemented using Apache Lucene. */\nexport interface EdgeNGramTokenizer extends LexicalTokenizer {\n /** The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. */\n minGram?: number;\n /** The maximum n-gram length. Default is 2. Maximum is 300. */\n maxGram?: number;\n /** Character classes to keep in the tokens. */\n tokenChars?: TokenCharacterKind[];\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.EdgeNGramTokenizer\";\n}\n\nexport function edgeNGramTokenizerSerializer(item: EdgeNGramTokenizer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n tokenChars: !item[\"tokenChars\"]\n ? item[\"tokenChars\"]\n : item[\"tokenChars\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function edgeNGramTokenizerDeserializer(item: any): EdgeNGramTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n tokenChars: !item[\"tokenChars\"]\n ? item[\"tokenChars\"]\n : item[\"tokenChars\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** Represents classes of characters on which a token filter can operate. */\nexport type TokenCharacterKind = \"letter\" | \"digit\" | \"whitespace\" | \"punctuation\" | \"symbol\";\n\n/** Emits the entire input as a single token. This tokenizer is implemented using Apache Lucene. */\nexport interface KeywordTokenizer extends LexicalTokenizer {\n /** The read buffer size in bytes. Default is 256. */\n bufferSize?: number;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.KeywordTokenizer\";\n}\n\nexport function keywordTokenizerSerializer(item: KeywordTokenizer): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"], bufferSize: item[\"bufferSize\"] };\n}\n\nexport function keywordTokenizerDeserializer(item: any): KeywordTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n bufferSize: item[\"bufferSize\"],\n };\n}\n\n/** Emits the entire input as a single token. This tokenizer is implemented using Apache Lucene. */\nexport interface KeywordTokenizerV2 extends LexicalTokenizer {\n /** The maximum token length. Default is 256. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */\n maxTokenLength?: number;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.KeywordTokenizerV2\";\n}\n\nexport function keywordTokenizerV2Serializer(item: KeywordTokenizerV2): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\nexport function keywordTokenizerV2Deserializer(item: any): KeywordTokenizerV2 {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\n/** Divides text using language-specific rules. */\nexport interface MicrosoftLanguageTokenizer extends LexicalTokenizer {\n /** The maximum token length. Tokens longer than the maximum length are split. Maximum token length that can be used is 300 characters. Tokens longer than 300 characters are first split into tokens of length 300 and then each of those tokens is split based on the max token length set. Default is 255. */\n maxTokenLength?: number;\n /** A value indicating how the tokenizer is used. Set to true if used as the search tokenizer, set to false if used as the indexing tokenizer. Default is false. */\n isSearchTokenizer?: boolean;\n /** The language to use. The default is English. */\n language?: MicrosoftTokenizerLanguage;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.MicrosoftLanguageTokenizer\";\n}\n\nexport function microsoftLanguageTokenizerSerializer(item: MicrosoftLanguageTokenizer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n isSearchTokenizer: item[\"isSearchTokenizer\"],\n language: item[\"language\"],\n };\n}\n\nexport function microsoftLanguageTokenizerDeserializer(item: any): MicrosoftLanguageTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n isSearchTokenizer: item[\"isSearchTokenizer\"],\n language: item[\"language\"],\n };\n}\n\n/** Lists the languages supported by the Microsoft language tokenizer. */\nexport type MicrosoftTokenizerLanguage =\n | \"bangla\"\n | \"bulgarian\"\n | \"catalan\"\n | \"chineseSimplified\"\n | \"chineseTraditional\"\n | \"croatian\"\n | \"czech\"\n | \"danish\"\n | \"dutch\"\n | \"english\"\n | \"french\"\n | \"german\"\n | \"greek\"\n | \"gujarati\"\n | \"hindi\"\n | \"icelandic\"\n | \"indonesian\"\n | \"italian\"\n | \"japanese\"\n | \"kannada\"\n | \"korean\"\n | \"malay\"\n | \"malayalam\"\n | \"marathi\"\n | \"norwegianBokmaal\"\n | \"polish\"\n | \"portuguese\"\n | \"portugueseBrazilian\"\n | \"punjabi\"\n | \"romanian\"\n | \"russian\"\n | \"serbianCyrillic\"\n | \"serbianLatin\"\n | \"slovenian\"\n | \"spanish\"\n | \"swedish\"\n | \"tamil\"\n | \"telugu\"\n | \"thai\"\n | \"ukrainian\"\n | \"urdu\"\n | \"vietnamese\";\n\n/** Divides text using language-specific rules and reduces words to their base forms. */\nexport interface MicrosoftLanguageStemmingTokenizer extends LexicalTokenizer {\n /** The maximum token length. Tokens longer than the maximum length are split. Maximum token length that can be used is 300 characters. Tokens longer than 300 characters are first split into tokens of length 300 and then each of those tokens is split based on the max token length set. Default is 255. */\n maxTokenLength?: number;\n /** A value indicating how the tokenizer is used. Set to true if used as the search tokenizer, set to false if used as the indexing tokenizer. Default is false. */\n isSearchTokenizer?: boolean;\n /** The language to use. The default is English. */\n language?: MicrosoftStemmingTokenizerLanguage;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer\";\n}\n\nexport function microsoftLanguageStemmingTokenizerSerializer(\n item: MicrosoftLanguageStemmingTokenizer,\n): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n isSearchTokenizer: item[\"isSearchTokenizer\"],\n language: item[\"language\"],\n };\n}\n\nexport function microsoftLanguageStemmingTokenizerDeserializer(\n item: any,\n): MicrosoftLanguageStemmingTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n isSearchTokenizer: item[\"isSearchTokenizer\"],\n language: item[\"language\"],\n };\n}\n\n/** Lists the languages supported by the Microsoft language stemming tokenizer. */\nexport type MicrosoftStemmingTokenizerLanguage =\n | \"arabic\"\n | \"bangla\"\n | \"bulgarian\"\n | \"catalan\"\n | \"croatian\"\n | \"czech\"\n | \"danish\"\n | \"dutch\"\n | \"english\"\n | \"estonian\"\n | \"finnish\"\n | \"french\"\n | \"german\"\n | \"greek\"\n | \"gujarati\"\n | \"hebrew\"\n | \"hindi\"\n | \"hungarian\"\n | \"icelandic\"\n | \"indonesian\"\n | \"italian\"\n | \"kannada\"\n | \"latvian\"\n | \"lithuanian\"\n | \"malay\"\n | \"malayalam\"\n | \"marathi\"\n | \"norwegianBokmaal\"\n | \"polish\"\n | \"portuguese\"\n | \"portugueseBrazilian\"\n | \"punjabi\"\n | \"romanian\"\n | \"russian\"\n | \"serbianCyrillic\"\n | \"serbianLatin\"\n | \"slovak\"\n | \"slovenian\"\n | \"spanish\"\n | \"swedish\"\n | \"tamil\"\n | \"telugu\"\n | \"turkish\"\n | \"ukrainian\"\n | \"urdu\";\n\n/** Tokenizes the input into n-grams of the given size(s). This tokenizer is implemented using Apache Lucene. */\nexport interface NGramTokenizer extends LexicalTokenizer {\n /** The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. */\n minGram?: number;\n /** The maximum n-gram length. Default is 2. Maximum is 300. */\n maxGram?: number;\n /** Character classes to keep in the tokens. */\n tokenChars?: TokenCharacterKind[];\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.NGramTokenizer\";\n}\n\nexport function nGramTokenizerSerializer(item: NGramTokenizer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n tokenChars: !item[\"tokenChars\"]\n ? item[\"tokenChars\"]\n : item[\"tokenChars\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function nGramTokenizerDeserializer(item: any): NGramTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n tokenChars: !item[\"tokenChars\"]\n ? item[\"tokenChars\"]\n : item[\"tokenChars\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** Tokenizer for path-like hierarchies. This tokenizer is implemented using Apache Lucene. */\nexport interface PathHierarchyTokenizerV2 extends LexicalTokenizer {\n /** The delimiter character to use. Default is \"/\". */\n delimiter?: string;\n /** A value that, if set, replaces the delimiter character. Default is \"/\". */\n replacement?: string;\n /** The maximum token length. Default and maximum is 300. */\n maxTokenLength?: number;\n /** A value indicating whether to generate tokens in reverse order. Default is false. */\n reverseTokenOrder?: boolean;\n /** The number of initial tokens to skip. Default is 0. */\n numberOfTokensToSkip?: number;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.PathHierarchyTokenizerV2\";\n}\n\nexport function pathHierarchyTokenizerV2Serializer(item: PathHierarchyTokenizerV2): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n delimiter: item[\"delimiter\"],\n replacement: item[\"replacement\"],\n maxTokenLength: item[\"maxTokenLength\"],\n reverse: item[\"reverseTokenOrder\"],\n skip: item[\"numberOfTokensToSkip\"],\n };\n}\n\nexport function pathHierarchyTokenizerV2Deserializer(item: any): PathHierarchyTokenizerV2 {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n delimiter: item[\"delimiter\"],\n replacement: item[\"replacement\"],\n maxTokenLength: item[\"maxTokenLength\"],\n reverseTokenOrder: item[\"reverse\"],\n numberOfTokensToSkip: item[\"skip\"],\n };\n}\n\n/** Tokenizer that uses regex pattern matching to construct distinct tokens. This tokenizer is implemented using Apache Lucene. */\nexport interface PatternTokenizer extends LexicalTokenizer {\n /** A regular expression pattern to match token separators. Default is an expression that matches one or more non-word characters. */\n pattern?: string;\n /** Regular expression flags, specified as a '|' separated string of RegexFlags values. */\n flags?: RegexFlags[];\n /** The zero-based ordinal of the matching group in the regular expression pattern to extract into tokens. Use -1 if you want to use the entire pattern to split the input into tokens, irrespective of matching groups. Default is -1. */\n group?: number;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.PatternTokenizer\";\n}\n\nexport function patternTokenizerSerializer(item: PatternTokenizer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n pattern: item[\"pattern\"],\n flags: !item[\"flags\"]\n ? item[\"flags\"]\n : buildPipeCollection(\n item[\"flags\"].map((p: any) => {\n return p;\n }),\n ),\n group: item[\"group\"],\n };\n}\n\nexport function patternTokenizerDeserializer(item: any): PatternTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n pattern: item[\"pattern\"],\n flags:\n item[\"flags\"] === null || item[\"flags\"] === undefined\n ? item[\"flags\"]\n : parsePipeCollection(item[\"flags\"]),\n group: item[\"group\"],\n };\n}\n\n/** Breaks text following the Unicode Text Segmentation rules. This tokenizer is implemented using Apache Lucene. */\nexport interface LuceneStandardTokenizer extends LexicalTokenizer {\n /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. */\n maxTokenLength?: number;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.StandardTokenizer\";\n}\n\nexport function luceneStandardTokenizerSerializer(item: LuceneStandardTokenizer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\nexport function luceneStandardTokenizerDeserializer(item: any): LuceneStandardTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\n/** Breaks text following the Unicode Text Segmentation rules. This tokenizer is implemented using Apache Lucene. */\nexport interface LuceneStandardTokenizerV2 extends LexicalTokenizer {\n /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */\n maxTokenLength?: number;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.StandardTokenizerV2\";\n}\n\nexport function luceneStandardTokenizerV2Serializer(item: LuceneStandardTokenizerV2): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\nexport function luceneStandardTokenizerV2Deserializer(item: any): LuceneStandardTokenizerV2 {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\n/** Tokenizes urls and emails as one token. This tokenizer is implemented using Apache Lucene. */\nexport interface UaxUrlEmailTokenizer extends LexicalTokenizer {\n /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */\n maxTokenLength?: number;\n /** A URI fragment specifying the type of tokenizer. */\n odatatype: \"#Microsoft.Azure.Search.UaxUrlEmailTokenizer\";\n}\n\nexport function uaxUrlEmailTokenizerSerializer(item: UaxUrlEmailTokenizer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\nexport function uaxUrlEmailTokenizerDeserializer(item: any): UaxUrlEmailTokenizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenLength: item[\"maxTokenLength\"],\n };\n}\n\nexport function tokenFilterUnionArraySerializer(result: Array<TokenFilterUnion>): any[] {\n return result.map((item) => {\n return tokenFilterUnionSerializer(item);\n });\n}\n\nexport function tokenFilterUnionArrayDeserializer(result: Array<TokenFilterUnion>): any[] {\n return result.map((item) => {\n return tokenFilterUnionDeserializer(item);\n });\n}\n\n/** Base type for token filters. */\nexport interface TokenFilter {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.AsciiFoldingTokenFilter, #Microsoft.Azure.Search.CjkBigramTokenFilter, #Microsoft.Azure.Search.CommonGramTokenFilter, #Microsoft.Azure.Search.DictionaryDecompounderTokenFilter, #Microsoft.Azure.Search.EdgeNGramTokenFilter, #Microsoft.Azure.Search.EdgeNGramTokenFilterV2, #Microsoft.Azure.Search.ElisionTokenFilter, #Microsoft.Azure.Search.KeepTokenFilter, #Microsoft.Azure.Search.KeywordMarkerTokenFilter, #Microsoft.Azure.Search.LengthTokenFilter, #Microsoft.Azure.Search.LimitTokenFilter, #Microsoft.Azure.Search.NGramTokenFilter, #Microsoft.Azure.Search.NGramTokenFilterV2, #Microsoft.Azure.Search.PatternCaptureTokenFilter, #Microsoft.Azure.Search.PatternReplaceTokenFilter, #Microsoft.Azure.Search.PhoneticTokenFilter, #Microsoft.Azure.Search.ShingleTokenFilter, #Microsoft.Azure.Search.SnowballTokenFilter, #Microsoft.Azure.Search.StemmerTokenFilter, #Microsoft.Azure.Search.StemmerOverrideTokenFilter, #Microsoft.Azure.Search.StopwordsTokenFilter, #Microsoft.Azure.Search.SynonymTokenFilter, #Microsoft.Azure.Search.TruncateTokenFilter, #Microsoft.Azure.Search.UniqueTokenFilter, #Microsoft.Azure.Search.WordDelimiterTokenFilter */\n odatatype: string;\n /** The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. */\n name: string;\n}\n\nexport function tokenFilterSerializer(item: TokenFilter): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"] };\n}\n\nexport function tokenFilterDeserializer(item: any): TokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n };\n}\n\n/** Alias for TokenFilterUnion */\nexport type TokenFilterUnion =\n | AsciiFoldingTokenFilter\n | CjkBigramTokenFilter\n | CommonGramTokenFilter\n | DictionaryDecompounderTokenFilter\n | EdgeNGramTokenFilter\n | EdgeNGramTokenFilterV2\n | ElisionTokenFilter\n | KeepTokenFilter\n | KeywordMarkerTokenFilter\n | LengthTokenFilter\n | LimitTokenFilter\n | NGramTokenFilter\n | NGramTokenFilterV2\n | PatternCaptureTokenFilter\n | PatternReplaceTokenFilter\n | PhoneticTokenFilter\n | ShingleTokenFilter\n | SnowballTokenFilter\n | StemmerTokenFilter\n | StemmerOverrideTokenFilter\n | StopwordsTokenFilter\n | SynonymTokenFilter\n | TruncateTokenFilter\n | UniqueTokenFilter\n | WordDelimiterTokenFilter\n | TokenFilter;\n\nexport function tokenFilterUnionSerializer(item: TokenFilterUnion): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.AsciiFoldingTokenFilter\":\n return asciiFoldingTokenFilterSerializer(item as AsciiFoldingTokenFilter);\n\n case \"#Microsoft.Azure.Search.CjkBigramTokenFilter\":\n return cjkBigramTokenFilterSerializer(item as CjkBigramTokenFilter);\n\n case \"#Microsoft.Azure.Search.CommonGramTokenFilter\":\n return commonGramTokenFilterSerializer(item as CommonGramTokenFilter);\n\n case \"#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter\":\n return dictionaryDecompounderTokenFilterSerializer(item as DictionaryDecompounderTokenFilter);\n\n case \"#Microsoft.Azure.Search.EdgeNGramTokenFilter\":\n return edgeNGramTokenFilterSerializer(item as EdgeNGramTokenFilter);\n\n case \"#Microsoft.Azure.Search.EdgeNGramTokenFilterV2\":\n return edgeNGramTokenFilterV2Serializer(item as EdgeNGramTokenFilterV2);\n\n case \"#Microsoft.Azure.Search.ElisionTokenFilter\":\n return elisionTokenFilterSerializer(item as ElisionTokenFilter);\n\n case \"#Microsoft.Azure.Search.KeepTokenFilter\":\n return keepTokenFilterSerializer(item as KeepTokenFilter);\n\n case \"#Microsoft.Azure.Search.KeywordMarkerTokenFilter\":\n return keywordMarkerTokenFilterSerializer(item as KeywordMarkerTokenFilter);\n\n case \"#Microsoft.Azure.Search.LengthTokenFilter\":\n return lengthTokenFilterSerializer(item as LengthTokenFilter);\n\n case \"#Microsoft.Azure.Search.LimitTokenFilter\":\n return limitTokenFilterSerializer(item as LimitTokenFilter);\n\n case \"#Microsoft.Azure.Search.NGramTokenFilter\":\n return nGramTokenFilterSerializer(item as NGramTokenFilter);\n\n case \"#Microsoft.Azure.Search.NGramTokenFilterV2\":\n return nGramTokenFilterV2Serializer(item as NGramTokenFilterV2);\n\n case \"#Microsoft.Azure.Search.PatternCaptureTokenFilter\":\n return patternCaptureTokenFilterSerializer(item as PatternCaptureTokenFilter);\n\n case \"#Microsoft.Azure.Search.PatternReplaceTokenFilter\":\n return patternReplaceTokenFilterSerializer(item as PatternReplaceTokenFilter);\n\n case \"#Microsoft.Azure.Search.PhoneticTokenFilter\":\n return phoneticTokenFilterSerializer(item as PhoneticTokenFilter);\n\n case \"#Microsoft.Azure.Search.ShingleTokenFilter\":\n return shingleTokenFilterSerializer(item as ShingleTokenFilter);\n\n case \"#Microsoft.Azure.Search.SnowballTokenFilter\":\n return snowballTokenFilterSerializer(item as SnowballTokenFilter);\n\n case \"#Microsoft.Azure.Search.StemmerTokenFilter\":\n return stemmerTokenFilterSerializer(item as StemmerTokenFilter);\n\n case \"#Microsoft.Azure.Search.StemmerOverrideTokenFilter\":\n return stemmerOverrideTokenFilterSerializer(item as StemmerOverrideTokenFilter);\n\n case \"#Microsoft.Azure.Search.StopwordsTokenFilter\":\n return stopwordsTokenFilterSerializer(item as StopwordsTokenFilter);\n\n case \"#Microsoft.Azure.Search.SynonymTokenFilter\":\n return synonymTokenFilterSerializer(item as SynonymTokenFilter);\n\n case \"#Microsoft.Azure.Search.TruncateTokenFilter\":\n return truncateTokenFilterSerializer(item as TruncateTokenFilter);\n\n case \"#Microsoft.Azure.Search.UniqueTokenFilter\":\n return uniqueTokenFilterSerializer(item as UniqueTokenFilter);\n\n case \"#Microsoft.Azure.Search.WordDelimiterTokenFilter\":\n return wordDelimiterTokenFilterSerializer(item as WordDelimiterTokenFilter);\n\n default:\n return tokenFilterSerializer(item);\n }\n}\n\nexport function tokenFilterUnionDeserializer(item: any): TokenFilterUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.AsciiFoldingTokenFilter\":\n return asciiFoldingTokenFilterDeserializer(item as AsciiFoldingTokenFilter);\n\n case \"#Microsoft.Azure.Search.CjkBigramTokenFilter\":\n return cjkBigramTokenFilterDeserializer(item as CjkBigramTokenFilter);\n\n case \"#Microsoft.Azure.Search.CommonGramTokenFilter\":\n return commonGramTokenFilterDeserializer(item as CommonGramTokenFilter);\n\n case \"#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter\":\n return dictionaryDecompounderTokenFilterDeserializer(\n item as DictionaryDecompounderTokenFilter,\n );\n\n case \"#Microsoft.Azure.Search.EdgeNGramTokenFilter\":\n return edgeNGramTokenFilterDeserializer(item as EdgeNGramTokenFilter);\n\n case \"#Microsoft.Azure.Search.EdgeNGramTokenFilterV2\":\n return edgeNGramTokenFilterV2Deserializer(item as EdgeNGramTokenFilterV2);\n\n case \"#Microsoft.Azure.Search.ElisionTokenFilter\":\n return elisionTokenFilterDeserializer(item as ElisionTokenFilter);\n\n case \"#Microsoft.Azure.Search.KeepTokenFilter\":\n return keepTokenFilterDeserializer(item as KeepTokenFilter);\n\n case \"#Microsoft.Azure.Search.KeywordMarkerTokenFilter\":\n return keywordMarkerTokenFilterDeserializer(item as KeywordMarkerTokenFilter);\n\n case \"#Microsoft.Azure.Search.LengthTokenFilter\":\n return lengthTokenFilterDeserializer(item as LengthTokenFilter);\n\n case \"#Microsoft.Azure.Search.LimitTokenFilter\":\n return limitTokenFilterDeserializer(item as LimitTokenFilter);\n\n case \"#Microsoft.Azure.Search.NGramTokenFilter\":\n return nGramTokenFilterDeserializer(item as NGramTokenFilter);\n\n case \"#Microsoft.Azure.Search.NGramTokenFilterV2\":\n return nGramTokenFilterV2Deserializer(item as NGramTokenFilterV2);\n\n case \"#Microsoft.Azure.Search.PatternCaptureTokenFilter\":\n return patternCaptureTokenFilterDeserializer(item as PatternCaptureTokenFilter);\n\n case \"#Microsoft.Azure.Search.PatternReplaceTokenFilter\":\n return patternReplaceTokenFilterDeserializer(item as PatternReplaceTokenFilter);\n\n case \"#Microsoft.Azure.Search.PhoneticTokenFilter\":\n return phoneticTokenFilterDeserializer(item as PhoneticTokenFilter);\n\n case \"#Microsoft.Azure.Search.ShingleTokenFilter\":\n return shingleTokenFilterDeserializer(item as ShingleTokenFilter);\n\n case \"#Microsoft.Azure.Search.SnowballTokenFilter\":\n return snowballTokenFilterDeserializer(item as SnowballTokenFilter);\n\n case \"#Microsoft.Azure.Search.StemmerTokenFilter\":\n return stemmerTokenFilterDeserializer(item as StemmerTokenFilter);\n\n case \"#Microsoft.Azure.Search.StemmerOverrideTokenFilter\":\n return stemmerOverrideTokenFilterDeserializer(item as StemmerOverrideTokenFilter);\n\n case \"#Microsoft.Azure.Search.StopwordsTokenFilter\":\n return stopwordsTokenFilterDeserializer(item as StopwordsTokenFilter);\n\n case \"#Microsoft.Azure.Search.SynonymTokenFilter\":\n return synonymTokenFilterDeserializer(item as SynonymTokenFilter);\n\n case \"#Microsoft.Azure.Search.TruncateTokenFilter\":\n return truncateTokenFilterDeserializer(item as TruncateTokenFilter);\n\n case \"#Microsoft.Azure.Search.UniqueTokenFilter\":\n return uniqueTokenFilterDeserializer(item as UniqueTokenFilter);\n\n case \"#Microsoft.Azure.Search.WordDelimiterTokenFilter\":\n return wordDelimiterTokenFilterDeserializer(item as WordDelimiterTokenFilter);\n\n default:\n return tokenFilterDeserializer(item);\n }\n}\n\n/** Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the \"Basic Latin\" Unicode block) into their ASCII equivalents, if such equivalents exist. This token filter is implemented using Apache Lucene. */\nexport interface AsciiFoldingTokenFilter extends TokenFilter {\n /** A value indicating whether the original token will be kept. Default is false. */\n preserveOriginal?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.AsciiFoldingTokenFilter\";\n}\n\nexport function asciiFoldingTokenFilterSerializer(item: AsciiFoldingTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n preserveOriginal: item[\"preserveOriginal\"],\n };\n}\n\nexport function asciiFoldingTokenFilterDeserializer(item: any): AsciiFoldingTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n preserveOriginal: item[\"preserveOriginal\"],\n };\n}\n\n/** Forms bigrams of CJK terms that are generated from the standard tokenizer. This token filter is implemented using Apache Lucene. */\nexport interface CjkBigramTokenFilter extends TokenFilter {\n /** The scripts to ignore. */\n ignoreScripts?: CjkBigramTokenFilterScripts[];\n /** A value indicating whether to output both unigrams and bigrams (if true), or just bigrams (if false). Default is false. */\n outputUnigrams?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.CjkBigramTokenFilter\";\n}\n\nexport function cjkBigramTokenFilterSerializer(item: CjkBigramTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n ignoreScripts: !item[\"ignoreScripts\"]\n ? item[\"ignoreScripts\"]\n : item[\"ignoreScripts\"].map((p: any) => {\n return p;\n }),\n outputUnigrams: item[\"outputUnigrams\"],\n };\n}\n\nexport function cjkBigramTokenFilterDeserializer(item: any): CjkBigramTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n ignoreScripts: !item[\"ignoreScripts\"]\n ? item[\"ignoreScripts\"]\n : item[\"ignoreScripts\"].map((p: any) => {\n return p;\n }),\n outputUnigrams: item[\"outputUnigrams\"],\n };\n}\n\n/** Scripts that can be ignored by CjkBigramTokenFilter. */\nexport type CjkBigramTokenFilterScripts = \"han\" | \"hiragana\" | \"katakana\" | \"hangul\";\n\n/** Construct bigrams for frequently occurring terms while indexing. Single terms are still indexed too, with bigrams overlaid. This token filter is implemented using Apache Lucene. */\nexport interface CommonGramTokenFilter extends TokenFilter {\n /** The set of common words. */\n commonWords: string[];\n /** A value indicating whether common words matching will be case insensitive. Default is false. */\n ignoreCase?: boolean;\n /** A value that indicates whether the token filter is in query mode. When in query mode, the token filter generates bigrams and then removes common words and single terms followed by a common word. Default is false. */\n useQueryMode?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.CommonGramTokenFilter\";\n}\n\nexport function commonGramTokenFilterSerializer(item: CommonGramTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n commonWords: item[\"commonWords\"].map((p: any) => {\n return p;\n }),\n ignoreCase: item[\"ignoreCase\"],\n queryMode: item[\"useQueryMode\"],\n };\n}\n\nexport function commonGramTokenFilterDeserializer(item: any): CommonGramTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n commonWords: item[\"commonWords\"].map((p: any) => {\n return p;\n }),\n ignoreCase: item[\"ignoreCase\"],\n useQueryMode: item[\"queryMode\"],\n };\n}\n\n/** Decomposes compound words found in many Germanic languages. This token filter is implemented using Apache Lucene. */\nexport interface DictionaryDecompounderTokenFilter extends TokenFilter {\n /** The list of words to match against. */\n wordList: string[];\n /** The minimum word size. Only words longer than this get processed. Default is 5. Maximum is 300. */\n minWordSize?: number;\n /** The minimum subword size. Only subwords longer than this are outputted. Default is 2. Maximum is 300. */\n minSubwordSize?: number;\n /** The maximum subword size. Only subwords shorter than this are outputted. Default is 15. Maximum is 300. */\n maxSubwordSize?: number;\n /** A value indicating whether to add only the longest matching subword to the output. Default is false. */\n onlyLongestMatch?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter\";\n}\n\nexport function dictionaryDecompounderTokenFilterSerializer(\n item: DictionaryDecompounderTokenFilter,\n): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n wordList: item[\"wordList\"].map((p: any) => {\n return p;\n }),\n minWordSize: item[\"minWordSize\"],\n minSubwordSize: item[\"minSubwordSize\"],\n maxSubwordSize: item[\"maxSubwordSize\"],\n onlyLongestMatch: item[\"onlyLongestMatch\"],\n };\n}\n\nexport function dictionaryDecompounderTokenFilterDeserializer(\n item: any,\n): DictionaryDecompounderTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n wordList: item[\"wordList\"].map((p: any) => {\n return p;\n }),\n minWordSize: item[\"minWordSize\"],\n minSubwordSize: item[\"minSubwordSize\"],\n maxSubwordSize: item[\"maxSubwordSize\"],\n onlyLongestMatch: item[\"onlyLongestMatch\"],\n };\n}\n\n/** Generates n-grams of the given size(s) starting from the front or the back of an input token. This token filter is implemented using Apache Lucene. */\nexport interface EdgeNGramTokenFilter extends TokenFilter {\n /** The minimum n-gram length. Default is 1. Must be less than the value of maxGram. */\n minGram?: number;\n /** The maximum n-gram length. Default is 2. */\n maxGram?: number;\n /** Specifies which side of the input the n-gram should be generated from. Default is \"front\". */\n side?: EdgeNGramTokenFilterSide;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.EdgeNGramTokenFilter\";\n}\n\nexport function edgeNGramTokenFilterSerializer(item: EdgeNGramTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n side: item[\"side\"],\n };\n}\n\nexport function edgeNGramTokenFilterDeserializer(item: any): EdgeNGramTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n side: item[\"side\"],\n };\n}\n\n/** Specifies which side of the input an n-gram should be generated from. */\nexport type EdgeNGramTokenFilterSide = \"front\" | \"back\";\n\n/** Generates n-grams of the given size(s) starting from the front or the back of an input token. This token filter is implemented using Apache Lucene. */\nexport interface EdgeNGramTokenFilterV2 extends TokenFilter {\n /** The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. */\n minGram?: number;\n /** The maximum n-gram length. Default is 2. Maximum is 300. */\n maxGram?: number;\n /** Specifies which side of the input the n-gram should be generated from. Default is \"front\". */\n side?: EdgeNGramTokenFilterSide;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.EdgeNGramTokenFilterV2\";\n}\n\nexport function edgeNGramTokenFilterV2Serializer(item: EdgeNGramTokenFilterV2): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n side: item[\"side\"],\n };\n}\n\nexport function edgeNGramTokenFilterV2Deserializer(item: any): EdgeNGramTokenFilterV2 {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n side: item[\"side\"],\n };\n}\n\n/** Removes elisions. For example, \"l'avion\" (the plane) will be converted to \"avion\" (plane). This token filter is implemented using Apache Lucene. */\nexport interface ElisionTokenFilter extends TokenFilter {\n /** The set of articles to remove. */\n articles?: string[];\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.ElisionTokenFilter\";\n}\n\nexport function elisionTokenFilterSerializer(item: ElisionTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n articles: !item[\"articles\"]\n ? item[\"articles\"]\n : item[\"articles\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function elisionTokenFilterDeserializer(item: any): ElisionTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n articles: !item[\"articles\"]\n ? item[\"articles\"]\n : item[\"articles\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** A token filter that only keeps tokens with text contained in a specified list of words. This token filter is implemented using Apache Lucene. */\nexport interface KeepTokenFilter extends TokenFilter {\n /** The list of words to keep. */\n keepWords: string[];\n /** A value indicating whether to lower case all words first. Default is false. */\n lowerCaseKeepWords?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.KeepTokenFilter\";\n}\n\nexport function keepTokenFilterSerializer(item: KeepTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n keepWords: item[\"keepWords\"].map((p: any) => {\n return p;\n }),\n keepWordsCase: item[\"lowerCaseKeepWords\"],\n };\n}\n\nexport function keepTokenFilterDeserializer(item: any): KeepTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n keepWords: item[\"keepWords\"].map((p: any) => {\n return p;\n }),\n lowerCaseKeepWords: item[\"keepWordsCase\"],\n };\n}\n\n/** Marks terms as keywords. This token filter is implemented using Apache Lucene. */\nexport interface KeywordMarkerTokenFilter extends TokenFilter {\n /** A list of words to mark as keywords. */\n keywords: string[];\n /** A value indicating whether to ignore case. If true, all words are converted to lower case first. Default is false. */\n ignoreCase?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.KeywordMarkerTokenFilter\";\n}\n\nexport function keywordMarkerTokenFilterSerializer(item: KeywordMarkerTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n keywords: item[\"keywords\"].map((p: any) => {\n return p;\n }),\n ignoreCase: item[\"ignoreCase\"],\n };\n}\n\nexport function keywordMarkerTokenFilterDeserializer(item: any): KeywordMarkerTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n keywords: item[\"keywords\"].map((p: any) => {\n return p;\n }),\n ignoreCase: item[\"ignoreCase\"],\n };\n}\n\n/** Removes words that are too long or too short. This token filter is implemented using Apache Lucene. */\nexport interface LengthTokenFilter extends TokenFilter {\n /** The minimum length in characters. Default is 0. Maximum is 300. Must be less than the value of max. */\n minLength?: number;\n /** The maximum length in characters. Default and maximum is 300. */\n maxLength?: number;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.LengthTokenFilter\";\n}\n\nexport function lengthTokenFilterSerializer(item: LengthTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n min: item[\"minLength\"],\n max: item[\"maxLength\"],\n };\n}\n\nexport function lengthTokenFilterDeserializer(item: any): LengthTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n minLength: item[\"min\"],\n maxLength: item[\"max\"],\n };\n}\n\n/** Limits the number of tokens while indexing. This token filter is implemented using Apache Lucene. */\nexport interface LimitTokenFilter extends TokenFilter {\n /** The maximum number of tokens to produce. Default is 1. */\n maxTokenCount?: number;\n /** A value indicating whether all tokens from the input must be consumed even if maxTokenCount is reached. Default is false. */\n consumeAllTokens?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.LimitTokenFilter\";\n}\n\nexport function limitTokenFilterSerializer(item: LimitTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxTokenCount: item[\"maxTokenCount\"],\n consumeAllTokens: item[\"consumeAllTokens\"],\n };\n}\n\nexport function limitTokenFilterDeserializer(item: any): LimitTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxTokenCount: item[\"maxTokenCount\"],\n consumeAllTokens: item[\"consumeAllTokens\"],\n };\n}\n\n/** Generates n-grams of the given size(s). This token filter is implemented using Apache Lucene. */\nexport interface NGramTokenFilter extends TokenFilter {\n /** The minimum n-gram length. Default is 1. Must be less than the value of maxGram. */\n minGram?: number;\n /** The maximum n-gram length. Default is 2. */\n maxGram?: number;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.NGramTokenFilter\";\n}\n\nexport function nGramTokenFilterSerializer(item: NGramTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n };\n}\n\nexport function nGramTokenFilterDeserializer(item: any): NGramTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n };\n}\n\n/** Generates n-grams of the given size(s). This token filter is implemented using Apache Lucene. */\nexport interface NGramTokenFilterV2 extends TokenFilter {\n /** The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. */\n minGram?: number;\n /** The maximum n-gram length. Default is 2. Maximum is 300. */\n maxGram?: number;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.NGramTokenFilterV2\";\n}\n\nexport function nGramTokenFilterV2Serializer(item: NGramTokenFilterV2): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n };\n}\n\nexport function nGramTokenFilterV2Deserializer(item: any): NGramTokenFilterV2 {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n minGram: item[\"minGram\"],\n maxGram: item[\"maxGram\"],\n };\n}\n\n/** Uses Java regexes to emit multiple tokens - one for each capture group in one or more patterns. This token filter is implemented using Apache Lucene. */\nexport interface PatternCaptureTokenFilter extends TokenFilter {\n /** A list of patterns to match against each token. */\n patterns: string[];\n /** A value indicating whether to return the original token even if one of the patterns matches. Default is true. */\n preserveOriginal?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.PatternCaptureTokenFilter\";\n}\n\nexport function patternCaptureTokenFilterSerializer(item: PatternCaptureTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n patterns: item[\"patterns\"].map((p: any) => {\n return p;\n }),\n preserveOriginal: item[\"preserveOriginal\"],\n };\n}\n\nexport function patternCaptureTokenFilterDeserializer(item: any): PatternCaptureTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n patterns: item[\"patterns\"].map((p: any) => {\n return p;\n }),\n preserveOriginal: item[\"preserveOriginal\"],\n };\n}\n\n/** A character filter that replaces characters in the input string. It uses a regular expression to identify character sequences to preserve and a replacement pattern to identify characters to replace. For example, given the input text \"aa bb aa bb\", pattern \"(aa)\\\\s+(bb)\", and replacement \"$1#$2\", the result would be \"aa#bb aa#bb\". This token filter is implemented using Apache Lucene. */\nexport interface PatternReplaceTokenFilter extends TokenFilter {\n /** A regular expression pattern. */\n pattern: string;\n /** The replacement text. */\n replacement: string;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.PatternReplaceTokenFilter\";\n}\n\nexport function patternReplaceTokenFilterSerializer(item: PatternReplaceTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n pattern: item[\"pattern\"],\n replacement: item[\"replacement\"],\n };\n}\n\nexport function patternReplaceTokenFilterDeserializer(item: any): PatternReplaceTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n pattern: item[\"pattern\"],\n replacement: item[\"replacement\"],\n };\n}\n\n/** Create tokens for phonetic matches. This token filter is implemented using Apache Lucene. */\nexport interface PhoneticTokenFilter extends TokenFilter {\n /** The phonetic encoder to use. Default is \"metaphone\". */\n encoder?: PhoneticEncoder;\n /** A value indicating whether encoded tokens should replace original tokens. If false, encoded tokens are added as synonyms. Default is true. */\n replaceOriginalTokens?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.PhoneticTokenFilter\";\n}\n\nexport function phoneticTokenFilterSerializer(item: PhoneticTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n encoder: item[\"encoder\"],\n replace: item[\"replaceOriginalTokens\"],\n };\n}\n\nexport function phoneticTokenFilterDeserializer(item: any): PhoneticTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n encoder: item[\"encoder\"],\n replaceOriginalTokens: item[\"replace\"],\n };\n}\n\n/** Identifies the type of phonetic encoder to use with a PhoneticTokenFilter. */\nexport type PhoneticEncoder =\n | \"metaphone\"\n | \"doubleMetaphone\"\n | \"soundex\"\n | \"refinedSoundex\"\n | \"caverphone1\"\n | \"caverphone2\"\n | \"cologne\"\n | \"nysiis\"\n | \"koelnerPhonetik\"\n | \"haasePhonetik\"\n | \"beiderMorse\";\n\n/** Creates combinations of tokens as a single token. This token filter is implemented using Apache Lucene. */\nexport interface ShingleTokenFilter extends TokenFilter {\n /** The maximum shingle size. Default and minimum value is 2. */\n maxShingleSize?: number;\n /** The minimum shingle size. Default and minimum value is 2. Must be less than the value of maxShingleSize. */\n minShingleSize?: number;\n /** A value indicating whether the output stream will contain the input tokens (unigrams) as well as shingles. Default is true. */\n outputUnigrams?: boolean;\n /** A value indicating whether to output unigrams for those times when no shingles are available. This property takes precedence when outputUnigrams is set to false. Default is false. */\n outputUnigramsIfNoShingles?: boolean;\n /** The string to use when joining adjacent tokens to form a shingle. Default is a single space (\" \"). */\n tokenSeparator?: string;\n /** The string to insert for each position at which there is no token. Default is an underscore (\"_\"). */\n filterToken?: string;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.ShingleTokenFilter\";\n}\n\nexport function shingleTokenFilterSerializer(item: ShingleTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n maxShingleSize: item[\"maxShingleSize\"],\n minShingleSize: item[\"minShingleSize\"],\n outputUnigrams: item[\"outputUnigrams\"],\n outputUnigramsIfNoShingles: item[\"outputUnigramsIfNoShingles\"],\n tokenSeparator: item[\"tokenSeparator\"],\n filterToken: item[\"filterToken\"],\n };\n}\n\nexport function shingleTokenFilterDeserializer(item: any): ShingleTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n maxShingleSize: item[\"maxShingleSize\"],\n minShingleSize: item[\"minShingleSize\"],\n outputUnigrams: item[\"outputUnigrams\"],\n outputUnigramsIfNoShingles: item[\"outputUnigramsIfNoShingles\"],\n tokenSeparator: item[\"tokenSeparator\"],\n filterToken: item[\"filterToken\"],\n };\n}\n\n/** A filter that stems words using a Snowball-generated stemmer. This token filter is implemented using Apache Lucene. */\nexport interface SnowballTokenFilter extends TokenFilter {\n /** The language to use. */\n language: SnowballTokenFilterLanguage;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.SnowballTokenFilter\";\n}\n\nexport function snowballTokenFilterSerializer(item: SnowballTokenFilter): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"], language: item[\"language\"] };\n}\n\nexport function snowballTokenFilterDeserializer(item: any): SnowballTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n language: item[\"language\"],\n };\n}\n\n/** The language to use for a Snowball token filter. */\nexport type SnowballTokenFilterLanguage =\n | \"armenian\"\n | \"basque\"\n | \"catalan\"\n | \"danish\"\n | \"dutch\"\n | \"english\"\n | \"finnish\"\n | \"french\"\n | \"german\"\n | \"german2\"\n | \"hungarian\"\n | \"italian\"\n | \"kp\"\n | \"lovins\"\n | \"norwegian\"\n | \"porter\"\n | \"portuguese\"\n | \"romanian\"\n | \"russian\"\n | \"spanish\"\n | \"swedish\"\n | \"turkish\";\n\n/** Language specific stemming filter. This token filter is implemented using Apache Lucene. See https://learn.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#TokenFilters */\nexport interface StemmerTokenFilter extends TokenFilter {\n /** The language to use. */\n language: StemmerTokenFilterLanguage;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.StemmerTokenFilter\";\n}\n\nexport function stemmerTokenFilterSerializer(item: StemmerTokenFilter): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"], language: item[\"language\"] };\n}\n\nexport function stemmerTokenFilterDeserializer(item: any): StemmerTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n language: item[\"language\"],\n };\n}\n\n/** The language to use for a stemmer token filter. */\nexport type StemmerTokenFilterLanguage =\n | \"arabic\"\n | \"armenian\"\n | \"basque\"\n | \"brazilian\"\n | \"bulgarian\"\n | \"catalan\"\n | \"czech\"\n | \"danish\"\n | \"dutch\"\n | \"dutchKp\"\n | \"english\"\n | \"lightEnglish\"\n | \"minimalEnglish\"\n | \"possessiveEnglish\"\n | \"porter2\"\n | \"lovins\"\n | \"finnish\"\n | \"lightFinnish\"\n | \"french\"\n | \"lightFrench\"\n | \"minimalFrench\"\n | \"galician\"\n | \"minimalGalician\"\n | \"german\"\n | \"german2\"\n | \"lightGerman\"\n | \"minimalGerman\"\n | \"greek\"\n | \"hindi\"\n | \"hungarian\"\n | \"lightHungarian\"\n | \"indonesian\"\n | \"irish\"\n | \"italian\"\n | \"lightItalian\"\n | \"sorani\"\n | \"latvian\"\n | \"norwegian\"\n | \"lightNorwegian\"\n | \"minimalNorwegian\"\n | \"lightNynorsk\"\n | \"minimalNynorsk\"\n | \"portuguese\"\n | \"lightPortuguese\"\n | \"minimalPortuguese\"\n | \"portugueseRslp\"\n | \"romanian\"\n | \"russian\"\n | \"lightRussian\"\n | \"spanish\"\n | \"lightSpanish\"\n | \"swedish\"\n | \"lightSwedish\"\n | \"turkish\";\n\n/** Provides the ability to override other stemming filters with custom dictionary-based stemming. Any dictionary-stemmed terms will be marked as keywords so that they will not be stemmed with stemmers down the chain. Must be placed before any stemming filters. This token filter is implemented using Apache Lucene. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/StemmerOverrideFilter.html */\nexport interface StemmerOverrideTokenFilter extends TokenFilter {\n /** A list of stemming rules in the following format: \"word => stem\", for example: \"ran => run\". */\n rules: string[];\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.StemmerOverrideTokenFilter\";\n}\n\nexport function stemmerOverrideTokenFilterSerializer(item: StemmerOverrideTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n rules: item[\"rules\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function stemmerOverrideTokenFilterDeserializer(item: any): StemmerOverrideTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n rules: item[\"rules\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** Removes stop words from a token stream. This token filter is implemented using Apache Lucene. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopFilter.html */\nexport interface StopwordsTokenFilter extends TokenFilter {\n /** The list of stopwords. This property and the stopwords list property cannot both be set. */\n stopwords?: string[];\n /** A predefined list of stopwords to use. This property and the stopwords property cannot both be set. Default is English. */\n stopwordsList?: StopwordsList;\n /** A value indicating whether to ignore case. If true, all words are converted to lower case first. Default is false. */\n ignoreCase?: boolean;\n /** A value indicating whether to ignore the last search term if it's a stop word. Default is true. */\n removeTrailingStopWords?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.StopwordsTokenFilter\";\n}\n\nexport function stopwordsTokenFilterSerializer(item: StopwordsTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n stopwords: !item[\"stopwords\"]\n ? item[\"stopwords\"]\n : item[\"stopwords\"].map((p: any) => {\n return p;\n }),\n stopwordsList: item[\"stopwordsList\"],\n ignoreCase: item[\"ignoreCase\"],\n removeTrailing: item[\"removeTrailingStopWords\"],\n };\n}\n\nexport function stopwordsTokenFilterDeserializer(item: any): StopwordsTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n stopwords: !item[\"stopwords\"]\n ? item[\"stopwords\"]\n : item[\"stopwords\"].map((p: any) => {\n return p;\n }),\n stopwordsList: item[\"stopwordsList\"],\n ignoreCase: item[\"ignoreCase\"],\n removeTrailingStopWords: item[\"removeTrailing\"],\n };\n}\n\n/** Identifies a predefined list of language-specific stopwords. */\nexport type StopwordsList =\n | \"arabic\"\n | \"armenian\"\n | \"basque\"\n | \"brazilian\"\n | \"bulgarian\"\n | \"catalan\"\n | \"czech\"\n | \"danish\"\n | \"dutch\"\n | \"english\"\n | \"finnish\"\n | \"french\"\n | \"galician\"\n | \"german\"\n | \"greek\"\n | \"hindi\"\n | \"hungarian\"\n | \"indonesian\"\n | \"irish\"\n | \"italian\"\n | \"latvian\"\n | \"norwegian\"\n | \"persian\"\n | \"portuguese\"\n | \"romanian\"\n | \"russian\"\n | \"sorani\"\n | \"spanish\"\n | \"swedish\"\n | \"thai\"\n | \"turkish\";\n\n/** Matches single or multi-word synonyms in a token stream. This token filter is implemented using Apache Lucene. */\nexport interface SynonymTokenFilter extends TokenFilter {\n /** A list of synonyms in following one of two formats: 1. incredible, unbelievable, fabulous => amazing - all terms on the left side of => symbol will be replaced with all terms on its right side; 2. incredible, unbelievable, fabulous, amazing - comma separated list of equivalent words. Set the expand option to change how this list is interpreted. */\n synonyms: string[];\n /** A value indicating whether to case-fold input for matching. Default is false. */\n ignoreCase?: boolean;\n /** A value indicating whether all words in the list of synonyms (if => notation is not used) will map to one another. If true, all words in the list of synonyms (if => notation is not used) will map to one another. The following list: incredible, unbelievable, fabulous, amazing is equivalent to: incredible, unbelievable, fabulous, amazing => incredible, unbelievable, fabulous, amazing. If false, the following list: incredible, unbelievable, fabulous, amazing will be equivalent to: incredible, unbelievable, fabulous, amazing => incredible. Default is true. */\n expand?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.SynonymTokenFilter\";\n}\n\nexport function synonymTokenFilterSerializer(item: SynonymTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n synonyms: item[\"synonyms\"].map((p: any) => {\n return p;\n }),\n ignoreCase: item[\"ignoreCase\"],\n expand: item[\"expand\"],\n };\n}\n\nexport function synonymTokenFilterDeserializer(item: any): SynonymTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n synonyms: item[\"synonyms\"].map((p: any) => {\n return p;\n }),\n ignoreCase: item[\"ignoreCase\"],\n expand: item[\"expand\"],\n };\n}\n\n/** Truncates the terms to a specific length. This token filter is implemented using Apache Lucene. */\nexport interface TruncateTokenFilter extends TokenFilter {\n /** The length at which terms will be truncated. Default and maximum is 300. */\n length?: number;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.TruncateTokenFilter\";\n}\n\nexport function truncateTokenFilterSerializer(item: TruncateTokenFilter): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"], length: item[\"length\"] };\n}\n\nexport function truncateTokenFilterDeserializer(item: any): TruncateTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n length: item[\"length\"],\n };\n}\n\n/** Filters out tokens with same text as the previous token. This token filter is implemented using Apache Lucene. */\nexport interface UniqueTokenFilter extends TokenFilter {\n /** A value indicating whether to remove duplicates only at the same position. Default is false. */\n onlyOnSamePosition?: boolean;\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.UniqueTokenFilter\";\n}\n\nexport function uniqueTokenFilterSerializer(item: UniqueTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n onlyOnSamePosition: item[\"onlyOnSamePosition\"],\n };\n}\n\nexport function uniqueTokenFilterDeserializer(item: any): UniqueTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n onlyOnSamePosition: item[\"onlyOnSamePosition\"],\n };\n}\n\n/** Splits words into subwords and performs optional transformations on subword groups. This token filter is implemented using Apache Lucene. */\nexport interface WordDelimiterTokenFilter extends TokenFilter {\n /** A value indicating whether to generate part words. If set, causes parts of words to be generated; for example \"AzureSearch\" becomes \"Azure\" \"Search\". Default is true. */\n generateWordParts?: boolean;\n /** A value indicating whether to generate number subwords. Default is true. */\n generateNumberParts?: boolean;\n /** A value indicating whether maximum runs of word parts will be catenated. For example, if this is set to true, \"Azure-Search\" becomes \"AzureSearch\". Default is false. */\n catenateWords?: boolean;\n /** A value indicating whether maximum runs of number parts will be catenated. For example, if this is set to true, \"1-2\" becomes \"12\". Default is false. */\n catenateNumbers?: boolean;\n /** A value indicating whether all subword parts will be catenated. For example, if this is set to true, \"Azure-Search-1\" becomes \"AzureSearch1\". Default is false. */\n catenateAll?: boolean;\n /** A value indicating whether to split words on caseChange. For example, if this is set to true, \"AzureSearch\" becomes \"Azure\" \"Search\". Default is true. */\n splitOnCaseChange?: boolean;\n /** A value indicating whether original words will be preserved and added to the subword list. Default is false. */\n preserveOriginal?: boolean;\n /** A value indicating whether to split on numbers. For example, if this is set to true, \"Azure1Search\" becomes \"Azure\" \"1\" \"Search\". Default is true. */\n splitOnNumerics?: boolean;\n /** A value indicating whether to remove trailing \"'s\" for each subword. Default is true. */\n stemEnglishPossessive?: boolean;\n /** A list of tokens to protect from being delimited. */\n protectedWords?: string[];\n /** A URI fragment specifying the type of token filter. */\n odatatype: \"#Microsoft.Azure.Search.WordDelimiterTokenFilter\";\n}\n\nexport function wordDelimiterTokenFilterSerializer(item: WordDelimiterTokenFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n generateWordParts: item[\"generateWordParts\"],\n generateNumberParts: item[\"generateNumberParts\"],\n catenateWords: item[\"catenateWords\"],\n catenateNumbers: item[\"catenateNumbers\"],\n catenateAll: item[\"catenateAll\"],\n splitOnCaseChange: item[\"splitOnCaseChange\"],\n preserveOriginal: item[\"preserveOriginal\"],\n splitOnNumerics: item[\"splitOnNumerics\"],\n stemEnglishPossessive: item[\"stemEnglishPossessive\"],\n protectedWords: !item[\"protectedWords\"]\n ? item[\"protectedWords\"]\n : item[\"protectedWords\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function wordDelimiterTokenFilterDeserializer(item: any): WordDelimiterTokenFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n generateWordParts: item[\"generateWordParts\"],\n generateNumberParts: item[\"generateNumberParts\"],\n catenateWords: item[\"catenateWords\"],\n catenateNumbers: item[\"catenateNumbers\"],\n catenateAll: item[\"catenateAll\"],\n splitOnCaseChange: item[\"splitOnCaseChange\"],\n preserveOriginal: item[\"preserveOriginal\"],\n splitOnNumerics: item[\"splitOnNumerics\"],\n stemEnglishPossessive: item[\"stemEnglishPossessive\"],\n protectedWords: !item[\"protectedWords\"]\n ? item[\"protectedWords\"]\n : item[\"protectedWords\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function charFilterUnionArraySerializer(result: Array<CharFilterUnion>): any[] {\n return result.map((item) => {\n return charFilterUnionSerializer(item);\n });\n}\n\nexport function charFilterUnionArrayDeserializer(result: Array<CharFilterUnion>): any[] {\n return result.map((item) => {\n return charFilterUnionDeserializer(item);\n });\n}\n\n/** Base type for character filters. */\nexport interface CharFilter {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.MappingCharFilter, #Microsoft.Azure.Search.PatternReplaceCharFilter */\n odatatype: string;\n /** The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. */\n name: string;\n}\n\nexport function charFilterSerializer(item: CharFilter): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"] };\n}\n\nexport function charFilterDeserializer(item: any): CharFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n };\n}\n\n/** Alias for CharFilterUnion */\nexport type CharFilterUnion = MappingCharFilter | PatternReplaceCharFilter | CharFilter;\n\nexport function charFilterUnionSerializer(item: CharFilterUnion): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.MappingCharFilter\":\n return mappingCharFilterSerializer(item as MappingCharFilter);\n\n case \"#Microsoft.Azure.Search.PatternReplaceCharFilter\":\n return patternReplaceCharFilterSerializer(item as PatternReplaceCharFilter);\n\n default:\n return charFilterSerializer(item);\n }\n}\n\nexport function charFilterUnionDeserializer(item: any): CharFilterUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.MappingCharFilter\":\n return mappingCharFilterDeserializer(item as MappingCharFilter);\n\n case \"#Microsoft.Azure.Search.PatternReplaceCharFilter\":\n return patternReplaceCharFilterDeserializer(item as PatternReplaceCharFilter);\n\n default:\n return charFilterDeserializer(item);\n }\n}\n\n/** A character filter that applies mappings defined with the mappings option. Matching is greedy (longest pattern matching at a given point wins). Replacement is allowed to be the empty string. This character filter is implemented using Apache Lucene. */\nexport interface MappingCharFilter extends CharFilter {\n /** A list of mappings of the following format: \"a=>b\" (all occurrences of the character \"a\" will be replaced with character \"b\"). */\n mappings: string[];\n /** A URI fragment specifying the type of char filter. */\n odatatype: \"#Microsoft.Azure.Search.MappingCharFilter\";\n}\n\nexport function mappingCharFilterSerializer(item: MappingCharFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n mappings: item[\"mappings\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function mappingCharFilterDeserializer(item: any): MappingCharFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n mappings: item[\"mappings\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** A character filter that replaces characters in the input string. It uses a regular expression to identify character sequences to preserve and a replacement pattern to identify characters to replace. For example, given the input text \"aa bb aa bb\", pattern \"(aa)\\\\s+(bb)\", and replacement \"$1#$2\", the result would be \"aa#bb aa#bb\". This character filter is implemented using Apache Lucene. */\nexport interface PatternReplaceCharFilter extends CharFilter {\n /** A regular expression pattern. */\n pattern: string;\n /** The replacement text. */\n replacement: string;\n /** A URI fragment specifying the type of char filter. */\n odatatype: \"#Microsoft.Azure.Search.PatternReplaceCharFilter\";\n}\n\nexport function patternReplaceCharFilterSerializer(item: PatternReplaceCharFilter): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n pattern: item[\"pattern\"],\n replacement: item[\"replacement\"],\n };\n}\n\nexport function patternReplaceCharFilterDeserializer(item: any): PatternReplaceCharFilter {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n pattern: item[\"pattern\"],\n replacement: item[\"replacement\"],\n };\n}\n\nexport function lexicalNormalizerUnionArraySerializer(\n result: Array<LexicalNormalizerUnion>,\n): any[] {\n return result.map((item) => {\n return lexicalNormalizerUnionSerializer(item);\n });\n}\n\nexport function lexicalNormalizerUnionArrayDeserializer(\n result: Array<LexicalNormalizerUnion>,\n): any[] {\n return result.map((item) => {\n return lexicalNormalizerUnionDeserializer(item);\n });\n}\n\n/** Base type for normalizers. */\nexport interface LexicalNormalizer {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.CustomNormalizer */\n odatatype: string;\n /** The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. */\n name: string;\n}\n\nexport function lexicalNormalizerSerializer(item: LexicalNormalizer): any {\n return { \"@odata.type\": item[\"odatatype\"], name: item[\"name\"] };\n}\n\nexport function lexicalNormalizerDeserializer(item: any): LexicalNormalizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n };\n}\n\n/** Alias for LexicalNormalizerUnion */\nexport type LexicalNormalizerUnion = CustomNormalizer | LexicalNormalizer;\n\nexport function lexicalNormalizerUnionSerializer(item: LexicalNormalizerUnion): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.CustomNormalizer\":\n return customNormalizerSerializer(item as CustomNormalizer);\n\n default:\n return lexicalNormalizerSerializer(item);\n }\n}\n\nexport function lexicalNormalizerUnionDeserializer(item: any): LexicalNormalizerUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.CustomNormalizer\":\n return customNormalizerDeserializer(item as CustomNormalizer);\n\n default:\n return lexicalNormalizerDeserializer(item);\n }\n}\n\n/** Allows you to configure normalization for filterable, sortable, and facetable fields, which by default operate with strict matching. This is a user-defined configuration consisting of at least one or more filters, which modify the token that is stored. */\nexport interface CustomNormalizer extends LexicalNormalizer {\n /** A list of token filters used to filter out or modify the input token. For example, you can specify a lowercase filter that converts all characters to lowercase. The filters are run in the order in which they are listed. */\n tokenFilters?: TokenFilterName[];\n /** A list of character filters used to prepare input text before it is processed. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. */\n charFilters?: CharFilterName[];\n /** A URI fragment specifying the type of normalizer. */\n odatatype: \"#Microsoft.Azure.Search.CustomNormalizer\";\n}\n\nexport function customNormalizerSerializer(item: CustomNormalizer): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n tokenFilters: !item[\"tokenFilters\"]\n ? item[\"tokenFilters\"]\n : item[\"tokenFilters\"].map((p: any) => {\n return p;\n }),\n charFilters: !item[\"charFilters\"]\n ? item[\"charFilters\"]\n : item[\"charFilters\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function customNormalizerDeserializer(item: any): CustomNormalizer {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n tokenFilters: !item[\"tokenFilters\"]\n ? item[\"tokenFilters\"]\n : item[\"tokenFilters\"].map((p: any) => {\n return p;\n }),\n charFilters: !item[\"charFilters\"]\n ? item[\"charFilters\"]\n : item[\"charFilters\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** Base type for similarity algorithms. Similarity algorithms are used to calculate scores that tie queries to documents. The higher the score, the more relevant the document is to that specific query. Those scores are used to rank the search results. */\nexport interface SimilarityAlgorithm {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.ClassicSimilarity, #Microsoft.Azure.Search.BM25Similarity */\n odatatype: string;\n}\n\nexport function similarityAlgorithmSerializer(item: SimilarityAlgorithm): any {\n return { \"@odata.type\": item[\"odatatype\"] };\n}\n\nexport function similarityAlgorithmDeserializer(item: any): SimilarityAlgorithm {\n return {\n odatatype: item[\"@odata.type\"],\n };\n}\n\n/** Alias for SimilarityAlgorithmUnion */\nexport type SimilarityAlgorithmUnion = ClassicSimilarity | BM25Similarity | SimilarityAlgorithm;\n\nexport function similarityAlgorithmUnionSerializer(item: SimilarityAlgorithmUnion): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.ClassicSimilarity\":\n return classicSimilaritySerializer(item as ClassicSimilarity);\n\n case \"#Microsoft.Azure.Search.BM25Similarity\":\n return bm25SimilaritySerializer(item as BM25Similarity);\n\n default:\n return similarityAlgorithmSerializer(item);\n }\n}\n\nexport function similarityAlgorithmUnionDeserializer(item: any): SimilarityAlgorithmUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.ClassicSimilarity\":\n return classicSimilarityDeserializer(item as ClassicSimilarity);\n\n case \"#Microsoft.Azure.Search.BM25Similarity\":\n return bm25SimilarityDeserializer(item as BM25Similarity);\n\n default:\n return similarityAlgorithmDeserializer(item);\n }\n}\n\n/** Legacy similarity algorithm which uses the Lucene TFIDFSimilarity implementation of TF-IDF. This variation of TF-IDF introduces static document length normalization as well as coordinating factors that penalize documents that only partially match the searched queries. */\nexport interface ClassicSimilarity extends SimilarityAlgorithm {\n /** The discriminator for derived types. */\n odatatype: \"#Microsoft.Azure.Search.ClassicSimilarity\";\n}\n\nexport function classicSimilaritySerializer(item: ClassicSimilarity): any {\n return { \"@odata.type\": item[\"odatatype\"] };\n}\n\nexport function classicSimilarityDeserializer(item: any): ClassicSimilarity {\n return {\n odatatype: item[\"@odata.type\"],\n };\n}\n\n/** Ranking function based on the Okapi BM25 similarity algorithm. BM25 is a TF-IDF-like algorithm that includes length normalization (controlled by the 'b' parameter) as well as term frequency saturation (controlled by the 'k1' parameter). */\nexport interface BM25Similarity extends SimilarityAlgorithm {\n /** This property controls the scaling function between the term frequency of each matching terms and the final relevance score of a document-query pair. By default, a value of 1.2 is used. A value of 0.0 means the score does not scale with an increase in term frequency. */\n k1?: number;\n /** This property controls how the length of a document affects the relevance score. By default, a value of 0.75 is used. A value of 0.0 means no length normalization is applied, while a value of 1.0 means the score is fully normalized by the length of the document. */\n b?: number;\n /** The discriminator for derived types. */\n odatatype: \"#Microsoft.Azure.Search.BM25Similarity\";\n}\n\nexport function bm25SimilaritySerializer(item: BM25Similarity): any {\n return { \"@odata.type\": item[\"odatatype\"], k1: item[\"k1\"], b: item[\"b\"] };\n}\n\nexport function bm25SimilarityDeserializer(item: any): BM25Similarity {\n return {\n odatatype: item[\"@odata.type\"],\n k1: item[\"k1\"],\n b: item[\"b\"],\n };\n}\n\n/** Defines parameters for a search index that influence semantic capabilities. */\nexport interface SemanticSearch {\n /** Allows you to set the name of a default semantic configuration in your index, making it optional to pass it on as a query parameter every time. */\n defaultConfigurationName?: string;\n /** The semantic configurations for the index. */\n configurations?: SemanticConfiguration[];\n}\n\nexport function semanticSearchSerializer(item: SemanticSearch): any {\n return {\n defaultConfiguration: item[\"defaultConfigurationName\"],\n configurations: !item[\"configurations\"]\n ? item[\"configurations\"]\n : semanticConfigurationArraySerializer(item[\"configurations\"]),\n };\n}\n\nexport function semanticSearchDeserializer(item: any): SemanticSearch {\n return {\n defaultConfigurationName: item[\"defaultConfiguration\"],\n configurations: !item[\"configurations\"]\n ? item[\"configurations\"]\n : semanticConfigurationArrayDeserializer(item[\"configurations\"]),\n };\n}\n\nexport function semanticConfigurationArraySerializer(result: Array<SemanticConfiguration>): any[] {\n return result.map((item) => {\n return semanticConfigurationSerializer(item);\n });\n}\n\nexport function semanticConfigurationArrayDeserializer(\n result: Array<SemanticConfiguration>,\n): any[] {\n return result.map((item) => {\n return semanticConfigurationDeserializer(item);\n });\n}\n\n/** Defines a specific configuration to be used in the context of semantic capabilities. */\nexport interface SemanticConfiguration {\n /** The name of the semantic configuration. */\n name: string;\n /** Describes the title, content, and keyword fields to be used for semantic ranking, captions, highlights, and answers. At least one of the three sub properties (titleField, prioritizedKeywordsFields and prioritizedContentFields) need to be set. */\n prioritizedFields: SemanticPrioritizedFields;\n /** Specifies the score type to be used for the sort order of the search results. */\n rankingOrder?: RankingOrder;\n}\n\nexport function semanticConfigurationSerializer(item: SemanticConfiguration): any {\n return {\n name: item[\"name\"],\n prioritizedFields: semanticPrioritizedFieldsSerializer(item[\"prioritizedFields\"]),\n rankingOrder: item[\"rankingOrder\"],\n };\n}\n\nexport function semanticConfigurationDeserializer(item: any): SemanticConfiguration {\n return {\n name: item[\"name\"],\n prioritizedFields: semanticPrioritizedFieldsDeserializer(item[\"prioritizedFields\"]),\n rankingOrder: item[\"rankingOrder\"],\n };\n}\n\n/** Describes the title, content, and keywords fields to be used for semantic ranking, captions, highlights, and answers. */\nexport interface SemanticPrioritizedFields {\n /** Defines the title field to be used for semantic ranking, captions, highlights, and answers. If you don't have a title field in your index, leave this blank. */\n titleField?: SemanticField;\n /** Defines the content fields to be used for semantic ranking, captions, highlights, and answers. For the best result, the selected fields should contain text in natural language form. The order of the fields in the array represents their priority. Fields with lower priority may get truncated if the content is long. */\n contentFields?: SemanticField[];\n /** Defines the keyword fields to be used for semantic ranking, captions, highlights, and answers. For the best result, the selected fields should contain a list of keywords. The order of the fields in the array represents their priority. Fields with lower priority may get truncated if the content is long. */\n keywordsFields?: SemanticField[];\n}\n\nexport function semanticPrioritizedFieldsSerializer(item: SemanticPrioritizedFields): any {\n return {\n titleField: !item[\"titleField\"]\n ? item[\"titleField\"]\n : semanticFieldSerializer(item[\"titleField\"]),\n prioritizedContentFields: !item[\"contentFields\"]\n ? item[\"contentFields\"]\n : semanticFieldArraySerializer(item[\"contentFields\"]),\n prioritizedKeywordsFields: !item[\"keywordsFields\"]\n ? item[\"keywordsFields\"]\n : semanticFieldArraySerializer(item[\"keywordsFields\"]),\n };\n}\n\nexport function semanticPrioritizedFieldsDeserializer(item: any): SemanticPrioritizedFields {\n return {\n titleField: !item[\"titleField\"]\n ? item[\"titleField\"]\n : semanticFieldDeserializer(item[\"titleField\"]),\n contentFields: !item[\"prioritizedContentFields\"]\n ? item[\"prioritizedContentFields\"]\n : semanticFieldArrayDeserializer(item[\"prioritizedContentFields\"]),\n keywordsFields: !item[\"prioritizedKeywordsFields\"]\n ? item[\"prioritizedKeywordsFields\"]\n : semanticFieldArrayDeserializer(item[\"prioritizedKeywordsFields\"]),\n };\n}\n\n/** A field that is used as part of the semantic configuration. */\nexport interface SemanticField {\n /** File name */\n name: string;\n}\n\nexport function semanticFieldSerializer(item: SemanticField): any {\n return { fieldName: item[\"name\"] };\n}\n\nexport function semanticFieldDeserializer(item: any): SemanticField {\n return {\n name: item[\"fieldName\"],\n };\n}\n\nexport function semanticFieldArraySerializer(result: Array<SemanticField>): any[] {\n return result.map((item) => {\n return semanticFieldSerializer(item);\n });\n}\n\nexport function semanticFieldArrayDeserializer(result: Array<SemanticField>): any[] {\n return result.map((item) => {\n return semanticFieldDeserializer(item);\n });\n}\n\n/** Represents score to use for sort order of documents. */\nexport enum KnownRankingOrder {\n /** Sets sort order as BoostedRerankerScore */\n BoostedRerankerScore = \"BoostedRerankerScore\",\n /** Sets sort order as ReRankerScore */\n RerankerScore = \"RerankerScore\",\n}\n\n/**\n * Represents score to use for sort order of documents. \\\n * {@link KnownRankingOrder} can be used interchangeably with RankingOrder,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **BoostedRerankerScore**: Sets sort order as BoostedRerankerScore \\\n * **RerankerScore**: Sets sort order as ReRankerScore\n */\nexport type RankingOrder = string;\n\n/** Contains configuration options related to vector search. */\nexport interface VectorSearch {\n /** Defines combinations of configurations to use with vector search. */\n profiles?: VectorSearchProfile[];\n /** Contains configuration options specific to the algorithm used during indexing or querying. */\n algorithms?: VectorSearchAlgorithmConfigurationUnion[];\n /** Contains configuration options on how to vectorize text vector queries. */\n vectorizers?: VectorSearchVectorizerUnion[];\n /** Contains configuration options specific to the compression method used during indexing or querying. */\n compressions?: VectorSearchCompressionUnion[];\n}\n\nexport function vectorSearchSerializer(item: VectorSearch): any {\n return {\n profiles: !item[\"profiles\"]\n ? item[\"profiles\"]\n : vectorSearchProfileArraySerializer(item[\"profiles\"]),\n algorithms: !item[\"algorithms\"]\n ? item[\"algorithms\"]\n : vectorSearchAlgorithmConfigurationUnionArraySerializer(item[\"algorithms\"]),\n vectorizers: !item[\"vectorizers\"]\n ? item[\"vectorizers\"]\n : vectorSearchVectorizerUnionArraySerializer(item[\"vectorizers\"]),\n compressions: !item[\"compressions\"]\n ? item[\"compressions\"]\n : vectorSearchCompressionUnionArraySerializer(item[\"compressions\"]),\n };\n}\n\nexport function vectorSearchDeserializer(item: any): VectorSearch {\n return {\n profiles: !item[\"profiles\"]\n ? item[\"profiles\"]\n : vectorSearchProfileArrayDeserializer(item[\"profiles\"]),\n algorithms: !item[\"algorithms\"]\n ? item[\"algorithms\"]\n : vectorSearchAlgorithmConfigurationUnionArrayDeserializer(item[\"algorithms\"]),\n vectorizers: !item[\"vectorizers\"]\n ? item[\"vectorizers\"]\n : vectorSearchVectorizerUnionArrayDeserializer(item[\"vectorizers\"]),\n compressions: !item[\"compressions\"]\n ? item[\"compressions\"]\n : vectorSearchCompressionUnionArrayDeserializer(item[\"compressions\"]),\n };\n}\n\nexport function vectorSearchProfileArraySerializer(result: Array<VectorSearchProfile>): any[] {\n return result.map((item) => {\n return vectorSearchProfileSerializer(item);\n });\n}\n\nexport function vectorSearchProfileArrayDeserializer(result: Array<VectorSearchProfile>): any[] {\n return result.map((item) => {\n return vectorSearchProfileDeserializer(item);\n });\n}\n\n/** Defines a combination of configurations to use with vector search. */\nexport interface VectorSearchProfile {\n /** The name to associate with this particular vector search profile. */\n name: string;\n /** The name of the vector search algorithm configuration that specifies the algorithm and optional parameters. */\n algorithmConfigurationName: string;\n /** The name of the vectorization being configured for use with vector search. */\n vectorizerName?: string;\n /** The name of the compression method configuration that specifies the compression method and optional parameters. */\n compressionName?: string;\n}\n\nexport function vectorSearchProfileSerializer(item: VectorSearchProfile): any {\n return {\n name: item[\"name\"],\n algorithm: item[\"algorithmConfigurationName\"],\n vectorizer: item[\"vectorizerName\"],\n compression: item[\"compressionName\"],\n };\n}\n\nexport function vectorSearchProfileDeserializer(item: any): VectorSearchProfile {\n return {\n name: item[\"name\"],\n algorithmConfigurationName: item[\"algorithm\"],\n vectorizerName: item[\"vectorizer\"],\n compressionName: item[\"compression\"],\n };\n}\n\nexport function vectorSearchAlgorithmConfigurationUnionArraySerializer(\n result: Array<VectorSearchAlgorithmConfigurationUnion>,\n): any[] {\n return result.map((item) => {\n return vectorSearchAlgorithmConfigurationUnionSerializer(item);\n });\n}\n\nexport function vectorSearchAlgorithmConfigurationUnionArrayDeserializer(\n result: Array<VectorSearchAlgorithmConfigurationUnion>,\n): any[] {\n return result.map((item) => {\n return vectorSearchAlgorithmConfigurationUnionDeserializer(item);\n });\n}\n\n/** Contains configuration options specific to the algorithm used during indexing or querying. */\nexport interface VectorSearchAlgorithmConfiguration {\n /** The name to associate with this particular configuration. */\n name: string;\n /** Type of VectorSearchAlgorithmConfiguration. */\n /** The discriminator possible values: hnsw, exhaustiveKnn */\n kind: VectorSearchAlgorithmKind;\n}\n\nexport function vectorSearchAlgorithmConfigurationSerializer(\n item: VectorSearchAlgorithmConfiguration,\n): any {\n return { name: item[\"name\"], kind: item[\"kind\"] };\n}\n\nexport function vectorSearchAlgorithmConfigurationDeserializer(\n item: any,\n): VectorSearchAlgorithmConfiguration {\n return {\n name: item[\"name\"],\n kind: item[\"kind\"],\n };\n}\n\n/** Alias for VectorSearchAlgorithmConfigurationUnion */\nexport type VectorSearchAlgorithmConfigurationUnion =\n | HnswAlgorithmConfiguration\n | ExhaustiveKnnAlgorithmConfiguration\n | VectorSearchAlgorithmConfiguration;\n\nexport function vectorSearchAlgorithmConfigurationUnionSerializer(\n item: VectorSearchAlgorithmConfigurationUnion,\n): any {\n switch (item.kind) {\n case \"hnsw\":\n return hnswAlgorithmConfigurationSerializer(item as HnswAlgorithmConfiguration);\n\n case \"exhaustiveKnn\":\n return exhaustiveKnnAlgorithmConfigurationSerializer(\n item as ExhaustiveKnnAlgorithmConfiguration,\n );\n\n default:\n return vectorSearchAlgorithmConfigurationSerializer(item);\n }\n}\n\nexport function vectorSearchAlgorithmConfigurationUnionDeserializer(\n item: any,\n): VectorSearchAlgorithmConfigurationUnion {\n switch (item[\"kind\"]) {\n case \"hnsw\":\n return hnswAlgorithmConfigurationDeserializer(item as HnswAlgorithmConfiguration);\n\n case \"exhaustiveKnn\":\n return exhaustiveKnnAlgorithmConfigurationDeserializer(\n item as ExhaustiveKnnAlgorithmConfiguration,\n );\n\n default:\n return vectorSearchAlgorithmConfigurationDeserializer(item);\n }\n}\n\n/** The algorithm used for indexing and querying. */\nexport enum KnownVectorSearchAlgorithmKind {\n /** HNSW (Hierarchical Navigable Small World), a type of approximate nearest neighbors algorithm. */\n Hnsw = \"hnsw\",\n /** Exhaustive KNN algorithm which will perform brute-force search. */\n ExhaustiveKnn = \"exhaustiveKnn\",\n}\n\n/**\n * The algorithm used for indexing and querying. \\\n * {@link KnownVectorSearchAlgorithmKind} can be used interchangeably with VectorSearchAlgorithmKind,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **hnsw**: HNSW (Hierarchical Navigable Small World), a type of approximate nearest neighbors algorithm. \\\n * **exhaustiveKnn**: Exhaustive KNN algorithm which will perform brute-force search.\n */\nexport type VectorSearchAlgorithmKind = string;\n\n/** Contains configuration options specific to the HNSW approximate nearest neighbors algorithm used during indexing and querying. The HNSW algorithm offers a tunable trade-off between search speed and accuracy. */\nexport interface HnswAlgorithmConfiguration extends VectorSearchAlgorithmConfiguration {\n /** Contains the parameters specific to HNSW algorithm. */\n parameters?: HnswParameters;\n /** The name of the kind of algorithm being configured for use with vector search. */\n kind: \"hnsw\";\n}\n\nexport function hnswAlgorithmConfigurationSerializer(item: HnswAlgorithmConfiguration): any {\n return {\n name: item[\"name\"],\n kind: item[\"kind\"],\n hnswParameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : hnswParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function hnswAlgorithmConfigurationDeserializer(item: any): HnswAlgorithmConfiguration {\n return {\n name: item[\"name\"],\n kind: item[\"kind\"],\n parameters: !item[\"hnswParameters\"]\n ? item[\"hnswParameters\"]\n : hnswParametersDeserializer(item[\"hnswParameters\"]),\n };\n}\n\n/** Contains the parameters specific to the HNSW algorithm. */\nexport interface HnswParameters {\n /** The number of bi-directional links created for every new element during construction. Increasing this parameter value may improve recall and reduce retrieval times for datasets with high intrinsic dimensionality at the expense of increased memory consumption and longer indexing time. */\n m?: number;\n /** The size of the dynamic list containing the nearest neighbors, which is used during index time. Increasing this parameter may improve index quality, at the expense of increased indexing time. At a certain point, increasing this parameter leads to diminishing returns. */\n efConstruction?: number;\n /** The size of the dynamic list containing the nearest neighbors, which is used during search time. Increasing this parameter may improve search results, at the expense of slower search. At a certain point, increasing this parameter leads to diminishing returns. */\n efSearch?: number;\n /** The similarity metric to use for vector comparisons. */\n metric?: VectorSearchAlgorithmMetric;\n}\n\nexport function hnswParametersSerializer(item: HnswParameters): any {\n return {\n m: item[\"m\"],\n efConstruction: item[\"efConstruction\"],\n efSearch: item[\"efSearch\"],\n metric: item[\"metric\"],\n };\n}\n\nexport function hnswParametersDeserializer(item: any): HnswParameters {\n return {\n m: item[\"m\"],\n efConstruction: item[\"efConstruction\"],\n efSearch: item[\"efSearch\"],\n metric: item[\"metric\"],\n };\n}\n\n/** The similarity metric to use for vector comparisons. It is recommended to choose the same similarity metric as the embedding model was trained on. */\nexport enum KnownVectorSearchAlgorithmMetric {\n /** Measures the angle between vectors to quantify their similarity, disregarding magnitude. The smaller the angle, the closer the similarity. */\n Cosine = \"cosine\",\n /** Computes the straight-line distance between vectors in a multi-dimensional space. The smaller the distance, the closer the similarity. */\n Euclidean = \"euclidean\",\n /** Calculates the sum of element-wise products to gauge alignment and magnitude similarity. The larger and more positive, the closer the similarity. */\n DotProduct = \"dotProduct\",\n /** Only applicable to bit-packed binary data types. Determines dissimilarity by counting differing positions in binary vectors. The fewer differences, the closer the similarity. */\n Hamming = \"hamming\",\n}\n\n/**\n * The similarity metric to use for vector comparisons. It is recommended to choose the same similarity metric as the embedding model was trained on. \\\n * {@link KnownVectorSearchAlgorithmMetric} can be used interchangeably with VectorSearchAlgorithmMetric,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **cosine**: Measures the angle between vectors to quantify their similarity, disregarding magnitude. The smaller the angle, the closer the similarity. \\\n * **euclidean**: Computes the straight-line distance between vectors in a multi-dimensional space. The smaller the distance, the closer the similarity. \\\n * **dotProduct**: Calculates the sum of element-wise products to gauge alignment and magnitude similarity. The larger and more positive, the closer the similarity. \\\n * **hamming**: Only applicable to bit-packed binary data types. Determines dissimilarity by counting differing positions in binary vectors. The fewer differences, the closer the similarity.\n */\nexport type VectorSearchAlgorithmMetric = string;\n\n/** Contains configuration options specific to the exhaustive KNN algorithm used during querying, which will perform brute-force search across the entire vector index. */\nexport interface ExhaustiveKnnAlgorithmConfiguration extends VectorSearchAlgorithmConfiguration {\n /** Contains the parameters specific to exhaustive KNN algorithm. */\n parameters?: ExhaustiveKnnParameters;\n /** The name of the kind of algorithm being configured for use with vector search. */\n kind: \"exhaustiveKnn\";\n}\n\nexport function exhaustiveKnnAlgorithmConfigurationSerializer(\n item: ExhaustiveKnnAlgorithmConfiguration,\n): any {\n return {\n name: item[\"name\"],\n kind: item[\"kind\"],\n exhaustiveKnnParameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : exhaustiveKnnParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function exhaustiveKnnAlgorithmConfigurationDeserializer(\n item: any,\n): ExhaustiveKnnAlgorithmConfiguration {\n return {\n name: item[\"name\"],\n kind: item[\"kind\"],\n parameters: !item[\"exhaustiveKnnParameters\"]\n ? item[\"exhaustiveKnnParameters\"]\n : exhaustiveKnnParametersDeserializer(item[\"exhaustiveKnnParameters\"]),\n };\n}\n\n/** Contains the parameters specific to exhaustive KNN algorithm. */\nexport interface ExhaustiveKnnParameters {\n /** The similarity metric to use for vector comparisons. */\n metric?: VectorSearchAlgorithmMetric;\n}\n\nexport function exhaustiveKnnParametersSerializer(item: ExhaustiveKnnParameters): any {\n return { metric: item[\"metric\"] };\n}\n\nexport function exhaustiveKnnParametersDeserializer(item: any): ExhaustiveKnnParameters {\n return {\n metric: item[\"metric\"],\n };\n}\n\nexport function vectorSearchVectorizerUnionArraySerializer(\n result: Array<VectorSearchVectorizerUnion>,\n): any[] {\n return result.map((item) => {\n return vectorSearchVectorizerUnionSerializer(item);\n });\n}\n\nexport function vectorSearchVectorizerUnionArrayDeserializer(\n result: Array<VectorSearchVectorizerUnion>,\n): any[] {\n return result.map((item) => {\n return vectorSearchVectorizerUnionDeserializer(item);\n });\n}\n\n/** Specifies the vectorization method to be used during query time. */\nexport interface VectorSearchVectorizer {\n /** The name to associate with this particular vectorization method. */\n vectorizerName: string;\n /** Type of VectorSearchVectorizer. */\n /** The discriminator possible values: azureOpenAI, customWebApi, aml */\n kind: VectorSearchVectorizerKind;\n}\n\nexport function vectorSearchVectorizerSerializer(item: VectorSearchVectorizer): any {\n return { name: item[\"vectorizerName\"], kind: item[\"kind\"] };\n}\n\nexport function vectorSearchVectorizerDeserializer(item: any): VectorSearchVectorizer {\n return {\n vectorizerName: item[\"name\"],\n kind: item[\"kind\"],\n };\n}\n\n/** Alias for VectorSearchVectorizerUnion */\nexport type VectorSearchVectorizerUnion =\n | AzureOpenAIVectorizer\n | WebApiVectorizer\n | AzureMachineLearningVectorizer\n | VectorSearchVectorizer;\n\nexport function vectorSearchVectorizerUnionSerializer(item: VectorSearchVectorizerUnion): any {\n switch (item.kind) {\n case \"azureOpenAI\":\n return azureOpenAIVectorizerSerializer(item as AzureOpenAIVectorizer);\n\n case \"customWebApi\":\n return webApiVectorizerSerializer(item as WebApiVectorizer);\n\n case \"aml\":\n return azureMachineLearningVectorizerSerializer(item as AzureMachineLearningVectorizer);\n\n default:\n return vectorSearchVectorizerSerializer(item);\n }\n}\n\nexport function vectorSearchVectorizerUnionDeserializer(item: any): VectorSearchVectorizerUnion {\n switch (item[\"kind\"]) {\n case \"azureOpenAI\":\n return azureOpenAIVectorizerDeserializer(item as AzureOpenAIVectorizer);\n\n case \"customWebApi\":\n return webApiVectorizerDeserializer(item as WebApiVectorizer);\n\n case \"aml\":\n return azureMachineLearningVectorizerDeserializer(item as AzureMachineLearningVectorizer);\n\n default:\n return vectorSearchVectorizerDeserializer(item);\n }\n}\n\n/** The vectorization method to be used during query time. */\nexport enum KnownVectorSearchVectorizerKind {\n /** Generate embeddings using an Azure OpenAI resource at query time. */\n AzureOpenAI = \"azureOpenAI\",\n /** Generate embeddings using a custom web endpoint at query time. */\n CustomWebApi = \"customWebApi\",\n /** Generate embeddings for an image or text input at query time using the Azure AI Services Vision Vectorize API. */\n AIServicesVision = \"aiServicesVision\",\n /** Generate embeddings using an Azure Machine Learning endpoint deployed via the Azure AI Foundry Model Catalog at query time. */\n AML = \"aml\",\n}\n\n/**\n * The vectorization method to be used during query time. \\\n * {@link KnownVectorSearchVectorizerKind} can be used interchangeably with VectorSearchVectorizerKind,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **azureOpenAI**: Generate embeddings using an Azure OpenAI resource at query time. \\\n * **customWebApi**: Generate embeddings using a custom web endpoint at query time. \\\n * **aiServicesVision**: Generate embeddings for an image or text input at query time using the Azure AI Services Vision Vectorize API. \\\n * **aml**: Generate embeddings using an Azure Machine Learning endpoint deployed via the Azure AI Foundry Model Catalog at query time.\n */\nexport type VectorSearchVectorizerKind = string;\n\n/** Specifies the Azure OpenAI resource used to vectorize a query string. */\nexport interface AzureOpenAIVectorizer extends VectorSearchVectorizer {\n /** Contains the parameters specific to Azure OpenAI embedding vectorization. */\n parameters?: AzureOpenAIVectorizerParameters;\n /** The name of the kind of vectorization method being configured for use with vector search. */\n kind: \"azureOpenAI\";\n}\n\nexport function azureOpenAIVectorizerSerializer(item: AzureOpenAIVectorizer): any {\n return {\n name: item[\"vectorizerName\"],\n kind: item[\"kind\"],\n azureOpenAIParameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : azureOpenAIVectorizerParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function azureOpenAIVectorizerDeserializer(item: any): AzureOpenAIVectorizer {\n return {\n vectorizerName: item[\"name\"],\n kind: item[\"kind\"],\n parameters: !item[\"azureOpenAIParameters\"]\n ? item[\"azureOpenAIParameters\"]\n : azureOpenAIVectorizerParametersDeserializer(item[\"azureOpenAIParameters\"]),\n };\n}\n\n/** Specifies the parameters for connecting to the Azure OpenAI resource. */\nexport interface AzureOpenAIVectorizerParameters {\n /** The resource URI of the Azure OpenAI resource. */\n resourceUrl?: string;\n /** ID of the Azure OpenAI model deployment on the designated resource. */\n deploymentId?: string;\n /** API key of the designated Azure OpenAI resource. */\n apiKey?: string;\n /** The user-assigned managed identity used for outbound connections. */\n authIdentity?: SearchIndexerDataIdentityUnion;\n /** The name of the embedding model that is deployed at the provided deploymentId path. */\n modelName?: AzureOpenAIModelName;\n}\n\nexport function azureOpenAIVectorizerParametersSerializer(\n item: AzureOpenAIVectorizerParameters,\n): any {\n return {\n resourceUri: item[\"resourceUrl\"],\n deploymentId: item[\"deploymentId\"],\n apiKey: item[\"apiKey\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"authIdentity\"]),\n modelName: item[\"modelName\"],\n };\n}\n\nexport function azureOpenAIVectorizerParametersDeserializer(\n item: any,\n): AzureOpenAIVectorizerParameters {\n return {\n resourceUrl: item[\"resourceUri\"],\n deploymentId: item[\"deploymentId\"],\n apiKey: item[\"apiKey\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"authIdentity\"]),\n modelName: item[\"modelName\"],\n };\n}\n\n/** The Azure Open AI model name that will be called. */\nexport enum KnownAzureOpenAIModelName {\n /** TextEmbeddingAda002 model. */\n TextEmbeddingAda002 = \"text-embedding-ada-002\",\n /** TextEmbedding3Large model. */\n TextEmbedding3Large = \"text-embedding-3-large\",\n /** TextEmbedding3Small model. */\n TextEmbedding3Small = \"text-embedding-3-small\",\n /** Gpt5Mini model. */\n Gpt5Mini = \"gpt-5-mini\",\n /** Gpt5Nano model. */\n Gpt5Nano = \"gpt-5-nano\",\n /** Gpt54Mini model. */\n Gpt54Mini = \"gpt-5.4-mini\",\n /** Gpt54Nano model. */\n Gpt54Nano = \"gpt-5.4-nano\",\n}\n\n/**\n * The Azure Open AI model name that will be called. \\\n * {@link KnownAzureOpenAIModelName} can be used interchangeably with AzureOpenAIModelName,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **text-embedding-ada-002**: TextEmbeddingAda002 model. \\\n * **text-embedding-3-large**: TextEmbedding3Large model. \\\n * **text-embedding-3-small**: TextEmbedding3Small model. \\\n * **gpt-5-mini**: Gpt5Mini model. \\\n * **gpt-5-nano**: Gpt5Nano model. \\\n * **gpt-5.4-mini**: Gpt54Mini model. \\\n * **gpt-5.4-nano**: Gpt54Nano model.\n */\nexport type AzureOpenAIModelName = string;\n\n/** Specifies a user-defined vectorizer for generating the vector embedding of a query string. Integration of an external vectorizer is achieved using the custom Web API interface of a skillset. */\nexport interface WebApiVectorizer extends VectorSearchVectorizer {\n /** Specifies the properties of the user-defined vectorizer. */\n webApiParameters?: WebApiVectorizerParameters;\n /** The name of the kind of vectorization method being configured for use with vector search. */\n kind: \"customWebApi\";\n}\n\nexport function webApiVectorizerSerializer(item: WebApiVectorizer): any {\n return {\n name: item[\"vectorizerName\"],\n kind: item[\"kind\"],\n customWebApiParameters: !item[\"webApiParameters\"]\n ? item[\"webApiParameters\"]\n : webApiVectorizerParametersSerializer(item[\"webApiParameters\"]),\n };\n}\n\nexport function webApiVectorizerDeserializer(item: any): WebApiVectorizer {\n return {\n vectorizerName: item[\"name\"],\n kind: item[\"kind\"],\n webApiParameters: !item[\"customWebApiParameters\"]\n ? item[\"customWebApiParameters\"]\n : webApiVectorizerParametersDeserializer(item[\"customWebApiParameters\"]),\n };\n}\n\n/** Specifies the properties for connecting to a user-defined vectorizer. */\nexport interface WebApiVectorizerParameters {\n /** The URI of the Web API providing the vectorizer. */\n uri?: string;\n /** The headers required to make the HTTP request. */\n httpHeaders?: Record<string, string>;\n /** The method for the HTTP request. */\n httpMethod?: string;\n /** The desired timeout for the request. Default is 30 seconds. */\n timeout?: string;\n /** Applies to custom endpoints that connect to external code in an Azure function or some other application that provides the transformations. This value should be the application ID created for the function or app when it was registered with Azure Active Directory. When specified, the vectorization connects to the function or app using a managed ID (either system or user-assigned) of the search service and the access token of the function or app, using this value as the resource id for creating the scope of the access token. */\n authResourceId?: string;\n /** The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to \"none\", the value of this property is cleared. */\n authIdentity?: SearchIndexerDataIdentityUnion;\n}\n\nexport function webApiVectorizerParametersSerializer(item: WebApiVectorizerParameters): any {\n return {\n uri: item[\"uri\"],\n httpHeaders: item[\"httpHeaders\"],\n httpMethod: item[\"httpMethod\"],\n timeout: item[\"timeout\"],\n authResourceId: item[\"authResourceId\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"authIdentity\"]),\n };\n}\n\nexport function webApiVectorizerParametersDeserializer(item: any): WebApiVectorizerParameters {\n return {\n uri: item[\"uri\"],\n httpHeaders: !item[\"httpHeaders\"]\n ? item[\"httpHeaders\"]\n : Object.fromEntries(\n Object.entries(item[\"httpHeaders\"]).map(([k, p]: [string, any]) => [k, p]),\n ),\n httpMethod: item[\"httpMethod\"],\n timeout: item[\"timeout\"],\n authResourceId: item[\"authResourceId\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"authIdentity\"]),\n };\n}\n\n/** Specifies an Azure Machine Learning endpoint deployed via the Azure AI Foundry Model Catalog for generating the vector embedding of a query string. */\nexport interface AzureMachineLearningVectorizer extends VectorSearchVectorizer {\n /** Specifies the properties of the AML vectorizer. */\n amlParameters?: AzureMachineLearningParameters;\n /** The name of the kind of vectorization method being configured for use with vector search. */\n kind: \"aml\";\n}\n\nexport function azureMachineLearningVectorizerSerializer(\n item: AzureMachineLearningVectorizer,\n): any {\n return {\n name: item[\"vectorizerName\"],\n kind: item[\"kind\"],\n amlParameters: !item[\"amlParameters\"]\n ? item[\"amlParameters\"]\n : azureMachineLearningParametersSerializer(item[\"amlParameters\"]),\n };\n}\n\nexport function azureMachineLearningVectorizerDeserializer(\n item: any,\n): AzureMachineLearningVectorizer {\n return {\n vectorizerName: item[\"name\"],\n kind: item[\"kind\"],\n amlParameters: !item[\"amlParameters\"]\n ? item[\"amlParameters\"]\n : azureMachineLearningParametersDeserializer(item[\"amlParameters\"]),\n };\n}\n\n/** Specifies the properties for connecting to an AML vectorizer. */\nexport interface AzureMachineLearningParameters {\n /** (Required for no authentication or key authentication) The scoring URI of the AML service to which the JSON payload will be sent. Only the https URI scheme is allowed. */\n scoringUri: string | null;\n /** (Required for key authentication) The key for the AML service. */\n authenticationKey?: string;\n /** (Required for token authentication). The Azure Resource Manager resource ID of the AML service. It should be in the format subscriptions/{guid}/resourceGroups/{resource-group-name}/Microsoft.MachineLearningServices/workspaces/{workspace-name}/services/{service_name}. */\n resourceId?: string;\n /** (Optional) When specified, indicates the timeout for the http client making the API call. */\n timeout?: string;\n /** (Optional for token authentication). The region the AML service is deployed in. */\n region?: string;\n /** The name of the embedding model from the Azure AI Foundry Catalog that is deployed at the provided endpoint. */\n modelName?: AIFoundryModelCatalogName;\n}\n\nexport function azureMachineLearningParametersSerializer(\n item: AzureMachineLearningParameters,\n): any {\n return {\n uri: item[\"scoringUri\"],\n key: item[\"authenticationKey\"],\n resourceId: item[\"resourceId\"],\n timeout: item[\"timeout\"],\n region: item[\"region\"],\n modelName: item[\"modelName\"],\n };\n}\n\nexport function azureMachineLearningParametersDeserializer(\n item: any,\n): AzureMachineLearningParameters {\n return {\n scoringUri: item[\"uri\"],\n authenticationKey: item[\"key\"],\n resourceId: item[\"resourceId\"],\n timeout: item[\"timeout\"],\n region: item[\"region\"],\n modelName: item[\"modelName\"],\n };\n}\n\n/** The name of the embedding model from the Azure AI Foundry Catalog that will be called. */\nexport enum KnownAIFoundryModelCatalogName {\n /** OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32 */\n OpenAIClipImageTextEmbeddingsVitBasePatch32 = \"OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32\",\n /** OpenAI-CLIP-Image-Text-Embeddings-ViT-Large-Patch14-336 */\n OpenAIClipImageTextEmbeddingsViTLargePatch14336 = \"OpenAI-CLIP-Image-Text-Embeddings-ViT-Large-Patch14-336\",\n /** Facebook-DinoV2-Image-Embeddings-ViT-Base */\n FacebookDinoV2ImageEmbeddingsViTBase = \"Facebook-DinoV2-Image-Embeddings-ViT-Base\",\n /** Facebook-DinoV2-Image-Embeddings-ViT-Giant */\n FacebookDinoV2ImageEmbeddingsViTGiant = \"Facebook-DinoV2-Image-Embeddings-ViT-Giant\",\n /** Cohere-embed-v3-english */\n CohereEmbedV3English = \"Cohere-embed-v3-english\",\n /** Cohere-embed-v3-multilingual */\n CohereEmbedV3Multilingual = \"Cohere-embed-v3-multilingual\",\n /** Cohere embed v4 model for generating embeddings from both text and images. */\n CohereEmbedV4 = \"Cohere-embed-v4\",\n}\n\n/**\n * The name of the embedding model from the Azure AI Foundry Catalog that will be called. \\\n * {@link KnownAIFoundryModelCatalogName} can be used interchangeably with AIFoundryModelCatalogName,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32**: OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32 \\\n * **OpenAI-CLIP-Image-Text-Embeddings-ViT-Large-Patch14-336**: OpenAI-CLIP-Image-Text-Embeddings-ViT-Large-Patch14-336 \\\n * **Facebook-DinoV2-Image-Embeddings-ViT-Base**: Facebook-DinoV2-Image-Embeddings-ViT-Base \\\n * **Facebook-DinoV2-Image-Embeddings-ViT-Giant**: Facebook-DinoV2-Image-Embeddings-ViT-Giant \\\n * **Cohere-embed-v3-english**: Cohere-embed-v3-english \\\n * **Cohere-embed-v3-multilingual**: Cohere-embed-v3-multilingual \\\n * **Cohere-embed-v4**: Cohere embed v4 model for generating embeddings from both text and images.\n */\nexport type AIFoundryModelCatalogName = string;\n\nexport function vectorSearchCompressionUnionArraySerializer(\n result: Array<VectorSearchCompressionUnion>,\n): any[] {\n return result.map((item) => {\n return vectorSearchCompressionUnionSerializer(item);\n });\n}\n\nexport function vectorSearchCompressionUnionArrayDeserializer(\n result: Array<VectorSearchCompressionUnion>,\n): any[] {\n return result.map((item) => {\n return vectorSearchCompressionUnionDeserializer(item);\n });\n}\n\n/** Contains configuration options specific to the compression method used during indexing or querying. */\nexport interface VectorSearchCompression {\n /** The name to associate with this particular configuration. */\n compressionName: string;\n /** Contains the options for rescoring. */\n rescoringOptions?: RescoringOptions;\n /** The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. */\n truncationDimension?: number;\n /** Type of VectorSearchCompression. */\n /** The discriminator possible values: scalarQuantization, binaryQuantization */\n kind: VectorSearchCompressionKind;\n}\n\nexport function vectorSearchCompressionSerializer(item: VectorSearchCompression): any {\n return {\n name: item[\"compressionName\"],\n rescoringOptions: !item[\"rescoringOptions\"]\n ? item[\"rescoringOptions\"]\n : rescoringOptionsSerializer(item[\"rescoringOptions\"]),\n truncationDimension: item[\"truncationDimension\"],\n kind: item[\"kind\"],\n };\n}\n\nexport function vectorSearchCompressionDeserializer(item: any): VectorSearchCompression {\n return {\n compressionName: item[\"name\"],\n rescoringOptions: !item[\"rescoringOptions\"]\n ? item[\"rescoringOptions\"]\n : rescoringOptionsDeserializer(item[\"rescoringOptions\"]),\n truncationDimension: item[\"truncationDimension\"],\n kind: item[\"kind\"],\n };\n}\n\n/** Alias for VectorSearchCompressionUnion */\nexport type VectorSearchCompressionUnion =\n | ScalarQuantizationCompression\n | BinaryQuantizationCompression\n | VectorSearchCompression;\n\nexport function vectorSearchCompressionUnionSerializer(item: VectorSearchCompressionUnion): any {\n switch (item.kind) {\n case \"scalarQuantization\":\n return scalarQuantizationCompressionSerializer(item as ScalarQuantizationCompression);\n\n case \"binaryQuantization\":\n return binaryQuantizationCompressionSerializer(item as BinaryQuantizationCompression);\n\n default:\n return vectorSearchCompressionSerializer(item);\n }\n}\n\nexport function vectorSearchCompressionUnionDeserializer(item: any): VectorSearchCompressionUnion {\n switch (item[\"kind\"]) {\n case \"scalarQuantization\":\n return scalarQuantizationCompressionDeserializer(item as ScalarQuantizationCompression);\n\n case \"binaryQuantization\":\n return binaryQuantizationCompressionDeserializer(item as BinaryQuantizationCompression);\n\n default:\n return vectorSearchCompressionDeserializer(item);\n }\n}\n\n/** Contains the options for rescoring. */\nexport interface RescoringOptions {\n /** If set to true, after the initial search on the compressed vectors, the similarity scores are recalculated using the full-precision vectors. This will improve recall at the expense of latency. */\n enableRescoring?: boolean;\n /** Default oversampling factor. Oversampling retrieves a greater set of potential documents to offset the resolution loss due to quantization. This increases the set of results that will be rescored on full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when 'enableRescoring' is true. Higher values improve recall at the expense of latency. */\n defaultOversampling?: number;\n /** Controls the storage method for original vectors. This setting is immutable. */\n rescoreStorageMethod?: VectorSearchCompressionRescoreStorageMethod;\n}\n\nexport function rescoringOptionsSerializer(item: RescoringOptions): any {\n return {\n enableRescoring: item[\"enableRescoring\"],\n defaultOversampling: item[\"defaultOversampling\"],\n rescoreStorageMethod: item[\"rescoreStorageMethod\"],\n };\n}\n\nexport function rescoringOptionsDeserializer(item: any): RescoringOptions {\n return {\n enableRescoring: item[\"enableRescoring\"],\n defaultOversampling: item[\"defaultOversampling\"],\n rescoreStorageMethod: item[\"rescoreStorageMethod\"],\n };\n}\n\n/** The storage method for the original full-precision vectors used for rescoring and internal index operations. */\nexport enum KnownVectorSearchCompressionRescoreStorageMethod {\n /** This option preserves the original full-precision vectors. Choose this option for maximum flexibility and highest quality of compressed search results. This consumes more storage but allows for rescoring and oversampling. */\n PreserveOriginals = \"preserveOriginals\",\n /** This option discards the original full-precision vectors. Choose this option for maximum storage savings. Since this option does not allow for rescoring and oversampling, it will often cause slight to moderate reductions in quality. */\n DiscardOriginals = \"discardOriginals\",\n}\n\n/**\n * The storage method for the original full-precision vectors used for rescoring and internal index operations. \\\n * {@link KnownVectorSearchCompressionRescoreStorageMethod} can be used interchangeably with VectorSearchCompressionRescoreStorageMethod,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **preserveOriginals**: This option preserves the original full-precision vectors. Choose this option for maximum flexibility and highest quality of compressed search results. This consumes more storage but allows for rescoring and oversampling. \\\n * **discardOriginals**: This option discards the original full-precision vectors. Choose this option for maximum storage savings. Since this option does not allow for rescoring and oversampling, it will often cause slight to moderate reductions in quality.\n */\nexport type VectorSearchCompressionRescoreStorageMethod = string;\n\n/** The compression method used for indexing and querying. */\nexport enum KnownVectorSearchCompressionKind {\n /** Scalar Quantization, a type of compression method. In scalar quantization, the original vectors values are compressed to a narrower type by discretizing and representing each component of a vector using a reduced set of quantized values, thereby reducing the overall data size. */\n ScalarQuantization = \"scalarQuantization\",\n /** Binary Quantization, a type of compression method. In binary quantization, the original vectors values are compressed to the narrower binary type by discretizing and representing each component of a vector using binary values, thereby reducing the overall data size. */\n BinaryQuantization = \"binaryQuantization\",\n}\n\n/**\n * The compression method used for indexing and querying. \\\n * {@link KnownVectorSearchCompressionKind} can be used interchangeably with VectorSearchCompressionKind,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **scalarQuantization**: Scalar Quantization, a type of compression method. In scalar quantization, the original vectors values are compressed to a narrower type by discretizing and representing each component of a vector using a reduced set of quantized values, thereby reducing the overall data size. \\\n * **binaryQuantization**: Binary Quantization, a type of compression method. In binary quantization, the original vectors values are compressed to the narrower binary type by discretizing and representing each component of a vector using binary values, thereby reducing the overall data size.\n */\nexport type VectorSearchCompressionKind = string;\n\n/** Contains configuration options specific to the scalar quantization compression method used during indexing and querying. */\nexport interface ScalarQuantizationCompression extends VectorSearchCompression {\n /** Contains the parameters specific to Scalar Quantization. */\n parameters?: ScalarQuantizationParameters;\n /** The name of the kind of compression method being configured for use with vector search. */\n kind: \"scalarQuantization\";\n}\n\nexport function scalarQuantizationCompressionSerializer(item: ScalarQuantizationCompression): any {\n return {\n name: item[\"compressionName\"],\n rescoringOptions: !item[\"rescoringOptions\"]\n ? item[\"rescoringOptions\"]\n : rescoringOptionsSerializer(item[\"rescoringOptions\"]),\n truncationDimension: item[\"truncationDimension\"],\n kind: item[\"kind\"],\n scalarQuantizationParameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : scalarQuantizationParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function scalarQuantizationCompressionDeserializer(\n item: any,\n): ScalarQuantizationCompression {\n return {\n compressionName: item[\"name\"],\n rescoringOptions: !item[\"rescoringOptions\"]\n ? item[\"rescoringOptions\"]\n : rescoringOptionsDeserializer(item[\"rescoringOptions\"]),\n truncationDimension: item[\"truncationDimension\"],\n kind: item[\"kind\"],\n parameters: !item[\"scalarQuantizationParameters\"]\n ? item[\"scalarQuantizationParameters\"]\n : scalarQuantizationParametersDeserializer(item[\"scalarQuantizationParameters\"]),\n };\n}\n\n/** Contains the parameters specific to Scalar Quantization. */\nexport interface ScalarQuantizationParameters {\n /** The quantized data type of compressed vector values. */\n quantizedDataType?: VectorSearchCompressionTarget;\n}\n\nexport function scalarQuantizationParametersSerializer(item: ScalarQuantizationParameters): any {\n return { quantizedDataType: item[\"quantizedDataType\"] };\n}\n\nexport function scalarQuantizationParametersDeserializer(item: any): ScalarQuantizationParameters {\n return {\n quantizedDataType: item[\"quantizedDataType\"],\n };\n}\n\n/** The quantized data type of compressed vector values. */\nexport enum KnownVectorSearchCompressionTarget {\n /** 8-bit signed integer. */\n Int8 = \"int8\",\n}\n\n/**\n * The quantized data type of compressed vector values. \\\n * {@link KnownVectorSearchCompressionTarget} can be used interchangeably with VectorSearchCompressionTarget,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **int8**: 8-bit signed integer.\n */\nexport type VectorSearchCompressionTarget = string;\n\n/** Contains configuration options specific to the binary quantization compression method used during indexing and querying. */\nexport interface BinaryQuantizationCompression extends VectorSearchCompression {\n /** The name of the kind of compression method being configured for use with vector search. */\n kind: \"binaryQuantization\";\n}\n\nexport function binaryQuantizationCompressionSerializer(item: BinaryQuantizationCompression): any {\n return {\n name: item[\"compressionName\"],\n rescoringOptions: !item[\"rescoringOptions\"]\n ? item[\"rescoringOptions\"]\n : rescoringOptionsSerializer(item[\"rescoringOptions\"]),\n truncationDimension: item[\"truncationDimension\"],\n kind: item[\"kind\"],\n };\n}\n\nexport function binaryQuantizationCompressionDeserializer(\n item: any,\n): BinaryQuantizationCompression {\n return {\n compressionName: item[\"name\"],\n rescoringOptions: !item[\"rescoringOptions\"]\n ? item[\"rescoringOptions\"]\n : rescoringOptionsDeserializer(item[\"rescoringOptions\"]),\n truncationDimension: item[\"truncationDimension\"],\n kind: item[\"kind\"],\n };\n}\n\n/** Response from a List Indexes request. If successful, it includes the full definitions of all indexes. */\nexport interface _ListIndexesResult {\n /** The indexes in the Search service. */\n readonly indexes: SearchIndex[];\n}\n\nexport function _listIndexesResultDeserializer(item: any): _ListIndexesResult {\n return {\n indexes: searchIndexArrayDeserializer(item[\"value\"]),\n };\n}\n\nexport function searchIndexArraySerializer(result: Array<SearchIndex>): any[] {\n return result.map((item) => {\n return searchIndexSerializer(item);\n });\n}\n\nexport function searchIndexArrayDeserializer(result: Array<SearchIndex>): any[] {\n return result.map((item) => {\n return searchIndexDeserializer(item);\n });\n}\n\nexport function searchIndexResponseArrayDeserializer(result: Array<SearchIndexResponse>): any[] {\n return result.map((item) => {\n return searchIndexResponseDeserializer(item);\n });\n}\n\n/** Represents a search index definition, which describes the fields and search behavior of an index. */\nexport interface SearchIndexResponse {\n /** The name of the index. */\n name: string;\n /** The description of the index. */\n description?: string;\n /** The fields of the index. */\n fields?: SearchField[];\n /** The scoring profiles for the index. */\n scoringProfiles?: ScoringProfile[];\n /** The name of the scoring profile to use if none is specified in the query. If this property is not set and no scoring profile is specified in the query, then default scoring (tf-idf) will be used. */\n defaultScoringProfile?: string;\n /** Options to control Cross-Origin Resource Sharing (CORS) for the index. */\n corsOptions?: CorsOptions;\n /** The suggesters for the index. */\n suggesters?: SearchSuggester[];\n /** The analyzers for the index. */\n analyzers?: LexicalAnalyzerUnion[];\n /** The tokenizers for the index. */\n tokenizers?: LexicalTokenizerUnion[];\n /** The token filters for the index. */\n tokenFilters?: TokenFilterUnion[];\n /** The character filters for the index. */\n charFilters?: CharFilterUnion[];\n /** The normalizers for the index. */\n normalizers?: LexicalNormalizerUnion[];\n /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */\n encryptionKey?: SearchResourceEncryptionKey;\n /** The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. */\n similarity?: SimilarityAlgorithmUnion;\n /** Defines parameters for a search index that influence semantic capabilities. */\n semanticSearch?: SemanticSearch;\n /** Contains configuration options related to vector search. */\n vectorSearch?: VectorSearch;\n /** The ETag of the index. */\n eTag?: string;\n}\n\nexport function searchIndexResponseDeserializer(item: any): SearchIndexResponse {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n fields: !item[\"fields\"] ? item[\"fields\"] : searchFieldArrayDeserializer(item[\"fields\"]),\n scoringProfiles: !item[\"scoringProfiles\"]\n ? item[\"scoringProfiles\"]\n : scoringProfileArrayDeserializer(item[\"scoringProfiles\"]),\n defaultScoringProfile: item[\"defaultScoringProfile\"],\n corsOptions: !item[\"corsOptions\"]\n ? item[\"corsOptions\"]\n : corsOptionsDeserializer(item[\"corsOptions\"]),\n suggesters: !item[\"suggesters\"]\n ? item[\"suggesters\"]\n : searchSuggesterArrayDeserializer(item[\"suggesters\"]),\n analyzers: !item[\"analyzers\"]\n ? item[\"analyzers\"]\n : lexicalAnalyzerUnionArrayDeserializer(item[\"analyzers\"]),\n tokenizers: !item[\"tokenizers\"]\n ? item[\"tokenizers\"]\n : lexicalTokenizerUnionArrayDeserializer(item[\"tokenizers\"]),\n tokenFilters: !item[\"tokenFilters\"]\n ? item[\"tokenFilters\"]\n : tokenFilterUnionArrayDeserializer(item[\"tokenFilters\"]),\n charFilters: !item[\"charFilters\"]\n ? item[\"charFilters\"]\n : charFilterUnionArrayDeserializer(item[\"charFilters\"]),\n normalizers: !item[\"normalizers\"]\n ? item[\"normalizers\"]\n : lexicalNormalizerUnionArrayDeserializer(item[\"normalizers\"]),\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n similarity: !item[\"similarity\"]\n ? item[\"similarity\"]\n : similarityAlgorithmUnionDeserializer(item[\"similarity\"]),\n semanticSearch: !item[\"semantic\"]\n ? item[\"semantic\"]\n : semanticSearchDeserializer(item[\"semantic\"]),\n vectorSearch: !item[\"vectorSearch\"]\n ? item[\"vectorSearch\"]\n : vectorSearchDeserializer(item[\"vectorSearch\"]),\n eTag: item[\"@odata.etag\"],\n };\n}\n\n/** Statistics for a given index. Statistics are collected periodically and are not guaranteed to always be up-to-date. */\nexport interface GetIndexStatisticsResult {\n /** The number of documents in the index. */\n readonly documentCount: number;\n /** The amount of storage in bytes consumed by the index. */\n readonly storageSize: number;\n /** The amount of memory in bytes consumed by vectors in the index. */\n readonly vectorIndexSize: number;\n}\n\nexport function getIndexStatisticsResultSerializer(item: GetIndexStatisticsResult): any {\n return item;\n}\n\nexport function getIndexStatisticsResultDeserializer(item: any): GetIndexStatisticsResult {\n return {\n documentCount: item[\"documentCount\"],\n storageSize: item[\"storageSize\"],\n vectorIndexSize: item[\"vectorIndexSize\"],\n };\n}\n\n/** Specifies some text and analysis components used to break that text into tokens. */\nexport interface AnalyzeTextOptions {\n /** The text to break into tokens. */\n text: string;\n /** The name of the analyzer to use to break the given text. If this parameter is not specified, you must specify a tokenizer instead. The tokenizer and analyzer parameters are mutually exclusive. */\n analyzerName?: LexicalAnalyzerName;\n /** The name of the tokenizer to use to break the given text. If this parameter is not specified, you must specify an analyzer instead. The tokenizer and analyzer parameters are mutually exclusive. */\n tokenizerName?: LexicalTokenizerName;\n /** The name of the normalizer to use to normalize the given text. */\n normalizerName?: LexicalNormalizerName;\n /** An optional list of token filters to use when breaking the given text. This parameter can only be set when using the tokenizer parameter. */\n tokenFilters?: TokenFilterName[];\n /** An optional list of character filters to use when breaking the given text. This parameter can only be set when using the tokenizer parameter. */\n charFilters?: CharFilterName[];\n}\n\nexport function analyzeTextOptionsSerializer(item: AnalyzeTextOptions): any {\n return {\n text: item[\"text\"],\n analyzer: item[\"analyzerName\"],\n tokenizer: item[\"tokenizerName\"],\n normalizer: item[\"normalizerName\"],\n tokenFilters: !item[\"tokenFilters\"]\n ? item[\"tokenFilters\"]\n : item[\"tokenFilters\"].map((p: any) => {\n return p;\n }),\n charFilters: !item[\"charFilters\"]\n ? item[\"charFilters\"]\n : item[\"charFilters\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** The result of testing an analyzer on text. */\nexport interface AnalyzeResult {\n /** The list of tokens returned by the analyzer specified in the request. */\n tokens: AnalyzedTokenInfo[];\n}\n\nexport function analyzeResultSerializer(item: AnalyzeResult): any {\n return { tokens: analyzedTokenInfoArraySerializer(item[\"tokens\"]) };\n}\n\nexport function analyzeResultDeserializer(item: any): AnalyzeResult {\n return {\n tokens: analyzedTokenInfoArrayDeserializer(item[\"tokens\"]),\n };\n}\n\nexport function analyzedTokenInfoArraySerializer(result: Array<AnalyzedTokenInfo>): any[] {\n return result.map((item) => {\n return analyzedTokenInfoSerializer(item);\n });\n}\n\nexport function analyzedTokenInfoArrayDeserializer(result: Array<AnalyzedTokenInfo>): any[] {\n return result.map((item) => {\n return analyzedTokenInfoDeserializer(item);\n });\n}\n\n/** Information about a token returned by an analyzer. */\nexport interface AnalyzedTokenInfo {\n /** The token returned by the analyzer. */\n readonly token: string;\n /** The index of the first character of the token in the input text. */\n readonly startOffset: number;\n /** The index of the last character of the token in the input text. */\n readonly endOffset: number;\n /** The position of the token in the input text relative to other tokens. The first token in the input text has position 0, the next has position 1, and so on. Depending on the analyzer used, some tokens might have the same position, for example if they are synonyms of each other. */\n readonly position: number;\n}\n\nexport function analyzedTokenInfoSerializer(item: AnalyzedTokenInfo): any {\n return item;\n}\n\nexport function analyzedTokenInfoDeserializer(item: any): AnalyzedTokenInfo {\n return {\n token: item[\"token\"],\n startOffset: item[\"startOffset\"],\n endOffset: item[\"endOffset\"],\n position: item[\"position\"],\n };\n}\n\n/** Represents an index alias, which describes a mapping from the alias name to an index. The alias name can be used in place of the index name for supported operations. */\nexport interface SearchAlias {\n /** The name of the alias. */\n name: string;\n /** The name of the index this alias maps to. Only one index name may be specified. */\n indexes: string[];\n /** The ETag of the alias. */\n etag?: string;\n}\n\nexport function searchAliasSerializer(item: SearchAlias): any {\n return {\n name: item[\"name\"],\n indexes: item[\"indexes\"].map((p: any) => {\n return p;\n }),\n \"@odata.etag\": item[\"etag\"],\n };\n}\n\nexport function searchAliasDeserializer(item: any): SearchAlias {\n return {\n name: item[\"name\"],\n indexes: item[\"indexes\"].map((p: any) => {\n return p;\n }),\n etag: item[\"@odata.etag\"],\n };\n}\n\n/** Response from a List Aliases request. If successful, it includes the associated index mappings for all aliases. */\nexport interface _ListAliasesResult {\n /** The aliases in the Search service. */\n readonly aliases: SearchAlias[];\n}\n\nexport function _listAliasesResultDeserializer(item: any): _ListAliasesResult {\n return {\n aliases: searchAliasArrayDeserializer(item[\"value\"]),\n };\n}\n\nexport function searchAliasArraySerializer(result: Array<SearchAlias>): any[] {\n return result.map((item) => {\n return searchAliasSerializer(item);\n });\n}\n\nexport function searchAliasArrayDeserializer(result: Array<SearchAlias>): any[] {\n return result.map((item) => {\n return searchAliasDeserializer(item);\n });\n}\n\n/** Represents a knowledge base definition. */\nexport interface KnowledgeBase {\n /** The name of the knowledge base. */\n name: string;\n /** Knowledge sources referenced by this knowledge base. */\n knowledgeSources: KnowledgeSourceReference[];\n /** Contains configuration options on how to connect to AI models. */\n models?: KnowledgeBaseModelUnion[];\n /** The ETag of the knowledge base. */\n etag?: string;\n /** A description of an encryption key that you create in Azure Key Vault. */\n encryptionKey?: SearchResourceEncryptionKey;\n /** The description of the knowledge base. */\n description?: string;\n}\n\nexport function knowledgeBaseSerializer(item: KnowledgeBase): any {\n return {\n name: item[\"name\"],\n knowledgeSources: knowledgeSourceReferenceArraySerializer(item[\"knowledgeSources\"]),\n models: !item[\"models\"]\n ? item[\"models\"]\n : knowledgeBaseModelUnionArraySerializer(item[\"models\"]),\n \"@odata.etag\": item[\"etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n description: item[\"description\"],\n };\n}\n\nexport function knowledgeBaseDeserializer(item: any): KnowledgeBase {\n return {\n name: item[\"name\"],\n knowledgeSources: knowledgeSourceReferenceArrayDeserializer(item[\"knowledgeSources\"]),\n models: !item[\"models\"]\n ? item[\"models\"]\n : knowledgeBaseModelUnionArrayDeserializer(item[\"models\"]),\n etag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n description: item[\"description\"],\n };\n}\n\nexport function knowledgeSourceReferenceArraySerializer(\n result: Array<KnowledgeSourceReference>,\n): any[] {\n return result.map((item) => {\n return knowledgeSourceReferenceSerializer(item);\n });\n}\n\nexport function knowledgeSourceReferenceArrayDeserializer(\n result: Array<KnowledgeSourceReference>,\n): any[] {\n return result.map((item) => {\n return knowledgeSourceReferenceDeserializer(item);\n });\n}\n\n/** Reference to a knowledge source. */\nexport interface KnowledgeSourceReference {\n /** The name of the knowledge source. */\n name: string;\n}\n\nexport function knowledgeSourceReferenceSerializer(item: KnowledgeSourceReference): any {\n return { name: item[\"name\"] };\n}\n\nexport function knowledgeSourceReferenceDeserializer(item: any): KnowledgeSourceReference {\n return {\n name: item[\"name\"],\n };\n}\n\nexport function knowledgeBaseModelUnionArraySerializer(\n result: Array<KnowledgeBaseModelUnion>,\n): any[] {\n return result.map((item) => {\n return knowledgeBaseModelUnionSerializer(item);\n });\n}\n\nexport function knowledgeBaseModelUnionArrayDeserializer(\n result: Array<KnowledgeBaseModelUnion>,\n): any[] {\n return result.map((item) => {\n return knowledgeBaseModelUnionDeserializer(item);\n });\n}\n\n/** Specifies the connection parameters for the model to use for query planning. */\nexport interface KnowledgeBaseModel {\n /** The AI model to be used for query planning. */\n /** The discriminator possible values: azureOpenAI */\n kind: KnowledgeBaseModelKind;\n}\n\nexport function knowledgeBaseModelSerializer(item: KnowledgeBaseModel): any {\n return { kind: item[\"kind\"] };\n}\n\nexport function knowledgeBaseModelDeserializer(item: any): KnowledgeBaseModel {\n return {\n kind: item[\"kind\"],\n };\n}\n\n/** Alias for KnowledgeBaseModelUnion */\nexport type KnowledgeBaseModelUnion = KnowledgeBaseAzureOpenAIModel | KnowledgeBaseModel;\n\nexport function knowledgeBaseModelUnionSerializer(item: KnowledgeBaseModelUnion): any {\n switch (item.kind) {\n case \"azureOpenAI\":\n return knowledgeBaseAzureOpenAIModelSerializer(item as KnowledgeBaseAzureOpenAIModel);\n\n default:\n return knowledgeBaseModelSerializer(item);\n }\n}\n\nexport function knowledgeBaseModelUnionDeserializer(item: any): KnowledgeBaseModelUnion {\n switch (item[\"kind\"]) {\n case \"azureOpenAI\":\n return knowledgeBaseAzureOpenAIModelDeserializer(item as KnowledgeBaseAzureOpenAIModel);\n\n default:\n return knowledgeBaseModelDeserializer(item);\n }\n}\n\n/** The AI model to be used for query planning. */\nexport enum KnownKnowledgeBaseModelKind {\n /** Use Azure Open AI models for query planning. */\n AzureOpenAI = \"azureOpenAI\",\n}\n\n/**\n * The AI model to be used for query planning. \\\n * {@link KnownKnowledgeBaseModelKind} can be used interchangeably with KnowledgeBaseModelKind,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **azureOpenAI**: Use Azure Open AI models for query planning.\n */\nexport type KnowledgeBaseModelKind = string;\n\n/** Specifies the Azure OpenAI resource used to do query planning. */\nexport interface KnowledgeBaseAzureOpenAIModel extends KnowledgeBaseModel {\n kind: \"azureOpenAI\";\n /** Azure OpenAI parameters. */\n azureOpenAIParameters: AzureOpenAIVectorizerParameters;\n}\n\nexport function knowledgeBaseAzureOpenAIModelSerializer(item: KnowledgeBaseAzureOpenAIModel): any {\n return {\n kind: item[\"kind\"],\n azureOpenAIParameters: azureOpenAIVectorizerParametersSerializer(item[\"azureOpenAIParameters\"]),\n };\n}\n\nexport function knowledgeBaseAzureOpenAIModelDeserializer(\n item: any,\n): KnowledgeBaseAzureOpenAIModel {\n return {\n kind: item[\"kind\"],\n azureOpenAIParameters: azureOpenAIVectorizerParametersDeserializer(\n item[\"azureOpenAIParameters\"],\n ),\n };\n}\n\n/** Result from listing knowledge bases. */\nexport interface _ListKnowledgeBasesResult {\n /** The knowledge bases in the service. */\n value: KnowledgeBase[];\n}\n\nexport function _listKnowledgeBasesResultDeserializer(item: any): _ListKnowledgeBasesResult {\n return {\n value: knowledgeBaseArrayDeserializer(item[\"value\"]),\n };\n}\n\nexport function knowledgeBaseArraySerializer(result: Array<KnowledgeBase>): any[] {\n return result.map((item) => {\n return knowledgeBaseSerializer(item);\n });\n}\n\nexport function knowledgeBaseArrayDeserializer(result: Array<KnowledgeBase>): any[] {\n return result.map((item) => {\n return knowledgeBaseDeserializer(item);\n });\n}\n\n/** Represents a knowledge source definition. */\nexport interface KnowledgeSource {\n /** The name of the knowledge source. */\n name: string;\n /** Optional user-defined description. */\n description?: string;\n /** The type of the knowledge source. */\n /** The discriminator possible values: searchIndex, azureBlob, indexedOneLake, web */\n kind: KnowledgeSourceKind;\n /** The ETag of the knowledge source. */\n eTag?: string;\n /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your knowledge source definition when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your knowledge source definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your knowledge source definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */\n encryptionKey?: SearchResourceEncryptionKey;\n}\n\nexport function knowledgeSourceSerializer(item: KnowledgeSource): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n \"@odata.etag\": item[\"eTag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n };\n}\n\nexport function knowledgeSourceDeserializer(item: any): KnowledgeSource {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n eTag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n };\n}\n\n/** Alias for KnowledgeSourceUnion */\nexport type KnowledgeSourceUnion =\n | SearchIndexKnowledgeSource\n | AzureBlobKnowledgeSource\n | IndexedOneLakeKnowledgeSource\n | WebKnowledgeSource\n | KnowledgeSource;\n\nexport function knowledgeSourceUnionSerializer(item: KnowledgeSourceUnion): any {\n switch (item.kind) {\n case \"searchIndex\":\n return searchIndexKnowledgeSourceSerializer(item as SearchIndexKnowledgeSource);\n\n case \"azureBlob\":\n return azureBlobKnowledgeSourceSerializer(item as AzureBlobKnowledgeSource);\n\n case \"indexedOneLake\":\n return indexedOneLakeKnowledgeSourceSerializer(item as IndexedOneLakeKnowledgeSource);\n\n case \"web\":\n return webKnowledgeSourceSerializer(item as WebKnowledgeSource);\n\n default:\n return knowledgeSourceSerializer(item);\n }\n}\n\nexport function knowledgeSourceUnionDeserializer(item: any): KnowledgeSourceUnion {\n switch (item[\"kind\"]) {\n case \"searchIndex\":\n return searchIndexKnowledgeSourceDeserializer(item as SearchIndexKnowledgeSource);\n\n case \"azureBlob\":\n return azureBlobKnowledgeSourceDeserializer(item as AzureBlobKnowledgeSource);\n\n case \"indexedOneLake\":\n return indexedOneLakeKnowledgeSourceDeserializer(item as IndexedOneLakeKnowledgeSource);\n\n case \"web\":\n return webKnowledgeSourceDeserializer(item as WebKnowledgeSource);\n\n default:\n return knowledgeSourceDeserializer(item);\n }\n}\n\n/** The kind of the knowledge source. */\nexport enum KnownKnowledgeSourceKind {\n /** A knowledge source that reads data from a Search Index. */\n SearchIndex = \"searchIndex\",\n /** A knowledge source that read and ingest data from Azure Blob Storage to a Search Index. */\n AzureBlob = \"azureBlob\",\n /** A knowledge source that reads data from indexed OneLake. */\n IndexedOneLake = \"indexedOneLake\",\n /** A knowledge source that reads data from the web. */\n Web = \"web\",\n}\n\n/**\n * The kind of the knowledge source. \\\n * {@link KnownKnowledgeSourceKind} can be used interchangeably with KnowledgeSourceKind,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **searchIndex**: A knowledge source that reads data from a Search Index. \\\n * **azureBlob**: A knowledge source that read and ingest data from Azure Blob Storage to a Search Index. \\\n * **indexedOneLake**: A knowledge source that reads data from indexed OneLake. \\\n * **web**: A knowledge source that reads data from the web.\n */\nexport type KnowledgeSourceKind = string;\n\n/** Knowledge Source targeting a search index. */\nexport interface SearchIndexKnowledgeSource extends KnowledgeSource {\n kind: \"searchIndex\";\n /** The parameters for the knowledge source. */\n searchIndexParameters: SearchIndexKnowledgeSourceParameters;\n}\n\nexport function searchIndexKnowledgeSourceSerializer(item: SearchIndexKnowledgeSource): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n \"@odata.etag\": item[\"eTag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n searchIndexParameters: searchIndexKnowledgeSourceParametersSerializer(\n item[\"searchIndexParameters\"],\n ),\n };\n}\n\nexport function searchIndexKnowledgeSourceDeserializer(item: any): SearchIndexKnowledgeSource {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n eTag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n searchIndexParameters: searchIndexKnowledgeSourceParametersDeserializer(\n item[\"searchIndexParameters\"],\n ),\n };\n}\n\n/** Parameters for search index knowledge source. */\nexport interface SearchIndexKnowledgeSourceParameters {\n /** The name of the Search index. */\n searchIndexName: string;\n /** Used to request additional fields for referenced source data. */\n sourceDataFields?: SearchIndexFieldReference[];\n /** Used to restrict which fields to search on the search index. */\n searchFields?: SearchIndexFieldReference[];\n /** Used to specify a different semantic configuration on the target search index other than the default one. */\n semanticConfigurationName?: string;\n}\n\nexport function searchIndexKnowledgeSourceParametersSerializer(\n item: SearchIndexKnowledgeSourceParameters,\n): any {\n return {\n searchIndexName: item[\"searchIndexName\"],\n sourceDataFields: !item[\"sourceDataFields\"]\n ? item[\"sourceDataFields\"]\n : searchIndexFieldReferenceArraySerializer(item[\"sourceDataFields\"]),\n searchFields: !item[\"searchFields\"]\n ? item[\"searchFields\"]\n : searchIndexFieldReferenceArraySerializer(item[\"searchFields\"]),\n semanticConfigurationName: item[\"semanticConfigurationName\"],\n };\n}\n\nexport function searchIndexKnowledgeSourceParametersDeserializer(\n item: any,\n): SearchIndexKnowledgeSourceParameters {\n return {\n searchIndexName: item[\"searchIndexName\"],\n sourceDataFields: !item[\"sourceDataFields\"]\n ? item[\"sourceDataFields\"]\n : searchIndexFieldReferenceArrayDeserializer(item[\"sourceDataFields\"]),\n searchFields: !item[\"searchFields\"]\n ? item[\"searchFields\"]\n : searchIndexFieldReferenceArrayDeserializer(item[\"searchFields\"]),\n semanticConfigurationName: item[\"semanticConfigurationName\"],\n };\n}\n\nexport function searchIndexFieldReferenceArraySerializer(\n result: Array<SearchIndexFieldReference>,\n): any[] {\n return result.map((item) => {\n return searchIndexFieldReferenceSerializer(item);\n });\n}\n\nexport function searchIndexFieldReferenceArrayDeserializer(\n result: Array<SearchIndexFieldReference>,\n): any[] {\n return result.map((item) => {\n return searchIndexFieldReferenceDeserializer(item);\n });\n}\n\n/** Field reference for a search index. */\nexport interface SearchIndexFieldReference {\n /** The name of the field. */\n name: string;\n}\n\nexport function searchIndexFieldReferenceSerializer(item: SearchIndexFieldReference): any {\n return { name: item[\"name\"] };\n}\n\nexport function searchIndexFieldReferenceDeserializer(item: any): SearchIndexFieldReference {\n return {\n name: item[\"name\"],\n };\n}\n\n/** Configuration for Azure Blob Storage knowledge source. */\nexport interface AzureBlobKnowledgeSource extends KnowledgeSource {\n kind: \"azureBlob\";\n /** The type of the knowledge source. */\n azureBlobParameters: AzureBlobKnowledgeSourceParameters;\n}\n\nexport function azureBlobKnowledgeSourceSerializer(item: AzureBlobKnowledgeSource): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n \"@odata.etag\": item[\"eTag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n azureBlobParameters: azureBlobKnowledgeSourceParametersSerializer(item[\"azureBlobParameters\"]),\n };\n}\n\nexport function azureBlobKnowledgeSourceDeserializer(item: any): AzureBlobKnowledgeSource {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n eTag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n azureBlobParameters: azureBlobKnowledgeSourceParametersDeserializer(\n item[\"azureBlobParameters\"],\n ),\n };\n}\n\n/** Parameters for Azure Blob Storage knowledge source. */\nexport interface AzureBlobKnowledgeSourceParameters {\n /** Key-based connection string or the ResourceId format if using a managed identity. */\n connectionString: string;\n /** The name of the blob storage container. */\n containerName: string;\n /** Optional folder path within the container. */\n folderPath?: string;\n /** Set to true if connecting to an ADLS Gen2 storage account. Default is false. */\n isAdlsGen2?: boolean;\n /** Consolidates all general ingestion settings. */\n ingestionParameters?: KnowledgeSourceIngestionParameters;\n /** Resources created by the knowledge source. */\n readonly createdResources?: CreatedResources;\n}\n\nexport function azureBlobKnowledgeSourceParametersSerializer(\n item: AzureBlobKnowledgeSourceParameters,\n): any {\n return {\n connectionString: item[\"connectionString\"],\n containerName: item[\"containerName\"],\n folderPath: item[\"folderPath\"],\n isADLSGen2: item[\"isAdlsGen2\"],\n ingestionParameters: !item[\"ingestionParameters\"]\n ? item[\"ingestionParameters\"]\n : knowledgeSourceIngestionParametersSerializer(item[\"ingestionParameters\"]),\n };\n}\n\nexport function azureBlobKnowledgeSourceParametersDeserializer(\n item: any,\n): AzureBlobKnowledgeSourceParameters {\n return {\n connectionString: item[\"connectionString\"],\n containerName: item[\"containerName\"],\n folderPath: item[\"folderPath\"],\n isAdlsGen2: item[\"isADLSGen2\"],\n ingestionParameters: !item[\"ingestionParameters\"]\n ? item[\"ingestionParameters\"]\n : knowledgeSourceIngestionParametersDeserializer(item[\"ingestionParameters\"]),\n createdResources: !item[\"createdResources\"]\n ? item[\"createdResources\"]\n : createdResourcesDeserializer(item[\"createdResources\"]),\n };\n}\n\n/** Represents a schedule for indexer execution. */\nexport interface IndexingSchedule {\n /** The interval of time between indexer executions. */\n interval: string;\n /** The time when an indexer should start running. */\n startTime?: Date;\n}\n\nexport function indexingScheduleSerializer(item: IndexingSchedule): any {\n return {\n interval: item[\"interval\"],\n startTime: !item[\"startTime\"] ? item[\"startTime\"] : item[\"startTime\"].toISOString(),\n };\n}\n\nexport function indexingScheduleDeserializer(item: any): IndexingSchedule {\n return {\n interval: item[\"interval\"],\n startTime: !item[\"startTime\"] ? item[\"startTime\"] : new Date(item[\"startTime\"]),\n };\n}\n\n/** Optional content extraction mode. Default is 'minimal'. */\nexport enum KnownKnowledgeSourceContentExtractionMode {\n /** Extracts only essential metadata while deferring most content processing. */\n Minimal = \"minimal\",\n /** Performs the full default content extraction pipeline. */\n Standard = \"standard\",\n}\n\n/**\n * Optional content extraction mode. Default is 'minimal'. \\\n * {@link KnownKnowledgeSourceContentExtractionMode} can be used interchangeably with KnowledgeSourceContentExtractionMode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **minimal**: Extracts only essential metadata while deferring most content processing. \\\n * **standard**: Performs the full default content extraction pipeline.\n */\nexport type KnowledgeSourceContentExtractionMode = string;\n\n/** Resources created by the knowledge source. Keys represent resource types (e.g., 'datasource', 'indexer', 'skillset', 'index') and values represent resource names. */\nexport interface CreatedResources {\n /** Additional properties */\n additionalProperties?: Record<string, string>;\n}\n\nexport function createdResourcesDeserializer(item: any): CreatedResources {\n return {\n additionalProperties: serializeRecord(item, []),\n };\n}\n\n/** Configuration for OneLake knowledge source. */\nexport interface IndexedOneLakeKnowledgeSource extends KnowledgeSource {\n kind: \"indexedOneLake\";\n /** The parameters for the knowledge source. */\n indexedOneLakeParameters: IndexedOneLakeKnowledgeSourceParameters;\n}\n\nexport function indexedOneLakeKnowledgeSourceSerializer(item: IndexedOneLakeKnowledgeSource): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n \"@odata.etag\": item[\"eTag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n indexedOneLakeParameters: indexedOneLakeKnowledgeSourceParametersSerializer(\n item[\"indexedOneLakeParameters\"],\n ),\n };\n}\n\nexport function indexedOneLakeKnowledgeSourceDeserializer(\n item: any,\n): IndexedOneLakeKnowledgeSource {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n eTag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n indexedOneLakeParameters: indexedOneLakeKnowledgeSourceParametersDeserializer(\n item[\"indexedOneLakeParameters\"],\n ),\n };\n}\n\n/** Parameters for OneLake knowledge source. */\nexport interface IndexedOneLakeKnowledgeSourceParameters {\n /** OneLake workspace ID. */\n fabricWorkspaceId: string;\n /** Specifies which OneLake lakehouse to access. */\n lakehouseId: string;\n /** Optional OneLakehouse folder or shortcut to filter OneLake content. */\n targetPath?: string;\n /** Consolidates all general ingestion settings. */\n ingestionParameters?: KnowledgeSourceIngestionParameters;\n /** Resources created by the knowledge source. */\n readonly createdResources?: CreatedResources;\n}\n\nexport function indexedOneLakeKnowledgeSourceParametersSerializer(\n item: IndexedOneLakeKnowledgeSourceParameters,\n): any {\n return {\n fabricWorkspaceId: item[\"fabricWorkspaceId\"],\n lakehouseId: item[\"lakehouseId\"],\n targetPath: item[\"targetPath\"],\n ingestionParameters: !item[\"ingestionParameters\"]\n ? item[\"ingestionParameters\"]\n : knowledgeSourceIngestionParametersSerializer(item[\"ingestionParameters\"]),\n };\n}\n\nexport function indexedOneLakeKnowledgeSourceParametersDeserializer(\n item: any,\n): IndexedOneLakeKnowledgeSourceParameters {\n return {\n fabricWorkspaceId: item[\"fabricWorkspaceId\"],\n lakehouseId: item[\"lakehouseId\"],\n targetPath: item[\"targetPath\"],\n ingestionParameters: !item[\"ingestionParameters\"]\n ? item[\"ingestionParameters\"]\n : knowledgeSourceIngestionParametersDeserializer(item[\"ingestionParameters\"]),\n createdResources: !item[\"createdResources\"]\n ? item[\"createdResources\"]\n : createdResourcesDeserializer(item[\"createdResources\"]),\n };\n}\n\n/** Knowledge Source targeting web results. */\nexport interface WebKnowledgeSource extends KnowledgeSource {\n kind: \"web\";\n /** The parameters for the web knowledge source. */\n webParameters?: WebKnowledgeSourceParameters;\n}\n\nexport function webKnowledgeSourceSerializer(item: WebKnowledgeSource): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n \"@odata.etag\": item[\"eTag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n webParameters: !item[\"webParameters\"]\n ? item[\"webParameters\"]\n : webKnowledgeSourceParametersSerializer(item[\"webParameters\"]),\n };\n}\n\nexport function webKnowledgeSourceDeserializer(item: any): WebKnowledgeSource {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n kind: item[\"kind\"],\n eTag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n webParameters: !item[\"webParameters\"]\n ? item[\"webParameters\"]\n : webKnowledgeSourceParametersDeserializer(item[\"webParameters\"]),\n };\n}\n\n/** Parameters for web knowledge source. */\nexport interface WebKnowledgeSourceParameters {\n /** Domain allow/block configuration for web results. */\n domains?: WebKnowledgeSourceDomains;\n}\n\nexport function webKnowledgeSourceParametersSerializer(item: WebKnowledgeSourceParameters): any {\n return {\n domains: !item[\"domains\"]\n ? item[\"domains\"]\n : webKnowledgeSourceDomainsSerializer(item[\"domains\"]),\n };\n}\n\nexport function webKnowledgeSourceParametersDeserializer(item: any): WebKnowledgeSourceParameters {\n return {\n domains: !item[\"domains\"]\n ? item[\"domains\"]\n : webKnowledgeSourceDomainsDeserializer(item[\"domains\"]),\n };\n}\n\n/** Domain allow/block configuration for web knowledge source. */\nexport interface WebKnowledgeSourceDomains {\n /** Domains that are allowed for web results. */\n allowedDomains?: WebKnowledgeSourceDomain[];\n /** Domains that are blocked from web results. */\n blockedDomains?: WebKnowledgeSourceDomain[];\n}\n\nexport function webKnowledgeSourceDomainsSerializer(item: WebKnowledgeSourceDomains): any {\n return {\n allowedDomains: !item[\"allowedDomains\"]\n ? item[\"allowedDomains\"]\n : webKnowledgeSourceDomainArraySerializer(item[\"allowedDomains\"]),\n blockedDomains: !item[\"blockedDomains\"]\n ? item[\"blockedDomains\"]\n : webKnowledgeSourceDomainArraySerializer(item[\"blockedDomains\"]),\n };\n}\n\nexport function webKnowledgeSourceDomainsDeserializer(item: any): WebKnowledgeSourceDomains {\n return {\n allowedDomains: !item[\"allowedDomains\"]\n ? item[\"allowedDomains\"]\n : webKnowledgeSourceDomainArrayDeserializer(item[\"allowedDomains\"]),\n blockedDomains: !item[\"blockedDomains\"]\n ? item[\"blockedDomains\"]\n : webKnowledgeSourceDomainArrayDeserializer(item[\"blockedDomains\"]),\n };\n}\n\nexport function webKnowledgeSourceDomainArraySerializer(\n result: Array<WebKnowledgeSourceDomain>,\n): any[] {\n return result.map((item) => {\n return webKnowledgeSourceDomainSerializer(item);\n });\n}\n\nexport function webKnowledgeSourceDomainArrayDeserializer(\n result: Array<WebKnowledgeSourceDomain>,\n): any[] {\n return result.map((item) => {\n return webKnowledgeSourceDomainDeserializer(item);\n });\n}\n\n/** Configuration for web knowledge source domain. */\nexport interface WebKnowledgeSourceDomain {\n /** The address of the domain. */\n address: string;\n /** Whether or not to include subpages from this domain. */\n includeSubpages?: boolean;\n}\n\nexport function webKnowledgeSourceDomainSerializer(item: WebKnowledgeSourceDomain): any {\n return { address: item[\"address\"], includeSubpages: item[\"includeSubpages\"] };\n}\n\nexport function webKnowledgeSourceDomainDeserializer(item: any): WebKnowledgeSourceDomain {\n return {\n address: item[\"address\"],\n includeSubpages: item[\"includeSubpages\"],\n };\n}\n\n/** Result from listing knowledge sources. */\nexport interface _ListKnowledgeSourcesResult {\n /** The knowledge sources in the service. */\n value: KnowledgeSourceUnion[];\n}\n\nexport function _listKnowledgeSourcesResultDeserializer(item: any): _ListKnowledgeSourcesResult {\n return {\n value: knowledgeSourceUnionArrayDeserializer(item[\"value\"]),\n };\n}\n\nexport function knowledgeSourceUnionArraySerializer(result: Array<KnowledgeSourceUnion>): any[] {\n return result.map((item) => {\n return knowledgeSourceUnionSerializer(item);\n });\n}\n\nexport function knowledgeSourceUnionArrayDeserializer(result: Array<KnowledgeSourceUnion>): any[] {\n return result.map((item) => {\n return knowledgeSourceUnionDeserializer(item);\n });\n}\n\n/** The current synchronization status of the knowledge source. */\nexport enum KnownKnowledgeSourceSynchronizationStatus {\n /** The knowledge source is being provisioned. */\n Creating = \"creating\",\n /** The knowledge source is active and synchronization runs are occurring. */\n Active = \"active\",\n /** The knowledge source is being deleted and synchronization is paused. */\n Deleting = \"deleting\",\n}\n\n/**\n * The current synchronization status of the knowledge source. \\\n * {@link KnownKnowledgeSourceSynchronizationStatus} can be used interchangeably with KnowledgeSourceSynchronizationStatus,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **creating**: The knowledge source is being provisioned. \\\n * **active**: The knowledge source is active and synchronization runs are occurring. \\\n * **deleting**: The knowledge source is being deleted and synchronization is paused.\n */\nexport type KnowledgeSourceSynchronizationStatus = string;\n\n/** Response from a get service statistics request. If successful, it includes service level counters and limits. */\nexport interface SearchServiceStatistics {\n /** Service level resource counters. */\n counters: ServiceCounters;\n /** Service level general limits. */\n limits: ServiceLimits;\n}\n\nexport function searchServiceStatisticsDeserializer(item: any): SearchServiceStatistics {\n return {\n counters: serviceCountersDeserializer(item[\"counters\"]),\n limits: serviceLimitsDeserializer(item[\"limits\"]),\n };\n}\n\n/** Represents service-level resource counters and quotas. */\nexport interface ServiceCounters {\n /** Total number of aliases. */\n aliasCounter: ResourceCounter;\n /** Total number of documents across all indexes in the service. */\n documentCounter: ResourceCounter;\n /** Total number of indexes. */\n indexCounter: ResourceCounter;\n /** Total number of indexers. */\n indexerCounter: ResourceCounter;\n /** Total number of data sources. */\n dataSourceCounter: ResourceCounter;\n /** Total size of used storage in bytes. */\n storageSizeCounter: ResourceCounter;\n /** Total number of synonym maps. */\n synonymMapCounter: ResourceCounter;\n /** Total number of skillsets. */\n skillsetCounter: ResourceCounter;\n /** Total memory consumption of all vector indexes within the service, in bytes. */\n vectorIndexSizeCounter: ResourceCounter;\n}\n\nexport function serviceCountersDeserializer(item: any): ServiceCounters {\n return {\n aliasCounter: resourceCounterDeserializer(item[\"aliasesCount\"]),\n documentCounter: resourceCounterDeserializer(item[\"documentCount\"]),\n indexCounter: resourceCounterDeserializer(item[\"indexesCount\"]),\n indexerCounter: resourceCounterDeserializer(item[\"indexersCount\"]),\n dataSourceCounter: resourceCounterDeserializer(item[\"dataSourcesCount\"]),\n storageSizeCounter: resourceCounterDeserializer(item[\"storageSize\"]),\n synonymMapCounter: resourceCounterDeserializer(item[\"synonymMaps\"]),\n skillsetCounter: resourceCounterDeserializer(item[\"skillsetCount\"]),\n vectorIndexSizeCounter: resourceCounterDeserializer(item[\"vectorIndexSize\"]),\n };\n}\n\n/** Represents a resource's usage and quota. */\nexport interface ResourceCounter {\n /** The resource usage amount. */\n usage: number;\n /** The resource amount quota. */\n quota?: number;\n}\n\nexport function resourceCounterDeserializer(item: any): ResourceCounter {\n return {\n usage: item[\"usage\"],\n quota: item[\"quota\"],\n };\n}\n\n/** Represents various service level limits. */\nexport interface ServiceLimits {\n /** The maximum allowed fields per index. */\n maxFieldsPerIndex?: number;\n /** The maximum depth which you can nest sub-fields in an index, including the top-level complex field. For example, a/b/c has a nesting depth of 3. */\n maxFieldNestingDepthPerIndex?: number;\n /** The maximum number of fields of type Collection(Edm.ComplexType) allowed in an index. */\n maxComplexCollectionFieldsPerIndex?: number;\n /** The maximum number of objects in complex collections allowed per document. */\n maxComplexObjectsInCollectionsPerDocument?: number;\n /** The maximum amount of storage in bytes allowed per index. */\n maxStoragePerIndexInBytes?: number;\n /** The maximum cumulative indexer runtime in seconds allowed for the service. */\n maxCumulativeIndexerRuntimeSeconds?: number;\n}\n\nexport function serviceLimitsDeserializer(item: any): ServiceLimits {\n return {\n maxFieldsPerIndex: item[\"maxFieldsPerIndex\"],\n maxFieldNestingDepthPerIndex: item[\"maxFieldNestingDepthPerIndex\"],\n maxComplexCollectionFieldsPerIndex: item[\"maxComplexCollectionFieldsPerIndex\"],\n maxComplexObjectsInCollectionsPerDocument: item[\"maxComplexObjectsInCollectionsPerDocument\"],\n maxStoragePerIndexInBytes: item[\"maxStoragePerIndex\"],\n maxCumulativeIndexerRuntimeSeconds: item[\"maxCumulativeIndexerRuntimeSeconds\"],\n };\n}\n\n/** Represents a datasource definition, which can be used to configure an indexer. */\nexport interface SearchIndexerDataSourceConnection {\n /** The name of the datasource. */\n name: string;\n /** The description of the datasource. */\n description?: string;\n /** The type of the datasource. */\n type: SearchIndexerDataSourceType;\n /** The data container for the datasource. */\n container: SearchIndexerDataContainer;\n /** An explicit managed identity to use for this datasource. If not specified and the connection string is a managed identity, the system-assigned managed identity is used. If not specified, the value remains unchanged. If \"none\" is specified, the value of this property is cleared. */\n identity?: SearchIndexerDataIdentityUnion;\n /** The data change detection policy for the datasource. */\n dataChangeDetectionPolicy?: DataChangeDetectionPolicyUnion;\n /** The data deletion detection policy for the datasource. */\n dataDeletionDetectionPolicy?: DataDeletionDetectionPolicyUnion;\n /** The ETag of the data source. */\n eTag?: string;\n /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your datasource definition when you want full assurance that no one, not even Microsoft, can decrypt your data source definition. Once you have encrypted your data source definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your datasource definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */\n encryptionKey?: SearchResourceEncryptionKey;\n /** The connection string for the datasource. Set to `<unchanged>` (with brackets) if you don't want the connection string updated. Set to `<redacted>` if you want to remove the connection string value from the datasource. */\n connectionString?: string;\n}\n\nexport function searchIndexerDataSourceConnectionSerializer(\n item: SearchIndexerDataSourceConnection,\n): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n type: item[\"type\"],\n credentials: _searchIndexerDataSourceConnectionCredentialsSerializer(item),\n container: searchIndexerDataContainerSerializer(item[\"container\"]),\n identity: !item[\"identity\"]\n ? item[\"identity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"identity\"]),\n dataChangeDetectionPolicy: !item[\"dataChangeDetectionPolicy\"]\n ? item[\"dataChangeDetectionPolicy\"]\n : dataChangeDetectionPolicyUnionSerializer(item[\"dataChangeDetectionPolicy\"]),\n dataDeletionDetectionPolicy: !item[\"dataDeletionDetectionPolicy\"]\n ? item[\"dataDeletionDetectionPolicy\"]\n : dataDeletionDetectionPolicyUnionSerializer(item[\"dataDeletionDetectionPolicy\"]),\n \"@odata.etag\": item[\"eTag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n };\n}\n\nexport function searchIndexerDataSourceConnectionDeserializer(\n item: any,\n): SearchIndexerDataSourceConnection {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n type: item[\"type\"],\n ..._searchIndexerDataSourceConnectionCredentialsDeserializer(item[\"credentials\"]),\n container: searchIndexerDataContainerDeserializer(item[\"container\"]),\n identity: !item[\"identity\"]\n ? item[\"identity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"identity\"]),\n dataChangeDetectionPolicy: !item[\"dataChangeDetectionPolicy\"]\n ? item[\"dataChangeDetectionPolicy\"]\n : dataChangeDetectionPolicyUnionDeserializer(item[\"dataChangeDetectionPolicy\"]),\n dataDeletionDetectionPolicy: !item[\"dataDeletionDetectionPolicy\"]\n ? item[\"dataDeletionDetectionPolicy\"]\n : dataDeletionDetectionPolicyUnionDeserializer(item[\"dataDeletionDetectionPolicy\"]),\n eTag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n };\n}\n\n/** Defines the type of a datasource. */\nexport enum KnownSearchIndexerDataSourceType {\n /** Indicates an Azure SQL datasource. */\n AzureSql = \"azuresql\",\n /** Indicates a CosmosDB datasource. */\n CosmosDb = \"cosmosdb\",\n /** Indicates an Azure Blob datasource. */\n AzureBlob = \"azureblob\",\n /** Indicates an Azure Table datasource. */\n AzureTable = \"azuretable\",\n /** Indicates a MySql datasource. */\n MySql = \"mysql\",\n /** Indicates an ADLS Gen2 datasource. */\n AdlsGen2 = \"adlsgen2\",\n /** Indicates a Microsoft Fabric OneLake datasource. */\n OneLake = \"onelake\",\n /** Indicates a SharePoint datasource. */\n SharePoint = \"sharepoint\",\n}\n\n/**\n * Defines the type of a datasource. \\\n * {@link KnownSearchIndexerDataSourceType} can be used interchangeably with SearchIndexerDataSourceType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **azuresql**: Indicates an Azure SQL datasource. \\\n * **cosmosdb**: Indicates a CosmosDB datasource. \\\n * **azureblob**: Indicates an Azure Blob datasource. \\\n * **azuretable**: Indicates an Azure Table datasource. \\\n * **mysql**: Indicates a MySql datasource. \\\n * **adlsgen2**: Indicates an ADLS Gen2 datasource. \\\n * **onelake**: Indicates a Microsoft Fabric OneLake datasource. \\\n * **sharepoint**: Indicates a SharePoint datasource.\n */\nexport type SearchIndexerDataSourceType = string;\n\n/** Represents credentials that can be used to connect to a datasource. */\nexport interface DataSourceCredentials {\n /** The connection string for the datasource. Set to `<unchanged>` (with brackets) if you don't want the connection string updated. Set to `<redacted>` if you want to remove the connection string value from the datasource. */\n connectionString?: string;\n}\n\nexport function dataSourceCredentialsSerializer(item: DataSourceCredentials): any {\n return { connectionString: item[\"connectionString\"] };\n}\n\nexport function dataSourceCredentialsDeserializer(item: any): DataSourceCredentials {\n return {\n connectionString: item[\"connectionString\"],\n };\n}\n\n/** Represents information about the entity (such as Azure SQL table or CosmosDB collection) that will be indexed. */\nexport interface SearchIndexerDataContainer {\n /** The name of the table or view (for Azure SQL data source) or collection (for CosmosDB data source) that will be indexed. */\n name: string;\n /** A query that is applied to this data container. The syntax and meaning of this parameter is datasource-specific. Not supported by Azure SQL datasources. */\n query?: string;\n}\n\nexport function searchIndexerDataContainerSerializer(item: SearchIndexerDataContainer): any {\n return { name: item[\"name\"], query: item[\"query\"] };\n}\n\nexport function searchIndexerDataContainerDeserializer(item: any): SearchIndexerDataContainer {\n return {\n name: item[\"name\"],\n query: item[\"query\"],\n };\n}\n\n/** Base type for data change detection policies. */\nexport interface DataChangeDetectionPolicy {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy, #Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy */\n odatatype: string;\n}\n\nexport function dataChangeDetectionPolicySerializer(item: DataChangeDetectionPolicy): any {\n return { \"@odata.type\": item[\"odatatype\"] };\n}\n\nexport function dataChangeDetectionPolicyDeserializer(item: any): DataChangeDetectionPolicy {\n return {\n odatatype: item[\"@odata.type\"],\n };\n}\n\n/** Alias for DataChangeDetectionPolicyUnion */\nexport type DataChangeDetectionPolicyUnion =\n | HighWaterMarkChangeDetectionPolicy\n | SqlIntegratedChangeTrackingPolicy\n | DataChangeDetectionPolicy;\n\nexport function dataChangeDetectionPolicyUnionSerializer(\n item: DataChangeDetectionPolicyUnion,\n): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy\":\n return highWaterMarkChangeDetectionPolicySerializer(\n item as HighWaterMarkChangeDetectionPolicy,\n );\n\n case \"#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy\":\n return sqlIntegratedChangeTrackingPolicySerializer(item as SqlIntegratedChangeTrackingPolicy);\n\n default:\n return dataChangeDetectionPolicySerializer(item);\n }\n}\n\nexport function dataChangeDetectionPolicyUnionDeserializer(\n item: any,\n): DataChangeDetectionPolicyUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy\":\n return highWaterMarkChangeDetectionPolicyDeserializer(\n item as HighWaterMarkChangeDetectionPolicy,\n );\n\n case \"#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy\":\n return sqlIntegratedChangeTrackingPolicyDeserializer(\n item as SqlIntegratedChangeTrackingPolicy,\n );\n\n default:\n return dataChangeDetectionPolicyDeserializer(item);\n }\n}\n\n/** Defines a data change detection policy that captures changes based on the value of a high water mark column. */\nexport interface HighWaterMarkChangeDetectionPolicy extends DataChangeDetectionPolicy {\n /** The name of the high water mark column. */\n highWaterMarkColumnName: string;\n /** A URI fragment specifying the type of data change detection policy. */\n odatatype: \"#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy\";\n}\n\nexport function highWaterMarkChangeDetectionPolicySerializer(\n item: HighWaterMarkChangeDetectionPolicy,\n): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n highWaterMarkColumnName: item[\"highWaterMarkColumnName\"],\n };\n}\n\nexport function highWaterMarkChangeDetectionPolicyDeserializer(\n item: any,\n): HighWaterMarkChangeDetectionPolicy {\n return {\n odatatype: item[\"@odata.type\"],\n highWaterMarkColumnName: item[\"highWaterMarkColumnName\"],\n };\n}\n\n/** Defines a data change detection policy that captures changes using the Integrated Change Tracking feature of Azure SQL Database. */\nexport interface SqlIntegratedChangeTrackingPolicy extends DataChangeDetectionPolicy {\n /** A URI fragment specifying the type of data change detection policy. */\n odatatype: \"#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy\";\n}\n\nexport function sqlIntegratedChangeTrackingPolicySerializer(\n item: SqlIntegratedChangeTrackingPolicy,\n): any {\n return { \"@odata.type\": item[\"odatatype\"] };\n}\n\nexport function sqlIntegratedChangeTrackingPolicyDeserializer(\n item: any,\n): SqlIntegratedChangeTrackingPolicy {\n return {\n odatatype: item[\"@odata.type\"],\n };\n}\n\n/** Base type for data deletion detection policies. */\nexport interface DataDeletionDetectionPolicy {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy, #Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy */\n odatatype: string;\n}\n\nexport function dataDeletionDetectionPolicySerializer(item: DataDeletionDetectionPolicy): any {\n return { \"@odata.type\": item[\"odatatype\"] };\n}\n\nexport function dataDeletionDetectionPolicyDeserializer(item: any): DataDeletionDetectionPolicy {\n return {\n odatatype: item[\"@odata.type\"],\n };\n}\n\n/** Alias for DataDeletionDetectionPolicyUnion */\nexport type DataDeletionDetectionPolicyUnion =\n | SoftDeleteColumnDeletionDetectionPolicy\n | NativeBlobSoftDeleteDeletionDetectionPolicy\n | DataDeletionDetectionPolicy;\n\nexport function dataDeletionDetectionPolicyUnionSerializer(\n item: DataDeletionDetectionPolicyUnion,\n): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy\":\n return softDeleteColumnDeletionDetectionPolicySerializer(\n item as SoftDeleteColumnDeletionDetectionPolicy,\n );\n\n case \"#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy\":\n return nativeBlobSoftDeleteDeletionDetectionPolicySerializer(\n item as NativeBlobSoftDeleteDeletionDetectionPolicy,\n );\n\n default:\n return dataDeletionDetectionPolicySerializer(item);\n }\n}\n\nexport function dataDeletionDetectionPolicyUnionDeserializer(\n item: any,\n): DataDeletionDetectionPolicyUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy\":\n return softDeleteColumnDeletionDetectionPolicyDeserializer(\n item as SoftDeleteColumnDeletionDetectionPolicy,\n );\n\n case \"#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy\":\n return nativeBlobSoftDeleteDeletionDetectionPolicyDeserializer(\n item as NativeBlobSoftDeleteDeletionDetectionPolicy,\n );\n\n default:\n return dataDeletionDetectionPolicyDeserializer(item);\n }\n}\n\n/** Defines a data deletion detection policy that implements a soft-deletion strategy. It determines whether an item should be deleted based on the value of a designated 'soft delete' column. */\nexport interface SoftDeleteColumnDeletionDetectionPolicy extends DataDeletionDetectionPolicy {\n /** The name of the column to use for soft-deletion detection. */\n softDeleteColumnName?: string;\n /** The marker value that identifies an item as deleted. */\n softDeleteMarkerValue?: string;\n /** A URI fragment specifying the type of data deletion detection policy. */\n odatatype: \"#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy\";\n}\n\nexport function softDeleteColumnDeletionDetectionPolicySerializer(\n item: SoftDeleteColumnDeletionDetectionPolicy,\n): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n softDeleteColumnName: item[\"softDeleteColumnName\"],\n softDeleteMarkerValue: item[\"softDeleteMarkerValue\"],\n };\n}\n\nexport function softDeleteColumnDeletionDetectionPolicyDeserializer(\n item: any,\n): SoftDeleteColumnDeletionDetectionPolicy {\n return {\n odatatype: item[\"@odata.type\"],\n softDeleteColumnName: item[\"softDeleteColumnName\"],\n softDeleteMarkerValue: item[\"softDeleteMarkerValue\"],\n };\n}\n\n/** Defines a data deletion detection policy utilizing Azure Blob Storage's native soft delete feature for deletion detection. */\nexport interface NativeBlobSoftDeleteDeletionDetectionPolicy extends DataDeletionDetectionPolicy {\n /** A URI fragment specifying the type of data deletion detection policy. */\n odatatype: \"#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy\";\n}\n\nexport function nativeBlobSoftDeleteDeletionDetectionPolicySerializer(\n item: NativeBlobSoftDeleteDeletionDetectionPolicy,\n): any {\n return { \"@odata.type\": item[\"odatatype\"] };\n}\n\nexport function nativeBlobSoftDeleteDeletionDetectionPolicyDeserializer(\n item: any,\n): NativeBlobSoftDeleteDeletionDetectionPolicy {\n return {\n odatatype: item[\"@odata.type\"],\n };\n}\n\n/** Response from a List Datasources request. If successful, it includes the full definitions of all datasources. */\nexport interface ListDataSourcesResult {\n /** The datasources in the Search service. */\n readonly dataSources: SearchIndexerDataSourceConnection[];\n}\n\nexport function listDataSourcesResultDeserializer(item: any): ListDataSourcesResult {\n return {\n dataSources: searchIndexerDataSourceConnectionArrayDeserializer(item[\"value\"]),\n };\n}\n\nexport function searchIndexerDataSourceConnectionArraySerializer(\n result: Array<SearchIndexerDataSourceConnection>,\n): any[] {\n return result.map((item) => {\n return searchIndexerDataSourceConnectionSerializer(item);\n });\n}\n\nexport function searchIndexerDataSourceConnectionArrayDeserializer(\n result: Array<SearchIndexerDataSourceConnection>,\n): any[] {\n return result.map((item) => {\n return searchIndexerDataSourceConnectionDeserializer(item);\n });\n}\n\n/** Represents an indexer. */\nexport interface SearchIndexer {\n /** The name of the indexer. */\n name: string;\n /** The description of the indexer. */\n description?: string;\n /** The name of the datasource from which this indexer reads data. */\n dataSourceName: string;\n /** The name of the skillset executing with this indexer. */\n skillsetName?: string;\n /** The name of the index to which this indexer writes data. */\n targetIndexName: string;\n /** The schedule for this indexer. */\n schedule?: IndexingSchedule;\n /** Parameters for indexer execution. */\n parameters?: IndexingParameters;\n /** Defines mappings between fields in the data source and corresponding target fields in the index. */\n fieldMappings?: FieldMapping[];\n /** Output field mappings are applied after enrichment and immediately before indexing. */\n outputFieldMappings?: FieldMapping[];\n /** A value indicating whether the indexer is disabled. Default is false. */\n isDisabled?: boolean;\n /** The ETag of the indexer. */\n eTag?: string;\n /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your indexer definition (as well as indexer execution status) when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your indexer definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your indexer definition (and indexer execution status) will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */\n encryptionKey?: SearchResourceEncryptionKey;\n}\n\nexport function searchIndexerSerializer(item: SearchIndexer): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n dataSourceName: item[\"dataSourceName\"],\n skillsetName: item[\"skillsetName\"],\n targetIndexName: item[\"targetIndexName\"],\n schedule: !item[\"schedule\"] ? item[\"schedule\"] : indexingScheduleSerializer(item[\"schedule\"]),\n parameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : indexingParametersSerializer(item[\"parameters\"]),\n fieldMappings: !item[\"fieldMappings\"]\n ? item[\"fieldMappings\"]\n : fieldMappingArraySerializer(item[\"fieldMappings\"]),\n outputFieldMappings: !item[\"outputFieldMappings\"]\n ? item[\"outputFieldMappings\"]\n : fieldMappingArraySerializer(item[\"outputFieldMappings\"]),\n disabled: item[\"isDisabled\"],\n \"@odata.etag\": item[\"eTag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n };\n}\n\nexport function searchIndexerDeserializer(item: any): SearchIndexer {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n dataSourceName: item[\"dataSourceName\"],\n skillsetName: item[\"skillsetName\"],\n targetIndexName: item[\"targetIndexName\"],\n schedule: !item[\"schedule\"] ? item[\"schedule\"] : indexingScheduleDeserializer(item[\"schedule\"]),\n parameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : indexingParametersDeserializer(item[\"parameters\"]),\n fieldMappings: !item[\"fieldMappings\"]\n ? item[\"fieldMappings\"]\n : fieldMappingArrayDeserializer(item[\"fieldMappings\"]),\n outputFieldMappings: !item[\"outputFieldMappings\"]\n ? item[\"outputFieldMappings\"]\n : fieldMappingArrayDeserializer(item[\"outputFieldMappings\"]),\n isDisabled: item[\"disabled\"],\n eTag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n };\n}\n\n/** Represents parameters for indexer execution. */\nexport interface IndexingParameters {\n /** The number of items that are read from the data source and indexed as a single batch in order to improve performance. The default depends on the data source type. */\n batchSize?: number;\n /** The maximum number of items that can fail indexing for indexer execution to still be considered successful. -1 means no limit. Default is 0. */\n maxFailedItems?: number;\n /** The maximum number of items in a single batch that can fail indexing for the batch to still be considered successful. -1 means no limit. Default is 0. */\n maxFailedItemsPerBatch?: number;\n /** A dictionary of indexer-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. */\n configuration?: IndexingParametersConfiguration;\n}\n\nexport function indexingParametersSerializer(item: IndexingParameters): any {\n return {\n batchSize: item[\"batchSize\"],\n maxFailedItems: item[\"maxFailedItems\"],\n maxFailedItemsPerBatch: item[\"maxFailedItemsPerBatch\"],\n configuration: !item[\"configuration\"]\n ? item[\"configuration\"]\n : indexingParametersConfigurationSerializer(item[\"configuration\"]),\n };\n}\n\nexport function indexingParametersDeserializer(item: any): IndexingParameters {\n return {\n batchSize: item[\"batchSize\"],\n maxFailedItems: item[\"maxFailedItems\"],\n maxFailedItemsPerBatch: item[\"maxFailedItemsPerBatch\"],\n configuration: !item[\"configuration\"]\n ? item[\"configuration\"]\n : indexingParametersConfigurationDeserializer(item[\"configuration\"]),\n };\n}\n\n/** A dictionary of indexer-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. */\nexport interface IndexingParametersConfiguration {\n /** Represents the parsing mode for indexing from an Azure blob data source. */\n parsingMode?: BlobIndexerParsingMode;\n /** Comma-delimited list of filename extensions to ignore when processing from Azure blob storage. For example, you could exclude \".png, .mp4\" to skip over those files during indexing. */\n excludedFileNameExtensions?: string;\n /** Comma-delimited list of filename extensions to select when processing from Azure blob storage. For example, you could focus indexing on specific application files \".docx, .pptx, .msg\" to specifically include those file types. */\n indexedFileNameExtensions?: string;\n /** For Azure blobs, set to false if you want to continue indexing when an unsupported content type is encountered, and you don't know all the content types (file extensions) in advance. */\n failOnUnsupportedContentType?: boolean;\n /** For Azure blobs, set to false if you want to continue indexing if a document fails indexing. */\n failOnUnprocessableDocument?: boolean;\n /** For Azure blobs, set this property to true to still index storage metadata for blob content that is too large to process. Oversized blobs are treated as errors by default. For limits on blob size, see https://learn.microsoft.com/azure/search/search-limits-quotas-capacity. */\n indexStorageMetadataOnlyForOversizedDocuments?: boolean;\n /** For CSV blobs, specifies a comma-delimited list of column headers, useful for mapping source fields to destination fields in an index. */\n delimitedTextHeaders?: string;\n /** For CSV blobs, specifies the end-of-line single-character delimiter for CSV files where each line starts a new document (for example, \"|\"). */\n delimitedTextDelimiter?: string;\n /** For CSV blobs, indicates that the first (non-blank) line of each blob contains headers. */\n firstLineContainsHeaders?: boolean;\n /** Specifies the submode that will determine whether a markdown file will be parsed into exactly one search document or multiple search documents. Default is `oneToMany`. */\n markdownParsingSubmode?: MarkdownParsingSubmode;\n /** Specifies the max header depth that will be considered while grouping markdown content. Default is `h6`. */\n markdownHeaderDepth?: MarkdownHeaderDepth;\n /** For JSON arrays, given a structured or semi-structured document, you can specify a path to the array using this property. */\n documentRoot?: string;\n /** Specifies the data to extract from Azure blob storage and tells the indexer which data to extract from image content when \"imageAction\" is set to a value other than \"none\". This applies to embedded image content in a .PDF or other application, or image files such as .jpg and .png, in Azure blobs. */\n dataToExtract?: BlobIndexerDataToExtract;\n /** Determines how to process embedded images and image files in Azure blob storage. Setting the \"imageAction\" configuration to any value other than \"none\" requires that a skillset also be attached to that indexer. */\n imageAction?: BlobIndexerImageAction;\n /** If true, will create a path //document//file_data that is an object representing the original file data downloaded from your blob data source. This allows you to pass the original file data to a custom skill for processing within the enrichment pipeline, or to the Document Extraction skill. */\n allowSkillsetToReadFileData?: boolean;\n /** Determines algorithm for text extraction from PDF files in Azure blob storage. */\n pdfTextRotationAlgorithm?: BlobIndexerPDFTextRotationAlgorithm;\n /** Specifies the environment in which the indexer should execute. */\n executionEnvironment?: IndexerExecutionEnvironment;\n /** Increases the timeout beyond the 5-minute default for Azure SQL database data sources, specified in the format \"hh:mm:ss\". */\n queryTimeout?: string;\n /** Additional properties */\n additionalProperties?: Record<string, any>;\n}\n\nexport function indexingParametersConfigurationSerializer(\n item: IndexingParametersConfiguration,\n): any {\n return {\n ...serializeRecord(item.additionalProperties ?? {}),\n parsingMode: item[\"parsingMode\"],\n excludedFileNameExtensions: item[\"excludedFileNameExtensions\"],\n indexedFileNameExtensions: item[\"indexedFileNameExtensions\"],\n failOnUnsupportedContentType: item[\"failOnUnsupportedContentType\"],\n failOnUnprocessableDocument: item[\"failOnUnprocessableDocument\"],\n indexStorageMetadataOnlyForOversizedDocuments:\n item[\"indexStorageMetadataOnlyForOversizedDocuments\"],\n delimitedTextHeaders: item[\"delimitedTextHeaders\"],\n delimitedTextDelimiter: item[\"delimitedTextDelimiter\"],\n firstLineContainsHeaders: item[\"firstLineContainsHeaders\"],\n markdownParsingSubmode: item[\"markdownParsingSubmode\"],\n markdownHeaderDepth: item[\"markdownHeaderDepth\"],\n documentRoot: item[\"documentRoot\"],\n dataToExtract: item[\"dataToExtract\"],\n imageAction: item[\"imageAction\"],\n allowSkillsetToReadFileData: item[\"allowSkillsetToReadFileData\"],\n pdfTextRotationAlgorithm: item[\"pdfTextRotationAlgorithm\"],\n executionEnvironment: item[\"executionEnvironment\"],\n queryTimeout: item[\"queryTimeout\"],\n };\n}\n\nexport function indexingParametersConfigurationDeserializer(\n item: any,\n): IndexingParametersConfiguration {\n return {\n additionalProperties: serializeRecord(item, [\n \"parsingMode\",\n \"excludedFileNameExtensions\",\n \"indexedFileNameExtensions\",\n \"failOnUnsupportedContentType\",\n \"failOnUnprocessableDocument\",\n \"indexStorageMetadataOnlyForOversizedDocuments\",\n \"delimitedTextHeaders\",\n \"delimitedTextDelimiter\",\n \"firstLineContainsHeaders\",\n \"markdownParsingSubmode\",\n \"markdownHeaderDepth\",\n \"documentRoot\",\n \"dataToExtract\",\n \"imageAction\",\n \"allowSkillsetToReadFileData\",\n \"pdfTextRotationAlgorithm\",\n \"executionEnvironment\",\n \"queryTimeout\",\n ]),\n parsingMode: item[\"parsingMode\"],\n excludedFileNameExtensions: item[\"excludedFileNameExtensions\"],\n indexedFileNameExtensions: item[\"indexedFileNameExtensions\"],\n failOnUnsupportedContentType: item[\"failOnUnsupportedContentType\"],\n failOnUnprocessableDocument: item[\"failOnUnprocessableDocument\"],\n indexStorageMetadataOnlyForOversizedDocuments:\n item[\"indexStorageMetadataOnlyForOversizedDocuments\"],\n delimitedTextHeaders: item[\"delimitedTextHeaders\"],\n delimitedTextDelimiter: item[\"delimitedTextDelimiter\"],\n firstLineContainsHeaders: item[\"firstLineContainsHeaders\"],\n markdownParsingSubmode: item[\"markdownParsingSubmode\"],\n markdownHeaderDepth: item[\"markdownHeaderDepth\"],\n documentRoot: item[\"documentRoot\"],\n dataToExtract: item[\"dataToExtract\"],\n imageAction: item[\"imageAction\"],\n allowSkillsetToReadFileData: item[\"allowSkillsetToReadFileData\"],\n pdfTextRotationAlgorithm: item[\"pdfTextRotationAlgorithm\"],\n executionEnvironment: item[\"executionEnvironment\"],\n queryTimeout: item[\"queryTimeout\"],\n };\n}\n\n/** Represents the parsing mode for indexing from an Azure blob data source. */\nexport enum KnownBlobIndexerParsingMode {\n /** Set to default for normal file processing. */\n Default = \"default\",\n /** Set to text to improve indexing performance on plain text files in blob storage. */\n Text = \"text\",\n /** Set to delimitedText when blobs are plain CSV files. */\n DelimitedText = \"delimitedText\",\n /** Set to json to extract structured content from JSON files. */\n Json = \"json\",\n /** Set to jsonArray to extract individual elements of a JSON array as separate documents. */\n JsonArray = \"jsonArray\",\n /** Set to jsonLines to extract individual JSON entities, separated by a new line, as separate documents. */\n JsonLines = \"jsonLines\",\n /** Set to markdown to extract content from markdown files. */\n Markdown = \"markdown\",\n}\n\n/**\n * Represents the parsing mode for indexing from an Azure blob data source. \\\n * {@link KnownBlobIndexerParsingMode} can be used interchangeably with BlobIndexerParsingMode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **default**: Set to default for normal file processing. \\\n * **text**: Set to text to improve indexing performance on plain text files in blob storage. \\\n * **delimitedText**: Set to delimitedText when blobs are plain CSV files. \\\n * **json**: Set to json to extract structured content from JSON files. \\\n * **jsonArray**: Set to jsonArray to extract individual elements of a JSON array as separate documents. \\\n * **jsonLines**: Set to jsonLines to extract individual JSON entities, separated by a new line, as separate documents. \\\n * **markdown**: Set to markdown to extract content from markdown files.\n */\nexport type BlobIndexerParsingMode = string;\n\n/** Specifies the submode that will determine whether a markdown file will be parsed into exactly one search document or multiple search documents. Default is `oneToMany`. */\nexport enum KnownMarkdownParsingSubmode {\n /** Indicates that each section of the markdown file (up to a specified depth) will be parsed into individual search documents. This can result in a single markdown file producing multiple search documents. This is the default sub-mode. */\n OneToMany = \"oneToMany\",\n /** Indicates that each markdown file will be parsed into a single search document. */\n OneToOne = \"oneToOne\",\n}\n\n/**\n * Specifies the submode that will determine whether a markdown file will be parsed into exactly one search document or multiple search documents. Default is `oneToMany`. \\\n * {@link KnownMarkdownParsingSubmode} can be used interchangeably with MarkdownParsingSubmode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **oneToMany**: Indicates that each section of the markdown file (up to a specified depth) will be parsed into individual search documents. This can result in a single markdown file producing multiple search documents. This is the default sub-mode. \\\n * **oneToOne**: Indicates that each markdown file will be parsed into a single search document.\n */\nexport type MarkdownParsingSubmode = string;\n\n/** Specifies the max header depth that will be considered while grouping markdown content. Default is `h6`. */\nexport enum KnownMarkdownHeaderDepth {\n /** Indicates that headers up to a level of h1 will be considered while grouping markdown content. */\n H1 = \"h1\",\n /** Indicates that headers up to a level of h2 will be considered while grouping markdown content. */\n H2 = \"h2\",\n /** Indicates that headers up to a level of h3 will be considered while grouping markdown content. */\n H3 = \"h3\",\n /** Indicates that headers up to a level of h4 will be considered while grouping markdown content. */\n H4 = \"h4\",\n /** Indicates that headers up to a level of h5 will be considered while grouping markdown content. */\n H5 = \"h5\",\n /** Indicates that headers up to a level of h6 will be considered while grouping markdown content. This is the default. */\n H6 = \"h6\",\n}\n\n/**\n * Specifies the max header depth that will be considered while grouping markdown content. Default is `h6`. \\\n * {@link KnownMarkdownHeaderDepth} can be used interchangeably with MarkdownHeaderDepth,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **h1**: Indicates that headers up to a level of h1 will be considered while grouping markdown content. \\\n * **h2**: Indicates that headers up to a level of h2 will be considered while grouping markdown content. \\\n * **h3**: Indicates that headers up to a level of h3 will be considered while grouping markdown content. \\\n * **h4**: Indicates that headers up to a level of h4 will be considered while grouping markdown content. \\\n * **h5**: Indicates that headers up to a level of h5 will be considered while grouping markdown content. \\\n * **h6**: Indicates that headers up to a level of h6 will be considered while grouping markdown content. This is the default.\n */\nexport type MarkdownHeaderDepth = string;\n\n/** Specifies the data to extract from Azure blob storage and tells the indexer which data to extract from image content when \"imageAction\" is set to a value other than \"none\". This applies to embedded image content in a .PDF or other application, or image files such as .jpg and .png, in Azure blobs. */\nexport enum KnownBlobIndexerDataToExtract {\n /** Indexes just the standard blob properties and user-specified metadata. */\n StorageMetadata = \"storageMetadata\",\n /** Extracts metadata provided by the Azure blob storage subsystem and the content-type specific metadata (for example, metadata unique to just .png files are indexed). */\n AllMetadata = \"allMetadata\",\n /** Extracts all metadata and textual content from each blob. */\n ContentAndMetadata = \"contentAndMetadata\",\n}\n\n/**\n * Specifies the data to extract from Azure blob storage and tells the indexer which data to extract from image content when \"imageAction\" is set to a value other than \"none\". This applies to embedded image content in a .PDF or other application, or image files such as .jpg and .png, in Azure blobs. \\\n * {@link KnownBlobIndexerDataToExtract} can be used interchangeably with BlobIndexerDataToExtract,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **storageMetadata**: Indexes just the standard blob properties and user-specified metadata. \\\n * **allMetadata**: Extracts metadata provided by the Azure blob storage subsystem and the content-type specific metadata (for example, metadata unique to just .png files are indexed). \\\n * **contentAndMetadata**: Extracts all metadata and textual content from each blob.\n */\nexport type BlobIndexerDataToExtract = string;\n\n/** Determines how to process embedded images and image files in Azure blob storage. Setting the \"imageAction\" configuration to any value other than \"none\" requires that a skillset also be attached to that indexer. */\nexport enum KnownBlobIndexerImageAction {\n /** Ignores embedded images or image files in the data set. This is the default. */\n None = \"none\",\n /** Extracts text from images (for example, the word \"STOP\" from a traffic stop sign), and embeds it into the content field. This action requires that \"dataToExtract\" is set to \"contentAndMetadata\". A normalized image refers to additional processing resulting in uniform image output, sized and rotated to promote consistent rendering when you include images in visual search results. This information is generated for each image when you use this option. */\n GenerateNormalizedImages = \"generateNormalizedImages\",\n /** Extracts text from images (for example, the word \"STOP\" from a traffic stop sign), and embeds it into the content field, but treats PDF files differently in that each page will be rendered as an image and normalized accordingly, instead of extracting embedded images. Non-PDF file types will be treated the same as if \"generateNormalizedImages\" was set. */\n GenerateNormalizedImagePerPage = \"generateNormalizedImagePerPage\",\n}\n\n/**\n * Determines how to process embedded images and image files in Azure blob storage. Setting the \"imageAction\" configuration to any value other than \"none\" requires that a skillset also be attached to that indexer. \\\n * {@link KnownBlobIndexerImageAction} can be used interchangeably with BlobIndexerImageAction,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none**: Ignores embedded images or image files in the data set. This is the default. \\\n * **generateNormalizedImages**: Extracts text from images (for example, the word \"STOP\" from a traffic stop sign), and embeds it into the content field. This action requires that \"dataToExtract\" is set to \"contentAndMetadata\". A normalized image refers to additional processing resulting in uniform image output, sized and rotated to promote consistent rendering when you include images in visual search results. This information is generated for each image when you use this option. \\\n * **generateNormalizedImagePerPage**: Extracts text from images (for example, the word \"STOP\" from a traffic stop sign), and embeds it into the content field, but treats PDF files differently in that each page will be rendered as an image and normalized accordingly, instead of extracting embedded images. Non-PDF file types will be treated the same as if \"generateNormalizedImages\" was set.\n */\nexport type BlobIndexerImageAction = string;\n\n/** Determines algorithm for text extraction from PDF files in Azure blob storage. */\nexport enum KnownBlobIndexerPDFTextRotationAlgorithm {\n /** Leverages normal text extraction. This is the default. */\n None = \"none\",\n /** May produce better and more readable text extraction from PDF files that have rotated text within them. Note that there may be a small performance speed impact when this parameter is used. This parameter only applies to PDF files, and only to PDFs with embedded text. If the rotated text appears within an embedded image in the PDF, this parameter does not apply. */\n DetectAngles = \"detectAngles\",\n}\n\n/**\n * Determines algorithm for text extraction from PDF files in Azure blob storage. \\\n * {@link KnownBlobIndexerPDFTextRotationAlgorithm} can be used interchangeably with BlobIndexerPDFTextRotationAlgorithm,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none**: Leverages normal text extraction. This is the default. \\\n * **detectAngles**: May produce better and more readable text extraction from PDF files that have rotated text within them. Note that there may be a small performance speed impact when this parameter is used. This parameter only applies to PDF files, and only to PDFs with embedded text. If the rotated text appears within an embedded image in the PDF, this parameter does not apply.\n */\nexport type BlobIndexerPDFTextRotationAlgorithm = string;\n\n/** Specifies the environment in which the indexer should execute. */\nexport enum KnownIndexerExecutionEnvironment {\n /** Indicates that the search service can determine where the indexer should execute. This is the default environment when nothing is specified and is the recommended value. */\n Standard = \"standard\",\n /** Indicates that the indexer should run with the environment provisioned specifically for the search service. This should only be specified as the execution environment if the indexer needs to access resources securely over shared private link resources. */\n Private = \"private\",\n}\n\n/**\n * Specifies the environment in which the indexer should execute. \\\n * {@link KnownIndexerExecutionEnvironment} can be used interchangeably with IndexerExecutionEnvironment,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **standard**: Indicates that the search service can determine where the indexer should execute. This is the default environment when nothing is specified and is the recommended value. \\\n * **private**: Indicates that the indexer should run with the environment provisioned specifically for the search service. This should only be specified as the execution environment if the indexer needs to access resources securely over shared private link resources.\n */\nexport type IndexerExecutionEnvironment = string;\n\nexport function fieldMappingArraySerializer(result: Array<FieldMapping>): any[] {\n return result.map((item) => {\n return fieldMappingSerializer(item);\n });\n}\n\nexport function fieldMappingArrayDeserializer(result: Array<FieldMapping>): any[] {\n return result.map((item) => {\n return fieldMappingDeserializer(item);\n });\n}\n\n/** Defines a mapping between a field in a data source and a target field in an index. */\nexport interface FieldMapping {\n /** The name of the field in the data source. */\n sourceFieldName: string;\n /** The name of the target field in the index. Same as the source field name by default. */\n targetFieldName?: string;\n /** A function to apply to each source field value before indexing. */\n mappingFunction?: FieldMappingFunction;\n}\n\nexport function fieldMappingSerializer(item: FieldMapping): any {\n return {\n sourceFieldName: item[\"sourceFieldName\"],\n targetFieldName: item[\"targetFieldName\"],\n mappingFunction: !item[\"mappingFunction\"]\n ? item[\"mappingFunction\"]\n : fieldMappingFunctionSerializer(item[\"mappingFunction\"]),\n };\n}\n\nexport function fieldMappingDeserializer(item: any): FieldMapping {\n return {\n sourceFieldName: item[\"sourceFieldName\"],\n targetFieldName: item[\"targetFieldName\"],\n mappingFunction: !item[\"mappingFunction\"]\n ? item[\"mappingFunction\"]\n : fieldMappingFunctionDeserializer(item[\"mappingFunction\"]),\n };\n}\n\n/** Represents a function that transforms a value from a data source before indexing. */\nexport interface FieldMappingFunction {\n /** The name of the field mapping function. */\n name: string;\n /** A dictionary of parameter name/value pairs to pass to the function. Each value must be of a primitive type. */\n parameters?: Record<string, any>;\n}\n\nexport function fieldMappingFunctionSerializer(item: FieldMappingFunction): any {\n return { name: item[\"name\"], parameters: item[\"parameters\"] };\n}\n\nexport function fieldMappingFunctionDeserializer(item: any): FieldMappingFunction {\n return {\n name: item[\"name\"],\n parameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : Object.fromEntries(\n Object.entries(item[\"parameters\"]).map(([k1, p1]: [string, any]) => [k1, p1]),\n ),\n };\n}\n\n/** Response from a List Indexers request. If successful, it includes the full definitions of all indexers. */\nexport interface ListIndexersResult {\n /** The indexers in the Search service. */\n readonly indexers: SearchIndexer[];\n}\n\nexport function listIndexersResultDeserializer(item: any): ListIndexersResult {\n return {\n indexers: searchIndexerArrayDeserializer(item[\"value\"]),\n };\n}\n\nexport function searchIndexerArraySerializer(result: Array<SearchIndexer>): any[] {\n return result.map((item) => {\n return searchIndexerSerializer(item);\n });\n}\n\nexport function searchIndexerArrayDeserializer(result: Array<SearchIndexer>): any[] {\n return result.map((item) => {\n return searchIndexerDeserializer(item);\n });\n}\n\n/** Represents the current status and execution history of an indexer. */\nexport interface SearchIndexerStatus {\n /** The name of the indexer. */\n readonly name: string;\n /** Overall indexer status. */\n readonly status: IndexerStatus;\n /** The result of the most recent or an in-progress indexer execution. */\n readonly lastResult?: IndexerExecutionResult;\n /** History of the recent indexer executions, sorted in reverse chronological order. */\n readonly executionHistory: IndexerExecutionResult[];\n /** The execution limits for the indexer. */\n readonly limits: SearchIndexerLimits;\n}\n\nexport function searchIndexerStatusDeserializer(item: any): SearchIndexerStatus {\n return {\n name: item[\"name\"],\n status: item[\"status\"],\n lastResult: !item[\"lastResult\"]\n ? item[\"lastResult\"]\n : indexerExecutionResultDeserializer(item[\"lastResult\"]),\n executionHistory: indexerExecutionResultArrayDeserializer(item[\"executionHistory\"]),\n limits: searchIndexerLimitsDeserializer(item[\"limits\"]),\n };\n}\n\n/** Represents the overall indexer status. */\nexport type IndexerStatus = \"unknown\" | \"error\" | \"running\";\n\n/** Represents the result of an individual indexer execution. */\nexport interface IndexerExecutionResult {\n /** The outcome of this indexer execution. */\n readonly status: IndexerExecutionStatus;\n /** The error message indicating the top-level error, if any. */\n readonly errorMessage?: string;\n /** The start time of this indexer execution. */\n readonly startTime?: Date;\n /** The end time of this indexer execution, if the execution has already completed. */\n readonly endTime?: Date;\n /** The item-level indexing errors. */\n readonly errors: SearchIndexerError[];\n /** The item-level indexing warnings. */\n readonly warnings: SearchIndexerWarning[];\n /** The number of items that were processed during this indexer execution. This includes both successfully processed items and items where indexing was attempted but failed. */\n readonly itemCount: number;\n /** The number of items that failed to be indexed during this indexer execution. */\n readonly failedItemCount: number;\n /** Change tracking state with which an indexer execution started. */\n readonly initialTrackingState?: string;\n /** Change tracking state with which an indexer execution finished. */\n readonly finalTrackingState?: string;\n}\n\nexport function indexerExecutionResultDeserializer(item: any): IndexerExecutionResult {\n return {\n status: item[\"status\"],\n errorMessage: item[\"errorMessage\"],\n startTime: !item[\"startTime\"] ? item[\"startTime\"] : new Date(item[\"startTime\"]),\n endTime: !item[\"endTime\"] ? item[\"endTime\"] : new Date(item[\"endTime\"]),\n errors: searchIndexerErrorArrayDeserializer(item[\"errors\"]),\n warnings: searchIndexerWarningArrayDeserializer(item[\"warnings\"]),\n itemCount: item[\"itemsProcessed\"],\n failedItemCount: item[\"itemsFailed\"],\n initialTrackingState: item[\"initialTrackingState\"],\n finalTrackingState: item[\"finalTrackingState\"],\n };\n}\n\n/** Represents the status of an individual indexer execution. */\nexport type IndexerExecutionStatus = \"transientFailure\" | \"success\" | \"inProgress\" | \"reset\";\n\nexport function searchIndexerErrorArrayDeserializer(result: Array<SearchIndexerError>): any[] {\n return result.map((item) => {\n return searchIndexerErrorDeserializer(item);\n });\n}\n\n/** Represents an item- or document-level indexing error. */\nexport interface SearchIndexerError {\n /** The key of the item for which indexing failed. */\n readonly key?: string;\n /** The message describing the error that occurred while processing the item. */\n readonly errorMessage: string;\n /** The status code indicating why the indexing operation failed. Possible values include: 400 for a malformed input document, 404 for document not found, 409 for a version conflict, 422 when the index is temporarily unavailable, or 503 for when the service is too busy. */\n readonly statusCode: number;\n /** The name of the source at which the error originated. For example, this could refer to a particular skill in the attached skillset. This may not be always available. */\n readonly name?: string;\n /** Additional, verbose details about the error to assist in debugging the indexer. This may not be always available. */\n readonly details?: string;\n /** A link to a troubleshooting guide for these classes of errors. This may not be always available. */\n readonly documentationLink?: string;\n}\n\nexport function searchIndexerErrorDeserializer(item: any): SearchIndexerError {\n return {\n key: item[\"key\"],\n errorMessage: item[\"errorMessage\"],\n statusCode: item[\"statusCode\"],\n name: item[\"name\"],\n details: item[\"details\"],\n documentationLink: item[\"documentationLink\"],\n };\n}\n\nexport function searchIndexerWarningArrayDeserializer(result: Array<SearchIndexerWarning>): any[] {\n return result.map((item) => {\n return searchIndexerWarningDeserializer(item);\n });\n}\n\n/** Represents an item-level warning. */\nexport interface SearchIndexerWarning {\n /** The key of the item which generated a warning. */\n readonly key?: string;\n /** The message describing the warning that occurred while processing the item. */\n readonly message: string;\n /** The name of the source at which the warning originated. For example, this could refer to a particular skill in the attached skillset. This may not be always available. */\n readonly name?: string;\n /** Additional, verbose details about the warning to assist in debugging the indexer. This may not be always available. */\n readonly details?: string;\n /** A link to a troubleshooting guide for these classes of warnings. This may not be always available. */\n readonly documentationLink?: string;\n}\n\nexport function searchIndexerWarningDeserializer(item: any): SearchIndexerWarning {\n return {\n key: item[\"key\"],\n message: item[\"message\"],\n name: item[\"name\"],\n details: item[\"details\"],\n documentationLink: item[\"documentationLink\"],\n };\n}\n\nexport function indexerExecutionResultArrayDeserializer(\n result: Array<IndexerExecutionResult>,\n): any[] {\n return result.map((item) => {\n return indexerExecutionResultDeserializer(item);\n });\n}\n\n/** Represents the limits that can be applied to an indexer. */\nexport interface SearchIndexerLimits {\n /** The maximum duration that the indexer is permitted to run for one execution. */\n readonly maxRunTime?: string;\n /** The maximum size of a document, in bytes, which will be considered valid for indexing. */\n readonly maxDocumentExtractionSize?: number;\n /** The maximum number of characters that will be extracted from a document picked up for indexing. */\n readonly maxDocumentContentCharactersToExtract?: number;\n}\n\nexport function searchIndexerLimitsDeserializer(item: any): SearchIndexerLimits {\n return {\n maxRunTime: item[\"maxRunTime\"],\n maxDocumentExtractionSize: item[\"maxDocumentExtractionSize\"],\n maxDocumentContentCharactersToExtract: item[\"maxDocumentContentCharactersToExtract\"],\n };\n}\n\n/** A list of skills. */\nexport interface SearchIndexerSkillset {\n /** The name of the skillset. */\n name: string;\n /** The description of the skillset. */\n description?: string;\n /** A list of skills in the skillset. */\n skills: SearchIndexerSkillUnion[];\n /** Details about the Azure AI service to be used when running skills. */\n cognitiveServicesAccount?: CognitiveServicesAccountUnion;\n /** Definition of additional projections to Azure blob, table, or files, of enriched data. */\n knowledgeStore?: SearchIndexerKnowledgeStore;\n /** Definition of additional projections to secondary search index(es). */\n indexProjection?: SearchIndexerIndexProjection;\n /** The ETag of the skillset. */\n eTag?: string;\n /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your skillset definition when you want full assurance that no one, not even Microsoft, can decrypt your skillset definition. Once you have encrypted your skillset definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your skillset definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */\n encryptionKey?: SearchResourceEncryptionKey;\n}\n\nexport function searchIndexerSkillsetSerializer(item: SearchIndexerSkillset): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n skills: searchIndexerSkillUnionArraySerializer(item[\"skills\"]),\n cognitiveServices: !item[\"cognitiveServicesAccount\"]\n ? item[\"cognitiveServicesAccount\"]\n : cognitiveServicesAccountUnionSerializer(item[\"cognitiveServicesAccount\"]),\n knowledgeStore: !item[\"knowledgeStore\"]\n ? item[\"knowledgeStore\"]\n : searchIndexerKnowledgeStoreSerializer(item[\"knowledgeStore\"]),\n indexProjections: !item[\"indexProjection\"]\n ? item[\"indexProjection\"]\n : searchIndexerIndexProjectionSerializer(item[\"indexProjection\"]),\n \"@odata.etag\": item[\"eTag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeySerializer(item[\"encryptionKey\"]),\n };\n}\n\nexport function searchIndexerSkillsetDeserializer(item: any): SearchIndexerSkillset {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n skills: searchIndexerSkillUnionArrayDeserializer(item[\"skills\"]),\n cognitiveServicesAccount: !item[\"cognitiveServices\"]\n ? item[\"cognitiveServices\"]\n : cognitiveServicesAccountUnionDeserializer(item[\"cognitiveServices\"]),\n knowledgeStore: !item[\"knowledgeStore\"]\n ? item[\"knowledgeStore\"]\n : searchIndexerKnowledgeStoreDeserializer(item[\"knowledgeStore\"]),\n indexProjection: !item[\"indexProjections\"]\n ? item[\"indexProjections\"]\n : searchIndexerIndexProjectionDeserializer(item[\"indexProjections\"]),\n eTag: item[\"@odata.etag\"],\n encryptionKey: !item[\"encryptionKey\"]\n ? item[\"encryptionKey\"]\n : searchResourceEncryptionKeyDeserializer(item[\"encryptionKey\"]),\n };\n}\n\nexport function searchIndexerSkillUnionArraySerializer(\n result: Array<SearchIndexerSkillUnion>,\n): any[] {\n return result.map((item) => {\n return searchIndexerSkillUnionSerializer(item);\n });\n}\n\nexport function searchIndexerSkillUnionArrayDeserializer(\n result: Array<SearchIndexerSkillUnion>,\n): any[] {\n return result.map((item) => {\n return searchIndexerSkillUnionDeserializer(item);\n });\n}\n\n/** Base type for skills. */\nexport interface SearchIndexerSkill {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Skills.Util.ConditionalSkill, #Microsoft.Skills.Text.KeyPhraseExtractionSkill, #Microsoft.Skills.Vision.OcrSkill, #Microsoft.Skills.Vision.ImageAnalysisSkill, #Microsoft.Skills.Text.LanguageDetectionSkill, #Microsoft.Skills.Util.ShaperSkill, #Microsoft.Skills.Text.MergeSkill, #Microsoft.Skills.Text.V3.SentimentSkill, #Microsoft.Skills.Text.V3.EntityLinkingSkill, #Microsoft.Skills.Text.V3.EntityRecognitionSkill, #Microsoft.Skills.Text.PIIDetectionSkill, #Microsoft.Skills.Text.SplitSkill, #Microsoft.Skills.Text.CustomEntityLookupSkill, #Microsoft.Skills.Text.TranslationSkill, #Microsoft.Skills.Util.DocumentExtractionSkill, #Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill, #Microsoft.Skills.Custom.WebApiSkill, #Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill, #Microsoft.Skills.Util.ContentUnderstandingSkill, #Microsoft.Skills.Custom.ChatCompletionSkill */\n odatatype: string;\n /** The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. */\n name?: string;\n /** The description of the skill which describes the inputs, outputs, and usage of the skill. */\n description?: string;\n /** Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. */\n context?: string;\n /** Inputs of the skills could be a column in the source data set, or the output of an upstream skill. */\n inputs: InputFieldMappingEntry[];\n /** The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. */\n outputs: OutputFieldMappingEntry[];\n}\n\nexport function searchIndexerSkillSerializer(item: SearchIndexerSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n };\n}\n\nexport function searchIndexerSkillDeserializer(item: any): SearchIndexerSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n };\n}\n\n/** Alias for SearchIndexerSkillUnion */\nexport type SearchIndexerSkillUnion =\n | ConditionalSkill\n | KeyPhraseExtractionSkill\n | OcrSkill\n | ImageAnalysisSkill\n | LanguageDetectionSkill\n | ShaperSkill\n | MergeSkill\n | SentimentSkillV3\n | EntityLinkingSkill\n | EntityRecognitionSkillV3\n | PIIDetectionSkill\n | SplitSkill\n | CustomEntityLookupSkill\n | TextTranslationSkill\n | DocumentExtractionSkill\n | DocumentIntelligenceLayoutSkill\n | WebApiSkill\n | AzureOpenAIEmbeddingSkill\n | ContentUnderstandingSkill\n | ChatCompletionSkill\n | SearchIndexerSkill;\n\nexport function searchIndexerSkillUnionSerializer(item: SearchIndexerSkillUnion): any {\n switch (item.odatatype) {\n case \"#Microsoft.Skills.Util.ConditionalSkill\":\n return conditionalSkillSerializer(item as ConditionalSkill);\n\n case \"#Microsoft.Skills.Text.KeyPhraseExtractionSkill\":\n return keyPhraseExtractionSkillSerializer(item as KeyPhraseExtractionSkill);\n\n case \"#Microsoft.Skills.Vision.OcrSkill\":\n return ocrSkillSerializer(item as OcrSkill);\n\n case \"#Microsoft.Skills.Vision.ImageAnalysisSkill\":\n return imageAnalysisSkillSerializer(item as ImageAnalysisSkill);\n\n case \"#Microsoft.Skills.Text.LanguageDetectionSkill\":\n return languageDetectionSkillSerializer(item as LanguageDetectionSkill);\n\n case \"#Microsoft.Skills.Util.ShaperSkill\":\n return shaperSkillSerializer(item as ShaperSkill);\n\n case \"#Microsoft.Skills.Text.MergeSkill\":\n return mergeSkillSerializer(item as MergeSkill);\n\n case \"#Microsoft.Skills.Text.V3.SentimentSkill\":\n return sentimentSkillV3Serializer(item as SentimentSkillV3);\n\n case \"#Microsoft.Skills.Text.V3.EntityLinkingSkill\":\n return entityLinkingSkillSerializer(item as EntityLinkingSkill);\n\n case \"#Microsoft.Skills.Text.V3.EntityRecognitionSkill\":\n return entityRecognitionSkillV3Serializer(item as EntityRecognitionSkillV3);\n\n case \"#Microsoft.Skills.Text.PIIDetectionSkill\":\n return piiDetectionSkillSerializer(item as PIIDetectionSkill);\n\n case \"#Microsoft.Skills.Text.SplitSkill\":\n return splitSkillSerializer(item as SplitSkill);\n\n case \"#Microsoft.Skills.Text.CustomEntityLookupSkill\":\n return customEntityLookupSkillSerializer(item as CustomEntityLookupSkill);\n\n case \"#Microsoft.Skills.Text.TranslationSkill\":\n return textTranslationSkillSerializer(item as TextTranslationSkill);\n\n case \"#Microsoft.Skills.Util.DocumentExtractionSkill\":\n return documentExtractionSkillSerializer(item as DocumentExtractionSkill);\n\n case \"#Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill\":\n return documentIntelligenceLayoutSkillSerializer(item as DocumentIntelligenceLayoutSkill);\n\n case \"#Microsoft.Skills.Custom.WebApiSkill\":\n return webApiSkillSerializer(item as WebApiSkill);\n\n case \"#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill\":\n return azureOpenAIEmbeddingSkillSerializer(item as AzureOpenAIEmbeddingSkill);\n\n case \"#Microsoft.Skills.Util.ContentUnderstandingSkill\":\n return contentUnderstandingSkillSerializer(item as ContentUnderstandingSkill);\n\n case \"#Microsoft.Skills.Custom.ChatCompletionSkill\":\n return chatCompletionSkillSerializer(item as ChatCompletionSkill);\n\n default:\n return searchIndexerSkillSerializer(item);\n }\n}\n\nexport function searchIndexerSkillUnionDeserializer(item: any): SearchIndexerSkillUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Skills.Util.ConditionalSkill\":\n return conditionalSkillDeserializer(item as ConditionalSkill);\n\n case \"#Microsoft.Skills.Text.KeyPhraseExtractionSkill\":\n return keyPhraseExtractionSkillDeserializer(item as KeyPhraseExtractionSkill);\n\n case \"#Microsoft.Skills.Vision.OcrSkill\":\n return ocrSkillDeserializer(item as OcrSkill);\n\n case \"#Microsoft.Skills.Vision.ImageAnalysisSkill\":\n return imageAnalysisSkillDeserializer(item as ImageAnalysisSkill);\n\n case \"#Microsoft.Skills.Text.LanguageDetectionSkill\":\n return languageDetectionSkillDeserializer(item as LanguageDetectionSkill);\n\n case \"#Microsoft.Skills.Util.ShaperSkill\":\n return shaperSkillDeserializer(item as ShaperSkill);\n\n case \"#Microsoft.Skills.Text.MergeSkill\":\n return mergeSkillDeserializer(item as MergeSkill);\n\n case \"#Microsoft.Skills.Text.V3.SentimentSkill\":\n return sentimentSkillV3Deserializer(item as SentimentSkillV3);\n\n case \"#Microsoft.Skills.Text.V3.EntityLinkingSkill\":\n return entityLinkingSkillDeserializer(item as EntityLinkingSkill);\n\n case \"#Microsoft.Skills.Text.V3.EntityRecognitionSkill\":\n return entityRecognitionSkillV3Deserializer(item as EntityRecognitionSkillV3);\n\n case \"#Microsoft.Skills.Text.PIIDetectionSkill\":\n return piiDetectionSkillDeserializer(item as PIIDetectionSkill);\n\n case \"#Microsoft.Skills.Text.SplitSkill\":\n return splitSkillDeserializer(item as SplitSkill);\n\n case \"#Microsoft.Skills.Text.CustomEntityLookupSkill\":\n return customEntityLookupSkillDeserializer(item as CustomEntityLookupSkill);\n\n case \"#Microsoft.Skills.Text.TranslationSkill\":\n return textTranslationSkillDeserializer(item as TextTranslationSkill);\n\n case \"#Microsoft.Skills.Util.DocumentExtractionSkill\":\n return documentExtractionSkillDeserializer(item as DocumentExtractionSkill);\n\n case \"#Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill\":\n return documentIntelligenceLayoutSkillDeserializer(item as DocumentIntelligenceLayoutSkill);\n\n case \"#Microsoft.Skills.Custom.WebApiSkill\":\n return webApiSkillDeserializer(item as WebApiSkill);\n\n case \"#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill\":\n return azureOpenAIEmbeddingSkillDeserializer(item as AzureOpenAIEmbeddingSkill);\n\n case \"#Microsoft.Skills.Util.ContentUnderstandingSkill\":\n return contentUnderstandingSkillDeserializer(item as ContentUnderstandingSkill);\n\n case \"#Microsoft.Skills.Custom.ChatCompletionSkill\":\n return chatCompletionSkillDeserializer(item as ChatCompletionSkill);\n\n default:\n return searchIndexerSkillDeserializer(item);\n }\n}\n\nexport function inputFieldMappingEntryArraySerializer(\n result: Array<InputFieldMappingEntry>,\n): any[] {\n return result.map((item) => {\n return inputFieldMappingEntrySerializer(item);\n });\n}\n\nexport function inputFieldMappingEntryArrayDeserializer(\n result: Array<InputFieldMappingEntry>,\n): any[] {\n return result.map((item) => {\n return inputFieldMappingEntryDeserializer(item);\n });\n}\n\n/** Input field mapping for a skill. */\nexport interface InputFieldMappingEntry {\n /** The name of the input. */\n name: string;\n /** The source of the input. */\n source?: string;\n /** The source context used for selecting recursive inputs. */\n sourceContext?: string;\n /** The recursive inputs used when creating a complex type. */\n inputs?: InputFieldMappingEntry[];\n}\n\nexport function inputFieldMappingEntrySerializer(item: InputFieldMappingEntry): any {\n return {\n name: item[\"name\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n };\n}\n\nexport function inputFieldMappingEntryDeserializer(item: any): InputFieldMappingEntry {\n return {\n name: item[\"name\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n };\n}\n\nexport function outputFieldMappingEntryArraySerializer(\n result: Array<OutputFieldMappingEntry>,\n): any[] {\n return result.map((item) => {\n return outputFieldMappingEntrySerializer(item);\n });\n}\n\nexport function outputFieldMappingEntryArrayDeserializer(\n result: Array<OutputFieldMappingEntry>,\n): any[] {\n return result.map((item) => {\n return outputFieldMappingEntryDeserializer(item);\n });\n}\n\n/** Output field mapping for a skill. */\nexport interface OutputFieldMappingEntry {\n /** The name of the output defined by the skill. */\n name: string;\n /** The target name of the output. It is optional and default to name. */\n targetName?: string;\n}\n\nexport function outputFieldMappingEntrySerializer(item: OutputFieldMappingEntry): any {\n return { name: item[\"name\"], targetName: item[\"targetName\"] };\n}\n\nexport function outputFieldMappingEntryDeserializer(item: any): OutputFieldMappingEntry {\n return {\n name: item[\"name\"],\n targetName: item[\"targetName\"],\n };\n}\n\n/** A skill that enables scenarios that require a Boolean operation to determine the data to assign to an output. */\nexport interface ConditionalSkill extends SearchIndexerSkill {\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Util.ConditionalSkill\";\n}\n\nexport function conditionalSkillSerializer(item: ConditionalSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n };\n}\n\nexport function conditionalSkillDeserializer(item: any): ConditionalSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n };\n}\n\n/** A skill that uses text analytics for key phrase extraction. */\nexport interface KeyPhraseExtractionSkill extends SearchIndexerSkill {\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: KeyPhraseExtractionSkillLanguage;\n /** A number indicating how many key phrases to return. If absent, all identified key phrases will be returned. */\n maxKeyPhraseCount?: number;\n /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */\n modelVersion?: string;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.KeyPhraseExtractionSkill\";\n}\n\nexport function keyPhraseExtractionSkillSerializer(item: KeyPhraseExtractionSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n maxKeyPhraseCount: item[\"maxKeyPhraseCount\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\nexport function keyPhraseExtractionSkillDeserializer(item: any): KeyPhraseExtractionSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n maxKeyPhraseCount: item[\"maxKeyPhraseCount\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\n/** The language codes supported for input text by KeyPhraseExtractionSkill. */\nexport enum KnownKeyPhraseExtractionSkillLanguage {\n /** Danish */\n Da = \"da\",\n /** Dutch */\n Nl = \"nl\",\n /** English */\n En = \"en\",\n /** Finnish */\n Fi = \"fi\",\n /** French */\n Fr = \"fr\",\n /** German */\n De = \"de\",\n /** Italian */\n It = \"it\",\n /** Japanese */\n Ja = \"ja\",\n /** Korean */\n Ko = \"ko\",\n /** Norwegian (Bokmaal) */\n No = \"no\",\n /** Polish */\n Pl = \"pl\",\n /** Portuguese (Portugal) */\n PtPT = \"pt-PT\",\n /** Portuguese (Brazil) */\n PtBR = \"pt-BR\",\n /** Russian */\n Ru = \"ru\",\n /** Spanish */\n Es = \"es\",\n /** Swedish */\n Sv = \"sv\",\n}\n\n/**\n * The language codes supported for input text by KeyPhraseExtractionSkill. \\\n * {@link KnownKeyPhraseExtractionSkillLanguage} can be used interchangeably with KeyPhraseExtractionSkillLanguage,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **da**: Danish \\\n * **nl**: Dutch \\\n * **en**: English \\\n * **fi**: Finnish \\\n * **fr**: French \\\n * **de**: German \\\n * **it**: Italian \\\n * **ja**: Japanese \\\n * **ko**: Korean \\\n * **no**: Norwegian (Bokmaal) \\\n * **pl**: Polish \\\n * **pt-PT**: Portuguese (Portugal) \\\n * **pt-BR**: Portuguese (Brazil) \\\n * **ru**: Russian \\\n * **es**: Spanish \\\n * **sv**: Swedish\n */\nexport type KeyPhraseExtractionSkillLanguage = string;\n\n/** A skill that extracts text from image files. */\nexport interface OcrSkill extends SearchIndexerSkill {\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: OcrSkillLanguage;\n /** A value indicating to turn orientation detection on or not. Default is false. */\n shouldDetectOrientation?: boolean;\n /** Defines the sequence of characters to use between the lines of text recognized by the OCR skill. The default value is \"space\". */\n lineEnding?: OcrLineEnding;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Vision.OcrSkill\";\n}\n\nexport function ocrSkillSerializer(item: OcrSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n detectOrientation: item[\"shouldDetectOrientation\"],\n lineEnding: item[\"lineEnding\"],\n };\n}\n\nexport function ocrSkillDeserializer(item: any): OcrSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n shouldDetectOrientation: item[\"detectOrientation\"],\n lineEnding: item[\"lineEnding\"],\n };\n}\n\n/** The language codes supported for input by OcrSkill. */\nexport enum KnownOcrSkillLanguage {\n /** Afrikaans */\n Af = \"af\",\n /** Albanian */\n Sq = \"sq\",\n /** Angika (Devanagiri) */\n Anp = \"anp\",\n /** Arabic */\n Ar = \"ar\",\n /** Asturian */\n Ast = \"ast\",\n /** Awadhi-Hindi (Devanagiri) */\n Awa = \"awa\",\n /** Azerbaijani (Latin) */\n Az = \"az\",\n /** Bagheli */\n Bfy = \"bfy\",\n /** Basque */\n Eu = \"eu\",\n /** Belarusian (Cyrillic and Latin) */\n Be = \"be\",\n /** Belarusian (Cyrillic) */\n BeCyrl = \"be-cyrl\",\n /** Belarusian (Latin) */\n BeLatn = \"be-latn\",\n /** Bhojpuri-Hindi (Devanagiri) */\n Bho = \"bho\",\n /** Bislama */\n Bi = \"bi\",\n /** Bodo (Devanagiri) */\n Brx = \"brx\",\n /** Bosnian Latin */\n Bs = \"bs\",\n /** Brajbha */\n Bra = \"bra\",\n /** Breton */\n Br = \"br\",\n /** Bulgarian */\n Bg = \"bg\",\n /** Bundeli */\n Bns = \"bns\",\n /** Buryat (Cyrillic) */\n Bua = \"bua\",\n /** Catalan */\n Ca = \"ca\",\n /** Cebuano */\n Ceb = \"ceb\",\n /** Chamling */\n Rab = \"rab\",\n /** Chamorro */\n Ch = \"ch\",\n /** Chhattisgarhi (Devanagiri) */\n Hne = \"hne\",\n /** Chinese Simplified */\n ZhHans = \"zh-Hans\",\n /** Chinese Traditional */\n ZhHant = \"zh-Hant\",\n /** Cornish */\n Kw = \"kw\",\n /** Corsican */\n Co = \"co\",\n /** Crimean Tatar (Latin) */\n Crh = \"crh\",\n /** Croatian */\n Hr = \"hr\",\n /** Czech */\n Cs = \"cs\",\n /** Danish */\n Da = \"da\",\n /** Dari */\n Prs = \"prs\",\n /** Dhimal (Devanagiri) */\n Dhi = \"dhi\",\n /** Dogri (Devanagiri) */\n Doi = \"doi\",\n /** Dutch */\n Nl = \"nl\",\n /** English */\n En = \"en\",\n /** Erzya (Cyrillic) */\n Myv = \"myv\",\n /** Estonian */\n Et = \"et\",\n /** Faroese */\n Fo = \"fo\",\n /** Fijian */\n Fj = \"fj\",\n /** Filipino */\n Fil = \"fil\",\n /** Finnish */\n Fi = \"fi\",\n /** French */\n Fr = \"fr\",\n /** Frulian */\n Fur = \"fur\",\n /** Gagauz (Latin) */\n Gag = \"gag\",\n /** Galician */\n Gl = \"gl\",\n /** German */\n De = \"de\",\n /** Gilbertese */\n Gil = \"gil\",\n /** Gondi (Devanagiri) */\n Gon = \"gon\",\n /** Greek */\n El = \"el\",\n /** Greenlandic */\n Kl = \"kl\",\n /** Gurung (Devanagiri) */\n Gvr = \"gvr\",\n /** Haitian Creole */\n Ht = \"ht\",\n /** Halbi (Devanagiri) */\n Hlb = \"hlb\",\n /** Hani */\n Hni = \"hni\",\n /** Haryanvi */\n Bgc = \"bgc\",\n /** Hawaiian */\n Haw = \"haw\",\n /** Hindi */\n Hi = \"hi\",\n /** Hmong Daw (Latin) */\n Mww = \"mww\",\n /** Ho (Devanagiri) */\n Hoc = \"hoc\",\n /** Hungarian */\n Hu = \"hu\",\n /** Icelandic */\n Is = \"is\",\n /** Inari Sami */\n Smn = \"smn\",\n /** Indonesian */\n Id = \"id\",\n /** Interlingua */\n Ia = \"ia\",\n /** Inuktitut (Latin) */\n Iu = \"iu\",\n /** Irish */\n Ga = \"ga\",\n /** Italian */\n It = \"it\",\n /** Japanese */\n Ja = \"ja\",\n /** Jaunsari (Devanagiri) */\n Jns = \"Jns\",\n /** Javanese */\n Jv = \"jv\",\n /** Kabuverdianu */\n Kea = \"kea\",\n /** Kachin (Latin) */\n Kac = \"kac\",\n /** Kangri (Devanagiri) */\n Xnr = \"xnr\",\n /** Karachay-Balkar */\n Krc = \"krc\",\n /** Kara-Kalpak (Cyrillic) */\n KaaCyrl = \"kaa-cyrl\",\n /** Kara-Kalpak (Latin) */\n Kaa = \"kaa\",\n /** Kashubian */\n Csb = \"csb\",\n /** Kazakh (Cyrillic) */\n KkCyrl = \"kk-cyrl\",\n /** Kazakh (Latin) */\n KkLatn = \"kk-latn\",\n /** Khaling */\n Klr = \"klr\",\n /** Khasi */\n Kha = \"kha\",\n /** K'iche' */\n Quc = \"quc\",\n /** Korean */\n Ko = \"ko\",\n /** Korku */\n Kfq = \"kfq\",\n /** Koryak */\n Kpy = \"kpy\",\n /** Kosraean */\n Kos = \"kos\",\n /** Kumyk (Cyrillic) */\n Kum = \"kum\",\n /** Kurdish (Arabic) */\n KuArab = \"ku-arab\",\n /** Kurdish (Latin) */\n KuLatn = \"ku-latn\",\n /** Kurukh (Devanagiri) */\n Kru = \"kru\",\n /** Kyrgyz (Cyrillic) */\n Ky = \"ky\",\n /** Lakota */\n Lkt = \"lkt\",\n /** Latin */\n La = \"la\",\n /** Lithuanian */\n Lt = \"lt\",\n /** Lower Sorbian */\n Dsb = \"dsb\",\n /** Lule Sami */\n Smj = \"smj\",\n /** Luxembourgish */\n Lb = \"lb\",\n /** Mahasu Pahari (Devanagiri) */\n Bfz = \"bfz\",\n /** Malay (Latin) */\n Ms = \"ms\",\n /** Maltese */\n Mt = \"mt\",\n /** Malto (Devanagiri) */\n Kmj = \"kmj\",\n /** Manx */\n Gv = \"gv\",\n /** Maori */\n Mi = \"mi\",\n /** Marathi */\n Mr = \"mr\",\n /** Mongolian (Cyrillic) */\n Mn = \"mn\",\n /** Montenegrin (Cyrillic) */\n CnrCyrl = \"cnr-cyrl\",\n /** Montenegrin (Latin) */\n CnrLatn = \"cnr-latn\",\n /** Neapolitan */\n Nap = \"nap\",\n /** Nepali */\n Ne = \"ne\",\n /** Niuean */\n Niu = \"niu\",\n /** Nogay */\n Nog = \"nog\",\n /** Northern Sami (Latin) */\n Sme = \"sme\",\n /** Norwegian */\n Nb = \"nb\",\n /** Norwegian */\n No = \"no\",\n /** Occitan */\n Oc = \"oc\",\n /** Ossetic */\n Os = \"os\",\n /** Pashto */\n Ps = \"ps\",\n /** Persian */\n Fa = \"fa\",\n /** Polish */\n Pl = \"pl\",\n /** Portuguese */\n Pt = \"pt\",\n /** Punjabi (Arabic) */\n Pa = \"pa\",\n /** Ripuarian */\n Ksh = \"ksh\",\n /** Romanian */\n Ro = \"ro\",\n /** Romansh */\n Rm = \"rm\",\n /** Russian */\n Ru = \"ru\",\n /** Sadri (Devanagiri) */\n Sck = \"sck\",\n /** Samoan (Latin) */\n Sm = \"sm\",\n /** Sanskrit (Devanagiri) */\n Sa = \"sa\",\n /** Santali (Devanagiri) */\n Sat = \"sat\",\n /** Scots */\n Sco = \"sco\",\n /** Scottish Gaelic */\n Gd = \"gd\",\n /** Serbian (Latin) */\n Sr = \"sr\",\n /** Serbian (Cyrillic) */\n SrCyrl = \"sr-Cyrl\",\n /** Serbian (Latin) */\n SrLatn = \"sr-Latn\",\n /** Sherpa (Devanagiri) */\n Xsr = \"xsr\",\n /** Sirmauri (Devanagiri) */\n Srx = \"srx\",\n /** Skolt Sami */\n Sms = \"sms\",\n /** Slovak */\n Sk = \"sk\",\n /** Slovenian */\n Sl = \"sl\",\n /** Somali (Arabic) */\n So = \"so\",\n /** Southern Sami */\n Sma = \"sma\",\n /** Spanish */\n Es = \"es\",\n /** Swahili (Latin) */\n Sw = \"sw\",\n /** Swedish */\n Sv = \"sv\",\n /** Tajik (Cyrillic) */\n Tg = \"tg\",\n /** Tatar (Latin) */\n Tt = \"tt\",\n /** Tetum */\n Tet = \"tet\",\n /** Thangmi */\n Thf = \"thf\",\n /** Tongan */\n To = \"to\",\n /** Turkish */\n Tr = \"tr\",\n /** Turkmen (Latin) */\n Tk = \"tk\",\n /** Tuvan */\n Tyv = \"tyv\",\n /** Upper Sorbian */\n Hsb = \"hsb\",\n /** Urdu */\n Ur = \"ur\",\n /** Uyghur (Arabic) */\n Ug = \"ug\",\n /** Uzbek (Arabic) */\n UzArab = \"uz-arab\",\n /** Uzbek (Cyrillic) */\n UzCyrl = \"uz-cyrl\",\n /** Uzbek (Latin) */\n Uz = \"uz\",\n /** Volap\u00FCk */\n Vo = \"vo\",\n /** Walser */\n Wae = \"wae\",\n /** Welsh */\n Cy = \"cy\",\n /** Western Frisian */\n Fy = \"fy\",\n /** Yucatec Maya */\n Yua = \"yua\",\n /** Zhuang */\n Za = \"za\",\n /** Zulu */\n Zu = \"zu\",\n /** Unknown (All) */\n Unk = \"unk\",\n}\n\n/**\n * The language codes supported for input by OcrSkill. \\\n * {@link KnownOcrSkillLanguage} can be used interchangeably with OcrSkillLanguage,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **af**: Afrikaans \\\n * **sq**: Albanian \\\n * **anp**: Angika (Devanagiri) \\\n * **ar**: Arabic \\\n * **ast**: Asturian \\\n * **awa**: Awadhi-Hindi (Devanagiri) \\\n * **az**: Azerbaijani (Latin) \\\n * **bfy**: Bagheli \\\n * **eu**: Basque \\\n * **be**: Belarusian (Cyrillic and Latin) \\\n * **be-cyrl**: Belarusian (Cyrillic) \\\n * **be-latn**: Belarusian (Latin) \\\n * **bho**: Bhojpuri-Hindi (Devanagiri) \\\n * **bi**: Bislama \\\n * **brx**: Bodo (Devanagiri) \\\n * **bs**: Bosnian Latin \\\n * **bra**: Brajbha \\\n * **br**: Breton \\\n * **bg**: Bulgarian \\\n * **bns**: Bundeli \\\n * **bua**: Buryat (Cyrillic) \\\n * **ca**: Catalan \\\n * **ceb**: Cebuano \\\n * **rab**: Chamling \\\n * **ch**: Chamorro \\\n * **hne**: Chhattisgarhi (Devanagiri) \\\n * **zh-Hans**: Chinese Simplified \\\n * **zh-Hant**: Chinese Traditional \\\n * **kw**: Cornish \\\n * **co**: Corsican \\\n * **crh**: Crimean Tatar (Latin) \\\n * **hr**: Croatian \\\n * **cs**: Czech \\\n * **da**: Danish \\\n * **prs**: Dari \\\n * **dhi**: Dhimal (Devanagiri) \\\n * **doi**: Dogri (Devanagiri) \\\n * **nl**: Dutch \\\n * **en**: English \\\n * **myv**: Erzya (Cyrillic) \\\n * **et**: Estonian \\\n * **fo**: Faroese \\\n * **fj**: Fijian \\\n * **fil**: Filipino \\\n * **fi**: Finnish \\\n * **fr**: French \\\n * **fur**: Frulian \\\n * **gag**: Gagauz (Latin) \\\n * **gl**: Galician \\\n * **de**: German \\\n * **gil**: Gilbertese \\\n * **gon**: Gondi (Devanagiri) \\\n * **el**: Greek \\\n * **kl**: Greenlandic \\\n * **gvr**: Gurung (Devanagiri) \\\n * **ht**: Haitian Creole \\\n * **hlb**: Halbi (Devanagiri) \\\n * **hni**: Hani \\\n * **bgc**: Haryanvi \\\n * **haw**: Hawaiian \\\n * **hi**: Hindi \\\n * **mww**: Hmong Daw (Latin) \\\n * **hoc**: Ho (Devanagiri) \\\n * **hu**: Hungarian \\\n * **is**: Icelandic \\\n * **smn**: Inari Sami \\\n * **id**: Indonesian \\\n * **ia**: Interlingua \\\n * **iu**: Inuktitut (Latin) \\\n * **ga**: Irish \\\n * **it**: Italian \\\n * **ja**: Japanese \\\n * **Jns**: Jaunsari (Devanagiri) \\\n * **jv**: Javanese \\\n * **kea**: Kabuverdianu \\\n * **kac**: Kachin (Latin) \\\n * **xnr**: Kangri (Devanagiri) \\\n * **krc**: Karachay-Balkar \\\n * **kaa-cyrl**: Kara-Kalpak (Cyrillic) \\\n * **kaa**: Kara-Kalpak (Latin) \\\n * **csb**: Kashubian \\\n * **kk-cyrl**: Kazakh (Cyrillic) \\\n * **kk-latn**: Kazakh (Latin) \\\n * **klr**: Khaling \\\n * **kha**: Khasi \\\n * **quc**: K'iche' \\\n * **ko**: Korean \\\n * **kfq**: Korku \\\n * **kpy**: Koryak \\\n * **kos**: Kosraean \\\n * **kum**: Kumyk (Cyrillic) \\\n * **ku-arab**: Kurdish (Arabic) \\\n * **ku-latn**: Kurdish (Latin) \\\n * **kru**: Kurukh (Devanagiri) \\\n * **ky**: Kyrgyz (Cyrillic) \\\n * **lkt**: Lakota \\\n * **la**: Latin \\\n * **lt**: Lithuanian \\\n * **dsb**: Lower Sorbian \\\n * **smj**: Lule Sami \\\n * **lb**: Luxembourgish \\\n * **bfz**: Mahasu Pahari (Devanagiri) \\\n * **ms**: Malay (Latin) \\\n * **mt**: Maltese \\\n * **kmj**: Malto (Devanagiri) \\\n * **gv**: Manx \\\n * **mi**: Maori \\\n * **mr**: Marathi \\\n * **mn**: Mongolian (Cyrillic) \\\n * **cnr-cyrl**: Montenegrin (Cyrillic) \\\n * **cnr-latn**: Montenegrin (Latin) \\\n * **nap**: Neapolitan \\\n * **ne**: Nepali \\\n * **niu**: Niuean \\\n * **nog**: Nogay \\\n * **sme**: Northern Sami (Latin) \\\n * **nb**: Norwegian \\\n * **no**: Norwegian \\\n * **oc**: Occitan \\\n * **os**: Ossetic \\\n * **ps**: Pashto \\\n * **fa**: Persian \\\n * **pl**: Polish \\\n * **pt**: Portuguese \\\n * **pa**: Punjabi (Arabic) \\\n * **ksh**: Ripuarian \\\n * **ro**: Romanian \\\n * **rm**: Romansh \\\n * **ru**: Russian \\\n * **sck**: Sadri (Devanagiri) \\\n * **sm**: Samoan (Latin) \\\n * **sa**: Sanskrit (Devanagiri) \\\n * **sat**: Santali (Devanagiri) \\\n * **sco**: Scots \\\n * **gd**: Scottish Gaelic \\\n * **sr**: Serbian (Latin) \\\n * **sr-Cyrl**: Serbian (Cyrillic) \\\n * **sr-Latn**: Serbian (Latin) \\\n * **xsr**: Sherpa (Devanagiri) \\\n * **srx**: Sirmauri (Devanagiri) \\\n * **sms**: Skolt Sami \\\n * **sk**: Slovak \\\n * **sl**: Slovenian \\\n * **so**: Somali (Arabic) \\\n * **sma**: Southern Sami \\\n * **es**: Spanish \\\n * **sw**: Swahili (Latin) \\\n * **sv**: Swedish \\\n * **tg**: Tajik (Cyrillic) \\\n * **tt**: Tatar (Latin) \\\n * **tet**: Tetum \\\n * **thf**: Thangmi \\\n * **to**: Tongan \\\n * **tr**: Turkish \\\n * **tk**: Turkmen (Latin) \\\n * **tyv**: Tuvan \\\n * **hsb**: Upper Sorbian \\\n * **ur**: Urdu \\\n * **ug**: Uyghur (Arabic) \\\n * **uz-arab**: Uzbek (Arabic) \\\n * **uz-cyrl**: Uzbek (Cyrillic) \\\n * **uz**: Uzbek (Latin) \\\n * **vo**: Volap\u00FCk \\\n * **wae**: Walser \\\n * **cy**: Welsh \\\n * **fy**: Western Frisian \\\n * **yua**: Yucatec Maya \\\n * **za**: Zhuang \\\n * **zu**: Zulu \\\n * **unk**: Unknown (All)\n */\nexport type OcrSkillLanguage = string;\n\n/** Defines the sequence of characters to use between the lines of text recognized by the OCR skill. The default value is \"space\". */\nexport enum KnownOcrLineEnding {\n /** Lines are separated by a single space character. */\n Space = \"space\",\n /** Lines are separated by a carriage return ('\\r') character. */\n CarriageReturn = \"carriageReturn\",\n /** Lines are separated by a single line feed ('\\n') character. */\n LineFeed = \"lineFeed\",\n /** Lines are separated by a carriage return and a line feed ('\\r\\n') character. */\n CarriageReturnLineFeed = \"carriageReturnLineFeed\",\n}\n\n/**\n * Defines the sequence of characters to use between the lines of text recognized by the OCR skill. The default value is \"space\". \\\n * {@link KnownOcrLineEnding} can be used interchangeably with OcrLineEnding,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **space**: Lines are separated by a single space character. \\\n * **carriageReturn**: Lines are separated by a carriage return ('\\r') character. \\\n * **lineFeed**: Lines are separated by a single line feed ('\\n') character. \\\n * **carriageReturnLineFeed**: Lines are separated by a carriage return and a line feed ('\\r\\n') character.\n */\nexport type OcrLineEnding = string;\n\n/** A skill that analyzes image files. It extracts a rich set of visual features based on the image content. */\nexport interface ImageAnalysisSkill extends SearchIndexerSkill {\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: ImageAnalysisSkillLanguage;\n /** A list of visual features. */\n visualFeatures?: VisualFeature[];\n /** A string indicating which domain-specific details to return. */\n details?: ImageDetail[];\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Vision.ImageAnalysisSkill\";\n}\n\nexport function imageAnalysisSkillSerializer(item: ImageAnalysisSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n visualFeatures: !item[\"visualFeatures\"]\n ? item[\"visualFeatures\"]\n : item[\"visualFeatures\"].map((p: any) => {\n return p;\n }),\n details: !item[\"details\"]\n ? item[\"details\"]\n : item[\"details\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function imageAnalysisSkillDeserializer(item: any): ImageAnalysisSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n visualFeatures: !item[\"visualFeatures\"]\n ? item[\"visualFeatures\"]\n : item[\"visualFeatures\"].map((p: any) => {\n return p;\n }),\n details: !item[\"details\"]\n ? item[\"details\"]\n : item[\"details\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** The language codes supported for input by ImageAnalysisSkill. */\nexport enum KnownImageAnalysisSkillLanguage {\n /** Arabic */\n Ar = \"ar\",\n /** Azerbaijani */\n Az = \"az\",\n /** Bulgarian */\n Bg = \"bg\",\n /** Bosnian Latin */\n Bs = \"bs\",\n /** Catalan */\n Ca = \"ca\",\n /** Czech */\n Cs = \"cs\",\n /** Welsh */\n Cy = \"cy\",\n /** Danish */\n Da = \"da\",\n /** German */\n De = \"de\",\n /** Greek */\n El = \"el\",\n /** English */\n En = \"en\",\n /** Spanish */\n Es = \"es\",\n /** Estonian */\n Et = \"et\",\n /** Basque */\n Eu = \"eu\",\n /** Finnish */\n Fi = \"fi\",\n /** French */\n Fr = \"fr\",\n /** Irish */\n Ga = \"ga\",\n /** Galician */\n Gl = \"gl\",\n /** Hebrew */\n He = \"he\",\n /** Hindi */\n Hi = \"hi\",\n /** Croatian */\n Hr = \"hr\",\n /** Hungarian */\n Hu = \"hu\",\n /** Indonesian */\n Id = \"id\",\n /** Italian */\n It = \"it\",\n /** Japanese */\n Ja = \"ja\",\n /** Kazakh */\n Kk = \"kk\",\n /** Korean */\n Ko = \"ko\",\n /** Lithuanian */\n Lt = \"lt\",\n /** Latvian */\n Lv = \"lv\",\n /** Macedonian */\n Mk = \"mk\",\n /** Malay Malaysia */\n Ms = \"ms\",\n /** Norwegian (Bokmal) */\n Nb = \"nb\",\n /** Dutch */\n Nl = \"nl\",\n /** Polish */\n Pl = \"pl\",\n /** Dari */\n Prs = \"prs\",\n /** Portuguese-Brazil */\n PtBR = \"pt-BR\",\n /** Portuguese-Portugal */\n Pt = \"pt\",\n /** Portuguese-Portugal */\n PtPT = \"pt-PT\",\n /** Romanian */\n Ro = \"ro\",\n /** Russian */\n Ru = \"ru\",\n /** Slovak */\n Sk = \"sk\",\n /** Slovenian */\n Sl = \"sl\",\n /** Serbian - Cyrillic RS */\n SrCyrl = \"sr-Cyrl\",\n /** Serbian - Latin RS */\n SrLatn = \"sr-Latn\",\n /** Swedish */\n Sv = \"sv\",\n /** Thai */\n Th = \"th\",\n /** Turkish */\n Tr = \"tr\",\n /** Ukrainian */\n Uk = \"uk\",\n /** Vietnamese */\n Vi = \"vi\",\n /** Chinese Simplified */\n Zh = \"zh\",\n /** Chinese Simplified */\n ZhHans = \"zh-Hans\",\n /** Chinese Traditional */\n ZhHant = \"zh-Hant\",\n}\n\n/**\n * The language codes supported for input by ImageAnalysisSkill. \\\n * {@link KnownImageAnalysisSkillLanguage} can be used interchangeably with ImageAnalysisSkillLanguage,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ar**: Arabic \\\n * **az**: Azerbaijani \\\n * **bg**: Bulgarian \\\n * **bs**: Bosnian Latin \\\n * **ca**: Catalan \\\n * **cs**: Czech \\\n * **cy**: Welsh \\\n * **da**: Danish \\\n * **de**: German \\\n * **el**: Greek \\\n * **en**: English \\\n * **es**: Spanish \\\n * **et**: Estonian \\\n * **eu**: Basque \\\n * **fi**: Finnish \\\n * **fr**: French \\\n * **ga**: Irish \\\n * **gl**: Galician \\\n * **he**: Hebrew \\\n * **hi**: Hindi \\\n * **hr**: Croatian \\\n * **hu**: Hungarian \\\n * **id**: Indonesian \\\n * **it**: Italian \\\n * **ja**: Japanese \\\n * **kk**: Kazakh \\\n * **ko**: Korean \\\n * **lt**: Lithuanian \\\n * **lv**: Latvian \\\n * **mk**: Macedonian \\\n * **ms**: Malay Malaysia \\\n * **nb**: Norwegian (Bokmal) \\\n * **nl**: Dutch \\\n * **pl**: Polish \\\n * **prs**: Dari \\\n * **pt-BR**: Portuguese-Brazil \\\n * **pt**: Portuguese-Portugal \\\n * **pt-PT**: Portuguese-Portugal \\\n * **ro**: Romanian \\\n * **ru**: Russian \\\n * **sk**: Slovak \\\n * **sl**: Slovenian \\\n * **sr-Cyrl**: Serbian - Cyrillic RS \\\n * **sr-Latn**: Serbian - Latin RS \\\n * **sv**: Swedish \\\n * **th**: Thai \\\n * **tr**: Turkish \\\n * **uk**: Ukrainian \\\n * **vi**: Vietnamese \\\n * **zh**: Chinese Simplified \\\n * **zh-Hans**: Chinese Simplified \\\n * **zh-Hant**: Chinese Traditional\n */\nexport type ImageAnalysisSkillLanguage = string;\n\n/** The strings indicating what visual feature types to return. */\nexport enum KnownVisualFeature {\n /** Visual features recognized as adult persons. */\n Adult = \"adult\",\n /** Visual features recognized as commercial brands. */\n Brands = \"brands\",\n /** Categories. */\n Categories = \"categories\",\n /** Description. */\n Description = \"description\",\n /** Visual features recognized as people faces. */\n Faces = \"faces\",\n /** Visual features recognized as objects. */\n Objects = \"objects\",\n /** Tags. */\n Tags = \"tags\",\n}\n\n/**\n * The strings indicating what visual feature types to return. \\\n * {@link KnownVisualFeature} can be used interchangeably with VisualFeature,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **adult**: Visual features recognized as adult persons. \\\n * **brands**: Visual features recognized as commercial brands. \\\n * **categories**: Categories. \\\n * **description**: Description. \\\n * **faces**: Visual features recognized as people faces. \\\n * **objects**: Visual features recognized as objects. \\\n * **tags**: Tags.\n */\nexport type VisualFeature = string;\n\n/** A string indicating which domain-specific details to return. */\nexport enum KnownImageDetail {\n /** Details recognized as celebrities. */\n Celebrities = \"celebrities\",\n /** Details recognized as landmarks. */\n Landmarks = \"landmarks\",\n}\n\n/**\n * A string indicating which domain-specific details to return. \\\n * {@link KnownImageDetail} can be used interchangeably with ImageDetail,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **celebrities**: Details recognized as celebrities. \\\n * **landmarks**: Details recognized as landmarks.\n */\nexport type ImageDetail = string;\n\n/** A skill that detects the language of input text and reports a single language code for every document submitted on the request. The language code is paired with a score indicating the confidence of the analysis. */\nexport interface LanguageDetectionSkill extends SearchIndexerSkill {\n /** A country code to use as a hint to the language detection model if it cannot disambiguate the language. */\n defaultCountryHint?: string;\n /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */\n modelVersion?: string;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.LanguageDetectionSkill\";\n}\n\nexport function languageDetectionSkillSerializer(item: LanguageDetectionSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultCountryHint: item[\"defaultCountryHint\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\nexport function languageDetectionSkillDeserializer(item: any): LanguageDetectionSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultCountryHint: item[\"defaultCountryHint\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\n/** A skill for reshaping the outputs. It creates a complex type to support composite fields (also known as multipart fields). */\nexport interface ShaperSkill extends SearchIndexerSkill {\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Util.ShaperSkill\";\n}\n\nexport function shaperSkillSerializer(item: ShaperSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n };\n}\n\nexport function shaperSkillDeserializer(item: any): ShaperSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n };\n}\n\n/** A skill for merging two or more strings into a single unified string, with an optional user-defined delimiter separating each component part. */\nexport interface MergeSkill extends SearchIndexerSkill {\n /** The tag indicates the start of the merged text. By default, the tag is an empty space. */\n insertPreTag?: string;\n /** The tag indicates the end of the merged text. By default, the tag is an empty space. */\n insertPostTag?: string;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.MergeSkill\";\n}\n\nexport function mergeSkillSerializer(item: MergeSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n insertPreTag: item[\"insertPreTag\"],\n insertPostTag: item[\"insertPostTag\"],\n };\n}\n\nexport function mergeSkillDeserializer(item: any): MergeSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n insertPreTag: item[\"insertPreTag\"],\n insertPostTag: item[\"insertPostTag\"],\n };\n}\n\n/** Using the Text Analytics API, evaluates unstructured text and for each record, provides sentiment labels (such as \"negative\", \"neutral\" and \"positive\") based on the highest confidence score found by the service at a sentence and document-level. */\nexport interface SentimentSkillV3 extends SearchIndexerSkill {\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: SentimentSkillLanguage;\n /** If set to true, the skill output will include information from Text Analytics for opinion mining, namely targets (nouns or verbs) and their associated assessment (adjective) in the text. Default is false. */\n includeOpinionMining?: boolean;\n /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */\n modelVersion?: string;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.V3.SentimentSkill\";\n}\n\nexport function sentimentSkillV3Serializer(item: SentimentSkillV3): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n includeOpinionMining: item[\"includeOpinionMining\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\nexport function sentimentSkillV3Deserializer(item: any): SentimentSkillV3 {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n includeOpinionMining: item[\"includeOpinionMining\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\n/** The language codes supported for input text by SentimentSkill. */\nexport enum KnownSentimentSkillLanguage {\n /** Danish */\n Da = \"da\",\n /** Dutch */\n Nl = \"nl\",\n /** English */\n En = \"en\",\n /** Finnish */\n Fi = \"fi\",\n /** French */\n Fr = \"fr\",\n /** German */\n De = \"de\",\n /** Greek */\n El = \"el\",\n /** Italian */\n It = \"it\",\n /** Norwegian (Bokmaal) */\n No = \"no\",\n /** Polish */\n Pl = \"pl\",\n /** Portuguese (Portugal) */\n PtPT = \"pt-PT\",\n /** Russian */\n Ru = \"ru\",\n /** Spanish */\n Es = \"es\",\n /** Swedish */\n Sv = \"sv\",\n /** Turkish */\n Tr = \"tr\",\n}\n\n/**\n * The language codes supported for input text by SentimentSkill. \\\n * {@link KnownSentimentSkillLanguage} can be used interchangeably with SentimentSkillLanguage,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **da**: Danish \\\n * **nl**: Dutch \\\n * **en**: English \\\n * **fi**: Finnish \\\n * **fr**: French \\\n * **de**: German \\\n * **el**: Greek \\\n * **it**: Italian \\\n * **no**: Norwegian (Bokmaal) \\\n * **pl**: Polish \\\n * **pt-PT**: Portuguese (Portugal) \\\n * **ru**: Russian \\\n * **es**: Spanish \\\n * **sv**: Swedish \\\n * **tr**: Turkish\n */\nexport type SentimentSkillLanguage = string;\n\n/** Using the Text Analytics API, extracts linked entities from text. */\nexport interface EntityLinkingSkill extends SearchIndexerSkill {\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: string;\n /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */\n minimumPrecision?: number;\n /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */\n modelVersion?: string;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.V3.EntityLinkingSkill\";\n}\n\nexport function entityLinkingSkillSerializer(item: EntityLinkingSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n minimumPrecision: item[\"minimumPrecision\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\nexport function entityLinkingSkillDeserializer(item: any): EntityLinkingSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n minimumPrecision: item[\"minimumPrecision\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\n/** Using the Text Analytics API, extracts entities of different types from text. */\nexport interface EntityRecognitionSkillV3 extends SearchIndexerSkill {\n /** A list of entity categories that should be extracted. */\n categories?: EntityCategory[];\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: EntityRecognitionSkillLanguage;\n /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */\n minimumPrecision?: number;\n /** The version of the model to use when calling the Text Analytics API. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */\n modelVersion?: string;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.V3.EntityRecognitionSkill\";\n}\n\nexport function entityRecognitionSkillV3Serializer(item: EntityRecognitionSkillV3): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n categories: !item[\"categories\"]\n ? item[\"categories\"]\n : item[\"categories\"].map((p: any) => {\n return p;\n }),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n minimumPrecision: item[\"minimumPrecision\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\nexport function entityRecognitionSkillV3Deserializer(item: any): EntityRecognitionSkillV3 {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n categories: !item[\"categories\"]\n ? item[\"categories\"]\n : item[\"categories\"].map((p: any) => {\n return p;\n }),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n minimumPrecision: item[\"minimumPrecision\"],\n modelVersion: item[\"modelVersion\"],\n };\n}\n\n/** A string indicating what entity categories to return. */\nexport enum KnownEntityCategory {\n /** Entities describing a physical location. */\n Location = \"location\",\n /** Entities describing an organization. */\n Organization = \"organization\",\n /** Entities describing a person. */\n Person = \"person\",\n /** Entities describing a quantity. */\n Quantity = \"quantity\",\n /** Entities describing a date and time. */\n Datetime = \"datetime\",\n /** Entities describing a URL. */\n Url = \"url\",\n /** Entities describing an email address. */\n Email = \"email\",\n}\n\n/**\n * A string indicating what entity categories to return. \\\n * {@link KnownEntityCategory} can be used interchangeably with EntityCategory,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **location**: Entities describing a physical location. \\\n * **organization**: Entities describing an organization. \\\n * **person**: Entities describing a person. \\\n * **quantity**: Entities describing a quantity. \\\n * **datetime**: Entities describing a date and time. \\\n * **url**: Entities describing a URL. \\\n * **email**: Entities describing an email address.\n */\nexport type EntityCategory = string;\n\n/** The language codes supported for input text by EntityRecognitionSkill. */\nexport enum KnownEntityRecognitionSkillLanguage {\n /** Arabic */\n Ar = \"ar\",\n /** Czech */\n Cs = \"cs\",\n /** Chinese-Simplified */\n ZhHans = \"zh-Hans\",\n /** Chinese-Traditional */\n ZhHant = \"zh-Hant\",\n /** Danish */\n Da = \"da\",\n /** Dutch */\n Nl = \"nl\",\n /** English */\n En = \"en\",\n /** Finnish */\n Fi = \"fi\",\n /** French */\n Fr = \"fr\",\n /** German */\n De = \"de\",\n /** Greek */\n El = \"el\",\n /** Hungarian */\n Hu = \"hu\",\n /** Italian */\n It = \"it\",\n /** Japanese */\n Ja = \"ja\",\n /** Korean */\n Ko = \"ko\",\n /** Norwegian (Bokmaal) */\n No = \"no\",\n /** Polish */\n Pl = \"pl\",\n /** Portuguese (Portugal) */\n PtPT = \"pt-PT\",\n /** Portuguese (Brazil) */\n PtBR = \"pt-BR\",\n /** Russian */\n Ru = \"ru\",\n /** Spanish */\n Es = \"es\",\n /** Swedish */\n Sv = \"sv\",\n /** Turkish */\n Tr = \"tr\",\n}\n\n/**\n * The language codes supported for input text by EntityRecognitionSkill. \\\n * {@link KnownEntityRecognitionSkillLanguage} can be used interchangeably with EntityRecognitionSkillLanguage,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **ar**: Arabic \\\n * **cs**: Czech \\\n * **zh-Hans**: Chinese-Simplified \\\n * **zh-Hant**: Chinese-Traditional \\\n * **da**: Danish \\\n * **nl**: Dutch \\\n * **en**: English \\\n * **fi**: Finnish \\\n * **fr**: French \\\n * **de**: German \\\n * **el**: Greek \\\n * **hu**: Hungarian \\\n * **it**: Italian \\\n * **ja**: Japanese \\\n * **ko**: Korean \\\n * **no**: Norwegian (Bokmaal) \\\n * **pl**: Polish \\\n * **pt-PT**: Portuguese (Portugal) \\\n * **pt-BR**: Portuguese (Brazil) \\\n * **ru**: Russian \\\n * **es**: Spanish \\\n * **sv**: Swedish \\\n * **tr**: Turkish\n */\nexport type EntityRecognitionSkillLanguage = string;\n\n/** Using the Text Analytics API, extracts personal information from an input text and gives you the option of masking it. */\nexport interface PIIDetectionSkill extends SearchIndexerSkill {\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: string;\n /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */\n minimumPrecision?: number;\n /** A parameter that provides various ways to mask the personal information detected in the input text. Default is 'none'. */\n maskingMode?: PIIDetectionSkillMaskingMode;\n /** The character used to mask the text if the maskingMode parameter is set to replace. Default is '*'. */\n mask?: string;\n /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */\n modelVersion?: string;\n /** A list of PII entity categories that should be extracted and masked. */\n piiCategories?: string[];\n /** If specified, will set the PII domain to include only a subset of the entity categories. Possible values include: 'phi', 'none'. Default is 'none'. */\n domain?: string;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.PIIDetectionSkill\";\n}\n\nexport function piiDetectionSkillSerializer(item: PIIDetectionSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n minimumPrecision: item[\"minimumPrecision\"],\n maskingMode: item[\"maskingMode\"],\n maskingCharacter: item[\"mask\"],\n modelVersion: item[\"modelVersion\"],\n piiCategories: !item[\"piiCategories\"]\n ? item[\"piiCategories\"]\n : item[\"piiCategories\"].map((p: any) => {\n return p;\n }),\n domain: item[\"domain\"],\n };\n}\n\nexport function piiDetectionSkillDeserializer(item: any): PIIDetectionSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n minimumPrecision: item[\"minimumPrecision\"],\n maskingMode: item[\"maskingMode\"],\n mask: item[\"maskingCharacter\"],\n modelVersion: item[\"modelVersion\"],\n piiCategories: !item[\"piiCategories\"]\n ? item[\"piiCategories\"]\n : item[\"piiCategories\"].map((p: any) => {\n return p;\n }),\n domain: item[\"domain\"],\n };\n}\n\n/** A string indicating what maskingMode to use to mask the personal information detected in the input text. */\nexport enum KnownPIIDetectionSkillMaskingMode {\n /** No masking occurs and the maskedText output will not be returned. */\n None = \"none\",\n /** Replaces the detected entities with the character given in the maskingCharacter parameter. The character will be repeated to the length of the detected entity so that the offsets will correctly correspond to both the input text as well as the output maskedText. */\n Replace = \"replace\",\n}\n\n/**\n * A string indicating what maskingMode to use to mask the personal information detected in the input text. \\\n * {@link KnownPIIDetectionSkillMaskingMode} can be used interchangeably with PIIDetectionSkillMaskingMode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **none**: No masking occurs and the maskedText output will not be returned. \\\n * **replace**: Replaces the detected entities with the character given in the maskingCharacter parameter. The character will be repeated to the length of the detected entity so that the offsets will correctly correspond to both the input text as well as the output maskedText.\n */\nexport type PIIDetectionSkillMaskingMode = string;\n\n/** A skill to split a string into chunks of text. */\nexport interface SplitSkill extends SearchIndexerSkill {\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: SplitSkillLanguage;\n /** A value indicating which split mode to perform. */\n textSplitMode?: TextSplitMode;\n /** The desired maximum page length. Default is 10000. */\n maximumPageLength?: number;\n /** Only applicable when textSplitMode is set to 'pages'. If specified, n+1th chunk will start with this number of characters/tokens from the end of the nth chunk. */\n pageOverlapLength?: number;\n /** Only applicable when textSplitMode is set to 'pages'. If specified, the SplitSkill will discontinue splitting after processing the first 'maximumPagesToTake' pages, in order to improve performance when only a few initial pages are needed from each document. */\n maximumPagesToTake?: number;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.SplitSkill\";\n}\n\nexport function splitSkillSerializer(item: SplitSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n textSplitMode: item[\"textSplitMode\"],\n maximumPageLength: item[\"maximumPageLength\"],\n pageOverlapLength: item[\"pageOverlapLength\"],\n maximumPagesToTake: item[\"maximumPagesToTake\"],\n };\n}\n\nexport function splitSkillDeserializer(item: any): SplitSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n textSplitMode: item[\"textSplitMode\"],\n maximumPageLength: item[\"maximumPageLength\"],\n pageOverlapLength: item[\"pageOverlapLength\"],\n maximumPagesToTake: item[\"maximumPagesToTake\"],\n };\n}\n\n/** The language codes supported for input text by SplitSkill. */\nexport enum KnownSplitSkillLanguage {\n /** Amharic */\n Am = \"am\",\n /** Bosnian */\n Bs = \"bs\",\n /** Czech */\n Cs = \"cs\",\n /** Danish */\n Da = \"da\",\n /** German */\n De = \"de\",\n /** English */\n En = \"en\",\n /** Spanish */\n Es = \"es\",\n /** Estonian */\n Et = \"et\",\n /** Finnish */\n Fi = \"fi\",\n /** French */\n Fr = \"fr\",\n /** Hebrew */\n He = \"he\",\n /** Hindi */\n Hi = \"hi\",\n /** Croatian */\n Hr = \"hr\",\n /** Hungarian */\n Hu = \"hu\",\n /** Indonesian */\n Id = \"id\",\n /** Icelandic */\n Is = \"is\",\n /** Italian */\n It = \"it\",\n /** Japanese */\n Ja = \"ja\",\n /** Korean */\n Ko = \"ko\",\n /** Latvian */\n Lv = \"lv\",\n /** Norwegian */\n Nb = \"nb\",\n /** Dutch */\n Nl = \"nl\",\n /** Polish */\n Pl = \"pl\",\n /** Portuguese (Portugal) */\n Pt = \"pt\",\n /** Portuguese (Brazil) */\n PtBr = \"pt-br\",\n /** Russian */\n Ru = \"ru\",\n /** Slovak */\n Sk = \"sk\",\n /** Slovenian */\n Sl = \"sl\",\n /** Serbian */\n Sr = \"sr\",\n /** Swedish */\n Sv = \"sv\",\n /** Turkish */\n Tr = \"tr\",\n /** Urdu */\n Ur = \"ur\",\n /** Chinese (Simplified) */\n Zh = \"zh\",\n}\n\n/**\n * The language codes supported for input text by SplitSkill. \\\n * {@link KnownSplitSkillLanguage} can be used interchangeably with SplitSkillLanguage,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **am**: Amharic \\\n * **bs**: Bosnian \\\n * **cs**: Czech \\\n * **da**: Danish \\\n * **de**: German \\\n * **en**: English \\\n * **es**: Spanish \\\n * **et**: Estonian \\\n * **fi**: Finnish \\\n * **fr**: French \\\n * **he**: Hebrew \\\n * **hi**: Hindi \\\n * **hr**: Croatian \\\n * **hu**: Hungarian \\\n * **id**: Indonesian \\\n * **is**: Icelandic \\\n * **it**: Italian \\\n * **ja**: Japanese \\\n * **ko**: Korean \\\n * **lv**: Latvian \\\n * **nb**: Norwegian \\\n * **nl**: Dutch \\\n * **pl**: Polish \\\n * **pt**: Portuguese (Portugal) \\\n * **pt-br**: Portuguese (Brazil) \\\n * **ru**: Russian \\\n * **sk**: Slovak \\\n * **sl**: Slovenian \\\n * **sr**: Serbian \\\n * **sv**: Swedish \\\n * **tr**: Turkish \\\n * **ur**: Urdu \\\n * **zh**: Chinese (Simplified)\n */\nexport type SplitSkillLanguage = string;\n\n/** A value indicating which split mode to perform. */\nexport enum KnownTextSplitMode {\n /** Split the text into individual pages. */\n Pages = \"pages\",\n /** Split the text into individual sentences. */\n Sentences = \"sentences\",\n}\n\n/**\n * A value indicating which split mode to perform. \\\n * {@link KnownTextSplitMode} can be used interchangeably with TextSplitMode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **pages**: Split the text into individual pages. \\\n * **sentences**: Split the text into individual sentences.\n */\nexport type TextSplitMode = string;\n\n/** A skill looks for text from a custom, user-defined list of words and phrases. */\nexport interface CustomEntityLookupSkill extends SearchIndexerSkill {\n /** A value indicating which language code to use. Default is `en`. */\n defaultLanguageCode?: CustomEntityLookupSkillLanguage;\n /** Path to a JSON or CSV file containing all the target text to match against. This entity definition is read at the beginning of an indexer run. Any updates to this file during an indexer run will not take effect until subsequent runs. This config must be accessible over HTTPS. */\n entitiesDefinitionUri?: string;\n /** The inline CustomEntity definition. */\n inlineEntitiesDefinition?: CustomEntity[];\n /** A global flag for CaseSensitive. If CaseSensitive is not set in CustomEntity, this value will be the default value. */\n globalDefaultCaseSensitive?: boolean;\n /** A global flag for AccentSensitive. If AccentSensitive is not set in CustomEntity, this value will be the default value. */\n globalDefaultAccentSensitive?: boolean;\n /** A global flag for FuzzyEditDistance. If FuzzyEditDistance is not set in CustomEntity, this value will be the default value. */\n globalDefaultFuzzyEditDistance?: number;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.CustomEntityLookupSkill\";\n}\n\nexport function customEntityLookupSkillSerializer(item: CustomEntityLookupSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n entitiesDefinitionUri: item[\"entitiesDefinitionUri\"],\n inlineEntitiesDefinition: !item[\"inlineEntitiesDefinition\"]\n ? item[\"inlineEntitiesDefinition\"]\n : customEntityArraySerializer(item[\"inlineEntitiesDefinition\"]),\n globalDefaultCaseSensitive: item[\"globalDefaultCaseSensitive\"],\n globalDefaultAccentSensitive: item[\"globalDefaultAccentSensitive\"],\n globalDefaultFuzzyEditDistance: item[\"globalDefaultFuzzyEditDistance\"],\n };\n}\n\nexport function customEntityLookupSkillDeserializer(item: any): CustomEntityLookupSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultLanguageCode: item[\"defaultLanguageCode\"],\n entitiesDefinitionUri: item[\"entitiesDefinitionUri\"],\n inlineEntitiesDefinition: !item[\"inlineEntitiesDefinition\"]\n ? item[\"inlineEntitiesDefinition\"]\n : customEntityArrayDeserializer(item[\"inlineEntitiesDefinition\"]),\n globalDefaultCaseSensitive: item[\"globalDefaultCaseSensitive\"],\n globalDefaultAccentSensitive: item[\"globalDefaultAccentSensitive\"],\n globalDefaultFuzzyEditDistance: item[\"globalDefaultFuzzyEditDistance\"],\n };\n}\n\n/** The language codes supported for input text by CustomEntityLookupSkill. */\nexport enum KnownCustomEntityLookupSkillLanguage {\n /** Danish */\n Da = \"da\",\n /** German */\n De = \"de\",\n /** English */\n En = \"en\",\n /** Spanish */\n Es = \"es\",\n /** Finnish */\n Fi = \"fi\",\n /** French */\n Fr = \"fr\",\n /** Italian */\n It = \"it\",\n /** Korean */\n Ko = \"ko\",\n /** Portuguese */\n Pt = \"pt\",\n}\n\n/**\n * The language codes supported for input text by CustomEntityLookupSkill. \\\n * {@link KnownCustomEntityLookupSkillLanguage} can be used interchangeably with CustomEntityLookupSkillLanguage,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **da**: Danish \\\n * **de**: German \\\n * **en**: English \\\n * **es**: Spanish \\\n * **fi**: Finnish \\\n * **fr**: French \\\n * **it**: Italian \\\n * **ko**: Korean \\\n * **pt**: Portuguese\n */\nexport type CustomEntityLookupSkillLanguage = string;\n\nexport function customEntityArraySerializer(result: Array<CustomEntity>): any[] {\n return result.map((item) => {\n return customEntitySerializer(item);\n });\n}\n\nexport function customEntityArrayDeserializer(result: Array<CustomEntity>): any[] {\n return result.map((item) => {\n return customEntityDeserializer(item);\n });\n}\n\n/** An object that contains information about the matches that were found, and related metadata. */\nexport interface CustomEntity {\n /** The top-level entity descriptor. Matches in the skill output will be grouped by this name, and it should represent the \"normalized\" form of the text being found. */\n name: string;\n /** This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. */\n description?: string;\n /** This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. */\n type?: string;\n /** This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. */\n subtype?: string;\n /** This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. */\n id?: string;\n /** Defaults to false. Boolean value denoting whether comparisons with the entity name should be sensitive to character casing. Sample case insensitive matches of \"Microsoft\" could be: microsoft, microSoft, MICROSOFT. */\n caseSensitive?: boolean;\n /** Defaults to false. Boolean value denoting whether comparisons with the entity name should be sensitive to accent. */\n accentSensitive?: boolean;\n /** Defaults to 0. Maximum value of 5. Denotes the acceptable number of divergent characters that would still constitute a match with the entity name. The smallest possible fuzziness for any given match is returned. For instance, if the edit distance is set to 3, \"Windows10\" would still match \"Windows\", \"Windows10\" and \"Windows 7\". When case sensitivity is set to false, case differences do NOT count towards fuzziness tolerance, but otherwise do. */\n fuzzyEditDistance?: number;\n /** Changes the default case sensitivity value for this entity. It be used to change the default value of all aliases caseSensitive values. */\n defaultCaseSensitive?: boolean;\n /** Changes the default accent sensitivity value for this entity. It be used to change the default value of all aliases accentSensitive values. */\n defaultAccentSensitive?: boolean;\n /** Changes the default fuzzy edit distance value for this entity. It can be used to change the default value of all aliases fuzzyEditDistance values. */\n defaultFuzzyEditDistance?: number;\n /** An array of complex objects that can be used to specify alternative spellings or synonyms to the root entity name. */\n aliases?: CustomEntityAlias[];\n}\n\nexport function customEntitySerializer(item: CustomEntity): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n type: item[\"type\"],\n subtype: item[\"subtype\"],\n id: item[\"id\"],\n caseSensitive: item[\"caseSensitive\"],\n accentSensitive: item[\"accentSensitive\"],\n fuzzyEditDistance: item[\"fuzzyEditDistance\"],\n defaultCaseSensitive: item[\"defaultCaseSensitive\"],\n defaultAccentSensitive: item[\"defaultAccentSensitive\"],\n defaultFuzzyEditDistance: item[\"defaultFuzzyEditDistance\"],\n aliases: !item[\"aliases\"] ? item[\"aliases\"] : customEntityAliasArraySerializer(item[\"aliases\"]),\n };\n}\n\nexport function customEntityDeserializer(item: any): CustomEntity {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n type: item[\"type\"],\n subtype: item[\"subtype\"],\n id: item[\"id\"],\n caseSensitive: item[\"caseSensitive\"],\n accentSensitive: item[\"accentSensitive\"],\n fuzzyEditDistance: item[\"fuzzyEditDistance\"],\n defaultCaseSensitive: item[\"defaultCaseSensitive\"],\n defaultAccentSensitive: item[\"defaultAccentSensitive\"],\n defaultFuzzyEditDistance: item[\"defaultFuzzyEditDistance\"],\n aliases: !item[\"aliases\"]\n ? item[\"aliases\"]\n : customEntityAliasArrayDeserializer(item[\"aliases\"]),\n };\n}\n\nexport function customEntityAliasArraySerializer(result: Array<CustomEntityAlias>): any[] {\n return result.map((item) => {\n return customEntityAliasSerializer(item);\n });\n}\n\nexport function customEntityAliasArrayDeserializer(result: Array<CustomEntityAlias>): any[] {\n return result.map((item) => {\n return customEntityAliasDeserializer(item);\n });\n}\n\n/** A complex object that can be used to specify alternative spellings or synonyms to the root entity name. */\nexport interface CustomEntityAlias {\n /** The text of the alias. */\n text: string;\n /** Determine if the alias is case sensitive. */\n caseSensitive?: boolean;\n /** Determine if the alias is accent sensitive. */\n accentSensitive?: boolean;\n /** Determine the fuzzy edit distance of the alias. */\n fuzzyEditDistance?: number;\n}\n\nexport function customEntityAliasSerializer(item: CustomEntityAlias): any {\n return {\n text: item[\"text\"],\n caseSensitive: item[\"caseSensitive\"],\n accentSensitive: item[\"accentSensitive\"],\n fuzzyEditDistance: item[\"fuzzyEditDistance\"],\n };\n}\n\nexport function customEntityAliasDeserializer(item: any): CustomEntityAlias {\n return {\n text: item[\"text\"],\n caseSensitive: item[\"caseSensitive\"],\n accentSensitive: item[\"accentSensitive\"],\n fuzzyEditDistance: item[\"fuzzyEditDistance\"],\n };\n}\n\n/** A skill to translate text from one language to another. */\nexport interface TextTranslationSkill extends SearchIndexerSkill {\n /** The language code to translate documents into for documents that don't specify the to language explicitly. */\n defaultToLanguageCode: TextTranslationSkillLanguage;\n /** The language code to translate documents from for documents that don't specify the from language explicitly. */\n defaultFromLanguageCode?: TextTranslationSkillLanguage;\n /** The language code to translate documents from when neither the fromLanguageCode input nor the defaultFromLanguageCode parameter are provided, and the automatic language detection is unsuccessful. Default is `en`. */\n suggestedFrom?: TextTranslationSkillLanguage;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.TranslationSkill\";\n}\n\nexport function textTranslationSkillSerializer(item: TextTranslationSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n defaultToLanguageCode: item[\"defaultToLanguageCode\"],\n defaultFromLanguageCode: item[\"defaultFromLanguageCode\"],\n suggestedFrom: item[\"suggestedFrom\"],\n };\n}\n\nexport function textTranslationSkillDeserializer(item: any): TextTranslationSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n defaultToLanguageCode: item[\"defaultToLanguageCode\"],\n defaultFromLanguageCode: item[\"defaultFromLanguageCode\"],\n suggestedFrom: item[\"suggestedFrom\"],\n };\n}\n\n/** The language codes supported for input text by TextTranslationSkill. */\nexport enum KnownTextTranslationSkillLanguage {\n /** Afrikaans */\n Af = \"af\",\n /** Arabic */\n Ar = \"ar\",\n /** Bangla */\n Bn = \"bn\",\n /** Bosnian (Latin) */\n Bs = \"bs\",\n /** Bulgarian */\n Bg = \"bg\",\n /** Cantonese (Traditional) */\n Yue = \"yue\",\n /** Catalan */\n Ca = \"ca\",\n /** Chinese Simplified */\n ZhHans = \"zh-Hans\",\n /** Chinese Traditional */\n ZhHant = \"zh-Hant\",\n /** Croatian */\n Hr = \"hr\",\n /** Czech */\n Cs = \"cs\",\n /** Danish */\n Da = \"da\",\n /** Dutch */\n Nl = \"nl\",\n /** English */\n En = \"en\",\n /** Estonian */\n Et = \"et\",\n /** Fijian */\n Fj = \"fj\",\n /** Filipino */\n Fil = \"fil\",\n /** Finnish */\n Fi = \"fi\",\n /** French */\n Fr = \"fr\",\n /** German */\n De = \"de\",\n /** Greek */\n El = \"el\",\n /** Haitian Creole */\n Ht = \"ht\",\n /** Hebrew */\n He = \"he\",\n /** Hindi */\n Hi = \"hi\",\n /** Hmong Daw */\n Mww = \"mww\",\n /** Hungarian */\n Hu = \"hu\",\n /** Icelandic */\n Is = \"is\",\n /** Indonesian */\n Id = \"id\",\n /** Italian */\n It = \"it\",\n /** Japanese */\n Ja = \"ja\",\n /** Kiswahili */\n Sw = \"sw\",\n /** Klingon */\n Tlh = \"tlh\",\n /** Klingon (Latin script) */\n TlhLatn = \"tlh-Latn\",\n /** Klingon (Klingon script) */\n TlhPiqd = \"tlh-Piqd\",\n /** Korean */\n Ko = \"ko\",\n /** Latvian */\n Lv = \"lv\",\n /** Lithuanian */\n Lt = \"lt\",\n /** Malagasy */\n Mg = \"mg\",\n /** Malay */\n Ms = \"ms\",\n /** Maltese */\n Mt = \"mt\",\n /** Norwegian */\n Nb = \"nb\",\n /** Persian */\n Fa = \"fa\",\n /** Polish */\n Pl = \"pl\",\n /** Portuguese */\n Pt = \"pt\",\n /** Portuguese (Brazil) */\n PtBr = \"pt-br\",\n /** Portuguese (Portugal) */\n PtPT = \"pt-PT\",\n /** Queretaro Otomi */\n Otq = \"otq\",\n /** Romanian */\n Ro = \"ro\",\n /** Russian */\n Ru = \"ru\",\n /** Samoan */\n Sm = \"sm\",\n /** Serbian (Cyrillic) */\n SrCyrl = \"sr-Cyrl\",\n /** Serbian (Latin) */\n SrLatn = \"sr-Latn\",\n /** Slovak */\n Sk = \"sk\",\n /** Slovenian */\n Sl = \"sl\",\n /** Spanish */\n Es = \"es\",\n /** Swedish */\n Sv = \"sv\",\n /** Tahitian */\n Ty = \"ty\",\n /** Tamil */\n Ta = \"ta\",\n /** Telugu */\n Te = \"te\",\n /** Thai */\n Th = \"th\",\n /** Tongan */\n To = \"to\",\n /** Turkish */\n Tr = \"tr\",\n /** Ukrainian */\n Uk = \"uk\",\n /** Urdu */\n Ur = \"ur\",\n /** Vietnamese */\n Vi = \"vi\",\n /** Welsh */\n Cy = \"cy\",\n /** Yucatec Maya */\n Yua = \"yua\",\n /** Irish */\n Ga = \"ga\",\n /** Kannada */\n Kn = \"kn\",\n /** Maori */\n Mi = \"mi\",\n /** Malayalam */\n Ml = \"ml\",\n /** Punjabi */\n Pa = \"pa\",\n}\n\n/**\n * The language codes supported for input text by TextTranslationSkill. \\\n * {@link KnownTextTranslationSkillLanguage} can be used interchangeably with TextTranslationSkillLanguage,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **af**: Afrikaans \\\n * **ar**: Arabic \\\n * **bn**: Bangla \\\n * **bs**: Bosnian (Latin) \\\n * **bg**: Bulgarian \\\n * **yue**: Cantonese (Traditional) \\\n * **ca**: Catalan \\\n * **zh-Hans**: Chinese Simplified \\\n * **zh-Hant**: Chinese Traditional \\\n * **hr**: Croatian \\\n * **cs**: Czech \\\n * **da**: Danish \\\n * **nl**: Dutch \\\n * **en**: English \\\n * **et**: Estonian \\\n * **fj**: Fijian \\\n * **fil**: Filipino \\\n * **fi**: Finnish \\\n * **fr**: French \\\n * **de**: German \\\n * **el**: Greek \\\n * **ht**: Haitian Creole \\\n * **he**: Hebrew \\\n * **hi**: Hindi \\\n * **mww**: Hmong Daw \\\n * **hu**: Hungarian \\\n * **is**: Icelandic \\\n * **id**: Indonesian \\\n * **it**: Italian \\\n * **ja**: Japanese \\\n * **sw**: Kiswahili \\\n * **tlh**: Klingon \\\n * **tlh-Latn**: Klingon (Latin script) \\\n * **tlh-Piqd**: Klingon (Klingon script) \\\n * **ko**: Korean \\\n * **lv**: Latvian \\\n * **lt**: Lithuanian \\\n * **mg**: Malagasy \\\n * **ms**: Malay \\\n * **mt**: Maltese \\\n * **nb**: Norwegian \\\n * **fa**: Persian \\\n * **pl**: Polish \\\n * **pt**: Portuguese \\\n * **pt-br**: Portuguese (Brazil) \\\n * **pt-PT**: Portuguese (Portugal) \\\n * **otq**: Queretaro Otomi \\\n * **ro**: Romanian \\\n * **ru**: Russian \\\n * **sm**: Samoan \\\n * **sr-Cyrl**: Serbian (Cyrillic) \\\n * **sr-Latn**: Serbian (Latin) \\\n * **sk**: Slovak \\\n * **sl**: Slovenian \\\n * **es**: Spanish \\\n * **sv**: Swedish \\\n * **ty**: Tahitian \\\n * **ta**: Tamil \\\n * **te**: Telugu \\\n * **th**: Thai \\\n * **to**: Tongan \\\n * **tr**: Turkish \\\n * **uk**: Ukrainian \\\n * **ur**: Urdu \\\n * **vi**: Vietnamese \\\n * **cy**: Welsh \\\n * **yua**: Yucatec Maya \\\n * **ga**: Irish \\\n * **kn**: Kannada \\\n * **mi**: Maori \\\n * **ml**: Malayalam \\\n * **pa**: Punjabi\n */\nexport type TextTranslationSkillLanguage = string;\n\n/** A skill that extracts content from a file within the enrichment pipeline. */\nexport interface DocumentExtractionSkill extends SearchIndexerSkill {\n /** The parsingMode for the skill. Will be set to 'default' if not defined. */\n parsingMode?: string;\n /** The type of data to be extracted for the skill. Will be set to 'contentAndMetadata' if not defined. */\n dataToExtract?: string;\n /** A dictionary of configurations for the skill. */\n configuration?: Record<string, any>;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Util.DocumentExtractionSkill\";\n}\n\nexport function documentExtractionSkillSerializer(item: DocumentExtractionSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n parsingMode: item[\"parsingMode\"],\n dataToExtract: item[\"dataToExtract\"],\n configuration: item[\"configuration\"],\n };\n}\n\nexport function documentExtractionSkillDeserializer(item: any): DocumentExtractionSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n parsingMode: item[\"parsingMode\"],\n dataToExtract: item[\"dataToExtract\"],\n configuration: !item[\"configuration\"]\n ? item[\"configuration\"]\n : Object.fromEntries(\n Object.entries(item[\"configuration\"]).map(([k1, p1]: [string, any]) => [k1, p1]),\n ),\n };\n}\n\n/** A skill that extracts content and layout information, via Azure AI Services, from files within the enrichment pipeline. */\nexport interface DocumentIntelligenceLayoutSkill extends SearchIndexerSkill {\n /** Controls the output format. Default is 'markdown'. */\n outputFormat?: DocumentIntelligenceLayoutSkillOutputFormat;\n /** Controls the cardinality of the output produced by the skill. Default is 'oneToMany'. */\n outputMode?: DocumentIntelligenceLayoutSkillOutputMode;\n /** The depth of headers in the markdown output. Default is h6. */\n markdownHeaderDepth?: DocumentIntelligenceLayoutSkillMarkdownHeaderDepth;\n /** Controls the cardinality of the content extracted from the document by the skill. */\n extractionOptions?: DocumentIntelligenceLayoutSkillExtractionOptions[];\n /** Controls the cardinality for chunking the content. */\n chunkingProperties?: DocumentIntelligenceLayoutSkillChunkingProperties;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill\";\n}\n\nexport function documentIntelligenceLayoutSkillSerializer(\n item: DocumentIntelligenceLayoutSkill,\n): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n outputFormat: item[\"outputFormat\"],\n outputMode: item[\"outputMode\"],\n markdownHeaderDepth: item[\"markdownHeaderDepth\"],\n extractionOptions: !item[\"extractionOptions\"]\n ? item[\"extractionOptions\"]\n : item[\"extractionOptions\"].map((p: any) => {\n return p;\n }),\n chunkingProperties: !item[\"chunkingProperties\"]\n ? item[\"chunkingProperties\"]\n : documentIntelligenceLayoutSkillChunkingPropertiesSerializer(item[\"chunkingProperties\"]),\n };\n}\n\nexport function documentIntelligenceLayoutSkillDeserializer(\n item: any,\n): DocumentIntelligenceLayoutSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n outputFormat: item[\"outputFormat\"],\n outputMode: item[\"outputMode\"],\n markdownHeaderDepth: item[\"markdownHeaderDepth\"],\n extractionOptions: !item[\"extractionOptions\"]\n ? item[\"extractionOptions\"]\n : item[\"extractionOptions\"].map((p1: any) => {\n return p1;\n }),\n chunkingProperties: !item[\"chunkingProperties\"]\n ? item[\"chunkingProperties\"]\n : documentIntelligenceLayoutSkillChunkingPropertiesDeserializer(item[\"chunkingProperties\"]),\n };\n}\n\n/** Controls the cardinality of the output format. Default is 'markdown'. */\nexport enum KnownDocumentIntelligenceLayoutSkillOutputFormat {\n /** Specify the format of the output as text. */\n Text = \"text\",\n /** Specify the format of the output as markdown. */\n Markdown = \"markdown\",\n}\n\n/**\n * Controls the cardinality of the output format. Default is 'markdown'. \\\n * {@link KnownDocumentIntelligenceLayoutSkillOutputFormat} can be used interchangeably with DocumentIntelligenceLayoutSkillOutputFormat,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **text**: Specify the format of the output as text. \\\n * **markdown**: Specify the format of the output as markdown.\n */\nexport type DocumentIntelligenceLayoutSkillOutputFormat = string;\n\n/** Controls the cardinality of the output produced by the skill. Default is 'oneToMany'. */\nexport enum KnownDocumentIntelligenceLayoutSkillOutputMode {\n /** Specify that the output should be parsed as 'oneToMany'. */\n OneToMany = \"oneToMany\",\n}\n\n/**\n * Controls the cardinality of the output produced by the skill. Default is 'oneToMany'. \\\n * {@link KnownDocumentIntelligenceLayoutSkillOutputMode} can be used interchangeably with DocumentIntelligenceLayoutSkillOutputMode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **oneToMany**: Specify that the output should be parsed as 'oneToMany'.\n */\nexport type DocumentIntelligenceLayoutSkillOutputMode = string;\n\n/** The depth of headers in the markdown output. Default is h6. */\nexport enum KnownDocumentIntelligenceLayoutSkillMarkdownHeaderDepth {\n /** Header level 1. */\n H1 = \"h1\",\n /** Header level 2. */\n H2 = \"h2\",\n /** Header level 3. */\n H3 = \"h3\",\n /** Header level 4. */\n H4 = \"h4\",\n /** Header level 5. */\n H5 = \"h5\",\n /** Header level 6. */\n H6 = \"h6\",\n}\n\n/**\n * The depth of headers in the markdown output. Default is h6. \\\n * {@link KnownDocumentIntelligenceLayoutSkillMarkdownHeaderDepth} can be used interchangeably with DocumentIntelligenceLayoutSkillMarkdownHeaderDepth,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **h1**: Header level 1. \\\n * **h2**: Header level 2. \\\n * **h3**: Header level 3. \\\n * **h4**: Header level 4. \\\n * **h5**: Header level 5. \\\n * **h6**: Header level 6.\n */\nexport type DocumentIntelligenceLayoutSkillMarkdownHeaderDepth = string;\n\n/** Controls the cardinality of the content extracted from the document by the skill. */\nexport enum KnownDocumentIntelligenceLayoutSkillExtractionOptions {\n /** Specify that image content should be extracted from the document. */\n Images = \"images\",\n /** Specify that location metadata should be extracted from the document. */\n LocationMetadata = \"locationMetadata\",\n}\n\n/**\n * Controls the cardinality of the content extracted from the document by the skill. \\\n * {@link KnownDocumentIntelligenceLayoutSkillExtractionOptions} can be used interchangeably with DocumentIntelligenceLayoutSkillExtractionOptions,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **images**: Specify that image content should be extracted from the document. \\\n * **locationMetadata**: Specify that location metadata should be extracted from the document.\n */\nexport type DocumentIntelligenceLayoutSkillExtractionOptions = string;\n\n/** Controls the cardinality for chunking the content. */\nexport interface DocumentIntelligenceLayoutSkillChunkingProperties {\n /** The unit of the chunk. */\n unit?: DocumentIntelligenceLayoutSkillChunkingUnit;\n /** The maximum chunk length in characters. Default is 500. */\n maximumLength?: number;\n /** The length of overlap provided between two text chunks. Default is 0. */\n overlapLength?: number;\n}\n\nexport function documentIntelligenceLayoutSkillChunkingPropertiesSerializer(\n item: DocumentIntelligenceLayoutSkillChunkingProperties,\n): any {\n return {\n unit: item[\"unit\"],\n maximumLength: item[\"maximumLength\"],\n overlapLength: item[\"overlapLength\"],\n };\n}\n\nexport function documentIntelligenceLayoutSkillChunkingPropertiesDeserializer(\n item: any,\n): DocumentIntelligenceLayoutSkillChunkingProperties {\n return {\n unit: item[\"unit\"],\n maximumLength: item[\"maximumLength\"],\n overlapLength: item[\"overlapLength\"],\n };\n}\n\n/** Controls the cardinality of the chunk unit. Default is 'characters' */\nexport enum KnownDocumentIntelligenceLayoutSkillChunkingUnit {\n /** Specifies chunk by characters. */\n Characters = \"characters\",\n}\n\n/**\n * Controls the cardinality of the chunk unit. Default is 'characters' \\\n * {@link KnownDocumentIntelligenceLayoutSkillChunkingUnit} can be used interchangeably with DocumentIntelligenceLayoutSkillChunkingUnit,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **characters**: Specifies chunk by characters.\n */\nexport type DocumentIntelligenceLayoutSkillChunkingUnit = string;\n\n/** A skill that can call a Web API endpoint, allowing you to extend a skillset by having it call your custom code. */\nexport interface WebApiSkill extends SearchIndexerSkill {\n /** The url for the Web API. */\n uri: string;\n /** The headers required to make the http request. */\n httpHeaders?: WebApiHttpHeaders;\n /** The method for the http request. */\n httpMethod?: string;\n /** The desired timeout for the request. Default is 30 seconds. */\n timeout?: string;\n /** The desired batch size which indicates number of documents. */\n batchSize?: number;\n /** If set, the number of parallel calls that can be made to the Web API. */\n degreeOfParallelism?: number;\n /** Applies to custom skills that connect to external code in an Azure function or some other application that provides the transformations. This value should be the application ID created for the function or app when it was registered with Azure Active Directory. When specified, the custom skill connects to the function or app using a managed ID (either system or user-assigned) of the search service and the access token of the function or app, using this value as the resource id for creating the scope of the access token. */\n authResourceId?: string;\n /** The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to \"none\", the value of this property is cleared. */\n authIdentity?: SearchIndexerDataIdentityUnion;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Custom.WebApiSkill\";\n}\n\nexport function webApiSkillSerializer(item: WebApiSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n uri: item[\"uri\"],\n httpHeaders: !item[\"httpHeaders\"]\n ? item[\"httpHeaders\"]\n : webApiHttpHeadersSerializer(item[\"httpHeaders\"]),\n httpMethod: item[\"httpMethod\"],\n timeout: item[\"timeout\"],\n batchSize: item[\"batchSize\"],\n degreeOfParallelism: item[\"degreeOfParallelism\"],\n authResourceId: item[\"authResourceId\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"authIdentity\"]),\n };\n}\n\nexport function webApiSkillDeserializer(item: any): WebApiSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n uri: item[\"uri\"],\n httpHeaders: !item[\"httpHeaders\"]\n ? item[\"httpHeaders\"]\n : webApiHttpHeadersDeserializer(item[\"httpHeaders\"]),\n httpMethod: item[\"httpMethod\"],\n timeout: item[\"timeout\"],\n batchSize: item[\"batchSize\"],\n degreeOfParallelism: item[\"degreeOfParallelism\"],\n authResourceId: item[\"authResourceId\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"authIdentity\"]),\n };\n}\n\n/** A dictionary of http request headers. */\nexport interface WebApiHttpHeaders {\n /** Additional properties */\n additionalProperties?: Record<string, string>;\n}\n\nexport function webApiHttpHeadersSerializer(item: WebApiHttpHeaders): any {\n return { ...serializeRecord(item.additionalProperties ?? {}) };\n}\n\nexport function webApiHttpHeadersDeserializer(item: any): WebApiHttpHeaders {\n return {\n additionalProperties: serializeRecord(item, []),\n };\n}\n\n/** Allows you to generate a vector embedding for a given text input using the Azure OpenAI resource. */\nexport interface AzureOpenAIEmbeddingSkill extends SearchIndexerSkill {\n /** The resource URI of the Azure OpenAI resource. */\n resourceUrl?: string;\n /** ID of the Azure OpenAI model deployment on the designated resource. */\n deploymentId?: string;\n /** API key of the designated Azure OpenAI resource. */\n apiKey?: string;\n /** The user-assigned managed identity used for outbound connections. */\n authIdentity?: SearchIndexerDataIdentityUnion;\n /** The name of the embedding model that is deployed at the provided deploymentId path. */\n modelName?: AzureOpenAIModelName;\n /** The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models. */\n dimensions?: number;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill\";\n}\n\nexport function azureOpenAIEmbeddingSkillSerializer(item: AzureOpenAIEmbeddingSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n resourceUri: item[\"resourceUrl\"],\n deploymentId: item[\"deploymentId\"],\n apiKey: item[\"apiKey\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"authIdentity\"]),\n modelName: item[\"modelName\"],\n dimensions: item[\"dimensions\"],\n };\n}\n\nexport function azureOpenAIEmbeddingSkillDeserializer(item: any): AzureOpenAIEmbeddingSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n resourceUrl: item[\"resourceUri\"],\n deploymentId: item[\"deploymentId\"],\n apiKey: item[\"apiKey\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"authIdentity\"]),\n modelName: item[\"modelName\"],\n dimensions: item[\"dimensions\"],\n };\n}\n\n/** A skill that leverages Azure AI Content Understanding to process and extract structured insights from documents, enabling enriched, searchable content for enhanced document indexing and retrieval. */\nexport interface ContentUnderstandingSkill extends SearchIndexerSkill {\n /** Controls the cardinality of the content extracted from the document by the skill. */\n extractionOptions?: ContentUnderstandingSkillExtractionOptions[];\n /** Controls the cardinality for chunking the content. */\n chunkingProperties?: ContentUnderstandingSkillChunkingProperties;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Util.ContentUnderstandingSkill\";\n}\n\nexport function contentUnderstandingSkillSerializer(item: ContentUnderstandingSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n extractionOptions: !item[\"extractionOptions\"]\n ? item[\"extractionOptions\"]\n : item[\"extractionOptions\"].map((p: any) => {\n return p;\n }),\n chunkingProperties: !item[\"chunkingProperties\"]\n ? item[\"chunkingProperties\"]\n : contentUnderstandingSkillChunkingPropertiesSerializer(item[\"chunkingProperties\"]),\n };\n}\n\nexport function contentUnderstandingSkillDeserializer(item: any): ContentUnderstandingSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n extractionOptions: !item[\"extractionOptions\"]\n ? item[\"extractionOptions\"]\n : item[\"extractionOptions\"].map((p1: any) => {\n return p1;\n }),\n chunkingProperties: !item[\"chunkingProperties\"]\n ? item[\"chunkingProperties\"]\n : contentUnderstandingSkillChunkingPropertiesDeserializer(item[\"chunkingProperties\"]),\n };\n}\n\n/** Controls the cardinality of the content extracted from the document by the skill. */\nexport enum KnownContentUnderstandingSkillExtractionOptions {\n /** Specify that image content should be extracted from the document. */\n Images = \"images\",\n /** Specify that location metadata should be extracted from the document. */\n LocationMetadata = \"locationMetadata\",\n}\n\n/**\n * Controls the cardinality of the content extracted from the document by the skill. \\\n * {@link KnownContentUnderstandingSkillExtractionOptions} can be used interchangeably with ContentUnderstandingSkillExtractionOptions,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **images**: Specify that image content should be extracted from the document. \\\n * **locationMetadata**: Specify that location metadata should be extracted from the document.\n */\nexport type ContentUnderstandingSkillExtractionOptions = string;\n\n/** Controls the cardinality for chunking the content. */\nexport interface ContentUnderstandingSkillChunkingProperties {\n /** The unit of the chunk. */\n unit?: ContentUnderstandingSkillChunkingUnit;\n /** The maximum chunk length in characters. Default is 500. */\n maximumLength?: number;\n /** The length of overlap provided between two text chunks. Default is 0. */\n overlapLength?: number;\n}\n\nexport function contentUnderstandingSkillChunkingPropertiesSerializer(\n item: ContentUnderstandingSkillChunkingProperties,\n): any {\n return {\n unit: item[\"unit\"],\n maximumLength: item[\"maximumLength\"],\n overlapLength: item[\"overlapLength\"],\n };\n}\n\nexport function contentUnderstandingSkillChunkingPropertiesDeserializer(\n item: any,\n): ContentUnderstandingSkillChunkingProperties {\n return {\n unit: item[\"unit\"],\n maximumLength: item[\"maximumLength\"],\n overlapLength: item[\"overlapLength\"],\n };\n}\n\n/** Controls the cardinality of the chunk unit. Default is 'characters' */\nexport enum KnownContentUnderstandingSkillChunkingUnit {\n /** Specifies chunk by characters. */\n Characters = \"characters\",\n}\n\n/**\n * Controls the cardinality of the chunk unit. Default is 'characters' \\\n * {@link KnownContentUnderstandingSkillChunkingUnit} can be used interchangeably with ContentUnderstandingSkillChunkingUnit,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **characters**: Specifies chunk by characters.\n */\nexport type ContentUnderstandingSkillChunkingUnit = string;\n\n/** A skill that calls a language model via Azure AI Foundry's Chat Completions endpoint. */\nexport interface ChatCompletionSkill extends SearchIndexerSkill {\n /** The url for the Web API. */\n uri: string;\n /** The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to \"none\", the value of this property is cleared. */\n authIdentity?: SearchIndexerDataIdentityUnion;\n /** API key for authenticating to the model. Both apiKey and authIdentity cannot be specified at the same time. */\n apiKey?: string;\n /** Common language model parameters that customers can tweak. If omitted, reasonable defaults will be applied. */\n commonModelParameters?: CommonModelParameters;\n /** Open-type dictionary for model-specific parameters that should be appended to the chat completions call. Follows Azure AI Foundry's extensibility pattern. */\n extraParameters?: Record<string, any>;\n /** How extra parameters are handled by Azure AI Foundry. Default is 'error'. */\n extraParametersBehavior?: ChatCompletionExtraParametersBehavior;\n /** Determines how the LLM should format its response. Defaults to 'text' response type. */\n responseFormat?: ChatCompletionResponseFormat;\n /** A URI fragment specifying the type of skill. */\n odatatype: \"#Microsoft.Skills.Custom.ChatCompletionSkill\";\n}\n\nexport function chatCompletionSkillSerializer(item: ChatCompletionSkill): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArraySerializer(item[\"outputs\"]),\n uri: item[\"uri\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"authIdentity\"]),\n apiKey: item[\"apiKey\"],\n commonModelParameters: !item[\"commonModelParameters\"]\n ? item[\"commonModelParameters\"]\n : commonModelParametersSerializer(item[\"commonModelParameters\"]),\n extraParameters: item[\"extraParameters\"],\n extraParametersBehavior: item[\"extraParametersBehavior\"],\n responseFormat: !item[\"responseFormat\"]\n ? item[\"responseFormat\"]\n : chatCompletionResponseFormatSerializer(item[\"responseFormat\"]),\n };\n}\n\nexport function chatCompletionSkillDeserializer(item: any): ChatCompletionSkill {\n return {\n odatatype: item[\"@odata.type\"],\n name: item[\"name\"],\n description: item[\"description\"],\n context: item[\"context\"],\n inputs: inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n outputs: outputFieldMappingEntryArrayDeserializer(item[\"outputs\"]),\n uri: item[\"uri\"],\n authIdentity: !item[\"authIdentity\"]\n ? item[\"authIdentity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"authIdentity\"]),\n apiKey: item[\"apiKey\"],\n commonModelParameters: !item[\"commonModelParameters\"]\n ? item[\"commonModelParameters\"]\n : commonModelParametersDeserializer(item[\"commonModelParameters\"]),\n extraParameters: !item[\"extraParameters\"]\n ? item[\"extraParameters\"]\n : Object.fromEntries(\n Object.entries(item[\"extraParameters\"]).map(([k1, p1]: [string, any]) => [k1, p1]),\n ),\n extraParametersBehavior: item[\"extraParametersBehavior\"],\n responseFormat: !item[\"responseFormat\"]\n ? item[\"responseFormat\"]\n : chatCompletionResponseFormatDeserializer(item[\"responseFormat\"]),\n };\n}\n\n/** Common language model parameters for Chat Completions. If omitted, default values are used. */\nexport interface CommonModelParameters {\n /** The name of the model to use (e.g., 'gpt-4o', etc.). Default is null if not specified. */\n model?: string;\n /** A float in the range [-2,2] that reduces or increases likelihood of repeated tokens. Default is 0. */\n frequencyPenalty?: number;\n /** A float in the range [-2,2] that penalizes new tokens based on their existing presence. Default is 0. */\n presencePenalty?: number;\n /** Maximum number of tokens to generate. */\n maxTokens?: number;\n /** Sampling temperature. Default is 0.7. */\n temperature?: number;\n /** Random seed for controlling deterministic outputs. If omitted, randomization is used. */\n seed?: number;\n /** List of stop sequences that will cut off text generation. Default is none. */\n stop?: string[];\n}\n\nexport function commonModelParametersSerializer(item: CommonModelParameters): any {\n return {\n model: item[\"model\"],\n frequencyPenalty: item[\"frequencyPenalty\"],\n presencePenalty: item[\"presencePenalty\"],\n maxTokens: item[\"maxTokens\"],\n temperature: item[\"temperature\"],\n seed: item[\"seed\"],\n stop: !item[\"stop\"]\n ? item[\"stop\"]\n : item[\"stop\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function commonModelParametersDeserializer(item: any): CommonModelParameters {\n return {\n model: item[\"model\"],\n frequencyPenalty: item[\"frequencyPenalty\"],\n presencePenalty: item[\"presencePenalty\"],\n maxTokens: item[\"maxTokens\"],\n temperature: item[\"temperature\"],\n seed: item[\"seed\"],\n stop: !item[\"stop\"]\n ? item[\"stop\"]\n : item[\"stop\"].map((p1: any) => {\n return p1;\n }),\n };\n}\n\n/** Specifies how 'extraParameters' should be handled by Azure AI Foundry. Defaults to 'error'. */\nexport enum KnownChatCompletionExtraParametersBehavior {\n /** Passes any extra parameters directly to the model. */\n PassThrough = \"passThrough\",\n /** Drops all extra parameters. */\n Drop = \"drop\",\n /** Raises an error if any extra parameter is present. */\n Error = \"error\",\n}\n\n/**\n * Specifies how 'extraParameters' should be handled by Azure AI Foundry. Defaults to 'error'. \\\n * {@link KnownChatCompletionExtraParametersBehavior} can be used interchangeably with ChatCompletionExtraParametersBehavior,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **passThrough**: Passes any extra parameters directly to the model. \\\n * **drop**: Drops all extra parameters. \\\n * **error**: Raises an error if any extra parameter is present.\n */\nexport type ChatCompletionExtraParametersBehavior = string;\n\n/** Determines how the language model's response should be serialized. Defaults to 'text'. */\nexport interface ChatCompletionResponseFormat {\n /** Specifies how the LLM should format the response. */\n type?: ChatCompletionResponseFormatType;\n /** An open dictionary for extended properties. Required if 'type' == 'json_schema' */\n chatCompletionSchemaProperties?: ChatCompletionSchemaProperties;\n}\n\nexport function chatCompletionResponseFormatSerializer(item: ChatCompletionResponseFormat): any {\n return {\n type: item[\"type\"],\n jsonSchemaProperties: !item[\"chatCompletionSchemaProperties\"]\n ? item[\"chatCompletionSchemaProperties\"]\n : chatCompletionSchemaPropertiesSerializer(item[\"chatCompletionSchemaProperties\"]),\n };\n}\n\nexport function chatCompletionResponseFormatDeserializer(item: any): ChatCompletionResponseFormat {\n return {\n type: item[\"type\"],\n chatCompletionSchemaProperties: !item[\"jsonSchemaProperties\"]\n ? item[\"jsonSchemaProperties\"]\n : chatCompletionSchemaPropertiesDeserializer(item[\"jsonSchemaProperties\"]),\n };\n}\n\n/** Specifies how the LLM should format the response. */\nexport enum KnownChatCompletionResponseFormatType {\n /** Plain text response format. */\n Text = \"text\",\n /** Arbitrary JSON object response format. */\n JsonObject = \"jsonObject\",\n /** JSON schema-adhering response format. */\n JsonSchema = \"jsonSchema\",\n}\n\n/**\n * Specifies how the LLM should format the response. \\\n * {@link KnownChatCompletionResponseFormatType} can be used interchangeably with ChatCompletionResponseFormatType,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **text**: Plain text response format. \\\n * **jsonObject**: Arbitrary JSON object response format. \\\n * **jsonSchema**: JSON schema-adhering response format.\n */\nexport type ChatCompletionResponseFormatType = string;\n\n/** Properties for JSON schema response format. */\nexport interface ChatCompletionSchemaProperties {\n /** Name of the json schema the model will adhere to. */\n name?: string;\n /** Description of the json schema the model will adhere to. */\n description?: string;\n /** Whether or not the model's response should use structured outputs. Default is true. */\n strict?: boolean;\n /** The schema definition. */\n schema?: ChatCompletionSchema;\n}\n\nexport function chatCompletionSchemaPropertiesSerializer(\n item: ChatCompletionSchemaProperties,\n): any {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n strict: item[\"strict\"],\n schema: !item[\"schema\"] ? item[\"schema\"] : chatCompletionSchemaSerializer(item[\"schema\"]),\n };\n}\n\nexport function chatCompletionSchemaPropertiesDeserializer(\n item: any,\n): ChatCompletionSchemaProperties {\n return {\n name: item[\"name\"],\n description: item[\"description\"],\n strict: item[\"strict\"],\n schema: !item[\"schema\"] ? item[\"schema\"] : chatCompletionSchemaDeserializer(item[\"schema\"]),\n };\n}\n\n/** Object defining the custom schema the model will use to structure its output. */\nexport interface ChatCompletionSchema {\n /** Type of schema representation. Usually 'object'. Default is 'object'. */\n type?: string;\n /** A JSON-formatted string that defines the output schema's properties and constraints for the model. */\n properties?: string;\n /** An array of the property names that are required to be part of the model's response. All properties must be included for structured outputs. */\n required?: string[];\n /** Controls whether it is allowable for an object to contain additional keys / values that were not defined in the JSON Schema. Default is false. */\n additionalProperties?: boolean;\n}\n\nexport function chatCompletionSchemaSerializer(item: ChatCompletionSchema): any {\n return {\n type: item[\"type\"],\n properties: item[\"properties\"],\n required: !item[\"required\"]\n ? item[\"required\"]\n : item[\"required\"].map((p: any) => {\n return p;\n }),\n additionalProperties: item[\"additionalProperties\"],\n };\n}\n\nexport function chatCompletionSchemaDeserializer(item: any): ChatCompletionSchema {\n return {\n type: item[\"type\"],\n properties: item[\"properties\"],\n required: !item[\"required\"]\n ? item[\"required\"]\n : item[\"required\"].map((p: any) => {\n return p;\n }),\n additionalProperties: item[\"additionalProperties\"],\n };\n}\n\n/** Base type for describing any Azure AI service resource attached to a skillset. */\nexport interface CognitiveServicesAccount {\n /** The discriminator for derived types. */\n /** The discriminator possible values: #Microsoft.Azure.Search.DefaultCognitiveServices, #Microsoft.Azure.Search.CognitiveServicesByKey, #Microsoft.Azure.Search.AIServicesByKey, #Microsoft.Azure.Search.AIServicesByIdentity */\n odatatype: string;\n /** Description of the Azure AI service resource attached to a skillset. */\n description?: string;\n}\n\nexport function cognitiveServicesAccountSerializer(item: CognitiveServicesAccount): any {\n return { \"@odata.type\": item[\"odatatype\"], description: item[\"description\"] };\n}\n\nexport function cognitiveServicesAccountDeserializer(item: any): CognitiveServicesAccount {\n return {\n odatatype: item[\"@odata.type\"],\n description: item[\"description\"],\n };\n}\n\n/** Alias for CognitiveServicesAccountUnion */\nexport type CognitiveServicesAccountUnion =\n | DefaultCognitiveServicesAccount\n | CognitiveServicesAccountKey\n | AIServicesAccountKey\n | AIServicesAccountIdentity\n | CognitiveServicesAccount;\n\nexport function cognitiveServicesAccountUnionSerializer(item: CognitiveServicesAccountUnion): any {\n switch (item.odatatype) {\n case \"#Microsoft.Azure.Search.DefaultCognitiveServices\":\n return defaultCognitiveServicesAccountSerializer(item as DefaultCognitiveServicesAccount);\n\n case \"#Microsoft.Azure.Search.CognitiveServicesByKey\":\n return cognitiveServicesAccountKeySerializer(item as CognitiveServicesAccountKey);\n\n case \"#Microsoft.Azure.Search.AIServicesByKey\":\n return aiServicesAccountKeySerializer(item as AIServicesAccountKey);\n\n case \"#Microsoft.Azure.Search.AIServicesByIdentity\":\n return aiServicesAccountIdentitySerializer(item as AIServicesAccountIdentity);\n\n default:\n return cognitiveServicesAccountSerializer(item);\n }\n}\n\nexport function cognitiveServicesAccountUnionDeserializer(\n item: any,\n): CognitiveServicesAccountUnion {\n switch (item[\"@odata.type\"]) {\n case \"#Microsoft.Azure.Search.DefaultCognitiveServices\":\n return defaultCognitiveServicesAccountDeserializer(item as DefaultCognitiveServicesAccount);\n\n case \"#Microsoft.Azure.Search.CognitiveServicesByKey\":\n return cognitiveServicesAccountKeyDeserializer(item as CognitiveServicesAccountKey);\n\n case \"#Microsoft.Azure.Search.AIServicesByKey\":\n return aiServicesAccountKeyDeserializer(item as AIServicesAccountKey);\n\n case \"#Microsoft.Azure.Search.AIServicesByIdentity\":\n return aiServicesAccountIdentityDeserializer(item as AIServicesAccountIdentity);\n\n default:\n return cognitiveServicesAccountDeserializer(item);\n }\n}\n\n/** An empty object that represents the default Azure AI service resource for a skillset. */\nexport interface DefaultCognitiveServicesAccount extends CognitiveServicesAccount {\n /** A URI fragment specifying the type of Azure AI service resource attached to a skillset. */\n odatatype: \"#Microsoft.Azure.Search.DefaultCognitiveServices\";\n}\n\nexport function defaultCognitiveServicesAccountSerializer(\n item: DefaultCognitiveServicesAccount,\n): any {\n return { \"@odata.type\": item[\"odatatype\"], description: item[\"description\"] };\n}\n\nexport function defaultCognitiveServicesAccountDeserializer(\n item: any,\n): DefaultCognitiveServicesAccount {\n return {\n odatatype: item[\"@odata.type\"],\n description: item[\"description\"],\n };\n}\n\n/** The multi-region account key of an Azure AI service resource that's attached to a skillset. */\nexport interface CognitiveServicesAccountKey extends CognitiveServicesAccount {\n /** The key used to provision the Azure AI service resource attached to a skillset. */\n key: string;\n /** A URI fragment specifying the type of Azure AI service resource attached to a skillset. */\n odatatype: \"#Microsoft.Azure.Search.CognitiveServicesByKey\";\n}\n\nexport function cognitiveServicesAccountKeySerializer(item: CognitiveServicesAccountKey): any {\n return { \"@odata.type\": item[\"odatatype\"], description: item[\"description\"], key: item[\"key\"] };\n}\n\nexport function cognitiveServicesAccountKeyDeserializer(item: any): CognitiveServicesAccountKey {\n return {\n odatatype: item[\"@odata.type\"],\n description: item[\"description\"],\n key: item[\"key\"],\n };\n}\n\n/** The account key of an Azure AI service resource that's attached to a skillset, to be used with the resource's subdomain. */\nexport interface AIServicesAccountKey extends CognitiveServicesAccount {\n /** The key used to provision the Azure AI service resource attached to a skillset. */\n key: string;\n /** The subdomain/Azure AI Services endpoint url for the corresponding AI Service. */\n subdomainUrl: string;\n /** A URI fragment specifying the type of Azure AI service resource attached to a skillset. */\n odatatype: \"#Microsoft.Azure.Search.AIServicesByKey\";\n}\n\nexport function aiServicesAccountKeySerializer(item: AIServicesAccountKey): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n description: item[\"description\"],\n key: item[\"key\"],\n subdomainUrl: item[\"subdomainUrl\"],\n };\n}\n\nexport function aiServicesAccountKeyDeserializer(item: any): AIServicesAccountKey {\n return {\n odatatype: item[\"@odata.type\"],\n description: item[\"description\"],\n key: item[\"key\"],\n subdomainUrl: item[\"subdomainUrl\"],\n };\n}\n\n/** The multi-region account of an Azure AI service resource that's attached to a skillset. */\nexport interface AIServicesAccountIdentity extends CognitiveServicesAccount {\n /** The user-assigned managed identity used for connections to AI Service. If not specified, the system-assigned managed identity is used. On updates to the skillset, if the identity is unspecified, the value remains unchanged. If set to \"none\", the value of this property is cleared. */\n identity?: SearchIndexerDataIdentityUnion;\n /** The subdomain/Azure AI Services endpoint url for the corresponding AI Service. */\n subdomainUrl: string;\n /** A URI fragment specifying the type of Azure AI service resource attached to a skillset. */\n odatatype: \"#Microsoft.Azure.Search.AIServicesByIdentity\";\n}\n\nexport function aiServicesAccountIdentitySerializer(item: AIServicesAccountIdentity): any {\n return {\n \"@odata.type\": item[\"odatatype\"],\n description: item[\"description\"],\n identity: !item[\"identity\"]\n ? item[\"identity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"identity\"]),\n subdomainUrl: item[\"subdomainUrl\"],\n };\n}\n\nexport function aiServicesAccountIdentityDeserializer(item: any): AIServicesAccountIdentity {\n return {\n odatatype: item[\"@odata.type\"],\n description: item[\"description\"],\n identity: !item[\"identity\"]\n ? item[\"identity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"identity\"]),\n subdomainUrl: item[\"subdomainUrl\"],\n };\n}\n\n/** Definition of additional projections to azure blob, table, or files, of enriched data. */\nexport interface SearchIndexerKnowledgeStore {\n /** The connection string to the storage account projections will be stored in. */\n storageConnectionString: string;\n /** A list of additional projections to perform during indexing. */\n projections: SearchIndexerKnowledgeStoreProjection[];\n /** The user-assigned managed identity used for connections to Azure Storage when writing knowledge store projections. If the connection string indicates an identity (ResourceId) and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to \"none\", the value of this property is cleared. */\n identity?: SearchIndexerDataIdentityUnion;\n}\n\nexport function searchIndexerKnowledgeStoreSerializer(item: SearchIndexerKnowledgeStore): any {\n return {\n storageConnectionString: item[\"storageConnectionString\"],\n projections: searchIndexerKnowledgeStoreProjectionArraySerializer(item[\"projections\"]),\n identity: !item[\"identity\"]\n ? item[\"identity\"]\n : searchIndexerDataIdentityUnionSerializer(item[\"identity\"]),\n };\n}\n\nexport function searchIndexerKnowledgeStoreDeserializer(item: any): SearchIndexerKnowledgeStore {\n return {\n storageConnectionString: item[\"storageConnectionString\"],\n projections: searchIndexerKnowledgeStoreProjectionArrayDeserializer(item[\"projections\"]),\n identity: !item[\"identity\"]\n ? item[\"identity\"]\n : searchIndexerDataIdentityUnionDeserializer(item[\"identity\"]),\n };\n}\n\nexport function searchIndexerKnowledgeStoreProjectionArraySerializer(\n result: Array<SearchIndexerKnowledgeStoreProjection>,\n): any[] {\n return result.map((item) => {\n return searchIndexerKnowledgeStoreProjectionSerializer(item);\n });\n}\n\nexport function searchIndexerKnowledgeStoreProjectionArrayDeserializer(\n result: Array<SearchIndexerKnowledgeStoreProjection>,\n): any[] {\n return result.map((item) => {\n return searchIndexerKnowledgeStoreProjectionDeserializer(item);\n });\n}\n\n/** Container object for various projection selectors. */\nexport interface SearchIndexerKnowledgeStoreProjection {\n /** Projections to Azure Table storage. */\n tables?: SearchIndexerKnowledgeStoreTableProjectionSelector[];\n /** Projections to Azure Blob storage. */\n objects?: SearchIndexerKnowledgeStoreObjectProjectionSelector[];\n /** Projections to Azure File storage. */\n files?: SearchIndexerKnowledgeStoreFileProjectionSelector[];\n}\n\nexport function searchIndexerKnowledgeStoreProjectionSerializer(\n item: SearchIndexerKnowledgeStoreProjection,\n): any {\n return {\n tables: !item[\"tables\"]\n ? item[\"tables\"]\n : searchIndexerKnowledgeStoreTableProjectionSelectorArraySerializer(item[\"tables\"]),\n objects: !item[\"objects\"]\n ? item[\"objects\"]\n : searchIndexerKnowledgeStoreObjectProjectionSelectorArraySerializer(item[\"objects\"]),\n files: !item[\"files\"]\n ? item[\"files\"]\n : searchIndexerKnowledgeStoreFileProjectionSelectorArraySerializer(item[\"files\"]),\n };\n}\n\nexport function searchIndexerKnowledgeStoreProjectionDeserializer(\n item: any,\n): SearchIndexerKnowledgeStoreProjection {\n return {\n tables: !item[\"tables\"]\n ? item[\"tables\"]\n : searchIndexerKnowledgeStoreTableProjectionSelectorArrayDeserializer(item[\"tables\"]),\n objects: !item[\"objects\"]\n ? item[\"objects\"]\n : searchIndexerKnowledgeStoreObjectProjectionSelectorArrayDeserializer(item[\"objects\"]),\n files: !item[\"files\"]\n ? item[\"files\"]\n : searchIndexerKnowledgeStoreFileProjectionSelectorArrayDeserializer(item[\"files\"]),\n };\n}\n\nexport function searchIndexerKnowledgeStoreTableProjectionSelectorArraySerializer(\n result: Array<SearchIndexerKnowledgeStoreTableProjectionSelector>,\n): any[] {\n return result.map((item) => {\n return searchIndexerKnowledgeStoreTableProjectionSelectorSerializer(item);\n });\n}\n\nexport function searchIndexerKnowledgeStoreTableProjectionSelectorArrayDeserializer(\n result: Array<SearchIndexerKnowledgeStoreTableProjectionSelector>,\n): any[] {\n return result.map((item) => {\n return searchIndexerKnowledgeStoreTableProjectionSelectorDeserializer(item);\n });\n}\n\n/** Description for what data to store in Azure Tables. */\nexport interface SearchIndexerKnowledgeStoreTableProjectionSelector extends SearchIndexerKnowledgeStoreProjectionSelector {\n /** Name of generated key to store projection under. */\n generatedKeyName: string;\n /** Name of the Azure table to store projected data in. */\n tableName: string;\n}\n\nexport function searchIndexerKnowledgeStoreTableProjectionSelectorSerializer(\n item: SearchIndexerKnowledgeStoreTableProjectionSelector,\n): any {\n return {\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n tableName: item[\"tableName\"],\n };\n}\n\nexport function searchIndexerKnowledgeStoreTableProjectionSelectorDeserializer(\n item: any,\n): SearchIndexerKnowledgeStoreTableProjectionSelector {\n return {\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n tableName: item[\"tableName\"],\n };\n}\n\nexport function searchIndexerKnowledgeStoreObjectProjectionSelectorArraySerializer(\n result: Array<SearchIndexerKnowledgeStoreObjectProjectionSelector>,\n): any[] {\n return result.map((item) => {\n return searchIndexerKnowledgeStoreObjectProjectionSelectorSerializer(item);\n });\n}\n\nexport function searchIndexerKnowledgeStoreObjectProjectionSelectorArrayDeserializer(\n result: Array<SearchIndexerKnowledgeStoreObjectProjectionSelector>,\n): any[] {\n return result.map((item) => {\n return searchIndexerKnowledgeStoreObjectProjectionSelectorDeserializer(item);\n });\n}\n\n/** Projection definition for what data to store in Azure Blob. */\nexport interface SearchIndexerKnowledgeStoreObjectProjectionSelector extends SearchIndexerKnowledgeStoreBlobProjectionSelector {}\n\nexport function searchIndexerKnowledgeStoreObjectProjectionSelectorSerializer(\n item: SearchIndexerKnowledgeStoreObjectProjectionSelector,\n): any {\n return {\n storageContainer: item[\"storageContainer\"],\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n };\n}\n\nexport function searchIndexerKnowledgeStoreObjectProjectionSelectorDeserializer(\n item: any,\n): SearchIndexerKnowledgeStoreObjectProjectionSelector {\n return {\n storageContainer: item[\"storageContainer\"],\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n };\n}\n\nexport function searchIndexerKnowledgeStoreFileProjectionSelectorArraySerializer(\n result: Array<SearchIndexerKnowledgeStoreFileProjectionSelector>,\n): any[] {\n return result.map((item) => {\n return searchIndexerKnowledgeStoreFileProjectionSelectorSerializer(item);\n });\n}\n\nexport function searchIndexerKnowledgeStoreFileProjectionSelectorArrayDeserializer(\n result: Array<SearchIndexerKnowledgeStoreFileProjectionSelector>,\n): any[] {\n return result.map((item) => {\n return searchIndexerKnowledgeStoreFileProjectionSelectorDeserializer(item);\n });\n}\n\n/** Projection definition for what data to store in Azure Files. */\nexport interface SearchIndexerKnowledgeStoreFileProjectionSelector extends SearchIndexerKnowledgeStoreBlobProjectionSelector {}\n\nexport function searchIndexerKnowledgeStoreFileProjectionSelectorSerializer(\n item: SearchIndexerKnowledgeStoreFileProjectionSelector,\n): any {\n return {\n storageContainer: item[\"storageContainer\"],\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n };\n}\n\nexport function searchIndexerKnowledgeStoreFileProjectionSelectorDeserializer(\n item: any,\n): SearchIndexerKnowledgeStoreFileProjectionSelector {\n return {\n storageContainer: item[\"storageContainer\"],\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n };\n}\n\n/** Definition of additional projections to secondary search indexes. */\nexport interface SearchIndexerIndexProjection {\n /** A list of projections to be performed to secondary search indexes. */\n selectors: SearchIndexerIndexProjectionSelector[];\n /** A dictionary of index projection-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. */\n parameters?: SearchIndexerIndexProjectionsParameters;\n}\n\nexport function searchIndexerIndexProjectionSerializer(item: SearchIndexerIndexProjection): any {\n return {\n selectors: searchIndexerIndexProjectionSelectorArraySerializer(item[\"selectors\"]),\n parameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : searchIndexerIndexProjectionsParametersSerializer(item[\"parameters\"]),\n };\n}\n\nexport function searchIndexerIndexProjectionDeserializer(item: any): SearchIndexerIndexProjection {\n return {\n selectors: searchIndexerIndexProjectionSelectorArrayDeserializer(item[\"selectors\"]),\n parameters: !item[\"parameters\"]\n ? item[\"parameters\"]\n : searchIndexerIndexProjectionsParametersDeserializer(item[\"parameters\"]),\n };\n}\n\nexport function searchIndexerIndexProjectionSelectorArraySerializer(\n result: Array<SearchIndexerIndexProjectionSelector>,\n): any[] {\n return result.map((item) => {\n return searchIndexerIndexProjectionSelectorSerializer(item);\n });\n}\n\nexport function searchIndexerIndexProjectionSelectorArrayDeserializer(\n result: Array<SearchIndexerIndexProjectionSelector>,\n): any[] {\n return result.map((item) => {\n return searchIndexerIndexProjectionSelectorDeserializer(item);\n });\n}\n\n/** Description for what data to store in the designated search index. */\nexport interface SearchIndexerIndexProjectionSelector {\n /** Name of the search index to project to. Must have a key field with the 'keyword' analyzer set. */\n targetIndexName: string;\n /** Name of the field in the search index to map the parent document's key value to. Must be a string field that is filterable and not the key field. */\n parentKeyFieldName: string;\n /** Source context for the projections. Represents the cardinality at which the document will be split into multiple sub documents. */\n sourceContext: string;\n /** Mappings for the projection, or which source should be mapped to which field in the target index. */\n mappings: InputFieldMappingEntry[];\n}\n\nexport function searchIndexerIndexProjectionSelectorSerializer(\n item: SearchIndexerIndexProjectionSelector,\n): any {\n return {\n targetIndexName: item[\"targetIndexName\"],\n parentKeyFieldName: item[\"parentKeyFieldName\"],\n sourceContext: item[\"sourceContext\"],\n mappings: inputFieldMappingEntryArraySerializer(item[\"mappings\"]),\n };\n}\n\nexport function searchIndexerIndexProjectionSelectorDeserializer(\n item: any,\n): SearchIndexerIndexProjectionSelector {\n return {\n targetIndexName: item[\"targetIndexName\"],\n parentKeyFieldName: item[\"parentKeyFieldName\"],\n sourceContext: item[\"sourceContext\"],\n mappings: inputFieldMappingEntryArrayDeserializer(item[\"mappings\"]),\n };\n}\n\n/** A dictionary of index projection-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. */\nexport interface SearchIndexerIndexProjectionsParameters {\n /** Defines behavior of the index projections in relation to the rest of the indexer. */\n projectionMode?: IndexProjectionMode;\n /** Additional properties */\n additionalProperties?: Record<string, any>;\n}\n\nexport function searchIndexerIndexProjectionsParametersSerializer(\n item: SearchIndexerIndexProjectionsParameters,\n): any {\n return {\n ...serializeRecord(item.additionalProperties ?? {}),\n projectionMode: item[\"projectionMode\"],\n };\n}\n\nexport function searchIndexerIndexProjectionsParametersDeserializer(\n item: any,\n): SearchIndexerIndexProjectionsParameters {\n return {\n additionalProperties: serializeRecord(item, [\"projectionMode\"]),\n projectionMode: item[\"projectionMode\"],\n };\n}\n\n/** Defines behavior of the index projections in relation to the rest of the indexer. */\nexport enum KnownIndexProjectionMode {\n /** The source document will be skipped from writing into the indexer's target index. */\n SkipIndexingParentDocuments = \"skipIndexingParentDocuments\",\n /** The source document will be written into the indexer's target index. This is the default pattern. */\n IncludeIndexingParentDocuments = \"includeIndexingParentDocuments\",\n}\n\n/**\n * Defines behavior of the index projections in relation to the rest of the indexer. \\\n * {@link KnownIndexProjectionMode} can be used interchangeably with IndexProjectionMode,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **skipIndexingParentDocuments**: The source document will be skipped from writing into the indexer's target index. \\\n * **includeIndexingParentDocuments**: The source document will be written into the indexer's target index. This is the default pattern.\n */\nexport type IndexProjectionMode = string;\n\n/** Abstract class to share properties between concrete selectors. */\nexport interface SearchIndexerKnowledgeStoreProjectionSelector {\n /** Name of reference key to different projection. */\n referenceKeyName?: string;\n /** Name of generated key to store projection under. */\n generatedKeyName?: string;\n /** Source data to project. */\n source?: string;\n /** Source context for complex projections. */\n sourceContext?: string;\n /** Nested inputs for complex projections. */\n inputs?: InputFieldMappingEntry[];\n}\n\nexport function searchIndexerKnowledgeStoreProjectionSelectorSerializer(\n item: SearchIndexerKnowledgeStoreProjectionSelector,\n): any {\n return {\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n };\n}\n\nexport function searchIndexerKnowledgeStoreProjectionSelectorDeserializer(\n item: any,\n): SearchIndexerKnowledgeStoreProjectionSelector {\n return {\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n };\n}\n\n/** Abstract class to share properties between concrete selectors. */\nexport interface SearchIndexerKnowledgeStoreBlobProjectionSelector extends SearchIndexerKnowledgeStoreProjectionSelector {\n /** Blob container to store projections in. */\n storageContainer: string;\n}\n\nexport function searchIndexerKnowledgeStoreBlobProjectionSelectorSerializer(\n item: SearchIndexerKnowledgeStoreBlobProjectionSelector,\n): any {\n return {\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArraySerializer(item[\"inputs\"]),\n storageContainer: item[\"storageContainer\"],\n };\n}\n\nexport function searchIndexerKnowledgeStoreBlobProjectionSelectorDeserializer(\n item: any,\n): SearchIndexerKnowledgeStoreBlobProjectionSelector {\n return {\n referenceKeyName: item[\"referenceKeyName\"],\n generatedKeyName: item[\"generatedKeyName\"],\n source: item[\"source\"],\n sourceContext: item[\"sourceContext\"],\n inputs: !item[\"inputs\"]\n ? item[\"inputs\"]\n : inputFieldMappingEntryArrayDeserializer(item[\"inputs\"]),\n storageContainer: item[\"storageContainer\"],\n };\n}\n\n/** Response from a list skillset request. If successful, it includes the full definitions of all skillsets. */\nexport interface ListSkillsetsResult {\n /** The skillsets defined in the Search service. */\n readonly skillsets: SearchIndexerSkillset[];\n}\n\nexport function listSkillsetsResultDeserializer(item: any): ListSkillsetsResult {\n return {\n skillsets: searchIndexerSkillsetArrayDeserializer(item[\"value\"]),\n };\n}\n\nexport function searchIndexerSkillsetArraySerializer(result: Array<SearchIndexerSkillset>): any[] {\n return result.map((item) => {\n return searchIndexerSkillsetSerializer(item);\n });\n}\n\nexport function searchIndexerSkillsetArrayDeserializer(\n result: Array<SearchIndexerSkillset>,\n): any[] {\n return result.map((item) => {\n return searchIndexerSkillsetDeserializer(item);\n });\n}\n\n/** Request body for resync indexer operation. */\nexport interface IndexerResyncBody {\n /** Re-sync options that have been pre-defined from data source. */\n options?: IndexerResyncOption[];\n}\n\nexport function indexerResyncBodySerializer(item: IndexerResyncBody): any {\n return {\n options: !item[\"options\"]\n ? item[\"options\"]\n : item[\"options\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** Options with various types of permission data to index. */\nexport enum KnownIndexerResyncOption {\n /** Indexer to re-ingest pre-selected permissions data from data source to index. */\n Permissions = \"permissions\",\n}\n\n/**\n * Options with various types of permission data to index. \\\n * {@link KnownIndexerResyncOption} can be used interchangeably with IndexerResyncOption,\n * this enum contains the known values that the service supports.\n * ### Known values supported by the service\n * **permissions**: Indexer to re-ingest pre-selected permissions data from data source to index.\n */\nexport type IndexerResyncOption = string;\n\n/** The type of the keysOrIds. */\nexport interface DocumentKeysOrIds {\n /** document keys to be reset */\n documentKeys?: string[];\n /** datasource document identifiers to be reset */\n dataSourceDocumentIds?: string[];\n}\n\nexport function documentKeysOrIdsSerializer(item: DocumentKeysOrIds): any {\n return {\n documentKeys: !item[\"documentKeys\"]\n ? item[\"documentKeys\"]\n : item[\"documentKeys\"].map((p: any) => {\n return p;\n }),\n datasourceDocumentIds: !item[\"dataSourceDocumentIds\"]\n ? item[\"dataSourceDocumentIds\"]\n : item[\"dataSourceDocumentIds\"].map((p: any) => {\n return p;\n }),\n };\n}\n\n/** The type of the skill names. */\nexport interface SkillNames {\n /** the names of skills to be reset. */\n skillNames?: string[];\n}\n\nexport function skillNamesSerializer(item: SkillNames): any {\n return {\n skillNames: !item[\"skillNames\"]\n ? item[\"skillNames\"]\n : item[\"skillNames\"].map((p: any) => {\n return p;\n }),\n };\n}\n\nexport function _searchResourceEncryptionKeyAccessCredentialsSerializer(\n item: SearchResourceEncryptionKey,\n): any {\n return { applicationId: item[\"applicationId\"], applicationSecret: item[\"applicationSecret\"] };\n}\n\nexport function _searchResourceEncryptionKeyAccessCredentialsDeserializer(item: any) {\n return {\n applicationId: item[\"applicationId\"],\n applicationSecret: item[\"applicationSecret\"],\n };\n}\n\nexport function _searchIndexerDataSourceConnectionCredentialsSerializer(\n item: SearchIndexerDataSourceConnection,\n): any {\n return { connectionString: item[\"connectionString\"] };\n}\n\nexport function _searchIndexerDataSourceConnectionCredentialsDeserializer(item: any) {\n return {\n connectionString: item[\"connectionString\"],\n };\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,sCAAuC;AACvC,mCAAoC;AACpC,kCAAqC;AACrC,sCAAuC;AACvC,mCAAoC;AACpC,8BAAgC;AAEhC,oBAGO;AAsBA,SAAS,qBAAqB,MAAuB;AAC1D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,QAAQ,KAAK,QAAQ;AAAA,IACrB,cAAU;AAAA,MACR,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AAC/B,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,IACA,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,IAC/D,eAAe,KAAK,MAAM;AAAA,EAC5B;AACF;AAEO,SAAS,uBAAuB,MAAuB;AAC5D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,QAAQ,KAAK,QAAQ;AAAA,IACrB,cAAU,wDAAuB,KAAK,UAAU,CAAC;AAAA,IACjD,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,IACjE,MAAM,KAAK,aAAa;AAAA,EAC1B;AACF;AAkBO,SAAS,sCAAsC,MAAwC;AAC5F,SAAO;AAAA,IACL,iBAAiB,KAAK,SAAS;AAAA,IAC/B,oBAAoB,KAAK,YAAY;AAAA,IACrC,aAAa,KAAK,UAAU;AAAA,IAC5B,uBAAmB,kDAAqB,MAAM,CAAC,iBAAiB,mBAAmB,CAAC,IAChF,SACA,wDAAwD,IAAI;AAAA,IAChE,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,yCAAyC,KAAK,UAAU,CAAC;AAAA,EAC/D;AACF;AAEO,SAAS,wCAAwC,MAAwC;AAC9F,SAAO;AAAA,IACL,SAAS,KAAK,iBAAiB;AAAA,IAC/B,YAAY,KAAK,oBAAoB;AAAA,IACrC,UAAU,KAAK,aAAa;AAAA,IAC5B,GAAI,CAAC,KAAK,mBAAmB,IACzB,KAAK,mBAAmB,IACxB,0DAA0D,KAAK,mBAAmB,CAAC;AAAA,IACvF,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,2CAA2C,KAAK,UAAU,CAAC;AAAA,EACjE;AACF;AAUO,SAAS,qDACd,MACK;AACL,SAAO,EAAE,eAAe,KAAK,eAAe,GAAG,mBAAmB,KAAK,mBAAmB,EAAE;AAC9F;AAEO,SAAS,uDACd,MAC4C;AAC5C,SAAO;AAAA,IACL,eAAe,KAAK,eAAe;AAAA,IACnC,mBAAmB,KAAK,mBAAmB;AAAA,EAC7C;AACF;AASO,SAAS,oCAAoC,MAAsC;AACxF,SAAO,EAAE,eAAe,KAAK,WAAW,EAAE;AAC5C;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,EAC/B;AACF;AAQO,SAAS,yCACd,MACK;AACL,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,wCAAwC,IAAqC;AAAA,IAEtF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF;AACE,aAAO,oCAAoC,IAAI;AAAA,EACnD;AACF;AAEO,SAAS,2CACd,MACgC;AAChC,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,0CAA0C,IAAqC;AAAA,IAExF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF;AACE,aAAO,sCAAsC,IAAI;AAAA,EACrD;AACF;AAQO,SAAS,wCAAwC,MAA0C;AAChG,SAAO,EAAE,eAAe,KAAK,WAAW,EAAE;AAC5C;AAEO,SAAS,0CACd,MAC+B;AAC/B,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,EAC/B;AACF;AAUO,SAAS,gDACd,MACK;AACL,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,sBAAsB,KAAK,YAAY,EAAE;AACtF;AAEO,SAAS,kDACd,MACuC;AACvC,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,YAAY,KAAK,sBAAsB;AAAA,EACzC;AACF;AAQO,SAAS,kCAAkC,MAAkC;AAClF,SAAO;AAAA,IACL,aAAa,4BAA4B,KAAK,OAAO,CAAC;AAAA,EACxD;AACF;AAEO,SAAS,0BAA0B,QAAkC;AAC1E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,qBAAqB,IAAI;AAAA,EAClC,CAAC;AACH;AAEO,SAAS,4BAA4B,QAAkC;AAC5E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,uBAAuB,IAAI;AAAA,EACpC,CAAC;AACH;AAwCO,SAAS,sBAAsB,MAAwB;AAC5D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,2BAA2B,KAAK,QAAQ,CAAC;AAAA,IACjD,iBAAiB,CAAC,KAAK,iBAAiB,IACpC,KAAK,iBAAiB,IACtB,8BAA8B,KAAK,iBAAiB,CAAC;AAAA,IACzD,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,sBAAsB,KAAK,aAAa,CAAC;AAAA,IAC7C,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,+BAA+B,KAAK,YAAY,CAAC;AAAA,IACrD,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,oCAAoC,KAAK,WAAW,CAAC;AAAA,IACzD,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,qCAAqC,KAAK,YAAY,CAAC;AAAA,IAC3D,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,gCAAgC,KAAK,cAAc,CAAC;AAAA,IACxD,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,+BAA+B,KAAK,aAAa,CAAC;AAAA,IACtD,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,sCAAsC,KAAK,aAAa,CAAC;AAAA,IAC7D,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,IAC/D,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,mCAAmC,KAAK,YAAY,CAAC;AAAA,IACzD,UAAU,CAAC,KAAK,gBAAgB,IAC5B,KAAK,gBAAgB,IACrB,yBAAyB,KAAK,gBAAgB,CAAC;AAAA,IACnD,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,uBAAuB,KAAK,cAAc,CAAC;AAAA,IAC/C,eAAe,KAAK,MAAM;AAAA,EAC5B;AACF;AAEO,SAAS,wBAAwB,MAAwB;AAC9D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,6BAA6B,KAAK,QAAQ,CAAC;AAAA,IACnD,iBAAiB,CAAC,KAAK,iBAAiB,IACpC,KAAK,iBAAiB,IACtB,gCAAgC,KAAK,iBAAiB,CAAC;AAAA,IAC3D,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,wBAAwB,KAAK,aAAa,CAAC;AAAA,IAC/C,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,iCAAiC,KAAK,YAAY,CAAC;AAAA,IACvD,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,sCAAsC,KAAK,WAAW,CAAC;AAAA,IAC3D,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,uCAAuC,KAAK,YAAY,CAAC;AAAA,IAC7D,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,kCAAkC,KAAK,cAAc,CAAC;AAAA,IAC1D,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,iCAAiC,KAAK,aAAa,CAAC;AAAA,IACxD,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,wCAAwC,KAAK,aAAa,CAAC;AAAA,IAC/D,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,IACjE,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,qCAAqC,KAAK,YAAY,CAAC;AAAA,IAC3D,gBAAgB,CAAC,KAAK,UAAU,IAC5B,KAAK,UAAU,IACf,2BAA2B,KAAK,UAAU,CAAC;AAAA,IAC/C,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,yBAAyB,KAAK,cAAc,CAAC;AAAA,IACjD,MAAM,KAAK,aAAa;AAAA,EAC1B;AACF;AAEO,SAAS,2BAA2B,QAAmC;AAC5E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,sBAAsB,IAAI;AAAA,EACnC,CAAC;AACH;AAEO,SAAS,6BAA6B,QAAmC;AAC9E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,wBAAwB,IAAI;AAAA,EACrC,CAAC;AACH;AA0CO,SAAS,sBAAsB,MAAwB;AAC5D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,MAAM;AAAA,IACjB,KAAK,KAAK,KAAK;AAAA,IACf,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,KAAK,QAAQ;AAAA,IACrB,YAAY,KAAK,YAAY;AAAA,IAC7B,YAAY,KAAK,YAAY;AAAA,IAC7B,UAAU,KAAK,UAAU;AAAA,IACzB,WAAW,KAAK,WAAW;AAAA,IAC3B,UAAU,KAAK,cAAc;AAAA,IAC7B,gBAAgB,KAAK,oBAAoB;AAAA,IACzC,eAAe,KAAK,mBAAmB;AAAA,IACvC,YAAY,KAAK,gBAAgB;AAAA,IACjC,YAAY,KAAK,wBAAwB;AAAA,IACzC,qBAAqB,KAAK,yBAAyB;AAAA,IACnD,gBAAgB,KAAK,sBAAsB;AAAA,IAC3C,aAAa,CAAC,KAAK,iBAAiB,IAChC,KAAK,iBAAiB,IACtB,KAAK,iBAAiB,EAAE,IAAI,CAAC,MAAW;AACtC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,QAAQ,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ,IAAI,2BAA2B,KAAK,QAAQ,CAAC;AAAA,EACtF;AACF;AAEO,SAAS,wBAAwB,MAAwB;AAC9D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,MAAM;AAAA,IACjB,KAAK,KAAK,KAAK;AAAA,IACf,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,KAAK,QAAQ;AAAA,IACrB,YAAY,KAAK,YAAY;AAAA,IAC7B,YAAY,KAAK,YAAY;AAAA,IAC7B,UAAU,KAAK,UAAU;AAAA,IACzB,WAAW,KAAK,WAAW;AAAA,IAC3B,cAAc,KAAK,UAAU;AAAA,IAC7B,oBAAoB,KAAK,gBAAgB;AAAA,IACzC,mBAAmB,KAAK,eAAe;AAAA,IACvC,gBAAgB,KAAK,YAAY;AAAA,IACjC,wBAAwB,KAAK,YAAY;AAAA,IACzC,yBAAyB,KAAK,qBAAqB;AAAA,IACnD,sBAAsB,KAAK,gBAAgB;AAAA,IAC3C,iBAAiB,CAAC,KAAK,aAAa,IAChC,KAAK,aAAa,IAClB,KAAK,aAAa,EAAE,IAAI,CAAC,MAAW;AAClC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,QAAQ,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ,IAAI,6BAA6B,KAAK,QAAQ,CAAC;AAAA,EACxF;AACF;AAGO,IAAK,2BAAL,kBAAKA,8BAAL;AAEL,EAAAA,0BAAA,YAAS;AAET,EAAAA,0BAAA,WAAQ;AAER,EAAAA,0BAAA,WAAQ;AAER,EAAAA,0BAAA,YAAS;AAET,EAAAA,0BAAA,aAAU;AAEV,EAAAA,0BAAA,oBAAiB;AAEjB,EAAAA,0BAAA,oBAAiB;AAEjB,EAAAA,0BAAA,aAAU;AAEV,EAAAA,0BAAA,YAAS;AAET,EAAAA,0BAAA,UAAO;AAEP,EAAAA,0BAAA,WAAQ;AAER,EAAAA,0BAAA,WAAQ;AAER,EAAAA,0BAAA,UAAO;AA1BG,SAAAA;AAAA,GAAA;AAmDL,IAAK,2BAAL,kBAAKC,8BAAL;AAEL,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,qBAAkB;AAElB,EAAAA,0BAAA,kBAAe;AAEf,EAAAA,0BAAA,qBAAkB;AAElB,EAAAA,0BAAA,kBAAe;AAEf,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,mBAAgB;AAEhB,EAAAA,0BAAA,gBAAa;AAEb,EAAAA,0BAAA,mBAAgB;AAEhB,EAAAA,0BAAA,gBAAa;AAEb,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,yBAAsB;AAEtB,EAAAA,0BAAA,sBAAmB;AAEnB,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,cAAW;AAEX,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,oBAAiB;AAEjB,EAAAA,0BAAA,gCAA6B;AAE7B,EAAAA,0BAAA,aAAU;AAEV,EAAAA,0BAAA,aAAU;AAEV,EAAAA,0BAAA,YAAS;AAET,EAAAA,0BAAA,UAAO;AAEP,EAAAA,0BAAA,gBAAa;AA1LH,SAAAA;AAAA,GAAA;AAmSL,IAAK,6BAAL,kBAAKC,gCAAL;AAEL,EAAAA,4BAAA,kBAAe;AAEf,EAAAA,4BAAA,aAAU;AAEV,EAAAA,4BAAA,eAAY;AAEZ,EAAAA,4BAAA,cAAW;AAEX,EAAAA,4BAAA,eAAY;AAVF,SAAAA;AAAA,GAAA;AA2BL,IAAK,4BAAL,kBAAKC,+BAAL;AAEL,EAAAA,2BAAA,eAAY;AAFF,SAAAA;AAAA,GAAA;AAcL,SAAS,8BAA8B,QAAsC;AAClF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,yBAAyB,IAAI;AAAA,EACtC,CAAC;AACH;AAEO,SAAS,gCAAgC,QAAsC;AACpF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,2BAA2B,IAAI;AAAA,EACxC,CAAC;AACH;AAcO,SAAS,yBAAyB,MAA2B;AAClE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,CAAC,KAAK,aAAa,IAAI,KAAK,aAAa,IAAI,sBAAsB,KAAK,aAAa,CAAC;AAAA,IAC5F,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,oCAAoC,KAAK,WAAW,CAAC;AAAA,IACzD,qBAAqB,KAAK,qBAAqB;AAAA,EACjD;AACF;AAEO,SAAS,2BAA2B,MAA2B;AACpE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,CAAC,KAAK,MAAM,IAAI,KAAK,MAAM,IAAI,wBAAwB,KAAK,MAAM,CAAC;AAAA,IAChF,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,sCAAsC,KAAK,WAAW,CAAC;AAAA,IAC3D,qBAAqB,KAAK,qBAAqB;AAAA,EACjD;AACF;AAQO,SAAS,sBAAsB,MAAwB;AAC5D,SAAO,EAAE,SAAS,KAAK,SAAS,EAAE;AACpC;AAEO,SAAS,wBAAwB,MAAwB;AAC9D,SAAO;AAAA,IACL,SAAS,OAAO;AAAA,MACd,OAAO,QAAQ,KAAK,SAAS,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAqB,CAAC,GAAG,CAAC,CAAC;AAAA,IACvE;AAAA,EACF;AACF;AAEO,SAAS,oCAAoC,QAA4C;AAC9F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,+BAA+B,IAAI;AAAA,EAC5C,CAAC;AACH;AAEO,SAAS,sCAAsC,QAA4C;AAChG,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,iCAAiC,IAAI;AAAA,EAC9C,CAAC;AACH;AAeO,SAAS,0BAA0B,MAA4B;AACpE,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAEO,SAAS,4BAA4B,MAA4B;AACtE,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAUO,SAAS,+BAA+B,MAAiC;AAC9E,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK;AACH,aAAO,kCAAkC,IAA+B;AAAA,IAE1E,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE;AACE,aAAO,0BAA0B,IAAI;AAAA,EACzC;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,UAAQ,KAAK,MAAM,GAAG;AAAA,IACpB,KAAK;AACH,aAAO,oCAAoC,IAA+B;AAAA,IAE5E,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE;AACE,aAAO,4BAA4B,IAAI;AAAA,EAC3C;AACF;AAaO,SAAS,kCAAkC,MAAoC;AACpF,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,oCAAoC,KAAK,YAAY,CAAC;AAAA,EAClE;AACF;AAEO,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,sCAAsC,KAAK,UAAU,CAAC;AAAA,EACpE;AACF;AAUO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,yBAAyB,KAAK,yBAAyB;AAAA,IACvD,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,yBAAyB,KAAK,yBAAyB;AAAA,IACvD,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAUO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,qCAAqC,KAAK,YAAY,CAAC;AAAA,EACpE;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,uCAAuC,KAAK,WAAW,CAAC;AAAA,EACtE;AACF;AAQO,SAAS,qCAAqC,MAAuC;AAC1F,SAAO,EAAE,kBAAkB,KAAK,kBAAkB,EAAE;AACtD;AAEO,SAAS,uCAAuC,MAAuC;AAC5F,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAUO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,qCAAqC,KAAK,YAAY,CAAC;AAAA,EACpE;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,uCAAuC,KAAK,WAAW,CAAC;AAAA,EACtE;AACF;AAYO,SAAS,qCAAqC,MAAuC;AAC1F,SAAO;AAAA,IACL,oBAAoB,KAAK,oBAAoB;AAAA,IAC7C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,0BAA0B,KAAK,kCAAkC;AAAA,EACnE;AACF;AAEO,SAAS,uCAAuC,MAAuC;AAC5F,SAAO;AAAA,IACL,oBAAoB,KAAK,oBAAoB;AAAA,IAC7C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kCAAkC,KAAK,0BAA0B;AAAA,EACnE;AACF;AAUO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,IACjB,KAAK,+BAA+B,KAAK,YAAY,CAAC;AAAA,EACxD;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,OAAO,KAAK,OAAO;AAAA,IACnB,eAAe,KAAK,eAAe;AAAA,IACnC,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,iCAAiC,KAAK,KAAK,CAAC;AAAA,EAC1D;AACF;AAQO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO,EAAE,eAAe,KAAK,eAAe,EAAE;AAChD;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAmBO,SAAS,sBAAsB,MAAwB;AAC5D,SAAO;AAAA,IACL,gBAAgB,KAAK,gBAAgB,EAAE,IAAI,CAAC,MAAW;AACrD,aAAO;AAAA,IACT,CAAC;AAAA,IACD,iBAAiB,KAAK,iBAAiB;AAAA,EACzC;AACF;AAEO,SAAS,wBAAwB,MAAwB;AAC9D,SAAO;AAAA,IACL,gBAAgB,KAAK,gBAAgB,EAAE,IAAI,CAAC,MAAW;AACrD,aAAO;AAAA,IACT,CAAC;AAAA,IACD,iBAAiB,KAAK,iBAAiB;AAAA,EACzC;AACF;AAEO,SAAS,+BAA+B,QAAuC;AACpF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,0BAA0B,IAAI;AAAA,EACvC,CAAC;AACH;AAEO,SAAS,iCAAiC,QAAuC;AACtF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,4BAA4B,IAAI;AAAA,EACzC,CAAC;AACH;AAYO,SAAS,0BAA0B,MAA4B;AACpE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,KAAK,YAAY;AAAA,IAC7B,cAAc,KAAK,cAAc,EAAE,IAAI,CAAC,MAAW;AACjD,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAEO,SAAS,4BAA4B,MAA4B;AACtE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,KAAK,YAAY;AAAA,IAC7B,cAAc,KAAK,cAAc,EAAE,IAAI,CAAC,MAAW;AACjD,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAEO,SAAS,oCAAoC,QAA4C;AAC9F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,+BAA+B,IAAI;AAAA,EAC5C,CAAC;AACH;AAEO,SAAS,sCAAsC,QAA4C;AAChG,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,iCAAiC,IAAI;AAAA,EAC9C,CAAC;AACH;AAWO,SAAS,0BAA0B,MAA4B;AACpE,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,EAAE;AAChE;AAEO,SAAS,4BAA4B,MAA4B;AACtE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAUO,SAAS,+BAA+B,MAAiC;AAC9E,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,yBAAyB,IAAsB;AAAA,IAExD,KAAK;AACH,aAAO,0BAA0B,IAAuB;AAAA,IAE1D,KAAK;AACH,aAAO,iCAAiC,IAA8B;AAAA,IAExE,KAAK;AACH,aAAO,uBAAuB,IAAoB;AAAA,IAEpD;AACE,aAAO,0BAA0B,IAAI;AAAA,EACzC;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,2BAA2B,IAAsB;AAAA,IAE1D,KAAK;AACH,aAAO,4BAA4B,IAAuB;AAAA,IAE5D,KAAK;AACH,aAAO,mCAAmC,IAA8B;AAAA,IAE1E,KAAK;AACH,aAAO,yBAAyB,IAAoB;AAAA,IAEtD;AACE,aAAO,4BAA4B,IAAI;AAAA,EAC3C;AACF;AAcO,SAAS,yBAAyB,MAA2B;AAClE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,WAAW;AAAA,IAC3B,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,KAAK,cAAc,EAAE,IAAI,CAAC,MAAW;AACnC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,KAAK,aAAa,EAAE,IAAI,CAAC,MAAW;AAClC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,2BAA2B,MAA2B;AACpE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,WAAW;AAAA,IAC3B,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,KAAK,cAAc,EAAE,IAAI,CAAC,MAAW;AACnC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,KAAK,aAAa,EAAE,IAAI,CAAC,MAAW;AAClC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAGO,IAAK,4BAAL,kBAAKC,+BAAL;AAEL,EAAAA,2BAAA,aAAU;AAEV,EAAAA,2BAAA,eAAY;AAEZ,EAAAA,2BAAA,aAAU;AAEV,EAAAA,2BAAA,YAAS;AAET,EAAAA,2BAAA,eAAY;AAEZ,EAAAA,2BAAA,gCAA6B;AAE7B,EAAAA,2BAAA,wCAAqC;AAErC,EAAAA,2BAAA,WAAQ;AAER,EAAAA,2BAAA,mBAAgB;AAEhB,EAAAA,2BAAA,aAAU;AAEV,EAAAA,2BAAA,cAAW;AAEX,EAAAA,2BAAA,iBAAc;AAEd,EAAAA,2BAAA,gBAAa;AA1BH,SAAAA;AAAA,GAAA;AAmDL,IAAK,uBAAL,kBAAKC,0BAAL;AAEL,EAAAA,sBAAA,yBAAsB;AAEtB,EAAAA,sBAAA,gBAAa;AAEb,EAAAA,sBAAA,kBAAe;AAEf,EAAAA,sBAAA,eAAY;AAEZ,EAAAA,sBAAA,cAAW;AAEX,EAAAA,sBAAA,aAAU;AAEV,EAAAA,sBAAA,gBAAa;AAEb,EAAAA,sBAAA,eAAY;AAEZ,EAAAA,sBAAA,aAAU;AAEV,EAAAA,sBAAA,yBAAsB;AAEtB,EAAAA,sBAAA,wBAAqB;AAErB,EAAAA,sBAAA,wBAAqB;AAErB,EAAAA,sBAAA,mBAAgB;AAEhB,EAAAA,sBAAA,WAAQ;AAER,EAAAA,sBAAA,YAAS;AAET,EAAAA,sBAAA,WAAQ;AAER,EAAAA,sBAAA,eAAY;AAEZ,EAAAA,sBAAA,WAAQ;AAER,EAAAA,sBAAA,0BAAuB;AAEvB,EAAAA,sBAAA,cAAW;AAEX,EAAAA,sBAAA,gBAAa;AAEb,EAAAA,sBAAA,aAAU;AAEV,EAAAA,sBAAA,+BAA4B;AAE5B,EAAAA,sBAAA,sCAAmC;AAEnC,EAAAA,sBAAA,aAAU;AAEV,EAAAA,sBAAA,cAAW;AAEX,EAAAA,sBAAA,yBAAsB;AAEtB,EAAAA,sBAAA,aAAU;AAEV,EAAAA,sBAAA,eAAY;AAEZ,EAAAA,sBAAA,UAAO;AAEP,EAAAA,sBAAA,cAAW;AAEX,EAAAA,sBAAA,YAAS;AAET,EAAAA,sBAAA,eAAY;AAEZ,EAAAA,sBAAA,mBAAgB;AApEN,SAAAA;AAAA,GAAA;AAkHL,IAAK,sBAAL,kBAAKC,yBAAL;AAEL,EAAAA,qBAAA,eAAY;AAFF,SAAAA;AAAA,GAAA;AA4BL,SAAS,0BAA0B,MAA4B;AACpE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,gBAAgB;AAAA,IAChC,SAAS,KAAK,SAAS;AAAA,IACvB,OAAO,CAAC,KAAK,OAAO,IAChB,KAAK,OAAO,QACZ;AAAA,MACE,KAAK,OAAO,EAAE,IAAI,CAAC,MAAW;AAC5B,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,IACJ,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAChC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,4BAA4B,MAA4B;AACtE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,WAAW;AAAA,IAChC,SAAS,KAAK,SAAS;AAAA,IACvB,OACE,KAAK,OAAO,MAAM,QAAQ,KAAK,OAAO,MAAM,SACxC,KAAK,OAAO,QACZ,kDAAoB,KAAK,OAAO,CAAC;AAAA,IACvC,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAChC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAGO,IAAK,kBAAL,kBAAKC,qBAAL;AAEL,EAAAA,iBAAA,aAAU;AAEV,EAAAA,iBAAA,qBAAkB;AAElB,EAAAA,iBAAA,cAAW;AAEX,EAAAA,iBAAA,YAAS;AAET,EAAAA,iBAAA,aAAU;AAEV,EAAAA,iBAAA,eAAY;AAEZ,EAAAA,iBAAA,iBAAc;AAEd,EAAAA,iBAAA,eAAY;AAhBF,SAAAA;AAAA,GAAA;AA6CL,SAAS,iCAAiC,MAAmC;AAClF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAChC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,mCAAmC,MAAmC;AACpF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAChC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAUO,SAAS,uBAAuB,MAAyB;AAC9D,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAChC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,yBAAyB,MAAyB;AAChE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAChC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,qCAAqC,QAA6C;AAChG,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,gCAAgC,IAAI;AAAA,EAC7C,CAAC;AACH;AAEO,SAAS,uCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,kCAAkC,IAAI;AAAA,EAC/C,CAAC;AACH;AAWO,SAAS,2BAA2B,MAA6B;AACtE,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,EAAE;AAChE;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAkBO,SAAS,gCAAgC,MAAkC;AAChF,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,qCAAqC,IAAkC;AAAA,IAEhF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,yBAAyB,IAAsB;AAAA,IAExD,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D,KAAK;AACH,aAAO,kCAAkC,IAA+B;AAAA,IAE1E,KAAK;AACH,aAAO,oCAAoC,IAAiC;AAAA,IAE9E,KAAK;AACH,aAAO,+BAA+B,IAA4B;AAAA,IAEpE;AACE,aAAO,2BAA2B,IAAI;AAAA,EAC1C;AACF;AAEO,SAAS,kCAAkC,MAAkC;AAClF,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,uCAAuC,IAAkC;AAAA,IAElF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,2BAA2B,IAAsB;AAAA,IAE1D,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D,KAAK;AACH,aAAO,oCAAoC,IAA+B;AAAA,IAE5E,KAAK;AACH,aAAO,sCAAsC,IAAiC;AAAA,IAEhF,KAAK;AACH,aAAO,iCAAiC,IAA4B;AAAA,IAEtE;AACE,aAAO,6BAA6B,IAAI;AAAA,EAC5C;AACF;AAUO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAcO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,IACvB,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,KAAK,YAAY,EAAE,IAAI,CAAC,MAAW;AACjC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,IACvB,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,KAAK,YAAY,EAAE,IAAI,CAAC,MAAW;AACjC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAaO,SAAS,2BAA2B,MAA6B;AACtE,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,GAAG,YAAY,KAAK,YAAY,EAAE;AAChG;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,KAAK,YAAY;AAAA,EAC/B;AACF;AAUO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAcO,SAAS,qCAAqC,MAAuC;AAC1F,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,UAAU,KAAK,UAAU;AAAA,EAC3B;AACF;AAEO,SAAS,uCAAuC,MAAuC;AAC5F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,UAAU,KAAK,UAAU;AAAA,EAC3B;AACF;AA2DO,SAAS,6CACd,MACK;AACL,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,UAAU,KAAK,UAAU;AAAA,EAC3B;AACF;AAEO,SAAS,+CACd,MACoC;AACpC,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,UAAU,KAAK,UAAU;AAAA,EAC3B;AACF;AA8DO,SAAS,yBAAyB,MAA2B;AAClE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,IACvB,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,KAAK,YAAY,EAAE,IAAI,CAAC,MAAW;AACjC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,2BAA2B,MAA2B;AACpE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,IACvB,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,KAAK,YAAY,EAAE,IAAI,CAAC,MAAW;AACjC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAkBO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,WAAW;AAAA,IAC3B,aAAa,KAAK,aAAa;AAAA,IAC/B,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,SAAS,KAAK,mBAAmB;AAAA,IACjC,MAAM,KAAK,sBAAsB;AAAA,EACnC;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,WAAW;AAAA,IAC3B,aAAa,KAAK,aAAa;AAAA,IAC/B,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,mBAAmB,KAAK,SAAS;AAAA,IACjC,sBAAsB,KAAK,MAAM;AAAA,EACnC;AACF;AAcO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,OAAO,CAAC,KAAK,OAAO,IAChB,KAAK,OAAO,QACZ;AAAA,MACE,KAAK,OAAO,EAAE,IAAI,CAAC,MAAW;AAC5B,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,IACJ,OAAO,KAAK,OAAO;AAAA,EACrB;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,OACE,KAAK,OAAO,MAAM,QAAQ,KAAK,OAAO,MAAM,SACxC,KAAK,OAAO,QACZ,kDAAoB,KAAK,OAAO,CAAC;AAAA,IACvC,OAAO,KAAK,OAAO;AAAA,EACrB;AACF;AAUO,SAAS,kCAAkC,MAAoC;AACpF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAEO,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAUO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAUO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAEO,SAAS,gCAAgC,QAAwC;AACtF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,2BAA2B,IAAI;AAAA,EACxC,CAAC;AACH;AAEO,SAAS,kCAAkC,QAAwC;AACxF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,6BAA6B,IAAI;AAAA,EAC1C,CAAC;AACH;AAWO,SAAS,sBAAsB,MAAwB;AAC5D,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,EAAE;AAChE;AAEO,SAAS,wBAAwB,MAAwB;AAC9D,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AA+BO,SAAS,2BAA2B,MAA6B;AACtE,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,kCAAkC,IAA+B;AAAA,IAE1E,KAAK;AACH,aAAO,+BAA+B,IAA4B;AAAA,IAEpE,KAAK;AACH,aAAO,gCAAgC,IAA6B;AAAA,IAEtE,KAAK;AACH,aAAO,4CAA4C,IAAyC;AAAA,IAE9F,KAAK;AACH,aAAO,+BAA+B,IAA4B;AAAA,IAEpE,KAAK;AACH,aAAO,iCAAiC,IAA8B;AAAA,IAExE,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,0BAA0B,IAAuB;AAAA,IAE1D,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E,KAAK;AACH,aAAO,4BAA4B,IAAyB;AAAA,IAE9D,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,oCAAoC,IAAiC;AAAA,IAE9E,KAAK;AACH,aAAO,oCAAoC,IAAiC;AAAA,IAE9E,KAAK;AACH,aAAO,8BAA8B,IAA2B;AAAA,IAElE,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,8BAA8B,IAA2B;AAAA,IAElE,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,qCAAqC,IAAkC;AAAA,IAEhF,KAAK;AACH,aAAO,+BAA+B,IAA4B;AAAA,IAEpE,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,8BAA8B,IAA2B;AAAA,IAElE,KAAK;AACH,aAAO,4BAA4B,IAAyB;AAAA,IAE9D,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E;AACE,aAAO,sBAAsB,IAAI;AAAA,EACrC;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,oCAAoC,IAA+B;AAAA,IAE5E,KAAK;AACH,aAAO,iCAAiC,IAA4B;AAAA,IAEtE,KAAK;AACH,aAAO,kCAAkC,IAA6B;AAAA,IAExE,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,iCAAiC,IAA4B;AAAA,IAEtE,KAAK;AACH,aAAO,mCAAmC,IAA8B;AAAA,IAE1E,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,4BAA4B,IAAuB;AAAA,IAE5D,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E,KAAK;AACH,aAAO,8BAA8B,IAAyB;AAAA,IAEhE,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,sCAAsC,IAAiC;AAAA,IAEhF,KAAK;AACH,aAAO,sCAAsC,IAAiC;AAAA,IAEhF,KAAK;AACH,aAAO,gCAAgC,IAA2B;AAAA,IAEpE,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,gCAAgC,IAA2B;AAAA,IAEpE,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,uCAAuC,IAAkC;AAAA,IAElF,KAAK;AACH,aAAO,iCAAiC,IAA4B;AAAA,IAEtE,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,gCAAgC,IAA2B;AAAA,IAEpE,KAAK;AACH,aAAO,8BAA8B,IAAyB;AAAA,IAEhE,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E;AACE,aAAO,wBAAwB,IAAI;AAAA,EACvC;AACF;AAUO,SAAS,kCAAkC,MAAoC;AACpF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAEO,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAYO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,KAAK,eAAe,EAAE,IAAI,CAAC,MAAW;AACpC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,KAAK,eAAe,EAAE,IAAI,CAAC,MAAW;AACpC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAiBO,SAAS,gCAAgC,MAAkC;AAChF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa,EAAE,IAAI,CAAC,MAAW;AAC/C,aAAO;AAAA,IACT,CAAC;AAAA,IACD,YAAY,KAAK,YAAY;AAAA,IAC7B,WAAW,KAAK,cAAc;AAAA,EAChC;AACF;AAEO,SAAS,kCAAkC,MAAkC;AAClF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa,EAAE,IAAI,CAAC,MAAW;AAC/C,aAAO;AAAA,IACT,CAAC;AAAA,IACD,YAAY,KAAK,YAAY;AAAA,IAC7B,cAAc,KAAK,WAAW;AAAA,EAChC;AACF;AAkBO,SAAS,4CACd,MACK;AACL,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,aAAa,KAAK,aAAa;AAAA,IAC/B,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAEO,SAAS,8CACd,MACmC;AACnC,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,aAAa,KAAK,aAAa;AAAA,IAC/B,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAcO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,IACvB,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,IACvB,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAiBO,SAAS,iCAAiC,MAAmC;AAClF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,IACvB,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAEO,SAAS,mCAAmC,MAAmC;AACpF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,IACvB,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAUO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AAC/B,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AAC/B,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAYO,SAAS,0BAA0B,MAA4B;AACpE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAC3C,aAAO;AAAA,IACT,CAAC;AAAA,IACD,eAAe,KAAK,oBAAoB;AAAA,EAC1C;AACF;AAEO,SAAS,4BAA4B,MAA4B;AACtE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAC3C,aAAO;AAAA,IACT,CAAC;AAAA,IACD,oBAAoB,KAAK,eAAe;AAAA,EAC1C;AACF;AAYO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,YAAY,KAAK,YAAY;AAAA,EAC/B;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,YAAY,KAAK,YAAY;AAAA,EAC/B;AACF;AAYO,SAAS,4BAA4B,MAA8B;AACxE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,KAAK,KAAK,WAAW;AAAA,IACrB,KAAK,KAAK,WAAW;AAAA,EACvB;AACF;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,KAAK;AAAA,IACrB,WAAW,KAAK,KAAK;AAAA,EACvB;AACF;AAYO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,eAAe;AAAA,IACnC,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,eAAe;AAAA,IACnC,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAYO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,EACzB;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,EACzB;AACF;AAYO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,EACzB;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,SAAS;AAAA,EACzB;AACF;AAYO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAYO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAYO,SAAS,8BAA8B,MAAgC;AAC5E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,SAAS,KAAK,uBAAuB;AAAA,EACvC;AACF;AAEO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,uBAAuB,KAAK,SAAS;AAAA,EACvC;AACF;AAkCO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,4BAA4B,KAAK,4BAA4B;AAAA,IAC7D,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,4BAA4B,KAAK,4BAA4B;AAAA,IAC7D,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAUO,SAAS,8BAA8B,MAAgC;AAC5E,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,GAAG,UAAU,KAAK,UAAU,EAAE;AAC5F;AAEO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU;AAAA,EAC3B;AACF;AAmCO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,GAAG,UAAU,KAAK,UAAU,EAAE;AAC5F;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU;AAAA,EAC3B;AACF;AAmEO,SAAS,qCAAqC,MAAuC;AAC1F,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,OAAO,KAAK,OAAO,EAAE,IAAI,CAAC,MAAW;AACnC,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAEO,SAAS,uCAAuC,MAAuC;AAC5F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,OAAO,KAAK,OAAO,EAAE,IAAI,CAAC,MAAW;AACnC,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAgBO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAChC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,eAAe,KAAK,eAAe;AAAA,IACnC,YAAY,KAAK,YAAY;AAAA,IAC7B,gBAAgB,KAAK,yBAAyB;AAAA,EAChD;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,KAAK,WAAW,EAAE,IAAI,CAAC,MAAW;AAChC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,eAAe,KAAK,eAAe;AAAA,IACnC,YAAY,KAAK,YAAY;AAAA,IAC7B,yBAAyB,KAAK,gBAAgB;AAAA,EAChD;AACF;AAgDO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,YAAY,KAAK,YAAY;AAAA,IAC7B,QAAQ,KAAK,QAAQ;AAAA,EACvB;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,YAAY,KAAK,YAAY;AAAA,IAC7B,QAAQ,KAAK,QAAQ;AAAA,EACvB;AACF;AAUO,SAAS,8BAA8B,MAAgC;AAC5E,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,GAAG,QAAQ,KAAK,QAAQ,EAAE;AACxF;AAEO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,QAAQ,KAAK,QAAQ;AAAA,EACvB;AACF;AAUO,SAAS,4BAA4B,MAA8B;AACxE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,oBAAoB,KAAK,oBAAoB;AAAA,EAC/C;AACF;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,oBAAoB,KAAK,oBAAoB;AAAA,EAC/C;AACF;AA4BO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,eAAe,KAAK,eAAe;AAAA,IACnC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,aAAa,KAAK,aAAa;AAAA,IAC/B,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,KAAK,gBAAgB,EAAE,IAAI,CAAC,MAAW;AACrC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,eAAe,KAAK,eAAe;AAAA,IACnC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,aAAa,KAAK,aAAa;AAAA,IAC/B,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,KAAK,gBAAgB,EAAE,IAAI,CAAC,MAAW;AACrC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,+BAA+B,QAAuC;AACpF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,0BAA0B,IAAI;AAAA,EACvC,CAAC;AACH;AAEO,SAAS,iCAAiC,QAAuC;AACtF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,4BAA4B,IAAI;AAAA,EACzC,CAAC;AACH;AAWO,SAAS,qBAAqB,MAAuB;AAC1D,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,EAAE;AAChE;AAEO,SAAS,uBAAuB,MAAuB;AAC5D,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAKO,SAAS,0BAA0B,MAA4B;AACpE,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,4BAA4B,IAAyB;AAAA,IAE9D,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E;AACE,aAAO,qBAAqB,IAAI;AAAA,EACpC;AACF;AAEO,SAAS,4BAA4B,MAA4B;AACtE,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,8BAA8B,IAAyB;AAAA,IAEhE,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E;AACE,aAAO,uBAAuB,IAAI;AAAA,EACtC;AACF;AAUO,SAAS,4BAA4B,MAA8B;AACxE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAYO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAEO,SAAS,sCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,iCAAiC,IAAI;AAAA,EAC9C,CAAC;AACH;AAEO,SAAS,wCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,mCAAmC,IAAI;AAAA,EAChD,CAAC;AACH;AAWO,SAAS,4BAA4B,MAA8B;AACxE,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,MAAM,KAAK,MAAM,EAAE;AAChE;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAKO,SAAS,iCAAiC,MAAmC;AAClF,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D;AACE,aAAO,4BAA4B,IAAI;AAAA,EAC3C;AACF;AAEO,SAAS,mCAAmC,MAAmC;AACpF,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D;AACE,aAAO,8BAA8B,IAAI;AAAA,EAC7C;AACF;AAYO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,KAAK,cAAc,EAAE,IAAI,CAAC,MAAW;AACnC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,KAAK,aAAa,EAAE,IAAI,CAAC,MAAW;AAClC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,KAAK,cAAc,EAAE,IAAI,CAAC,MAAW;AACnC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,KAAK,aAAa,EAAE,IAAI,CAAC,MAAW;AAClC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AASO,SAAS,8BAA8B,MAAgC;AAC5E,SAAO,EAAE,eAAe,KAAK,WAAW,EAAE;AAC5C;AAEO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,EAC/B;AACF;AAKO,SAAS,mCAAmC,MAAqC;AACtF,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,4BAA4B,IAAyB;AAAA,IAE9D,KAAK;AACH,aAAO,yBAAyB,IAAsB;AAAA,IAExD;AACE,aAAO,8BAA8B,IAAI;AAAA,EAC7C;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,8BAA8B,IAAyB;AAAA,IAEhE,KAAK;AACH,aAAO,2BAA2B,IAAsB;AAAA,IAE1D;AACE,aAAO,gCAAgC,IAAI;AAAA,EAC/C;AACF;AAQO,SAAS,4BAA4B,MAA8B;AACxE,SAAO,EAAE,eAAe,KAAK,WAAW,EAAE;AAC5C;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,EAC/B;AACF;AAYO,SAAS,yBAAyB,MAA2B;AAClE,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,IAAI,KAAK,IAAI,GAAG,GAAG,KAAK,GAAG,EAAE;AAC1E;AAEO,SAAS,2BAA2B,MAA2B;AACpE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,IAAI,KAAK,IAAI;AAAA,IACb,GAAG,KAAK,GAAG;AAAA,EACb;AACF;AAUO,SAAS,yBAAyB,MAA2B;AAClE,SAAO;AAAA,IACL,sBAAsB,KAAK,0BAA0B;AAAA,IACrD,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,qCAAqC,KAAK,gBAAgB,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,2BAA2B,MAA2B;AACpE,SAAO;AAAA,IACL,0BAA0B,KAAK,sBAAsB;AAAA,IACrD,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,uCAAuC,KAAK,gBAAgB,CAAC;AAAA,EACnE;AACF;AAEO,SAAS,qCAAqC,QAA6C;AAChG,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,gCAAgC,IAAI;AAAA,EAC7C,CAAC;AACH;AAEO,SAAS,uCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,kCAAkC,IAAI;AAAA,EAC/C,CAAC;AACH;AAYO,SAAS,gCAAgC,MAAkC;AAChF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,mBAAmB,oCAAoC,KAAK,mBAAmB,CAAC;AAAA,IAChF,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,kCAAkC,MAAkC;AAClF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,mBAAmB,sCAAsC,KAAK,mBAAmB,CAAC;AAAA,IAClF,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAYO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,wBAAwB,KAAK,YAAY,CAAC;AAAA,IAC9C,0BAA0B,CAAC,KAAK,eAAe,IAC3C,KAAK,eAAe,IACpB,6BAA6B,KAAK,eAAe,CAAC;AAAA,IACtD,2BAA2B,CAAC,KAAK,gBAAgB,IAC7C,KAAK,gBAAgB,IACrB,6BAA6B,KAAK,gBAAgB,CAAC;AAAA,EACzD;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,0BAA0B,KAAK,YAAY,CAAC;AAAA,IAChD,eAAe,CAAC,KAAK,0BAA0B,IAC3C,KAAK,0BAA0B,IAC/B,+BAA+B,KAAK,0BAA0B,CAAC;AAAA,IACnE,gBAAgB,CAAC,KAAK,2BAA2B,IAC7C,KAAK,2BAA2B,IAChC,+BAA+B,KAAK,2BAA2B,CAAC;AAAA,EACtE;AACF;AAQO,SAAS,wBAAwB,MAA0B;AAChE,SAAO,EAAE,WAAW,KAAK,MAAM,EAAE;AACnC;AAEO,SAAS,0BAA0B,MAA0B;AAClE,SAAO;AAAA,IACL,MAAM,KAAK,WAAW;AAAA,EACxB;AACF;AAEO,SAAS,6BAA6B,QAAqC;AAChF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,wBAAwB,IAAI;AAAA,EACrC,CAAC;AACH;AAEO,SAAS,+BAA+B,QAAqC;AAClF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,0BAA0B,IAAI;AAAA,EACvC,CAAC;AACH;AAGO,IAAK,oBAAL,kBAAKC,uBAAL;AAEL,EAAAA,mBAAA,0BAAuB;AAEvB,EAAAA,mBAAA,mBAAgB;AAJN,SAAAA;AAAA,GAAA;AA6BL,SAAS,uBAAuB,MAAyB;AAC9D,SAAO;AAAA,IACL,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,mCAAmC,KAAK,UAAU,CAAC;AAAA,IACvD,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,uDAAuD,KAAK,YAAY,CAAC;AAAA,IAC7E,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,2CAA2C,KAAK,aAAa,CAAC;AAAA,IAClE,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,4CAA4C,KAAK,cAAc,CAAC;AAAA,EACtE;AACF;AAEO,SAAS,yBAAyB,MAAyB;AAChE,SAAO;AAAA,IACL,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,qCAAqC,KAAK,UAAU,CAAC;AAAA,IACzD,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,yDAAyD,KAAK,YAAY,CAAC;AAAA,IAC/E,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,6CAA6C,KAAK,aAAa,CAAC;AAAA,IACpE,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,8CAA8C,KAAK,cAAc,CAAC;AAAA,EACxE;AACF;AAEO,SAAS,mCAAmC,QAA2C;AAC5F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,8BAA8B,IAAI;AAAA,EAC3C,CAAC;AACH;AAEO,SAAS,qCAAqC,QAA2C;AAC9F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,gCAAgC,IAAI;AAAA,EAC7C,CAAC;AACH;AAcO,SAAS,8BAA8B,MAAgC;AAC5E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,WAAW,KAAK,4BAA4B;AAAA,IAC5C,YAAY,KAAK,gBAAgB;AAAA,IACjC,aAAa,KAAK,iBAAiB;AAAA,EACrC;AACF;AAEO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,4BAA4B,KAAK,WAAW;AAAA,IAC5C,gBAAgB,KAAK,YAAY;AAAA,IACjC,iBAAiB,KAAK,aAAa;AAAA,EACrC;AACF;AAEO,SAAS,uDACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,kDAAkD,IAAI;AAAA,EAC/D,CAAC;AACH;AAEO,SAAS,yDACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,oDAAoD,IAAI;AAAA,EACjE,CAAC;AACH;AAWO,SAAS,6CACd,MACK;AACL,SAAO,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,KAAK,MAAM,EAAE;AAClD;AAEO,SAAS,+CACd,MACoC;AACpC,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAQO,SAAS,kDACd,MACK;AACL,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK;AACH,aAAO,qCAAqC,IAAkC;AAAA,IAEhF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF;AACE,aAAO,6CAA6C,IAAI;AAAA,EAC5D;AACF;AAEO,SAAS,oDACd,MACyC;AACzC,UAAQ,KAAK,MAAM,GAAG;AAAA,IACpB,KAAK;AACH,aAAO,uCAAuC,IAAkC;AAAA,IAElF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF;AACE,aAAO,+CAA+C,IAAI;AAAA,EAC9D;AACF;AAGO,IAAK,iCAAL,kBAAKC,oCAAL;AAEL,EAAAA,gCAAA,UAAO;AAEP,EAAAA,gCAAA,mBAAgB;AAJN,SAAAA;AAAA,GAAA;AAyBL,SAAS,qCAAqC,MAAuC;AAC1F,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,MAAM;AAAA,IACjB,gBAAgB,CAAC,KAAK,YAAY,IAC9B,KAAK,YAAY,IACjB,yBAAyB,KAAK,YAAY,CAAC;AAAA,EACjD;AACF;AAEO,SAAS,uCAAuC,MAAuC;AAC5F,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,CAAC,KAAK,gBAAgB,IAC9B,KAAK,gBAAgB,IACrB,2BAA2B,KAAK,gBAAgB,CAAC;AAAA,EACvD;AACF;AAcO,SAAS,yBAAyB,MAA2B;AAClE,SAAO;AAAA,IACL,GAAG,KAAK,GAAG;AAAA,IACX,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,UAAU,KAAK,UAAU;AAAA,IACzB,QAAQ,KAAK,QAAQ;AAAA,EACvB;AACF;AAEO,SAAS,2BAA2B,MAA2B;AACpE,SAAO;AAAA,IACL,GAAG,KAAK,GAAG;AAAA,IACX,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,UAAU,KAAK,UAAU;AAAA,IACzB,QAAQ,KAAK,QAAQ;AAAA,EACvB;AACF;AAGO,IAAK,mCAAL,kBAAKC,sCAAL;AAEL,EAAAA,kCAAA,YAAS;AAET,EAAAA,kCAAA,eAAY;AAEZ,EAAAA,kCAAA,gBAAa;AAEb,EAAAA,kCAAA,aAAU;AARA,SAAAA;AAAA,GAAA;AA+BL,SAAS,8CACd,MACK;AACL,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,MAAM;AAAA,IACjB,yBAAyB,CAAC,KAAK,YAAY,IACvC,KAAK,YAAY,IACjB,kCAAkC,KAAK,YAAY,CAAC;AAAA,EAC1D;AACF;AAEO,SAAS,gDACd,MACqC;AACrC,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,CAAC,KAAK,yBAAyB,IACvC,KAAK,yBAAyB,IAC9B,oCAAoC,KAAK,yBAAyB,CAAC;AAAA,EACzE;AACF;AAQO,SAAS,kCAAkC,MAAoC;AACpF,SAAO,EAAE,QAAQ,KAAK,QAAQ,EAAE;AAClC;AAEO,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,QAAQ,KAAK,QAAQ;AAAA,EACvB;AACF;AAEO,SAAS,2CACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,sCAAsC,IAAI;AAAA,EACnD,CAAC;AACH;AAEO,SAAS,6CACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,wCAAwC,IAAI;AAAA,EACrD,CAAC;AACH;AAWO,SAAS,iCAAiC,MAAmC;AAClF,SAAO,EAAE,MAAM,KAAK,gBAAgB,GAAG,MAAM,KAAK,MAAM,EAAE;AAC5D;AAEO,SAAS,mCAAmC,MAAmC;AACpF,SAAO;AAAA,IACL,gBAAgB,KAAK,MAAM;AAAA,IAC3B,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AASO,SAAS,sCAAsC,MAAwC;AAC5F,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK;AACH,aAAO,gCAAgC,IAA6B;AAAA,IAEtE,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D,KAAK;AACH,aAAO,yCAAyC,IAAsC;AAAA,IAExF;AACE,aAAO,iCAAiC,IAAI;AAAA,EAChD;AACF;AAEO,SAAS,wCAAwC,MAAwC;AAC9F,UAAQ,KAAK,MAAM,GAAG;AAAA,IACpB,KAAK;AACH,aAAO,kCAAkC,IAA6B;AAAA,IAExE,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D,KAAK;AACH,aAAO,2CAA2C,IAAsC;AAAA,IAE1F;AACE,aAAO,mCAAmC,IAAI;AAAA,EAClD;AACF;AAGO,IAAK,kCAAL,kBAAKC,qCAAL;AAEL,EAAAA,iCAAA,iBAAc;AAEd,EAAAA,iCAAA,kBAAe;AAEf,EAAAA,iCAAA,sBAAmB;AAEnB,EAAAA,iCAAA,SAAM;AARI,SAAAA;AAAA,GAAA;AA+BL,SAAS,gCAAgC,MAAkC;AAChF,SAAO;AAAA,IACL,MAAM,KAAK,gBAAgB;AAAA,IAC3B,MAAM,KAAK,MAAM;AAAA,IACjB,uBAAuB,CAAC,KAAK,YAAY,IACrC,KAAK,YAAY,IACjB,0CAA0C,KAAK,YAAY,CAAC;AAAA,EAClE;AACF;AAEO,SAAS,kCAAkC,MAAkC;AAClF,SAAO;AAAA,IACL,gBAAgB,KAAK,MAAM;AAAA,IAC3B,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,CAAC,KAAK,uBAAuB,IACrC,KAAK,uBAAuB,IAC5B,4CAA4C,KAAK,uBAAuB,CAAC;AAAA,EAC/E;AACF;AAgBO,SAAS,0CACd,MACK;AACL,SAAO;AAAA,IACL,aAAa,KAAK,aAAa;AAAA,IAC/B,cAAc,KAAK,cAAc;AAAA,IACjC,QAAQ,KAAK,QAAQ;AAAA,IACrB,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,yCAAyC,KAAK,cAAc,CAAC;AAAA,IACjE,WAAW,KAAK,WAAW;AAAA,EAC7B;AACF;AAEO,SAAS,4CACd,MACiC;AACjC,SAAO;AAAA,IACL,aAAa,KAAK,aAAa;AAAA,IAC/B,cAAc,KAAK,cAAc;AAAA,IACjC,QAAQ,KAAK,QAAQ;AAAA,IACrB,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,2CAA2C,KAAK,cAAc,CAAC;AAAA,IACnE,WAAW,KAAK,WAAW;AAAA,EAC7B;AACF;AAGO,IAAK,4BAAL,kBAAKC,+BAAL;AAEL,EAAAA,2BAAA,yBAAsB;AAEtB,EAAAA,2BAAA,yBAAsB;AAEtB,EAAAA,2BAAA,yBAAsB;AAEtB,EAAAA,2BAAA,cAAW;AAEX,EAAAA,2BAAA,cAAW;AAEX,EAAAA,2BAAA,eAAY;AAEZ,EAAAA,2BAAA,eAAY;AAdF,SAAAA;AAAA,GAAA;AAwCL,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,MAAM,KAAK,gBAAgB;AAAA,IAC3B,MAAM,KAAK,MAAM;AAAA,IACjB,wBAAwB,CAAC,KAAK,kBAAkB,IAC5C,KAAK,kBAAkB,IACvB,qCAAqC,KAAK,kBAAkB,CAAC;AAAA,EACnE;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,gBAAgB,KAAK,MAAM;AAAA,IAC3B,MAAM,KAAK,MAAM;AAAA,IACjB,kBAAkB,CAAC,KAAK,wBAAwB,IAC5C,KAAK,wBAAwB,IAC7B,uCAAuC,KAAK,wBAAwB,CAAC;AAAA,EAC3E;AACF;AAkBO,SAAS,qCAAqC,MAAuC;AAC1F,SAAO;AAAA,IACL,KAAK,KAAK,KAAK;AAAA,IACf,aAAa,KAAK,aAAa;AAAA,IAC/B,YAAY,KAAK,YAAY;AAAA,IAC7B,SAAS,KAAK,SAAS;AAAA,IACvB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,yCAAyC,KAAK,cAAc,CAAC;AAAA,EACnE;AACF;AAEO,SAAS,uCAAuC,MAAuC;AAC5F,SAAO;AAAA,IACL,KAAK,KAAK,KAAK;AAAA,IACf,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,OAAO;AAAA,MACL,OAAO,QAAQ,KAAK,aAAa,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAqB,CAAC,GAAG,CAAC,CAAC;AAAA,IAC3E;AAAA,IACJ,YAAY,KAAK,YAAY;AAAA,IAC7B,SAAS,KAAK,SAAS;AAAA,IACvB,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,2CAA2C,KAAK,cAAc,CAAC;AAAA,EACrE;AACF;AAUO,SAAS,yCACd,MACK;AACL,SAAO;AAAA,IACL,MAAM,KAAK,gBAAgB;AAAA,IAC3B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,yCAAyC,KAAK,eAAe,CAAC;AAAA,EACpE;AACF;AAEO,SAAS,2CACd,MACgC;AAChC,SAAO;AAAA,IACL,gBAAgB,KAAK,MAAM;AAAA,IAC3B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,2CAA2C,KAAK,eAAe,CAAC;AAAA,EACtE;AACF;AAkBO,SAAS,yCACd,MACK;AACL,SAAO;AAAA,IACL,KAAK,KAAK,YAAY;AAAA,IACtB,KAAK,KAAK,mBAAmB;AAAA,IAC7B,YAAY,KAAK,YAAY;AAAA,IAC7B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,KAAK,QAAQ;AAAA,IACrB,WAAW,KAAK,WAAW;AAAA,EAC7B;AACF;AAEO,SAAS,2CACd,MACgC;AAChC,SAAO;AAAA,IACL,YAAY,KAAK,KAAK;AAAA,IACtB,mBAAmB,KAAK,KAAK;AAAA,IAC7B,YAAY,KAAK,YAAY;AAAA,IAC7B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,KAAK,QAAQ;AAAA,IACrB,WAAW,KAAK,WAAW;AAAA,EAC7B;AACF;AAGO,IAAK,iCAAL,kBAAKC,oCAAL;AAEL,EAAAA,gCAAA,iDAA8C;AAE9C,EAAAA,gCAAA,qDAAkD;AAElD,EAAAA,gCAAA,0CAAuC;AAEvC,EAAAA,gCAAA,2CAAwC;AAExC,EAAAA,gCAAA,0BAAuB;AAEvB,EAAAA,gCAAA,+BAA4B;AAE5B,EAAAA,gCAAA,mBAAgB;AAdN,SAAAA;AAAA,GAAA;AAgCL,SAAS,4CACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,uCAAuC,IAAI;AAAA,EACpD,CAAC;AACH;AAEO,SAAS,8CACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,yCAAyC,IAAI;AAAA,EACtD,CAAC;AACH;AAeO,SAAS,kCAAkC,MAAoC;AACpF,SAAO;AAAA,IACL,MAAM,KAAK,iBAAiB;AAAA,IAC5B,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,2BAA2B,KAAK,kBAAkB,CAAC;AAAA,IACvD,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAEO,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,iBAAiB,KAAK,MAAM;AAAA,IAC5B,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,6BAA6B,KAAK,kBAAkB,CAAC;AAAA,IACzD,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAQO,SAAS,uCAAuC,MAAyC;AAC9F,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK;AACH,aAAO,wCAAwC,IAAqC;AAAA,IAEtF,KAAK;AACH,aAAO,wCAAwC,IAAqC;AAAA,IAEtF;AACE,aAAO,kCAAkC,IAAI;AAAA,EACjD;AACF;AAEO,SAAS,yCAAyC,MAAyC;AAChG,UAAQ,KAAK,MAAM,GAAG;AAAA,IACpB,KAAK;AACH,aAAO,0CAA0C,IAAqC;AAAA,IAExF,KAAK;AACH,aAAO,0CAA0C,IAAqC;AAAA,IAExF;AACE,aAAO,oCAAoC,IAAI;AAAA,EACnD;AACF;AAYO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,sBAAsB,KAAK,sBAAsB;AAAA,EACnD;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,sBAAsB,KAAK,sBAAsB;AAAA,EACnD;AACF;AAGO,IAAK,mDAAL,kBAAKC,sDAAL;AAEL,EAAAA,kDAAA,uBAAoB;AAEpB,EAAAA,kDAAA,sBAAmB;AAJT,SAAAA;AAAA,GAAA;AAkBL,IAAK,mCAAL,kBAAKC,sCAAL;AAEL,EAAAA,kCAAA,wBAAqB;AAErB,EAAAA,kCAAA,wBAAqB;AAJX,SAAAA;AAAA,GAAA;AAyBL,SAAS,wCAAwC,MAA0C;AAChG,SAAO;AAAA,IACL,MAAM,KAAK,iBAAiB;AAAA,IAC5B,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,2BAA2B,KAAK,kBAAkB,CAAC;AAAA,IACvD,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,MAAM,KAAK,MAAM;AAAA,IACjB,8BAA8B,CAAC,KAAK,YAAY,IAC5C,KAAK,YAAY,IACjB,uCAAuC,KAAK,YAAY,CAAC;AAAA,EAC/D;AACF;AAEO,SAAS,0CACd,MAC+B;AAC/B,SAAO;AAAA,IACL,iBAAiB,KAAK,MAAM;AAAA,IAC5B,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,6BAA6B,KAAK,kBAAkB,CAAC;AAAA,IACzD,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,CAAC,KAAK,8BAA8B,IAC5C,KAAK,8BAA8B,IACnC,yCAAyC,KAAK,8BAA8B,CAAC;AAAA,EACnF;AACF;AAQO,SAAS,uCAAuC,MAAyC;AAC9F,SAAO,EAAE,mBAAmB,KAAK,mBAAmB,EAAE;AACxD;AAEO,SAAS,yCAAyC,MAAyC;AAChG,SAAO;AAAA,IACL,mBAAmB,KAAK,mBAAmB;AAAA,EAC7C;AACF;AAGO,IAAK,qCAAL,kBAAKC,wCAAL;AAEL,EAAAA,oCAAA,UAAO;AAFG,SAAAA;AAAA,GAAA;AAoBL,SAAS,wCAAwC,MAA0C;AAChG,SAAO;AAAA,IACL,MAAM,KAAK,iBAAiB;AAAA,IAC5B,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,2BAA2B,KAAK,kBAAkB,CAAC;AAAA,IACvD,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAEO,SAAS,0CACd,MAC+B;AAC/B,SAAO;AAAA,IACL,iBAAiB,KAAK,MAAM;AAAA,IAC5B,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,6BAA6B,KAAK,kBAAkB,CAAC;AAAA,IACzD,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAQO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,SAAS,6BAA6B,KAAK,OAAO,CAAC;AAAA,EACrD;AACF;AAEO,SAAS,2BAA2B,QAAmC;AAC5E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,sBAAsB,IAAI;AAAA,EACnC,CAAC;AACH;AAEO,SAAS,6BAA6B,QAAmC;AAC9E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,wBAAwB,IAAI;AAAA,EACrC,CAAC;AACH;AAEO,SAAS,qCAAqC,QAA2C;AAC9F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,gCAAgC,IAAI;AAAA,EAC7C,CAAC;AACH;AAwCO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ,IAAI,6BAA6B,KAAK,QAAQ,CAAC;AAAA,IACtF,iBAAiB,CAAC,KAAK,iBAAiB,IACpC,KAAK,iBAAiB,IACtB,gCAAgC,KAAK,iBAAiB,CAAC;AAAA,IAC3D,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,wBAAwB,KAAK,aAAa,CAAC;AAAA,IAC/C,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,iCAAiC,KAAK,YAAY,CAAC;AAAA,IACvD,WAAW,CAAC,KAAK,WAAW,IACxB,KAAK,WAAW,IAChB,sCAAsC,KAAK,WAAW,CAAC;AAAA,IAC3D,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,uCAAuC,KAAK,YAAY,CAAC;AAAA,IAC7D,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,kCAAkC,KAAK,cAAc,CAAC;AAAA,IAC1D,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,iCAAiC,KAAK,aAAa,CAAC;AAAA,IACxD,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,wCAAwC,KAAK,aAAa,CAAC;AAAA,IAC/D,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,IACjE,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,qCAAqC,KAAK,YAAY,CAAC;AAAA,IAC3D,gBAAgB,CAAC,KAAK,UAAU,IAC5B,KAAK,UAAU,IACf,2BAA2B,KAAK,UAAU,CAAC;AAAA,IAC/C,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,yBAAyB,KAAK,cAAc,CAAC;AAAA,IACjD,MAAM,KAAK,aAAa;AAAA,EAC1B;AACF;AAYO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AACT;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,eAAe,KAAK,eAAe;AAAA,IACnC,aAAa,KAAK,aAAa;AAAA,IAC/B,iBAAiB,KAAK,iBAAiB;AAAA,EACzC;AACF;AAkBO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,UAAU,KAAK,cAAc;AAAA,IAC7B,WAAW,KAAK,eAAe;AAAA,IAC/B,YAAY,KAAK,gBAAgB;AAAA,IACjC,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,KAAK,cAAc,EAAE,IAAI,CAAC,MAAW;AACnC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,KAAK,aAAa,EAAE,IAAI,CAAC,MAAW;AAClC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAQO,SAAS,wBAAwB,MAA0B;AAChE,SAAO,EAAE,QAAQ,iCAAiC,KAAK,QAAQ,CAAC,EAAE;AACpE;AAEO,SAAS,0BAA0B,MAA0B;AAClE,SAAO;AAAA,IACL,QAAQ,mCAAmC,KAAK,QAAQ,CAAC;AAAA,EAC3D;AACF;AAEO,SAAS,iCAAiC,QAAyC;AACxF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,4BAA4B,IAAI;AAAA,EACzC,CAAC;AACH;AAEO,SAAS,mCAAmC,QAAyC;AAC1F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,8BAA8B,IAAI;AAAA,EAC3C,CAAC;AACH;AAcO,SAAS,4BAA4B,MAA8B;AACxE,SAAO;AACT;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,OAAO,KAAK,OAAO;AAAA,IACnB,aAAa,KAAK,aAAa;AAAA,IAC/B,WAAW,KAAK,WAAW;AAAA,IAC3B,UAAU,KAAK,UAAU;AAAA,EAC3B;AACF;AAYO,SAAS,sBAAsB,MAAwB;AAC5D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS,EAAE,IAAI,CAAC,MAAW;AACvC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,eAAe,KAAK,MAAM;AAAA,EAC5B;AACF;AAEO,SAAS,wBAAwB,MAAwB;AAC9D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS,EAAE,IAAI,CAAC,MAAW;AACvC,aAAO;AAAA,IACT,CAAC;AAAA,IACD,MAAM,KAAK,aAAa;AAAA,EAC1B;AACF;AAQO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,SAAS,6BAA6B,KAAK,OAAO,CAAC;AAAA,EACrD;AACF;AAEO,SAAS,2BAA2B,QAAmC;AAC5E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,sBAAsB,IAAI;AAAA,EACnC,CAAC;AACH;AAEO,SAAS,6BAA6B,QAAmC;AAC9E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,wBAAwB,IAAI;AAAA,EACrC,CAAC;AACH;AAkBO,SAAS,wBAAwB,MAA0B;AAChE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,kBAAkB,wCAAwC,KAAK,kBAAkB,CAAC;AAAA,IAClF,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,uCAAuC,KAAK,QAAQ,CAAC;AAAA,IACzD,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,IAC/D,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAEO,SAAS,0BAA0B,MAA0B;AAClE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,kBAAkB,0CAA0C,KAAK,kBAAkB,CAAC;AAAA,IACpF,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,yCAAyC,KAAK,QAAQ,CAAC;AAAA,IAC3D,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,IACjE,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAEO,SAAS,wCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,mCAAmC,IAAI;AAAA,EAChD,CAAC;AACH;AAEO,SAAS,0CACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,qCAAqC,IAAI;AAAA,EAClD,CAAC;AACH;AAQO,SAAS,mCAAmC,MAAqC;AACtF,SAAO,EAAE,MAAM,KAAK,MAAM,EAAE;AAC9B;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAEO,SAAS,uCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,kCAAkC,IAAI;AAAA,EAC/C,CAAC;AACH;AAEO,SAAS,yCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,oCAAoC,IAAI;AAAA,EACjD,CAAC;AACH;AASO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO,EAAE,MAAM,KAAK,MAAM,EAAE;AAC9B;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AAKO,SAAS,kCAAkC,MAAoC;AACpF,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK;AACH,aAAO,wCAAwC,IAAqC;AAAA,IAEtF;AACE,aAAO,6BAA6B,IAAI;AAAA,EAC5C;AACF;AAEO,SAAS,oCAAoC,MAAoC;AACtF,UAAQ,KAAK,MAAM,GAAG;AAAA,IACpB,KAAK;AACH,aAAO,0CAA0C,IAAqC;AAAA,IAExF;AACE,aAAO,+BAA+B,IAAI;AAAA,EAC9C;AACF;AAGO,IAAK,8BAAL,kBAAKC,iCAAL;AAEL,EAAAA,6BAAA,iBAAc;AAFJ,SAAAA;AAAA,GAAA;AAqBL,SAAS,wCAAwC,MAA0C;AAChG,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,uBAAuB,0CAA0C,KAAK,uBAAuB,CAAC;AAAA,EAChG;AACF;AAEO,SAAS,0CACd,MAC+B;AAC/B,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,uBAAuB;AAAA,MACrB,KAAK,uBAAuB;AAAA,IAC9B;AAAA,EACF;AACF;AAQO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,OAAO,+BAA+B,KAAK,OAAO,CAAC;AAAA,EACrD;AACF;AAEO,SAAS,6BAA6B,QAAqC;AAChF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,wBAAwB,IAAI;AAAA,EACrC,CAAC;AACH;AAEO,SAAS,+BAA+B,QAAqC;AAClF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,0BAA0B,IAAI;AAAA,EACvC,CAAC;AACH;AAiBO,SAAS,0BAA0B,MAA4B;AACpE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,4BAA4B,MAA4B;AACtE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,EACnE;AACF;AAUO,SAAS,+BAA+B,MAAiC;AAC9E,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK;AACH,aAAO,qCAAqC,IAAkC;AAAA,IAEhF,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E,KAAK;AACH,aAAO,wCAAwC,IAAqC;AAAA,IAEtF,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE;AACE,aAAO,0BAA0B,IAAI;AAAA,EACzC;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,UAAQ,KAAK,MAAM,GAAG;AAAA,IACpB,KAAK;AACH,aAAO,uCAAuC,IAAkC;AAAA,IAElF,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E,KAAK;AACH,aAAO,0CAA0C,IAAqC;AAAA,IAExF,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE;AACE,aAAO,4BAA4B,IAAI;AAAA,EAC3C;AACF;AAGO,IAAK,2BAAL,kBAAKC,8BAAL;AAEL,EAAAA,0BAAA,iBAAc;AAEd,EAAAA,0BAAA,eAAY;AAEZ,EAAAA,0BAAA,oBAAiB;AAEjB,EAAAA,0BAAA,SAAM;AARI,SAAAA;AAAA,GAAA;AA8BL,SAAS,qCAAqC,MAAuC;AAC1F,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,IAC/D,uBAAuB;AAAA,MACrB,KAAK,uBAAuB;AAAA,IAC9B;AAAA,EACF;AACF;AAEO,SAAS,uCAAuC,MAAuC;AAC5F,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,IACjE,uBAAuB;AAAA,MACrB,KAAK,uBAAuB;AAAA,IAC9B;AAAA,EACF;AACF;AAcO,SAAS,+CACd,MACK;AACL,SAAO;AAAA,IACL,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,yCAAyC,KAAK,kBAAkB,CAAC;AAAA,IACrE,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,yCAAyC,KAAK,cAAc,CAAC;AAAA,IACjE,2BAA2B,KAAK,2BAA2B;AAAA,EAC7D;AACF;AAEO,SAAS,iDACd,MACsC;AACtC,SAAO;AAAA,IACL,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,2CAA2C,KAAK,kBAAkB,CAAC;AAAA,IACvE,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,2CAA2C,KAAK,cAAc,CAAC;AAAA,IACnE,2BAA2B,KAAK,2BAA2B;AAAA,EAC7D;AACF;AAEO,SAAS,yCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,oCAAoC,IAAI;AAAA,EACjD,CAAC;AACH;AAEO,SAAS,2CACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,sCAAsC,IAAI;AAAA,EACnD,CAAC;AACH;AAQO,SAAS,oCAAoC,MAAsC;AACxF,SAAO,EAAE,MAAM,KAAK,MAAM,EAAE;AAC9B;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,EACnB;AACF;AASO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,IAC/D,qBAAqB,6CAA6C,KAAK,qBAAqB,CAAC;AAAA,EAC/F;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,IACjE,qBAAqB;AAAA,MACnB,KAAK,qBAAqB;AAAA,IAC5B;AAAA,EACF;AACF;AAkBO,SAAS,6CACd,MACK;AACL,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,eAAe,KAAK,eAAe;AAAA,IACnC,YAAY,KAAK,YAAY;AAAA,IAC7B,YAAY,KAAK,YAAY;AAAA,IAC7B,qBAAqB,CAAC,KAAK,qBAAqB,IAC5C,KAAK,qBAAqB,QAC1B,4DAA6C,KAAK,qBAAqB,CAAC;AAAA,EAC9E;AACF;AAEO,SAAS,+CACd,MACoC;AACpC,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,eAAe,KAAK,eAAe;AAAA,IACnC,YAAY,KAAK,YAAY;AAAA,IAC7B,YAAY,KAAK,YAAY;AAAA,IAC7B,qBAAqB,CAAC,KAAK,qBAAqB,IAC5C,KAAK,qBAAqB,QAC1B,8DAA+C,KAAK,qBAAqB,CAAC;AAAA,IAC9E,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,6BAA6B,KAAK,kBAAkB,CAAC;AAAA,EAC3D;AACF;AAUO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,UAAU,KAAK,UAAU;AAAA,IACzB,WAAW,CAAC,KAAK,WAAW,IAAI,KAAK,WAAW,IAAI,KAAK,WAAW,EAAE,YAAY;AAAA,EACpF;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,UAAU,KAAK,UAAU;AAAA,IACzB,WAAW,CAAC,KAAK,WAAW,IAAI,KAAK,WAAW,IAAI,IAAI,KAAK,KAAK,WAAW,CAAC;AAAA,EAChF;AACF;AAGO,IAAK,4CAAL,kBAAKC,+CAAL;AAEL,EAAAA,2CAAA,aAAU;AAEV,EAAAA,2CAAA,cAAW;AAJD,SAAAA;AAAA,GAAA;AAuBL,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,0BAAsB,yCAAgB,MAAM,CAAC,CAAC;AAAA,EAChD;AACF;AASO,SAAS,wCAAwC,MAA0C;AAChG,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,IAC/D,0BAA0B;AAAA,MACxB,KAAK,0BAA0B;AAAA,IACjC;AAAA,EACF;AACF;AAEO,SAAS,0CACd,MAC+B;AAC/B,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,IACjE,0BAA0B;AAAA,MACxB,KAAK,0BAA0B;AAAA,IACjC;AAAA,EACF;AACF;AAgBO,SAAS,kDACd,MACK;AACL,SAAO;AAAA,IACL,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,aAAa,KAAK,aAAa;AAAA,IAC/B,YAAY,KAAK,YAAY;AAAA,IAC7B,qBAAqB,CAAC,KAAK,qBAAqB,IAC5C,KAAK,qBAAqB,QAC1B,4DAA6C,KAAK,qBAAqB,CAAC;AAAA,EAC9E;AACF;AAEO,SAAS,oDACd,MACyC;AACzC,SAAO;AAAA,IACL,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,aAAa,KAAK,aAAa;AAAA,IAC/B,YAAY,KAAK,YAAY;AAAA,IAC7B,qBAAqB,CAAC,KAAK,qBAAqB,IAC5C,KAAK,qBAAqB,QAC1B,8DAA+C,KAAK,qBAAqB,CAAC;AAAA,IAC9E,kBAAkB,CAAC,KAAK,kBAAkB,IACtC,KAAK,kBAAkB,IACvB,6BAA6B,KAAK,kBAAkB,CAAC;AAAA,EAC3D;AACF;AASO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,IAC/D,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,uCAAuC,KAAK,eAAe,CAAC;AAAA,EAClE;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,IACjE,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,yCAAyC,KAAK,eAAe,CAAC;AAAA,EACpE;AACF;AAQO,SAAS,uCAAuC,MAAyC;AAC9F,SAAO;AAAA,IACL,SAAS,CAAC,KAAK,SAAS,IACpB,KAAK,SAAS,IACd,oCAAoC,KAAK,SAAS,CAAC;AAAA,EACzD;AACF;AAEO,SAAS,yCAAyC,MAAyC;AAChG,SAAO;AAAA,IACL,SAAS,CAAC,KAAK,SAAS,IACpB,KAAK,SAAS,IACd,sCAAsC,KAAK,SAAS,CAAC;AAAA,EAC3D;AACF;AAUO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,wCAAwC,KAAK,gBAAgB,CAAC;AAAA,IAClE,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,wCAAwC,KAAK,gBAAgB,CAAC;AAAA,EACpE;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,0CAA0C,KAAK,gBAAgB,CAAC;AAAA,IACpE,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,0CAA0C,KAAK,gBAAgB,CAAC;AAAA,EACtE;AACF;AAEO,SAAS,wCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,mCAAmC,IAAI;AAAA,EAChD,CAAC;AACH;AAEO,SAAS,0CACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,qCAAqC,IAAI;AAAA,EAClD,CAAC;AACH;AAUO,SAAS,mCAAmC,MAAqC;AACtF,SAAO,EAAE,SAAS,KAAK,SAAS,GAAG,iBAAiB,KAAK,iBAAiB,EAAE;AAC9E;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,SAAS,KAAK,SAAS;AAAA,IACvB,iBAAiB,KAAK,iBAAiB;AAAA,EACzC;AACF;AAQO,SAAS,wCAAwC,MAAwC;AAC9F,SAAO;AAAA,IACL,OAAO,sCAAsC,KAAK,OAAO,CAAC;AAAA,EAC5D;AACF;AAEO,SAAS,oCAAoC,QAA4C;AAC9F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,+BAA+B,IAAI;AAAA,EAC5C,CAAC;AACH;AAEO,SAAS,sCAAsC,QAA4C;AAChG,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,iCAAiC,IAAI;AAAA,EAC9C,CAAC;AACH;AAGO,IAAK,4CAAL,kBAAKC,+CAAL;AAEL,EAAAA,2CAAA,cAAW;AAEX,EAAAA,2CAAA,YAAS;AAET,EAAAA,2CAAA,cAAW;AAND,SAAAA;AAAA,GAAA;AA4BL,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,UAAU,4BAA4B,KAAK,UAAU,CAAC;AAAA,IACtD,QAAQ,0BAA0B,KAAK,QAAQ,CAAC;AAAA,EAClD;AACF;AAwBO,SAAS,4BAA4B,MAA4B;AACtE,SAAO;AAAA,IACL,cAAc,4BAA4B,KAAK,cAAc,CAAC;AAAA,IAC9D,iBAAiB,4BAA4B,KAAK,eAAe,CAAC;AAAA,IAClE,cAAc,4BAA4B,KAAK,cAAc,CAAC;AAAA,IAC9D,gBAAgB,4BAA4B,KAAK,eAAe,CAAC;AAAA,IACjE,mBAAmB,4BAA4B,KAAK,kBAAkB,CAAC;AAAA,IACvE,oBAAoB,4BAA4B,KAAK,aAAa,CAAC;AAAA,IACnE,mBAAmB,4BAA4B,KAAK,aAAa,CAAC;AAAA,IAClE,iBAAiB,4BAA4B,KAAK,eAAe,CAAC;AAAA,IAClE,wBAAwB,4BAA4B,KAAK,iBAAiB,CAAC;AAAA,EAC7E;AACF;AAUO,SAAS,4BAA4B,MAA4B;AACtE,SAAO;AAAA,IACL,OAAO,KAAK,OAAO;AAAA,IACnB,OAAO,KAAK,OAAO;AAAA,EACrB;AACF;AAkBO,SAAS,0BAA0B,MAA0B;AAClE,SAAO;AAAA,IACL,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,8BAA8B,KAAK,8BAA8B;AAAA,IACjE,oCAAoC,KAAK,oCAAoC;AAAA,IAC7E,2CAA2C,KAAK,2CAA2C;AAAA,IAC3F,2BAA2B,KAAK,oBAAoB;AAAA,IACpD,oCAAoC,KAAK,oCAAoC;AAAA,EAC/E;AACF;AA0BO,SAAS,4CACd,MACK;AACL,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,wDAAwD,IAAI;AAAA,IACzE,WAAW,qCAAqC,KAAK,WAAW,CAAC;AAAA,IACjE,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,yCAAyC,KAAK,UAAU,CAAC;AAAA,IAC7D,2BAA2B,CAAC,KAAK,2BAA2B,IACxD,KAAK,2BAA2B,IAChC,yCAAyC,KAAK,2BAA2B,CAAC;AAAA,IAC9E,6BAA6B,CAAC,KAAK,6BAA6B,IAC5D,KAAK,6BAA6B,IAClC,2CAA2C,KAAK,6BAA6B,CAAC;AAAA,IAClF,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,8CACd,MACmC;AACnC,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,GAAG,0DAA0D,KAAK,aAAa,CAAC;AAAA,IAChF,WAAW,uCAAuC,KAAK,WAAW,CAAC;AAAA,IACnE,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,2CAA2C,KAAK,UAAU,CAAC;AAAA,IAC/D,2BAA2B,CAAC,KAAK,2BAA2B,IACxD,KAAK,2BAA2B,IAChC,2CAA2C,KAAK,2BAA2B,CAAC;AAAA,IAChF,6BAA6B,CAAC,KAAK,6BAA6B,IAC5D,KAAK,6BAA6B,IAClC,6CAA6C,KAAK,6BAA6B,CAAC;AAAA,IACpF,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,EACnE;AACF;AAGO,IAAK,mCAAL,kBAAKC,sCAAL;AAEL,EAAAA,kCAAA,cAAW;AAEX,EAAAA,kCAAA,cAAW;AAEX,EAAAA,kCAAA,eAAY;AAEZ,EAAAA,kCAAA,gBAAa;AAEb,EAAAA,kCAAA,WAAQ;AAER,EAAAA,kCAAA,cAAW;AAEX,EAAAA,kCAAA,aAAU;AAEV,EAAAA,kCAAA,gBAAa;AAhBH,SAAAA;AAAA,GAAA;AAyCL,SAAS,gCAAgC,MAAkC;AAChF,SAAO,EAAE,kBAAkB,KAAK,kBAAkB,EAAE;AACtD;AAEO,SAAS,kCAAkC,MAAkC;AAClF,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAUO,SAAS,qCAAqC,MAAuC;AAC1F,SAAO,EAAE,MAAM,KAAK,MAAM,GAAG,OAAO,KAAK,OAAO,EAAE;AACpD;AAEO,SAAS,uCAAuC,MAAuC;AAC5F,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,OAAO,KAAK,OAAO;AAAA,EACrB;AACF;AASO,SAAS,oCAAoC,MAAsC;AACxF,SAAO,EAAE,eAAe,KAAK,WAAW,EAAE;AAC5C;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,EAC/B;AACF;AAQO,SAAS,yCACd,MACK;AACL,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,4CAA4C,IAAyC;AAAA,IAE9F;AACE,aAAO,oCAAoC,IAAI;AAAA,EACnD;AACF;AAEO,SAAS,2CACd,MACgC;AAChC,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF;AACE,aAAO,sCAAsC,IAAI;AAAA,EACrD;AACF;AAUO,SAAS,6CACd,MACK;AACL,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,yBAAyB,KAAK,yBAAyB;AAAA,EACzD;AACF;AAEO,SAAS,+CACd,MACoC;AACpC,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,yBAAyB,KAAK,yBAAyB;AAAA,EACzD;AACF;AAQO,SAAS,4CACd,MACK;AACL,SAAO,EAAE,eAAe,KAAK,WAAW,EAAE;AAC5C;AAEO,SAAS,8CACd,MACmC;AACnC,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,EAC/B;AACF;AASO,SAAS,sCAAsC,MAAwC;AAC5F,SAAO,EAAE,eAAe,KAAK,WAAW,EAAE;AAC5C;AAEO,SAAS,wCAAwC,MAAwC;AAC9F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,EAC/B;AACF;AAQO,SAAS,2CACd,MACK;AACL,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF;AACE,aAAO,sCAAsC,IAAI;AAAA,EACrD;AACF;AAEO,SAAS,6CACd,MACkC;AAClC,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IAEF;AACE,aAAO,wCAAwC,IAAI;AAAA,EACvD;AACF;AAYO,SAAS,kDACd,MACK;AACL,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,uBAAuB,KAAK,uBAAuB;AAAA,EACrD;AACF;AAEO,SAAS,oDACd,MACyC;AACzC,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,uBAAuB,KAAK,uBAAuB;AAAA,EACrD;AACF;AAQO,SAAS,sDACd,MACK;AACL,SAAO,EAAE,eAAe,KAAK,WAAW,EAAE;AAC5C;AAEO,SAAS,wDACd,MAC6C;AAC7C,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,EAC/B;AACF;AAQO,SAAS,kCAAkC,MAAkC;AAClF,SAAO;AAAA,IACL,aAAa,mDAAmD,KAAK,OAAO,CAAC;AAAA,EAC/E;AACF;AAEO,SAAS,iDACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,4CAA4C,IAAI;AAAA,EACzD,CAAC;AACH;AAEO,SAAS,mDACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,8CAA8C,IAAI;AAAA,EAC3D,CAAC;AACH;AA8BO,SAAS,wBAAwB,MAA0B;AAChE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,cAAc,KAAK,cAAc;AAAA,IACjC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,UAAU,CAAC,KAAK,UAAU,IAAI,KAAK,UAAU,IAAI,2BAA2B,KAAK,UAAU,CAAC;AAAA,IAC5F,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,6BAA6B,KAAK,YAAY,CAAC;AAAA,IACnD,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,4BAA4B,KAAK,eAAe,CAAC;AAAA,IACrD,qBAAqB,CAAC,KAAK,qBAAqB,IAC5C,KAAK,qBAAqB,IAC1B,4BAA4B,KAAK,qBAAqB,CAAC;AAAA,IAC3D,UAAU,KAAK,YAAY;AAAA,IAC3B,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,0BAA0B,MAA0B;AAClE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,cAAc,KAAK,cAAc;AAAA,IACjC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,UAAU,CAAC,KAAK,UAAU,IAAI,KAAK,UAAU,IAAI,6BAA6B,KAAK,UAAU,CAAC;AAAA,IAC9F,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,+BAA+B,KAAK,YAAY,CAAC;AAAA,IACrD,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,8BAA8B,KAAK,eAAe,CAAC;AAAA,IACvD,qBAAqB,CAAC,KAAK,qBAAqB,IAC5C,KAAK,qBAAqB,IAC1B,8BAA8B,KAAK,qBAAqB,CAAC;AAAA,IAC7D,YAAY,KAAK,UAAU;AAAA,IAC3B,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,EACnE;AACF;AAcO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,wBAAwB,KAAK,wBAAwB;AAAA,IACrD,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,0CAA0C,KAAK,eAAe,CAAC;AAAA,EACrE;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,WAAW;AAAA,IAC3B,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,wBAAwB,KAAK,wBAAwB;AAAA,IACrD,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,4CAA4C,KAAK,eAAe,CAAC;AAAA,EACvE;AACF;AA4CO,SAAS,0CACd,MACK;AACL,SAAO;AAAA,IACL,OAAG,yCAAgB,KAAK,wBAAwB,CAAC,CAAC;AAAA,IAClD,aAAa,KAAK,aAAa;AAAA,IAC/B,4BAA4B,KAAK,4BAA4B;AAAA,IAC7D,2BAA2B,KAAK,2BAA2B;AAAA,IAC3D,8BAA8B,KAAK,8BAA8B;AAAA,IACjE,6BAA6B,KAAK,6BAA6B;AAAA,IAC/D,+CACE,KAAK,+CAA+C;AAAA,IACtD,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,wBAAwB,KAAK,wBAAwB;AAAA,IACrD,0BAA0B,KAAK,0BAA0B;AAAA,IACzD,wBAAwB,KAAK,wBAAwB;AAAA,IACrD,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,cAAc,KAAK,cAAc;AAAA,IACjC,eAAe,KAAK,eAAe;AAAA,IACnC,aAAa,KAAK,aAAa;AAAA,IAC/B,6BAA6B,KAAK,6BAA6B;AAAA,IAC/D,0BAA0B,KAAK,0BAA0B;AAAA,IACzD,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,4CACd,MACiC;AACjC,SAAO;AAAA,IACL,0BAAsB,yCAAgB,MAAM;AAAA,MAC1C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,IACD,aAAa,KAAK,aAAa;AAAA,IAC/B,4BAA4B,KAAK,4BAA4B;AAAA,IAC7D,2BAA2B,KAAK,2BAA2B;AAAA,IAC3D,8BAA8B,KAAK,8BAA8B;AAAA,IACjE,6BAA6B,KAAK,6BAA6B;AAAA,IAC/D,+CACE,KAAK,+CAA+C;AAAA,IACtD,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,wBAAwB,KAAK,wBAAwB;AAAA,IACrD,0BAA0B,KAAK,0BAA0B;AAAA,IACzD,wBAAwB,KAAK,wBAAwB;AAAA,IACrD,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,cAAc,KAAK,cAAc;AAAA,IACjC,eAAe,KAAK,eAAe;AAAA,IACnC,aAAa,KAAK,aAAa;AAAA,IAC/B,6BAA6B,KAAK,6BAA6B;AAAA,IAC/D,0BAA0B,KAAK,0BAA0B;AAAA,IACzD,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAGO,IAAK,8BAAL,kBAAKC,iCAAL;AAEL,EAAAA,6BAAA,aAAU;AAEV,EAAAA,6BAAA,UAAO;AAEP,EAAAA,6BAAA,mBAAgB;AAEhB,EAAAA,6BAAA,UAAO;AAEP,EAAAA,6BAAA,eAAY;AAEZ,EAAAA,6BAAA,eAAY;AAEZ,EAAAA,6BAAA,cAAW;AAdD,SAAAA;AAAA,GAAA;AAiCL,IAAK,8BAAL,kBAAKC,iCAAL;AAEL,EAAAA,6BAAA,eAAY;AAEZ,EAAAA,6BAAA,cAAW;AAJD,SAAAA;AAAA,GAAA;AAkBL,IAAK,2BAAL,kBAAKC,8BAAL;AAEL,EAAAA,0BAAA,QAAK;AAEL,EAAAA,0BAAA,QAAK;AAEL,EAAAA,0BAAA,QAAK;AAEL,EAAAA,0BAAA,QAAK;AAEL,EAAAA,0BAAA,QAAK;AAEL,EAAAA,0BAAA,QAAK;AAZK,SAAAA;AAAA,GAAA;AA8BL,IAAK,gCAAL,kBAAKC,mCAAL;AAEL,EAAAA,+BAAA,qBAAkB;AAElB,EAAAA,+BAAA,iBAAc;AAEd,EAAAA,+BAAA,wBAAqB;AANX,SAAAA;AAAA,GAAA;AAqBL,IAAK,8BAAL,kBAAKC,iCAAL;AAEL,EAAAA,6BAAA,UAAO;AAEP,EAAAA,6BAAA,8BAA2B;AAE3B,EAAAA,6BAAA,oCAAiC;AANvB,SAAAA;AAAA,GAAA;AAqBL,IAAK,2CAAL,kBAAKC,8CAAL;AAEL,EAAAA,0CAAA,UAAO;AAEP,EAAAA,0CAAA,kBAAe;AAJL,SAAAA;AAAA,GAAA;AAkBL,IAAK,mCAAL,kBAAKC,sCAAL;AAEL,EAAAA,kCAAA,cAAW;AAEX,EAAAA,kCAAA,aAAU;AAJA,SAAAA;AAAA,GAAA;AAiBL,SAAS,4BAA4B,QAAoC;AAC9E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,uBAAuB,IAAI;AAAA,EACpC,CAAC;AACH;AAEO,SAAS,8BAA8B,QAAoC;AAChF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,yBAAyB,IAAI;AAAA,EACtC,CAAC;AACH;AAYO,SAAS,uBAAuB,MAAyB;AAC9D,SAAO;AAAA,IACL,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,iBAAiB,CAAC,KAAK,iBAAiB,IACpC,KAAK,iBAAiB,IACtB,+BAA+B,KAAK,iBAAiB,CAAC;AAAA,EAC5D;AACF;AAEO,SAAS,yBAAyB,MAAyB;AAChE,SAAO;AAAA,IACL,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,iBAAiB,CAAC,KAAK,iBAAiB,IACpC,KAAK,iBAAiB,IACtB,iCAAiC,KAAK,iBAAiB,CAAC;AAAA,EAC9D;AACF;AAUO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO,EAAE,MAAM,KAAK,MAAM,GAAG,YAAY,KAAK,YAAY,EAAE;AAC9D;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,OAAO;AAAA,MACL,OAAO,QAAQ,KAAK,YAAY,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI,EAAE,MAAqB,CAAC,IAAI,EAAE,CAAC;AAAA,IAC9E;AAAA,EACN;AACF;AAQO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,UAAU,+BAA+B,KAAK,OAAO,CAAC;AAAA,EACxD;AACF;AAEO,SAAS,6BAA6B,QAAqC;AAChF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,wBAAwB,IAAI;AAAA,EACrC,CAAC;AACH;AAEO,SAAS,+BAA+B,QAAqC;AAClF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,0BAA0B,IAAI;AAAA,EACvC,CAAC;AACH;AAgBO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,QAAQ,KAAK,QAAQ;AAAA,IACrB,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,mCAAmC,KAAK,YAAY,CAAC;AAAA,IACzD,kBAAkB,wCAAwC,KAAK,kBAAkB,CAAC;AAAA,IAClF,QAAQ,gCAAgC,KAAK,QAAQ,CAAC;AAAA,EACxD;AACF;AA6BO,SAAS,mCAAmC,MAAmC;AACpF,SAAO;AAAA,IACL,QAAQ,KAAK,QAAQ;AAAA,IACrB,cAAc,KAAK,cAAc;AAAA,IACjC,WAAW,CAAC,KAAK,WAAW,IAAI,KAAK,WAAW,IAAI,IAAI,KAAK,KAAK,WAAW,CAAC;AAAA,IAC9E,SAAS,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,KAAK,SAAS,CAAC;AAAA,IACtE,QAAQ,oCAAoC,KAAK,QAAQ,CAAC;AAAA,IAC1D,UAAU,sCAAsC,KAAK,UAAU,CAAC;AAAA,IAChE,WAAW,KAAK,gBAAgB;AAAA,IAChC,iBAAiB,KAAK,aAAa;AAAA,IACnC,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,oBAAoB,KAAK,oBAAoB;AAAA,EAC/C;AACF;AAKO,SAAS,oCAAoC,QAA0C;AAC5F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,+BAA+B,IAAI;AAAA,EAC5C,CAAC;AACH;AAkBO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,KAAK,KAAK,KAAK;AAAA,IACf,cAAc,KAAK,cAAc;AAAA,IACjC,YAAY,KAAK,YAAY;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,mBAAmB,KAAK,mBAAmB;AAAA,EAC7C;AACF;AAEO,SAAS,sCAAsC,QAA4C;AAChG,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,iCAAiC,IAAI;AAAA,EAC9C,CAAC;AACH;AAgBO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,KAAK,KAAK,KAAK;AAAA,IACf,SAAS,KAAK,SAAS;AAAA,IACvB,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,mBAAmB,KAAK,mBAAmB;AAAA,EAC7C;AACF;AAEO,SAAS,wCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,mCAAmC,IAAI;AAAA,EAChD,CAAC;AACH;AAYO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,YAAY,KAAK,YAAY;AAAA,IAC7B,2BAA2B,KAAK,2BAA2B;AAAA,IAC3D,uCAAuC,KAAK,uCAAuC;AAAA,EACrF;AACF;AAsBO,SAAS,gCAAgC,MAAkC;AAChF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,uCAAuC,KAAK,QAAQ,CAAC;AAAA,IAC7D,mBAAmB,CAAC,KAAK,0BAA0B,IAC/C,KAAK,0BAA0B,IAC/B,wCAAwC,KAAK,0BAA0B,CAAC;AAAA,IAC5E,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,sCAAsC,KAAK,gBAAgB,CAAC;AAAA,IAChE,kBAAkB,CAAC,KAAK,iBAAiB,IACrC,KAAK,iBAAiB,IACtB,uCAAuC,KAAK,iBAAiB,CAAC;AAAA,IAClE,eAAe,KAAK,MAAM;AAAA,IAC1B,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,sCAAsC,KAAK,eAAe,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,kCAAkC,MAAkC;AAClF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,yCAAyC,KAAK,QAAQ,CAAC;AAAA,IAC/D,0BAA0B,CAAC,KAAK,mBAAmB,IAC/C,KAAK,mBAAmB,IACxB,0CAA0C,KAAK,mBAAmB,CAAC;AAAA,IACvE,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,wCAAwC,KAAK,gBAAgB,CAAC;AAAA,IAClE,iBAAiB,CAAC,KAAK,kBAAkB,IACrC,KAAK,kBAAkB,IACvB,yCAAyC,KAAK,kBAAkB,CAAC;AAAA,IACrE,MAAM,KAAK,aAAa;AAAA,IACxB,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,wCAAwC,KAAK,eAAe,CAAC;AAAA,EACnE;AACF;AAEO,SAAS,uCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,kCAAkC,IAAI;AAAA,EAC/C,CAAC;AACH;AAEO,SAAS,yCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,oCAAoC,IAAI;AAAA,EACjD,CAAC;AACH;AAmBO,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,EACnE;AACF;AA0BO,SAAS,kCAAkC,MAAoC;AACpF,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E,KAAK;AACH,aAAO,mBAAmB,IAAgB;AAAA,IAE5C,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,iCAAiC,IAA8B;AAAA,IAExE,KAAK;AACH,aAAO,sBAAsB,IAAmB;AAAA,IAElD,KAAK;AACH,aAAO,qBAAqB,IAAkB;AAAA,IAEhD,KAAK;AACH,aAAO,2BAA2B,IAAwB;AAAA,IAE5D,KAAK;AACH,aAAO,6BAA6B,IAA0B;AAAA,IAEhE,KAAK;AACH,aAAO,mCAAmC,IAAgC;AAAA,IAE5E,KAAK;AACH,aAAO,4BAA4B,IAAyB;AAAA,IAE9D,KAAK;AACH,aAAO,qBAAqB,IAAkB;AAAA,IAEhD,KAAK;AACH,aAAO,kCAAkC,IAA+B;AAAA,IAE1E,KAAK;AACH,aAAO,+BAA+B,IAA4B;AAAA,IAEpE,KAAK;AACH,aAAO,kCAAkC,IAA+B;AAAA,IAE1E,KAAK;AACH,aAAO,0CAA0C,IAAuC;AAAA,IAE1F,KAAK;AACH,aAAO,sBAAsB,IAAmB;AAAA,IAElD,KAAK;AACH,aAAO,oCAAoC,IAAiC;AAAA,IAE9E,KAAK;AACH,aAAO,oCAAoC,IAAiC;AAAA,IAE9E,KAAK;AACH,aAAO,8BAA8B,IAA2B;AAAA,IAElE;AACE,aAAO,6BAA6B,IAAI;AAAA,EAC5C;AACF;AAEO,SAAS,oCAAoC,MAAoC;AACtF,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E,KAAK;AACH,aAAO,qBAAqB,IAAgB;AAAA,IAE9C,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,mCAAmC,IAA8B;AAAA,IAE1E,KAAK;AACH,aAAO,wBAAwB,IAAmB;AAAA,IAEpD,KAAK;AACH,aAAO,uBAAuB,IAAkB;AAAA,IAElD,KAAK;AACH,aAAO,6BAA6B,IAAwB;AAAA,IAE9D,KAAK;AACH,aAAO,+BAA+B,IAA0B;AAAA,IAElE,KAAK;AACH,aAAO,qCAAqC,IAAgC;AAAA,IAE9E,KAAK;AACH,aAAO,8BAA8B,IAAyB;AAAA,IAEhE,KAAK;AACH,aAAO,uBAAuB,IAAkB;AAAA,IAElD,KAAK;AACH,aAAO,oCAAoC,IAA+B;AAAA,IAE5E,KAAK;AACH,aAAO,iCAAiC,IAA4B;AAAA,IAEtE,KAAK;AACH,aAAO,oCAAoC,IAA+B;AAAA,IAE5E,KAAK;AACH,aAAO,4CAA4C,IAAuC;AAAA,IAE5F,KAAK;AACH,aAAO,wBAAwB,IAAmB;AAAA,IAEpD,KAAK;AACH,aAAO,sCAAsC,IAAiC;AAAA,IAEhF,KAAK;AACH,aAAO,sCAAsC,IAAiC;AAAA,IAEhF,KAAK;AACH,aAAO,gCAAgC,IAA2B;AAAA,IAEpE;AACE,aAAO,+BAA+B,IAAI;AAAA,EAC9C;AACF;AAEO,SAAS,sCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,iCAAiC,IAAI;AAAA,EAC9C,CAAC;AACH;AAEO,SAAS,wCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,mCAAmC,IAAI;AAAA,EAChD,CAAC;AACH;AAcO,SAAS,iCAAiC,MAAmC;AAClF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,sCAAsC,KAAK,QAAQ,CAAC;AAAA,EAC1D;AACF;AAEO,SAAS,mCAAmC,MAAmC;AACpF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,wCAAwC,KAAK,QAAQ,CAAC;AAAA,EAC5D;AACF;AAEO,SAAS,uCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,kCAAkC,IAAI;AAAA,EAC/C,CAAC;AACH;AAEO,SAAS,yCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,oCAAoC,IAAI;AAAA,EACjD,CAAC;AACH;AAUO,SAAS,kCAAkC,MAAoC;AACpF,SAAO,EAAE,MAAM,KAAK,MAAM,GAAG,YAAY,KAAK,YAAY,EAAE;AAC9D;AAEO,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,KAAK,YAAY;AAAA,EAC/B;AACF;AAQO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,EACnE;AACF;AAcO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAGO,IAAK,wCAAL,kBAAKC,2CAAL;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,UAAO;AAEP,EAAAA,uCAAA,UAAO;AAEP,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAEL,EAAAA,uCAAA,QAAK;AAhCK,SAAAA;AAAA,GAAA;AAuEL,SAAS,mBAAmB,MAAqB;AACtD,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,mBAAmB,KAAK,yBAAyB;AAAA,IACjD,YAAY,KAAK,YAAY;AAAA,EAC/B;AACF;AAEO,SAAS,qBAAqB,MAAqB;AACxD,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,yBAAyB,KAAK,mBAAmB;AAAA,IACjD,YAAY,KAAK,YAAY;AAAA,EAC/B;AACF;AAGO,IAAK,wBAAL,kBAAKC,2BAAL;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,aAAU;AAEV,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,aAAU;AAEV,EAAAA,uBAAA,aAAU;AAEV,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,YAAS;AAET,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AAEN,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,QAAK;AAEL,EAAAA,uBAAA,SAAM;AApVI,SAAAA;AAAA,GAAA;AA0gBL,IAAK,qBAAL,kBAAKC,wBAAL;AAEL,EAAAA,oBAAA,WAAQ;AAER,EAAAA,oBAAA,oBAAiB;AAEjB,EAAAA,oBAAA,cAAW;AAEX,EAAAA,oBAAA,4BAAyB;AARf,SAAAA;AAAA,GAAA;AAmCL,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,KAAK,gBAAgB,EAAE,IAAI,CAAC,MAAW;AACrC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,SAAS,CAAC,KAAK,SAAS,IACpB,KAAK,SAAS,IACd,KAAK,SAAS,EAAE,IAAI,CAAC,MAAW;AAC9B,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,KAAK,gBAAgB,EAAE,IAAI,CAAC,MAAW;AACrC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,SAAS,CAAC,KAAK,SAAS,IACpB,KAAK,SAAS,IACd,KAAK,SAAS,EAAE,IAAI,CAAC,MAAW;AAC9B,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAGO,IAAK,kCAAL,kBAAKC,qCAAL;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,SAAM;AAEN,EAAAA,iCAAA,UAAO;AAEP,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,UAAO;AAEP,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,YAAS;AAET,EAAAA,iCAAA,YAAS;AAET,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,QAAK;AAEL,EAAAA,iCAAA,YAAS;AAET,EAAAA,iCAAA,YAAS;AAxGC,SAAAA;AAAA,GAAA;AAwKL,IAAK,qBAAL,kBAAKC,wBAAL;AAEL,EAAAA,oBAAA,WAAQ;AAER,EAAAA,oBAAA,YAAS;AAET,EAAAA,oBAAA,gBAAa;AAEb,EAAAA,oBAAA,iBAAc;AAEd,EAAAA,oBAAA,WAAQ;AAER,EAAAA,oBAAA,aAAU;AAEV,EAAAA,oBAAA,UAAO;AAdG,SAAAA;AAAA,GAAA;AAiCL,IAAK,mBAAL,kBAAKC,sBAAL;AAEL,EAAAA,kBAAA,iBAAc;AAEd,EAAAA,kBAAA,eAAY;AAJF,SAAAA;AAAA,GAAA;AA2BL,SAAS,iCAAiC,MAAmC;AAClF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,oBAAoB,KAAK,oBAAoB;AAAA,IAC7C,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,mCAAmC,MAAmC;AACpF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,oBAAoB,KAAK,oBAAoB;AAAA,IAC7C,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAQO,SAAS,sBAAsB,MAAwB;AAC5D,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,wBAAwB,MAAwB;AAC9D,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,EACnE;AACF;AAYO,SAAS,qBAAqB,MAAuB;AAC1D,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,cAAc,KAAK,cAAc;AAAA,IACjC,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAEO,SAAS,uBAAuB,MAAuB;AAC5D,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,cAAc,KAAK,cAAc;AAAA,IACjC,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAcO,SAAS,2BAA2B,MAA6B;AACtE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,6BAA6B,MAA6B;AACxE,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAGO,IAAK,8BAAL,kBAAKC,iCAAL;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,UAAO;AAEP,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AAEL,EAAAA,6BAAA,QAAK;AA9BK,SAAAA;AAAA,GAAA;AAoEL,SAAS,6BAA6B,MAA+B;AAC1E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,+BAA+B,MAA+B;AAC5E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAgBO,SAAS,mCAAmC,MAAqC;AACtF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,KAAK,YAAY,EAAE,IAAI,CAAC,MAAW;AACjC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,KAAK,YAAY,EAAE,IAAI,CAAC,MAAW;AACjC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAGO,IAAK,sBAAL,kBAAKC,yBAAL;AAEL,EAAAA,qBAAA,cAAW;AAEX,EAAAA,qBAAA,kBAAe;AAEf,EAAAA,qBAAA,YAAS;AAET,EAAAA,qBAAA,cAAW;AAEX,EAAAA,qBAAA,cAAW;AAEX,EAAAA,qBAAA,SAAM;AAEN,EAAAA,qBAAA,WAAQ;AAdE,SAAAA;AAAA,GAAA;AAiCL,IAAK,sCAAL,kBAAKC,yCAAL;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,YAAS;AAET,EAAAA,qCAAA,YAAS;AAET,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,UAAO;AAEP,EAAAA,qCAAA,UAAO;AAEP,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AAEL,EAAAA,qCAAA,QAAK;AA9CK,SAAAA;AAAA,GAAA;AAoGL,SAAS,4BAA4B,MAA8B;AACxE,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,aAAa,KAAK,aAAa;AAAA,IAC/B,kBAAkB,KAAK,MAAM;AAAA,IAC7B,cAAc,KAAK,cAAc;AAAA,IACjC,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,KAAK,eAAe,EAAE,IAAI,CAAC,MAAW;AACpC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,QAAQ,KAAK,QAAQ;AAAA,EACvB;AACF;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,kBAAkB;AAAA,IAC7B,cAAc,KAAK,cAAc;AAAA,IACjC,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,KAAK,eAAe,EAAE,IAAI,CAAC,MAAW;AACpC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,QAAQ,KAAK,QAAQ;AAAA,EACvB;AACF;AAGO,IAAK,oCAAL,kBAAKC,uCAAL;AAEL,EAAAA,mCAAA,UAAO;AAEP,EAAAA,mCAAA,aAAU;AAJA,SAAAA;AAAA,GAAA;AAiCL,SAAS,qBAAqB,MAAuB;AAC1D,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,eAAe,KAAK,eAAe;AAAA,IACnC,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,oBAAoB,KAAK,oBAAoB;AAAA,EAC/C;AACF;AAEO,SAAS,uBAAuB,MAAuB;AAC5D,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,eAAe,KAAK,eAAe;AAAA,IACnC,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,oBAAoB,KAAK,oBAAoB;AAAA,EAC/C;AACF;AAGO,IAAK,0BAAL,kBAAKC,6BAAL;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,UAAO;AAEP,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAEL,EAAAA,yBAAA,QAAK;AAlEK,SAAAA;AAAA,GAAA;AA+GL,IAAK,qBAAL,kBAAKC,wBAAL;AAEL,EAAAA,oBAAA,WAAQ;AAER,EAAAA,oBAAA,eAAY;AAJF,SAAAA;AAAA,GAAA;AAmCL,SAAS,kCAAkC,MAAoC;AACpF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,0BAA0B,CAAC,KAAK,0BAA0B,IACtD,KAAK,0BAA0B,IAC/B,4BAA4B,KAAK,0BAA0B,CAAC;AAAA,IAChE,4BAA4B,KAAK,4BAA4B;AAAA,IAC7D,8BAA8B,KAAK,8BAA8B;AAAA,IACjE,gCAAgC,KAAK,gCAAgC;AAAA,EACvE;AACF;AAEO,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,0BAA0B,CAAC,KAAK,0BAA0B,IACtD,KAAK,0BAA0B,IAC/B,8BAA8B,KAAK,0BAA0B,CAAC;AAAA,IAClE,4BAA4B,KAAK,4BAA4B;AAAA,IAC7D,8BAA8B,KAAK,8BAA8B;AAAA,IACjE,gCAAgC,KAAK,gCAAgC;AAAA,EACvE;AACF;AAGO,IAAK,uCAAL,kBAAKC,0CAAL;AAEL,EAAAA,sCAAA,QAAK;AAEL,EAAAA,sCAAA,QAAK;AAEL,EAAAA,sCAAA,QAAK;AAEL,EAAAA,sCAAA,QAAK;AAEL,EAAAA,sCAAA,QAAK;AAEL,EAAAA,sCAAA,QAAK;AAEL,EAAAA,sCAAA,QAAK;AAEL,EAAAA,sCAAA,QAAK;AAEL,EAAAA,sCAAA,QAAK;AAlBK,SAAAA;AAAA,GAAA;AAsCL,SAAS,4BAA4B,QAAoC;AAC9E,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,uBAAuB,IAAI;AAAA,EACpC,CAAC;AACH;AAEO,SAAS,8BAA8B,QAAoC;AAChF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,yBAAyB,IAAI;AAAA,EACtC,CAAC;AACH;AA8BO,SAAS,uBAAuB,MAAyB;AAC9D,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,IAAI,KAAK,IAAI;AAAA,IACb,eAAe,KAAK,eAAe;AAAA,IACnC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,wBAAwB,KAAK,wBAAwB;AAAA,IACrD,0BAA0B,KAAK,0BAA0B;AAAA,IACzD,SAAS,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS,IAAI,iCAAiC,KAAK,SAAS,CAAC;AAAA,EAChG;AACF;AAEO,SAAS,yBAAyB,MAAyB;AAChE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,SAAS,KAAK,SAAS;AAAA,IACvB,IAAI,KAAK,IAAI;AAAA,IACb,eAAe,KAAK,eAAe;AAAA,IACnC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,mBAAmB,KAAK,mBAAmB;AAAA,IAC3C,sBAAsB,KAAK,sBAAsB;AAAA,IACjD,wBAAwB,KAAK,wBAAwB;AAAA,IACrD,0BAA0B,KAAK,0BAA0B;AAAA,IACzD,SAAS,CAAC,KAAK,SAAS,IACpB,KAAK,SAAS,IACd,mCAAmC,KAAK,SAAS,CAAC;AAAA,EACxD;AACF;AAEO,SAAS,iCAAiC,QAAyC;AACxF,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,4BAA4B,IAAI;AAAA,EACzC,CAAC;AACH;AAEO,SAAS,mCAAmC,QAAyC;AAC1F,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,8BAA8B,IAAI;AAAA,EAC3C,CAAC;AACH;AAcO,SAAS,4BAA4B,MAA8B;AACxE,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,eAAe;AAAA,IACnC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,mBAAmB,KAAK,mBAAmB;AAAA,EAC7C;AACF;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,eAAe;AAAA,IACnC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,mBAAmB,KAAK,mBAAmB;AAAA,EAC7C;AACF;AAcO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,yBAAyB,KAAK,yBAAyB;AAAA,IACvD,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,uBAAuB,KAAK,uBAAuB;AAAA,IACnD,yBAAyB,KAAK,yBAAyB;AAAA,IACvD,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAGO,IAAK,oCAAL,kBAAKC,uCAAL;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,SAAM;AAEN,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,YAAS;AAET,EAAAA,mCAAA,YAAS;AAET,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,SAAM;AAEN,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,SAAM;AAEN,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,SAAM;AAEN,EAAAA,mCAAA,aAAU;AAEV,EAAAA,mCAAA,aAAU;AAEV,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,UAAO;AAEP,EAAAA,mCAAA,UAAO;AAEP,EAAAA,mCAAA,SAAM;AAEN,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,YAAS;AAET,EAAAA,mCAAA,YAAS;AAET,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,SAAM;AAEN,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAEL,EAAAA,mCAAA,QAAK;AAhJK,SAAAA;AAAA,GAAA;AA+OL,SAAS,kCAAkC,MAAoC;AACpF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,aAAa,KAAK,aAAa;AAAA,IAC/B,eAAe,KAAK,eAAe;AAAA,IACnC,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAEO,SAAS,oCAAoC,MAAoC;AACtF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,aAAa,KAAK,aAAa;AAAA,IAC/B,eAAe,KAAK,eAAe;AAAA,IACnC,eAAe,CAAC,KAAK,eAAe,IAChC,KAAK,eAAe,IACpB,OAAO;AAAA,MACL,OAAO,QAAQ,KAAK,eAAe,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI,EAAE,MAAqB,CAAC,IAAI,EAAE,CAAC;AAAA,IACjF;AAAA,EACN;AACF;AAkBO,SAAS,0CACd,MACK;AACL,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,cAAc,KAAK,cAAc;AAAA,IACjC,YAAY,KAAK,YAAY;AAAA,IAC7B,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,mBAAmB,CAAC,KAAK,mBAAmB,IACxC,KAAK,mBAAmB,IACxB,KAAK,mBAAmB,EAAE,IAAI,CAAC,MAAW;AACxC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,oBAAoB,CAAC,KAAK,oBAAoB,IAC1C,KAAK,oBAAoB,IACzB,4DAA4D,KAAK,oBAAoB,CAAC;AAAA,EAC5F;AACF;AAEO,SAAS,4CACd,MACiC;AACjC,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,cAAc,KAAK,cAAc;AAAA,IACjC,YAAY,KAAK,YAAY;AAAA,IAC7B,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,mBAAmB,CAAC,KAAK,mBAAmB,IACxC,KAAK,mBAAmB,IACxB,KAAK,mBAAmB,EAAE,IAAI,CAAC,OAAY;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,oBAAoB,CAAC,KAAK,oBAAoB,IAC1C,KAAK,oBAAoB,IACzB,8DAA8D,KAAK,oBAAoB,CAAC;AAAA,EAC9F;AACF;AAGO,IAAK,mDAAL,kBAAKC,sDAAL;AAEL,EAAAA,kDAAA,UAAO;AAEP,EAAAA,kDAAA,cAAW;AAJD,SAAAA;AAAA,GAAA;AAkBL,IAAK,iDAAL,kBAAKC,oDAAL;AAEL,EAAAA,gDAAA,eAAY;AAFF,SAAAA;AAAA,GAAA;AAeL,IAAK,0DAAL,kBAAKC,6DAAL;AAEL,EAAAA,yDAAA,QAAK;AAEL,EAAAA,yDAAA,QAAK;AAEL,EAAAA,yDAAA,QAAK;AAEL,EAAAA,yDAAA,QAAK;AAEL,EAAAA,yDAAA,QAAK;AAEL,EAAAA,yDAAA,QAAK;AAZK,SAAAA;AAAA,GAAA;AA8BL,IAAK,wDAAL,kBAAKC,2DAAL;AAEL,EAAAA,uDAAA,YAAS;AAET,EAAAA,uDAAA,sBAAmB;AAJT,SAAAA;AAAA,GAAA;AA2BL,SAAS,4DACd,MACK;AACL,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,eAAe;AAAA,IACnC,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAEO,SAAS,8DACd,MACmD;AACnD,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,eAAe;AAAA,IACnC,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAGO,IAAK,mDAAL,kBAAKC,sDAAL;AAEL,EAAAA,kDAAA,gBAAa;AAFH,SAAAA;AAAA,GAAA;AAoCL,SAAS,sBAAsB,MAAwB;AAC5D,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,KAAK,KAAK,KAAK;AAAA,IACf,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,4BAA4B,KAAK,aAAa,CAAC;AAAA,IACnD,YAAY,KAAK,YAAY;AAAA,IAC7B,SAAS,KAAK,SAAS;AAAA,IACvB,WAAW,KAAK,WAAW;AAAA,IAC3B,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,yCAAyC,KAAK,cAAc,CAAC;AAAA,EACnE;AACF;AAEO,SAAS,wBAAwB,MAAwB;AAC9D,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,KAAK,KAAK,KAAK;AAAA,IACf,aAAa,CAAC,KAAK,aAAa,IAC5B,KAAK,aAAa,IAClB,8BAA8B,KAAK,aAAa,CAAC;AAAA,IACrD,YAAY,KAAK,YAAY;AAAA,IAC7B,SAAS,KAAK,SAAS;AAAA,IACvB,WAAW,KAAK,WAAW;AAAA,IAC3B,qBAAqB,KAAK,qBAAqB;AAAA,IAC/C,gBAAgB,KAAK,gBAAgB;AAAA,IACrC,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,2CAA2C,KAAK,cAAc,CAAC;AAAA,EACrE;AACF;AAQO,SAAS,4BAA4B,MAA8B;AACxE,SAAO,EAAE,OAAG,yCAAgB,KAAK,wBAAwB,CAAC,CAAC,EAAE;AAC/D;AAEO,SAAS,8BAA8B,MAA8B;AAC1E,SAAO;AAAA,IACL,0BAAsB,yCAAgB,MAAM,CAAC,CAAC;AAAA,EAChD;AACF;AAoBO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,aAAa,KAAK,aAAa;AAAA,IAC/B,cAAc,KAAK,cAAc;AAAA,IACjC,QAAQ,KAAK,QAAQ;AAAA,IACrB,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,yCAAyC,KAAK,cAAc,CAAC;AAAA,IACjE,WAAW,KAAK,WAAW;AAAA,IAC3B,YAAY,KAAK,YAAY;AAAA,EAC/B;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,aAAa,KAAK,aAAa;AAAA,IAC/B,cAAc,KAAK,cAAc;AAAA,IACjC,QAAQ,KAAK,QAAQ;AAAA,IACrB,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,2CAA2C,KAAK,cAAc,CAAC;AAAA,IACnE,WAAW,KAAK,WAAW;AAAA,IAC3B,YAAY,KAAK,YAAY;AAAA,EAC/B;AACF;AAYO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,mBAAmB,CAAC,KAAK,mBAAmB,IACxC,KAAK,mBAAmB,IACxB,KAAK,mBAAmB,EAAE,IAAI,CAAC,MAAW;AACxC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,oBAAoB,CAAC,KAAK,oBAAoB,IAC1C,KAAK,oBAAoB,IACzB,sDAAsD,KAAK,oBAAoB,CAAC;AAAA,EACtF;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,mBAAmB,CAAC,KAAK,mBAAmB,IACxC,KAAK,mBAAmB,IACxB,KAAK,mBAAmB,EAAE,IAAI,CAAC,OAAY;AACzC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,oBAAoB,CAAC,KAAK,oBAAoB,IAC1C,KAAK,oBAAoB,IACzB,wDAAwD,KAAK,oBAAoB,CAAC;AAAA,EACxF;AACF;AAGO,IAAK,kDAAL,kBAAKC,qDAAL;AAEL,EAAAA,iDAAA,YAAS;AAET,EAAAA,iDAAA,sBAAmB;AAJT,SAAAA;AAAA,GAAA;AA2BL,SAAS,sDACd,MACK;AACL,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,eAAe;AAAA,IACnC,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAEO,SAAS,wDACd,MAC6C;AAC7C,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,eAAe,KAAK,eAAe;AAAA,IACnC,eAAe,KAAK,eAAe;AAAA,EACrC;AACF;AAGO,IAAK,6CAAL,kBAAKC,gDAAL;AAEL,EAAAA,4CAAA,gBAAa;AAFH,SAAAA;AAAA,GAAA;AAkCL,SAAS,8BAA8B,MAAgC;AAC5E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IAC5D,SAAS,uCAAuC,KAAK,SAAS,CAAC;AAAA,IAC/D,KAAK,KAAK,KAAK;AAAA,IACf,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,yCAAyC,KAAK,cAAc,CAAC;AAAA,IACjE,QAAQ,KAAK,QAAQ;AAAA,IACrB,uBAAuB,CAAC,KAAK,uBAAuB,IAChD,KAAK,uBAAuB,IAC5B,gCAAgC,KAAK,uBAAuB,CAAC;AAAA,IACjE,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,yBAAyB,KAAK,yBAAyB;AAAA,IACvD,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,uCAAuC,KAAK,gBAAgB,CAAC;AAAA,EACnE;AACF;AAEO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,SAAS,KAAK,SAAS;AAAA,IACvB,QAAQ,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC9D,SAAS,yCAAyC,KAAK,SAAS,CAAC;AAAA,IACjE,KAAK,KAAK,KAAK;AAAA,IACf,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,2CAA2C,KAAK,cAAc,CAAC;AAAA,IACnE,QAAQ,KAAK,QAAQ;AAAA,IACrB,uBAAuB,CAAC,KAAK,uBAAuB,IAChD,KAAK,uBAAuB,IAC5B,kCAAkC,KAAK,uBAAuB,CAAC;AAAA,IACnE,iBAAiB,CAAC,KAAK,iBAAiB,IACpC,KAAK,iBAAiB,IACtB,OAAO;AAAA,MACL,OAAO,QAAQ,KAAK,iBAAiB,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI,EAAE,MAAqB,CAAC,IAAI,EAAE,CAAC;AAAA,IACnF;AAAA,IACJ,yBAAyB,KAAK,yBAAyB;AAAA,IACvD,gBAAgB,CAAC,KAAK,gBAAgB,IAClC,KAAK,gBAAgB,IACrB,yCAAyC,KAAK,gBAAgB,CAAC;AAAA,EACrE;AACF;AAoBO,SAAS,gCAAgC,MAAkC;AAChF,SAAO;AAAA,IACL,OAAO,KAAK,OAAO;AAAA,IACnB,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,WAAW,KAAK,WAAW;AAAA,IAC3B,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,CAAC,KAAK,MAAM,IACd,KAAK,MAAM,IACX,KAAK,MAAM,EAAE,IAAI,CAAC,MAAW;AAC3B,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,kCAAkC,MAAkC;AAClF,SAAO;AAAA,IACL,OAAO,KAAK,OAAO;AAAA,IACnB,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,WAAW,KAAK,WAAW;AAAA,IAC3B,aAAa,KAAK,aAAa;AAAA,IAC/B,MAAM,KAAK,MAAM;AAAA,IACjB,MAAM,CAAC,KAAK,MAAM,IACd,KAAK,MAAM,IACX,KAAK,MAAM,EAAE,IAAI,CAAC,OAAY;AAC5B,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAGO,IAAK,6CAAL,kBAAKC,gDAAL;AAEL,EAAAA,4CAAA,iBAAc;AAEd,EAAAA,4CAAA,UAAO;AAEP,EAAAA,4CAAA,WAAQ;AANE,SAAAA;AAAA,GAAA;AA4BL,SAAS,uCAAuC,MAAyC;AAC9F,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,sBAAsB,CAAC,KAAK,gCAAgC,IACxD,KAAK,gCAAgC,IACrC,yCAAyC,KAAK,gCAAgC,CAAC;AAAA,EACrF;AACF;AAEO,SAAS,yCAAyC,MAAyC;AAChG,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,gCAAgC,CAAC,KAAK,sBAAsB,IACxD,KAAK,sBAAsB,IAC3B,2CAA2C,KAAK,sBAAsB,CAAC;AAAA,EAC7E;AACF;AAGO,IAAK,wCAAL,kBAAKC,2CAAL;AAEL,EAAAA,uCAAA,UAAO;AAEP,EAAAA,uCAAA,gBAAa;AAEb,EAAAA,uCAAA,gBAAa;AANH,SAAAA;AAAA,GAAA;AAgCL,SAAS,yCACd,MACK;AACL,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,KAAK,QAAQ;AAAA,IACrB,QAAQ,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ,IAAI,+BAA+B,KAAK,QAAQ,CAAC;AAAA,EAC1F;AACF;AAEO,SAAS,2CACd,MACgC;AAChC,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,aAAa,KAAK,aAAa;AAAA,IAC/B,QAAQ,KAAK,QAAQ;AAAA,IACrB,QAAQ,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ,IAAI,iCAAiC,KAAK,QAAQ,CAAC;AAAA,EAC5F;AACF;AAcO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,KAAK,YAAY;AAAA,IAC7B,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AAC/B,aAAO;AAAA,IACT,CAAC;AAAA,IACL,sBAAsB,KAAK,sBAAsB;AAAA,EACnD;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,MAAM,KAAK,MAAM;AAAA,IACjB,YAAY,KAAK,YAAY;AAAA,IAC7B,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,KAAK,UAAU,EAAE,IAAI,CAAC,MAAW;AAC/B,aAAO;AAAA,IACT,CAAC;AAAA,IACL,sBAAsB,KAAK,sBAAsB;AAAA,EACnD;AACF;AAWO,SAAS,mCAAmC,MAAqC;AACtF,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,aAAa,KAAK,aAAa,EAAE;AAC9E;AAEO,SAAS,qCAAqC,MAAqC;AACxF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAUO,SAAS,wCAAwC,MAA0C;AAChG,UAAQ,KAAK,WAAW;AAAA,IACtB,KAAK;AACH,aAAO,0CAA0C,IAAuC;AAAA,IAE1F,KAAK;AACH,aAAO,sCAAsC,IAAmC;AAAA,IAElF,KAAK;AACH,aAAO,+BAA+B,IAA4B;AAAA,IAEpE,KAAK;AACH,aAAO,oCAAoC,IAAiC;AAAA,IAE9E;AACE,aAAO,mCAAmC,IAAI;AAAA,EAClD;AACF;AAEO,SAAS,0CACd,MAC+B;AAC/B,UAAQ,KAAK,aAAa,GAAG;AAAA,IAC3B,KAAK;AACH,aAAO,4CAA4C,IAAuC;AAAA,IAE5F,KAAK;AACH,aAAO,wCAAwC,IAAmC;AAAA,IAEpF,KAAK;AACH,aAAO,iCAAiC,IAA4B;AAAA,IAEtE,KAAK;AACH,aAAO,sCAAsC,IAAiC;AAAA,IAEhF;AACE,aAAO,qCAAqC,IAAI;AAAA,EACpD;AACF;AAQO,SAAS,0CACd,MACK;AACL,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,aAAa,KAAK,aAAa,EAAE;AAC9E;AAEO,SAAS,4CACd,MACiC;AACjC,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,aAAa,KAAK,aAAa;AAAA,EACjC;AACF;AAUO,SAAS,sCAAsC,MAAwC;AAC5F,SAAO,EAAE,eAAe,KAAK,WAAW,GAAG,aAAa,KAAK,aAAa,GAAG,KAAK,KAAK,KAAK,EAAE;AAChG;AAEO,SAAS,wCAAwC,MAAwC;AAC9F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,aAAa,KAAK,aAAa;AAAA,IAC/B,KAAK,KAAK,KAAK;AAAA,EACjB;AACF;AAYO,SAAS,+BAA+B,MAAiC;AAC9E,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,aAAa,KAAK,aAAa;AAAA,IAC/B,KAAK,KAAK,KAAK;AAAA,IACf,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,iCAAiC,MAAiC;AAChF,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,aAAa,KAAK,aAAa;AAAA,IAC/B,KAAK,KAAK,KAAK;AAAA,IACf,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAYO,SAAS,oCAAoC,MAAsC;AACxF,SAAO;AAAA,IACL,eAAe,KAAK,WAAW;AAAA,IAC/B,aAAa,KAAK,aAAa;AAAA,IAC/B,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,yCAAyC,KAAK,UAAU,CAAC;AAAA,IAC7D,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAEO,SAAS,sCAAsC,MAAsC;AAC1F,SAAO;AAAA,IACL,WAAW,KAAK,aAAa;AAAA,IAC7B,aAAa,KAAK,aAAa;AAAA,IAC/B,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,2CAA2C,KAAK,UAAU,CAAC;AAAA,IAC/D,cAAc,KAAK,cAAc;AAAA,EACnC;AACF;AAYO,SAAS,sCAAsC,MAAwC;AAC5F,SAAO;AAAA,IACL,yBAAyB,KAAK,yBAAyB;AAAA,IACvD,aAAa,qDAAqD,KAAK,aAAa,CAAC;AAAA,IACrF,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,yCAAyC,KAAK,UAAU,CAAC;AAAA,EAC/D;AACF;AAEO,SAAS,wCAAwC,MAAwC;AAC9F,SAAO;AAAA,IACL,yBAAyB,KAAK,yBAAyB;AAAA,IACvD,aAAa,uDAAuD,KAAK,aAAa,CAAC;AAAA,IACvF,UAAU,CAAC,KAAK,UAAU,IACtB,KAAK,UAAU,IACf,2CAA2C,KAAK,UAAU,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,qDACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,gDAAgD,IAAI;AAAA,EAC7D,CAAC;AACH;AAEO,SAAS,uDACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,kDAAkD,IAAI;AAAA,EAC/D,CAAC;AACH;AAYO,SAAS,gDACd,MACK;AACL,SAAO;AAAA,IACL,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,kEAAkE,KAAK,QAAQ,CAAC;AAAA,IACpF,SAAS,CAAC,KAAK,SAAS,IACpB,KAAK,SAAS,IACd,mEAAmE,KAAK,SAAS,CAAC;AAAA,IACtF,OAAO,CAAC,KAAK,OAAO,IAChB,KAAK,OAAO,IACZ,iEAAiE,KAAK,OAAO,CAAC;AAAA,EACpF;AACF;AAEO,SAAS,kDACd,MACuC;AACvC,SAAO;AAAA,IACL,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,oEAAoE,KAAK,QAAQ,CAAC;AAAA,IACtF,SAAS,CAAC,KAAK,SAAS,IACpB,KAAK,SAAS,IACd,qEAAqE,KAAK,SAAS,CAAC;AAAA,IACxF,OAAO,CAAC,KAAK,OAAO,IAChB,KAAK,OAAO,IACZ,mEAAmE,KAAK,OAAO,CAAC;AAAA,EACtF;AACF;AAEO,SAAS,kEACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,6DAA6D,IAAI;AAAA,EAC1E,CAAC;AACH;AAEO,SAAS,oEACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,+DAA+D,IAAI;AAAA,EAC5E,CAAC;AACH;AAUO,SAAS,6DACd,MACK;AACL,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IACxD,WAAW,KAAK,WAAW;AAAA,EAC7B;AACF;AAEO,SAAS,+DACd,MACoD;AACpD,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC1D,WAAW,KAAK,WAAW;AAAA,EAC7B;AACF;AAEO,SAAS,mEACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,8DAA8D,IAAI;AAAA,EAC3E,CAAC;AACH;AAEO,SAAS,qEACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,gEAAgE,IAAI;AAAA,EAC7E,CAAC;AACH;AAKO,SAAS,8DACd,MACK;AACL,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,sCAAsC,KAAK,QAAQ,CAAC;AAAA,EAC1D;AACF;AAEO,SAAS,gEACd,MACqD;AACrD,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,wCAAwC,KAAK,QAAQ,CAAC;AAAA,EAC5D;AACF;AAEO,SAAS,iEACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,4DAA4D,IAAI;AAAA,EACzE,CAAC;AACH;AAEO,SAAS,mEACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,8DAA8D,IAAI;AAAA,EAC3E,CAAC;AACH;AAKO,SAAS,4DACd,MACK;AACL,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,sCAAsC,KAAK,QAAQ,CAAC;AAAA,EAC1D;AACF;AAEO,SAAS,8DACd,MACmD;AACnD,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,wCAAwC,KAAK,QAAQ,CAAC;AAAA,EAC5D;AACF;AAUO,SAAS,uCAAuC,MAAyC;AAC9F,SAAO;AAAA,IACL,WAAW,oDAAoD,KAAK,WAAW,CAAC;AAAA,IAChF,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,kDAAkD,KAAK,YAAY,CAAC;AAAA,EAC1E;AACF;AAEO,SAAS,yCAAyC,MAAyC;AAChG,SAAO;AAAA,IACL,WAAW,sDAAsD,KAAK,WAAW,CAAC;AAAA,IAClF,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,oDAAoD,KAAK,YAAY,CAAC;AAAA,EAC5E;AACF;AAEO,SAAS,oDACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,+CAA+C,IAAI;AAAA,EAC5D,CAAC;AACH;AAEO,SAAS,sDACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,iDAAiD,IAAI;AAAA,EAC9D,CAAC;AACH;AAcO,SAAS,+CACd,MACK;AACL,SAAO;AAAA,IACL,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,oBAAoB,KAAK,oBAAoB;AAAA,IAC7C,eAAe,KAAK,eAAe;AAAA,IACnC,UAAU,sCAAsC,KAAK,UAAU,CAAC;AAAA,EAClE;AACF;AAEO,SAAS,iDACd,MACsC;AACtC,SAAO;AAAA,IACL,iBAAiB,KAAK,iBAAiB;AAAA,IACvC,oBAAoB,KAAK,oBAAoB;AAAA,IAC7C,eAAe,KAAK,eAAe;AAAA,IACnC,UAAU,wCAAwC,KAAK,UAAU,CAAC;AAAA,EACpE;AACF;AAUO,SAAS,kDACd,MACK;AACL,SAAO;AAAA,IACL,OAAG,yCAAgB,KAAK,wBAAwB,CAAC,CAAC;AAAA,IAClD,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAEO,SAAS,oDACd,MACyC;AACzC,SAAO;AAAA,IACL,0BAAsB,yCAAgB,MAAM,CAAC,gBAAgB,CAAC;AAAA,IAC9D,gBAAgB,KAAK,gBAAgB;AAAA,EACvC;AACF;AAGO,IAAK,2BAAL,kBAAKC,8BAAL;AAEL,EAAAA,0BAAA,iCAA8B;AAE9B,EAAAA,0BAAA,oCAAiC;AAJvB,SAAAA;AAAA,GAAA;AA+BL,SAAS,wDACd,MACK;AACL,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,sCAAsC,KAAK,QAAQ,CAAC;AAAA,EAC1D;AACF;AAEO,SAAS,0DACd,MAC+C;AAC/C,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,wCAAwC,KAAK,QAAQ,CAAC;AAAA,EAC5D;AACF;AAQO,SAAS,4DACd,MACK;AACL,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,sCAAsC,KAAK,QAAQ,CAAC;AAAA,IACxD,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAEO,SAAS,8DACd,MACmD;AACnD,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,kBAAkB,KAAK,kBAAkB;AAAA,IACzC,QAAQ,KAAK,QAAQ;AAAA,IACrB,eAAe,KAAK,eAAe;AAAA,IACnC,QAAQ,CAAC,KAAK,QAAQ,IAClB,KAAK,QAAQ,IACb,wCAAwC,KAAK,QAAQ,CAAC;AAAA,IAC1D,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;AAQO,SAAS,gCAAgC,MAAgC;AAC9E,SAAO;AAAA,IACL,WAAW,uCAAuC,KAAK,OAAO,CAAC;AAAA,EACjE;AACF;AAEO,SAAS,qCAAqC,QAA6C;AAChG,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,gCAAgC,IAAI;AAAA,EAC7C,CAAC;AACH;AAEO,SAAS,uCACd,QACO;AACP,SAAO,OAAO,IAAI,CAAC,SAAS;AAC1B,WAAO,kCAAkC,IAAI;AAAA,EAC/C,CAAC;AACH;AAQO,SAAS,4BAA4B,MAA8B;AACxE,SAAO;AAAA,IACL,SAAS,CAAC,KAAK,SAAS,IACpB,KAAK,SAAS,IACd,KAAK,SAAS,EAAE,IAAI,CAAC,MAAW;AAC9B,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAGO,IAAK,2BAAL,kBAAKC,8BAAL;AAEL,EAAAA,0BAAA,iBAAc;AAFJ,SAAAA;AAAA,GAAA;AAsBL,SAAS,4BAA4B,MAA8B;AACxE,SAAO;AAAA,IACL,cAAc,CAAC,KAAK,cAAc,IAC9B,KAAK,cAAc,IACnB,KAAK,cAAc,EAAE,IAAI,CAAC,MAAW;AACnC,aAAO;AAAA,IACT,CAAC;AAAA,IACL,uBAAuB,CAAC,KAAK,uBAAuB,IAChD,KAAK,uBAAuB,IAC5B,KAAK,uBAAuB,EAAE,IAAI,CAAC,MAAW;AAC5C,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAQO,SAAS,qBAAqB,MAAuB;AAC1D,SAAO;AAAA,IACL,YAAY,CAAC,KAAK,YAAY,IAC1B,KAAK,YAAY,IACjB,KAAK,YAAY,EAAE,IAAI,CAAC,MAAW;AACjC,aAAO;AAAA,IACT,CAAC;AAAA,EACP;AACF;AAEO,SAAS,wDACd,MACK;AACL,SAAO,EAAE,eAAe,KAAK,eAAe,GAAG,mBAAmB,KAAK,mBAAmB,EAAE;AAC9F;AAEO,SAAS,0DAA0D,MAAW;AACnF,SAAO;AAAA,IACL,eAAe,KAAK,eAAe;AAAA,IACnC,mBAAmB,KAAK,mBAAmB;AAAA,EAC7C;AACF;AAEO,SAAS,wDACd,MACK;AACL,SAAO,EAAE,kBAAkB,KAAK,kBAAkB,EAAE;AACtD;AAEO,SAAS,0DAA0D,MAAW;AACnF,SAAO;AAAA,IACL,kBAAkB,KAAK,kBAAkB;AAAA,EAC3C;AACF;",
|
|
6
|
+
"names": ["KnownSearchFieldDataType", "KnownLexicalAnalyzerName", "KnownLexicalNormalizerName", "KnownVectorEncodingFormat", "KnownLexicalTokenizerName", "KnownTokenFilterName", "KnownCharFilterName", "KnownRegexFlags", "KnownRankingOrder", "KnownVectorSearchAlgorithmKind", "KnownVectorSearchAlgorithmMetric", "KnownVectorSearchVectorizerKind", "KnownAzureOpenAIModelName", "KnownAIFoundryModelCatalogName", "KnownVectorSearchCompressionRescoreStorageMethod", "KnownVectorSearchCompressionKind", "KnownVectorSearchCompressionTarget", "KnownKnowledgeBaseModelKind", "KnownKnowledgeSourceKind", "KnownKnowledgeSourceContentExtractionMode", "KnownKnowledgeSourceSynchronizationStatus", "KnownSearchIndexerDataSourceType", "KnownBlobIndexerParsingMode", "KnownMarkdownParsingSubmode", "KnownMarkdownHeaderDepth", "KnownBlobIndexerDataToExtract", "KnownBlobIndexerImageAction", "KnownBlobIndexerPDFTextRotationAlgorithm", "KnownIndexerExecutionEnvironment", "KnownKeyPhraseExtractionSkillLanguage", "KnownOcrSkillLanguage", "KnownOcrLineEnding", "KnownImageAnalysisSkillLanguage", "KnownVisualFeature", "KnownImageDetail", "KnownSentimentSkillLanguage", "KnownEntityCategory", "KnownEntityRecognitionSkillLanguage", "KnownPIIDetectionSkillMaskingMode", "KnownSplitSkillLanguage", "KnownTextSplitMode", "KnownCustomEntityLookupSkillLanguage", "KnownTextTranslationSkillLanguage", "KnownDocumentIntelligenceLayoutSkillOutputFormat", "KnownDocumentIntelligenceLayoutSkillOutputMode", "KnownDocumentIntelligenceLayoutSkillMarkdownHeaderDepth", "KnownDocumentIntelligenceLayoutSkillExtractionOptions", "KnownDocumentIntelligenceLayoutSkillChunkingUnit", "KnownContentUnderstandingSkillExtractionOptions", "KnownContentUnderstandingSkillChunkingUnit", "KnownChatCompletionExtraParametersBehavior", "KnownChatCompletionResponseFormatType", "KnownIndexProjectionMode", "KnownIndexerResyncOption"]
|
|
7
|
+
}
|