@azure/ai-language-text 1.0.0-beta.1 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"textAnalysisClient.js","sourceRoot":"","sources":["../../src/textAnalysisClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAoBlC,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAEL,+BAA+B,GAChC,MAAM,2BAA2B,CAAC;AACnC,OAAO,EAAkC,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACrF,OAAO,EAAwB,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAClE,OAAO,EAAiB,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AACzE,OAAO,EACL,+BAA+B,EAC/B,0BAA0B,EAC1B,mBAAmB,EACnB,aAAa,GACd,MAAM,QAAQ,CAAC;AAChB,OAAO,EACL,qBAAqB,EACrB,qBAAqB,EACrB,iCAAiC,EACjC,wBAAwB,EACxB,gBAAgB,EAChB,oBAAoB,GACrB,MAAM,OAAO,CAAC;AACf,OAAO,EAAE,UAAU,EAAE,qBAAqB,EAAE,MAAM,cAAc,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,6BAA6B,CAAC;AAC9D,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAClC,OAAO,EAAE,qCAAqC,EAAE,MAAM,4BAA4B,CAAC;AAEnF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoCG;AACH,MAAM,OAAO,kBAAkB;IA8D7B,YACE,WAAmB,EACnB,UAA2C,EAC3C,UAAqC,EAAE;QAEvC,MAAM,EAAE,kBAAkB,GAAG,IAAI,EAAE,eAAe,GAAG,IAAI,KAAyB,OAAO,EAA3B,eAAe,UAAK,OAAO,EAAnF,yCAAyE,CAAU,CAAC;QAC1F,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;QAEvC,MAAM,uBAAuB,mCACxB,eAAe,GACf;YACD,cAAc,EAAE;gBACd,MAAM,EAAE,MAAM,CAAC,IAAI;gBACnB,4BAA4B,EAAE,CAAC,6BAA6B,EAAE,iBAAiB,CAAC;aACjF;SACF,CACF,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,IAAI,eAAe,CAAC,WAAW,EAAE,uBAAuB,CAAC,CAAC;QAEzE,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;YAC9C,CAAC,CAAC,+BAA+B,CAAC,EAAE,UAAU,EAAE,MAAM,EAAE,uBAAuB,EAAE,CAAC;YAClF,CAAC,CAAC,qCAAqC,CAAC,UAAU,CAAC,CAAC;QAEtD,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC5C,IAAI,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YAClC,WAAW,EAAE,yBAAyB;YACtC,cAAc,EAAE,WAAW;YAC3B,SAAS,EAAE,6BAA6B;SACzC,CAAC,CAAC;IACL,CAAC;IAmRD,iBAAiB;IACV,KAAK,CAAC,OAAO,CAClB,UAAsB,EACtB,SAAoE,EACpE,8BAEwE,EACxE,OAA4E;QAE5E,IAAI,WAA+E,CAAC;QAEpF,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QAED,IAAI,UAA0D,CAAC;QAC/D,IAAI,aAAa,CAAC,SAAS,CAAC,EAAE;YAC5B,IAAI,UAAU,KAAK,mBAAmB,EAAE;gBACtC,UAAU,GAAG,+BAA+B,CAC1C,SAAS,EACT,OAAO,8BAA8B,KAAK,QAAQ;oBAChD,CAAC,CAAC,8BAA8B;oBAChC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAC5B,CAAC;aACH;iBAAM;gBACL,UAAU,GAAG,0BAA0B,CACrC,SAAS,EACT,OAAO,8BAA8B,KAAK,QAAQ;oBAChD,CAAC,CAAC,8BAA8B;oBAChC,CAAC,CAAC,IAAI,CAAC,eAAe,CACzB,CAAC;aACH;YACD,WAAW,GAAG,OAAO,IAAK,EAAU,CAAC;SACtC;aAAM;YACL,UAAU,GAAG,SAAS,CAAC;YACvB,WAAW;gBACR,8BAC8B,IAAI,EAAE,CAAC;SACzC;QACD,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,mBAAmB,CAAC,WAAW,CAAC,CAAC;QACrF,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,4BAA4B,EAC5B,gBAAgB,EAChB,KAAK,EAAE,cAA4C,EAAE,EAAE,CACrD,UAAU,CACR,IAAI,CAAC,OAAO;aACT,OAAO,CACN;YACE,IAAI,EAAE,UAAU;YAChB,aAAa,EAAE;gBACb,SAAS,EAAE,UAAU;aACtB;YACD,UAAU,EAAE,MAAM;SACZ,EACR,cAAc,CACf;aACA,IAAI,CACH,CAAC,MAAM,EAAE,EAAE,CACT,qBAAqB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAA8B,CACrF,CACJ,CACJ,CAAC;IACJ,CAAC;IAwHD,iBAAiB;IACjB,KAAK,CAAC,iBAAiB,CACrB,OAA6B,EAC7B,SAAyC,EACzC,iBAAqD,EACrD,UAAoC,EAAE;QAEtC,IAAI,WAAqC,CAAC;QAC1C,IAAI,UAA+B,CAAC;QAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACvD,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QAED,IAAI,aAAa,CAAC,SAAS,CAAC,EAAE;YAC5B,MAAM,QAAQ,GAAI,iBAA4B,IAAI,IAAI,CAAC,eAAe,CAAC;YACvE,UAAU,GAAG,0BAA0B,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;YAC7D,WAAW,GAAG,OAAO,CAAC;SACvB;aAAM;YACL,UAAU,GAAG,SAAS,CAAC;YACvB,WAAW,GAAG,iBAA6C,CAAC;SAC7D;QACD,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAC7B,CAAC,EAA6B,EAA2B,EAAE;gBAA1D,EAAE,IAAI,EAAE,UAAU,OAAW,EAAN,IAAI,cAA3B,sBAA6B,CAAF;YAAgC,OAAA,CAAC;gBAC3D,IAAI;gBACJ,UAAU;gBACV,UAAU,EAAE,IAAI;aACjB,CAAC,CAAA;SAAA,CACH,CAAC;QACF,MAAM,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,WAAW,KAAc,WAAW,EAApB,IAAI,UAAK,WAAW,EAA7E,0DAA+D,CAAc,CAAC;QACpF,MAAM,GAAG,GAAG,qBAAqB,CAAC;YAChC,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,aAAa,EAAE,IAAI;YACnB,SAAS,EAAE,UAAU;YACrB,qBAAqB,EAAE,EAAE,WAAW,EAAE;YACtC,kBAAkB,EAAE,EAAE,iBAAiB,EAAE;YACzC,KAAK,EAAE,WAAW;YAClB,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,IAAI,SAAS,CAC1B,GAAoD,EACpD;YACE,YAAY,EAAE,kBAAkB;YAChC,aAAa,EAAE,oBAAoB,CAAC;gBAClC,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,SAAS,EAAE,UAAU;gBACrB,SAAS,kCAAO,IAAI,KAAE,iBAAiB,GAAE;aAC1C,CAAC;YACF,WAAW,EAAE,wBAAwB,CAAC,UAAU,CAAC;YACjD,MAAM,EAAE,qBAAqB,CAAC;gBAC5B,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,OAAO,EAAE,IAAI;aACd,CAAC;SACH,CACF,CAAC;QAEF,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;IA0BD,iBAAiB;IACjB,KAAK,CAAC,yBAAyB,CAC7B,eAAuB,EACvB,UAA4C,EAAE;QAE9C,MAAM,EAAE,iBAAiB,EAAE,kBAAkB,KAAc,OAAO,EAAhB,IAAI,UAAK,OAAO,EAA5D,2CAAkD,CAAU,CAAC;QACnE,MAAM,SAAS,GAAG,gBAAgB,CAAC,eAAe,CAAC,CAAC;QACpD,MAAM,GAAG,GAAG,iCAAiC,CAAC;YAC5C,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,OAAO,kCAAO,IAAI,KAAE,iBAAiB,GAAE;YACvC,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,IAAI,SAAS,CAC1B,GAAoD,EACpD;YACE,YAAY,EAAE,kBAAkB;YAChC,UAAU,EAAE,eAAe;YAC3B,aAAa,EAAE,oBAAoB,CAAC;gBAClC,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,SAAS;gBACT,SAAS,kCAAO,IAAI,KAAE,iBAAiB,GAAE;aAC1C,CAAC;YACF,WAAW,EAAE,wBAAwB,EAAE;YACvC,MAAM,EAAE,qBAAqB,CAAC;gBAC5B,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,OAAO,EAAE,IAAI;aACd,CAAC;SACH,CACF,CAAC;QAEF,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AnalyzeActionName,\n AnalyzeActionParameters,\n AnalyzeBatchAction,\n AnalyzeBatchOperationState,\n AnalyzeBatchPoller,\n AnalyzeResult,\n BeginAnalyzeBatchOptions,\n PagedAnalyzeBatchResult,\n RestoreAnalyzeBatchPollerOptions,\n TextAnalysisClientOptions,\n TextAnalysisOperationOptions,\n} from \"./models\";\nimport {\n AnalyzeBatchActionUnion,\n LanguageDetectionInput,\n TextDocumentInput,\n} from \"./generated/models\";\nimport { DEFAULT_COGNITIVE_SCOPE, SDK_VERSION } from \"./constants\";\nimport {\n InternalPipelineOptions,\n bearerTokenAuthenticationPolicy,\n} from \"@azure/core-rest-pipeline\";\nimport { KeyCredential, TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { LongRunningOperation, LroEngine } from \"@azure/core-lro\";\nimport { TracingClient, createTracingClient } from \"@azure/core-tracing\";\nimport {\n convertToLanguageDetectionInput,\n convertToTextDocumentInput,\n getOperationOptions,\n isStringArray,\n} from \"./util\";\nimport {\n createAnalyzeBatchLro,\n createCancelOperation,\n createCreateAnalyzeBatchPollerLro,\n createUpdateAnalyzeState,\n getDocsFromState,\n processAnalyzeResult,\n} from \"./lro\";\nimport { throwError, transformActionResult } from \"./transforms\";\nimport { GeneratedClient } from \"./generated/generatedClient\";\nimport { logger } from \"./logger\";\nimport { textAnalyticsAzureKeyCredentialPolicy } from \"./azureKeyCredentialPolicy\";\n\n/**\n * A client for interacting with the text analysis features in Azure Cognitive\n * Language Service.\n *\n * The client needs the endpoint of a Language resource and an authentication\n * method such as an API key or AAD. The API key and endpoint can be found in\n * the Language resource page in the Azure portal. They will be located in the\n * resource's Keys and Endpoint page, under Resource Management.\n *\n * ### Examples for authentication:\n *\n * #### API Key\n *\n * ```js\n * import { TextAnalysisClient, AzureKeyCredential } from \"@azure/ai-language-text\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * #### Azure Active Directory\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity)\n * package for more information about authenticating with Azure Active Directory.\n *\n * ```js\n * import { TextAnalysisClient } from \"@azure/ai-language-text\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n */\nexport class TextAnalysisClient {\n private readonly _client: GeneratedClient;\n private readonly _tracing: TracingClient;\n private readonly defaultCountryHint: string;\n private readonly defaultLanguage: string;\n\n /**\n * Creates an instance of TextAnalysisClient with the endpoint of a Language\n * resource and an authentication method such as an API key or AAD.\n *\n * The API key and endpoint can be found in the Language resource page in the\n * Azure portal. They will be located in the resource's Keys and Endpoint page,\n * under Resource Management.\n *\n * ### Example\n *\n * ```js\n * import { TextAnalysisClient, AzureKeyCredential } from \"@azure/ai-language-text\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpointUrl - The URL to the endpoint of a Cognitive Language Service resource\n * @param credential - Key credential to be used to authenticate requests to the service.\n * @param options - Used to configure the TextAnalytics client.\n */\n constructor(endpointUrl: string, credential: KeyCredential, options?: TextAnalysisClientOptions);\n /**\n * Creates an instance of TextAnalysisClient with the endpoint of a Language\n * resource and an authentication method such as an API key or AAD.\n *\n * The API key and endpoint can be found in the Language resource page in the\n * Azure portal. They will be located in the resource's Keys and Endpoint page,\n * under Resource Management.\n *\n * ### Example\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity)\n * package for more information about authenticating with Azure Active Directory.\n *\n * ```js\n * import { TextAnalysisClient } from \"@azure/ai-language-text\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpointUrl - The URL to the endpoint of a Cognitive Language Service resource\n * @param credential - Token credential to be used to authenticate requests to the service.\n * @param options - Used to configure the TextAnalytics client.\n */\n constructor(\n endpointUrl: string,\n credential: TokenCredential,\n options?: TextAnalysisClientOptions\n );\n constructor(\n endpointUrl: string,\n credential: TokenCredential | KeyCredential,\n options: TextAnalysisClientOptions = {}\n ) {\n const { defaultCountryHint = \"us\", defaultLanguage = \"en\", ...pipelineOptions } = options;\n this.defaultCountryHint = defaultCountryHint;\n this.defaultLanguage = defaultLanguage;\n\n const internalPipelineOptions: InternalPipelineOptions = {\n ...pipelineOptions,\n ...{\n loggingOptions: {\n logger: logger.info,\n additionalAllowedHeaderNames: [\"x-ms-correlation-request-id\", \"x-ms-request-id\"],\n },\n },\n };\n\n this._client = new GeneratedClient(endpointUrl, internalPipelineOptions);\n\n const authPolicy = isTokenCredential(credential)\n ? bearerTokenAuthenticationPolicy({ credential, scopes: DEFAULT_COGNITIVE_SCOPE })\n : textAnalyticsAzureKeyCredentialPolicy(credential);\n\n this._client.pipeline.addPolicy(authPolicy);\n this._tracing = createTracingClient({\n packageName: \"@azure/ai-language-text\",\n packageVersion: SDK_VERSION,\n namespace: \"Microsoft.CognitiveServices\",\n });\n }\n\n /**\n * Runs a predictive model to determine the language that the passed-in\n * input strings are written in, and returns, for each one, the detected\n * language as well as a score indicating the model's confidence that the\n * inferred language is correct. Scores close to 1 indicate high certainty in\n * the result. 120 languages are supported.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Language detection\n *\n * ```js\n * const documents = [<input strings>];\n * const countryHint = \"us\";\n * const results = await client.analyze(\"LanguageDetection\", documents, countryHint);\n *\n * for (let i = 0; i < results.length; i++) {\n * const result = results[i];\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { name, confidenceScore, iso6391Name } = result.primaryLanguage;\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/language-detection/overview}\n * for more information on language detection.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results where each element contains the primary language\n * for the corresponding input document.\n */\n public async analyze<ActionName extends \"LanguageDetection\">(\n actionName: ActionName,\n documents: LanguageDetectionInput[],\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n /**\n * Runs a predictive model to determine the language that the passed-in\n * input strings are written in, and returns, for each one, the detected\n * language as well as a score indicating the model's confidence that the\n * inferred language is correct. Scores close to 1 indicate high certainty in\n * the result. 120 languages are supported.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Language detection\n *\n * ```js\n * const documents = [<input strings>];\n * const countryHint = \"us\";\n * const results = await client.analyze(\"LanguageDetection\", documents, countryHint);\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { name, confidenceScore, iso6391Name } = result.primaryLanguage;\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/language-detection/overview}\n * for more information on language detection.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param countryHint - Indicates the country of origin for all of\n * the input strings to assist the model in predicting the language they are\n * written in. If unspecified, this value will be set to the default\n * country hint in `TextAnalysisClientOptions`. If set to an empty string,\n * or the string \"none\", the service will apply a model where the country is\n * explicitly unset. The same country hint is applied to all strings in the\n * input collection.\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results where each element contains the primary language\n * for the corresponding input document.\n */\n public async analyze<ActionName extends \"LanguageDetection\">(\n actionName: ActionName,\n documents: string[],\n countryHint?: string,\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n /**\n * Runs a predictive model to perform the action of choice on the input\n * documents. See ${@link AnalyzeActionName} for a list of supported\n * actions.\n *\n * The layout of each item in the results array depends on the action chosen.\n * For example, each PIIEntityRecognition document result consists of both\n * `entities` and `redactedText` where the former is a list of all Pii entities\n * in the text and the latter is the original text after all such Pii entities\n * have been redacted from it.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Opinion mining\n *\n * ```js\n * const documents = [{\n * id: \"1\",\n * text: \"The food and service aren't the best\",\n * language: \"en\"\n * }];\n * const results = await client.analyze(\"SentimentAnalysis\", documents, {\n * includeOpinionMining: true,\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { sentiment, confidenceScores, sentences } = result;\n * for (const { sentiment, confidenceScores, opinions } of sentences) {\n * for (const { target, assessments } of opinions) {\n * const { text, sentiment, confidenceScores } = target;\n * for (const { text, sentiment } of assessments) {\n * // Do something\n * }\n * }\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/sentiment-opinion-mining/overview}\n * for more information on opinion mining.\n *\n * #### Personally identifiable information\n *\n * ```js\n * const documents = [<input documents>];\n * const categoriesFilter = [KnownPiiCategory.USSocialSecurityNumber];\n * const domainFilter = KnownPiiDomain.Phi;\n * const results = await client.analyze(\"PiiEntityRecognition\", documents, {\n * domainFilter, categoriesFilter\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { entities, redactedText } = result;\n * for (const { text, category, confidenceScore, length, offset } of entities) {\n * // Do something\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/personally-identifiable-information/overview}\n * for more information on personally identifiable information.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results corresponding to the input documents\n */\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: TextDocumentInput[],\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n\n /**\n * Runs a predictive model to perform the action of choice on the input\n * strings. See ${@link AnalyzeActionName} for a list of supported\n * actions.\n *\n * The layout of each item in the results array depends on the action chosen.\n * For example, each PIIEntityRecognition document result consists of both\n * `entities` and `redactedText` where the former is a list of all Pii entities\n * in the text and the latter is the original text after all such Pii entities\n * have been redacted from it.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Opinion mining\n *\n * ```js\n * const documents = [\"The food and service aren't the best\"];\n * const results = await client.analyze(\"SentimentAnalysis\", documents, {\n * includeOpinionMining: true,\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { sentiment, confidenceScores, sentences } = result;\n * for (const { sentiment, confidenceScores, opinions } of sentences) {\n * for (const { target, assessments } of opinions) {\n * const { text, sentiment, confidenceScores } = target;\n * for (const { text, sentiment } of assessments) {\n * // Do something\n * }\n * }\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/sentiment-opinion-mining/overview}\n * for more information on opinion mining.\n *\n * #### Personally identifiable information\n *\n * ```js\n * const documents = [<input strings>];\n * const languageHint = \"en\";\n * const categoriesFilter = [KnownPiiCategory.USSocialSecurityNumber];\n * const domainFilter = KnownPiiDomain.Phi;\n * const results = await client.analyze(\"PiiEntityRecognition\", documents, languageHint, {\n * domainFilter, categoriesFilter\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { entities, redactedText } = result;\n * for (const { text, category, confidenceScore, length, offset } of entities) {\n * // Do something\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/personally-identifiable-information/overview}\n * for more information on personally identifiable information.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param languageCode - the code of the language that all the input strings are\n * written in. If unspecified, this value will be set to the default\n * language in `TextAnalysisClientOptions`. If set to an empty string,\n * the service will apply a model where the language is explicitly set to\n * \"None\". Language support varies per action, for example, more information\n * about the languages supported for Entity Recognition actions can be\n * found in {@link https://docs.microsoft.com//azure/cognitive-services/language-service/named-entity-recognition/language-support}\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results corresponding to the input documents\n */\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: string[],\n languageCode?: string,\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n // implementation\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: string[] | LanguageDetectionInput[] | TextDocumentInput[],\n languageOrCountryHintOrOptions?:\n | string\n | (AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions),\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>> {\n let realOptions: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions;\n\n if (documents.length === 0) {\n throw new Error(\"'documents' must be a non-empty array\");\n }\n\n let realInputs: LanguageDetectionInput[] | TextDocumentInput[];\n if (isStringArray(documents)) {\n if (actionName === \"LanguageDetection\") {\n realInputs = convertToLanguageDetectionInput(\n documents,\n typeof languageOrCountryHintOrOptions === \"string\"\n ? languageOrCountryHintOrOptions\n : this.defaultCountryHint\n );\n } else {\n realInputs = convertToTextDocumentInput(\n documents,\n typeof languageOrCountryHintOrOptions === \"string\"\n ? languageOrCountryHintOrOptions\n : this.defaultLanguage\n );\n }\n realOptions = options || ({} as any);\n } else {\n realInputs = documents;\n realOptions =\n (languageOrCountryHintOrOptions as AnalyzeActionParameters<ActionName> &\n TextAnalysisOperationOptions) || {};\n }\n const { options: operationOptions, rest: action } = getOperationOptions(realOptions);\n return this._tracing.withSpan(\n \"TextAnalysisClient.analyze\",\n operationOptions,\n async (updatedOptions: TextAnalysisOperationOptions) =>\n throwError(\n this._client\n .analyze(\n {\n kind: actionName,\n analysisInput: {\n documents: realInputs,\n },\n parameters: action,\n } as any,\n updatedOptions\n )\n .then(\n (result) =>\n transformActionResult(actionName, realInputs, result) as AnalyzeResult<ActionName>\n )\n )\n );\n }\n\n /**\n * Performs an array (batch) of actions on the input documents. Each action has\n * a `kind` field that specifies the nature of the action. See ${@link AnalyzeBatchActionNames}\n * for a list of supported actions. In addition to `kind`, actions could also\n * have other parameters such as `disableServiceLogs` and `modelVersion`.\n *\n * The results array contains the results for those input actions where each\n * item also has a `kind` field that specifies the type of the results.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Key phrase extraction and Pii entity recognition\n *\n * ```js\n * const poller = await client.beginAnalyzeBatch(\n * [{ kind: \"KeyPhraseExtraction\" }, { kind: \"PiiEntityRecognition\" }],\n * documents\n * );\n * const actionResults = await poller.pollUntilDone();\n *\n * for await (const actionResult of actionResults) {\n * if (actionResult.error) {\n * throw new Error(`Unexpected error`);\n * }\n * switch (actionResult.kind) {\n * case \"KeyPhraseExtraction\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * case \"PiiEntityRecognition\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * }\n * }\n * ```\n *\n * @param actions - an array of actions that will be run on the input documents\n * @param documents - the input documents to be analyzed\n * @param languageCode - the code of the language that all the input strings are\n * written in. If unspecified, this value will be set to the default\n * language in `TextAnalysisClientOptions`. If set to an empty string,\n * the service will apply a model where the language is explicitly set to\n * \"None\". Language support varies per action, for example, more information\n * about the languages supported for Entity Recognition actions can be\n * found in {@link https://docs.microsoft.com//azure/cognitive-services/language-service/named-entity-recognition/language-support}\n * @param options - optional settings for the operation\n *\n * @returns an array of results corresponding to the input actions\n */\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: string[],\n languageCode?: string,\n options?: BeginAnalyzeBatchOptions\n ): Promise<AnalyzeBatchPoller>;\n /**\n * Performs an array (batch) of actions on the input documents. Each action has\n * a `kind` field that specifies the nature of the action. See ${@link AnalyzeBatchActionNames}\n * for a list of supported actions. In addition to `kind`, actions could also\n * have other parameters such as `disableServiceLogs` and `modelVersion`.\n *\n * The results array contains the results for those input actions where each\n * item also has a `kind` field that specifies the type of the results.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Keyphrase extraction and Pii entity recognition\n *\n * ```js\n * const poller = await client.beginAnalyzeBatch(\n * [{ kind: \"KeyPhraseExtraction\" }, { kind: \"PiiEntityRecognition\" }],\n * documents\n * );\n * const actionResults = await poller.pollUntilDone();\n *\n * for await (const actionResult of actionResults) {\n * if (actionResult.error) {\n * throw new Error(`Unexpected error`);\n * }\n * switch (actionResult.kind) {\n * case \"KeyPhraseExtraction\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * case \"PiiEntityRecognition\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * }\n * }\n * ```\n *\n * @param actions - an array of actions that will be run on the input documents\n * @param documents - the input documents to be analyzed\n * @param options - optional settings for the operation\n *\n * @returns an array of results corresponding to the input actions\n */\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: TextDocumentInput[],\n options?: BeginAnalyzeBatchOptions\n ): Promise<AnalyzeBatchPoller>;\n // implementation\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: TextDocumentInput[] | string[],\n languageOrOptions?: BeginAnalyzeBatchOptions | string,\n options: BeginAnalyzeBatchOptions = {}\n ): Promise<AnalyzeBatchPoller> {\n let realOptions: BeginAnalyzeBatchOptions;\n let realInputs: TextDocumentInput[];\n\n if (!Array.isArray(documents) || documents.length === 0) {\n throw new Error(\"'documents' must be a non-empty array\");\n }\n\n if (isStringArray(documents)) {\n const language = (languageOrOptions as string) || this.defaultLanguage;\n realInputs = convertToTextDocumentInput(documents, language);\n realOptions = options;\n } else {\n realInputs = documents;\n realOptions = languageOrOptions as BeginAnalyzeBatchOptions;\n }\n const realActions = actions.map(\n ({ kind, actionName, ...rest }): AnalyzeBatchActionUnion => ({\n kind,\n actionName,\n parameters: rest,\n })\n );\n const { includeStatistics, updateIntervalInMs, displayName, ...rest } = realOptions;\n const lro = createAnalyzeBatchLro({\n client: this._client,\n commonOptions: rest,\n documents: realInputs,\n initialRequestOptions: { displayName },\n pollRequestOptions: { includeStatistics },\n tasks: realActions,\n tracing: this._tracing,\n });\n\n const poller = new LroEngine<PagedAnalyzeBatchResult, AnalyzeBatchOperationState>(\n lro as LongRunningOperation<PagedAnalyzeBatchResult>,\n {\n intervalInMs: updateIntervalInMs,\n processResult: processAnalyzeResult({\n client: this._client,\n tracing: this._tracing,\n documents: realInputs,\n opOptions: { ...rest, includeStatistics },\n }),\n updateState: createUpdateAnalyzeState(realInputs),\n cancel: createCancelOperation({\n client: this._client,\n tracing: this._tracing,\n options: rest,\n }),\n }\n );\n\n await poller.poll();\n return poller;\n }\n\n /**\n * Creates a poller from the serialized state of another poller. This can be\n * useful when you want to create pollers on a different host or a poller\n * needs to be constructed after the original one is not in scope.\n *\n * @param serializedState - the serialized state of another poller. It is the\n * result of `poller.toString()`\n * @param options - optional settings for the operation\n *\n * # Example\n *\n * `client.beginAnalyzeBatch` returns a promise that will resolve to a poller.\n * The state of the poller can be serialized and used to create another as follows:\n *\n * ```js\n * const serializedState = poller.toString();\n * const rehydratedPoller = await client.createAnalyzeBatchPoller(serializedState);\n * const actionResults = await rehydratedPoller.pollUntilDone();\n * ```\n */\n async restoreAnalyzeBatchPoller(\n serializedState: string,\n options?: RestoreAnalyzeBatchPollerOptions\n ): Promise<AnalyzeBatchPoller>;\n // implementation\n async restoreAnalyzeBatchPoller(\n serializedState: string,\n options: RestoreAnalyzeBatchPollerOptions = {}\n ): Promise<AnalyzeBatchPoller> {\n const { includeStatistics, updateIntervalInMs, ...rest } = options;\n const documents = getDocsFromState(serializedState);\n const lro = createCreateAnalyzeBatchPollerLro({\n client: this._client,\n options: { ...rest, includeStatistics },\n tracing: this._tracing,\n });\n\n const poller = new LroEngine<PagedAnalyzeBatchResult, AnalyzeBatchOperationState>(\n lro as LongRunningOperation<PagedAnalyzeBatchResult>,\n {\n intervalInMs: updateIntervalInMs,\n resumeFrom: serializedState,\n processResult: processAnalyzeResult({\n client: this._client,\n tracing: this._tracing,\n documents,\n opOptions: { ...rest, includeStatistics },\n }),\n updateState: createUpdateAnalyzeState(),\n cancel: createCancelOperation({\n client: this._client,\n tracing: this._tracing,\n options: rest,\n }),\n }\n );\n\n await poller.poll();\n return poller;\n }\n}\n"]}
1
+ {"version":3,"file":"textAnalysisClient.js","sourceRoot":"","sources":["../../src/textAnalysisClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAmBlC,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAAkC,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACrF,OAAO,EAAiB,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AACzE,OAAO,EACL,+BAA+B,EAC/B,0BAA0B,EAC1B,mBAAmB,EACnB,aAAa,GACd,MAAM,QAAQ,CAAC;AAChB,OAAO,EACL,qBAAqB,EACrB,iCAAiC,EACjC,4BAA4B,EAC5B,wBAAwB,EACxB,kBAAkB,EAClB,oBAAoB,GACrB,MAAM,OAAO,CAAC;AACf,OAAO,EAAE,UAAU,EAAE,qBAAqB,EAAE,MAAM,cAAc,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,6BAA6B,CAAC;AAC9D,OAAO,EAAE,+BAA+B,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACnD,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAClC,OAAO,EAAE,qCAAqC,EAAE,MAAM,4BAA4B,CAAC;AAEnF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoCG;AACH,MAAM,OAAO,kBAAkB;IA8D7B,YACE,WAAmB,EACnB,UAA2C,EAC3C,UAAqC,EAAE;QAEvC,MAAM,EACJ,kBAAkB,GAAG,IAAI,EACzB,eAAe,GAAG,IAAI,EACtB,cAAc,KAEZ,OAAO,EADN,eAAe,UAChB,OAAO,EALL,2DAKL,CAAU,CAAC;QACZ,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;QAEvC,MAAM,uBAAuB,iDACxB,eAAe,GACf;YACD,cAAc,EAAE;gBACd,MAAM,EAAE,MAAM,CAAC,IAAI;gBACnB,4BAA4B,EAAE,CAAC,6BAA6B,EAAE,iBAAiB,CAAC;aACjF;SACF,KACD,UAAU,EAAE,cAAc,GAC3B,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,IAAI,eAAe,CAAC,WAAW,EAAE,uBAAuB,CAAC,CAAC;QAEzE,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;YAC9C,CAAC,CAAC,+BAA+B,CAAC,EAAE,UAAU,EAAE,MAAM,EAAE,uBAAuB,EAAE,CAAC;YAClF,CAAC,CAAC,qCAAqC,CAAC,UAAU,CAAC,CAAC;QAEtD,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC5C,IAAI,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YAClC,WAAW,EAAE,yBAAyB;YACtC,cAAc,EAAE,WAAW;YAC3B,SAAS,EAAE,6BAA6B;SACzC,CAAC,CAAC;IACL,CAAC;IAmRD,iBAAiB;IACV,KAAK,CAAC,OAAO,CAClB,UAAsB,EACtB,SAAoE,EACpE,8BAEwE,EACxE,OAA4E;QAE5E,IAAI,WAA+E,CAAC;QAEpF,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QAED,IAAI,UAA0D,CAAC;QAC/D,IAAI,aAAa,CAAC,SAAS,CAAC,EAAE;YAC5B,IAAI,UAAU,KAAK,mBAAmB,EAAE;gBACtC,UAAU,GAAG,+BAA+B,CAC1C,SAAS,EACT,OAAO,8BAA8B,KAAK,QAAQ;oBAChD,CAAC,CAAC,8BAA8B;oBAChC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAC5B,CAAC;aACH;iBAAM;gBACL,UAAU,GAAG,0BAA0B,CACrC,SAAS,EACT,OAAO,8BAA8B,KAAK,QAAQ;oBAChD,CAAC,CAAC,8BAA8B;oBAChC,CAAC,CAAC,IAAI,CAAC,eAAe,CACzB,CAAC;aACH;YACD,WAAW,GAAG,OAAO,IAAK,EAAU,CAAC;SACtC;aAAM;YACL,UAAU,GAAG,SAAS,CAAC;YACvB,WAAW;gBACR,8BAC8B,IAAI,EAAE,CAAC;SACzC;QACD,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,mBAAmB,CAAC,WAAW,CAAC,CAAC;QACrF,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,4BAA4B,EAC5B,gBAAgB,EAChB,KAAK,EAAE,cAA4C,EAAE,EAAE,CACrD,UAAU,CACR,IAAI,CAAC,OAAO;aACT,OAAO,CACN;YACE,IAAI,EAAE,UAAU;YAChB,aAAa,EAAE;gBACb,SAAS,EAAE,UAAU;aACtB;YACD,UAAU,EAAE,MAAM;SACZ,EACR,cAAc,CACf;aACA,IAAI,CACH,CAAC,MAAM,EAAE,EAAE,CACT,qBAAqB,CACnB,UAAU,EACV,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAC9B,MAAM,CACsB,CACjC,CACJ,CACJ,CAAC;IACJ,CAAC;IAwHD,iBAAiB;IACjB,KAAK,CAAC,iBAAiB,CACrB,OAA6B,EAC7B,SAAyC,EACzC,iBAAqD,EACrD,UAAoC,EAAE;QAEtC,IAAI,WAAqC,CAAC;QAC1C,IAAI,UAA+B,CAAC;QAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACvD,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QAED,IAAI,aAAa,CAAC,SAAS,CAAC,EAAE;YAC5B,MAAM,QAAQ,GAAI,iBAA4B,IAAI,IAAI,CAAC,eAAe,CAAC;YACvE,UAAU,GAAG,0BAA0B,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;YAC7D,WAAW,GAAG,OAAO,CAAC;SACvB;aAAM;YACL,UAAU,GAAG,SAAS,CAAC;YACvB,WAAW,GAAG,iBAA6C,CAAC;SAC7D;QACD,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAC7B,CAAC,EAA6B,EAA2B,EAAE;gBAA1D,EAAE,IAAI,EAAE,UAAU,OAAW,EAAN,IAAI,cAA3B,sBAA6B,CAAF;YAAgC,OAAA,CAAC;gBAC3D,IAAI;gBACJ,UAAU;gBACV,UAAU,EAAE,IAAI;aACjB,CAAC,CAAA;SAAA,CACH,CAAC;QACF,MAAM,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,WAAW,KAAc,WAAW,EAApB,IAAI,UAAK,WAAW,EAA7E,0DAA+D,CAAc,CAAC;QACpF,MAAM,GAAG,GAAG,qBAAqB,CAAC;YAChC,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,aAAa,EAAE,IAAI;YACnB,SAAS,EAAE,UAAU;YACrB,qBAAqB,EAAE,EAAE,WAAW,EAAE;YACtC,kBAAkB,EAAE,EAAE,iBAAiB,EAAE;YACzC,KAAK,EAAE,WAAW;YAClB,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC;QAE9C,MAAM,KAAK,GAAG,EAAE,iBAAiB,EAAE,EAAE,EAAE,CAAC;QAExC,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE;YACzC,YAAY,EAAE,kBAAkB;YAChC,aAAa,EAAE,oBAAoB,CAAC;gBAClC,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,MAAM;gBACN,SAAS,kCAAO,IAAI,KAAE,iBAAiB,GAAE;gBACzC,KAAK;aACN,CAAC;YACF,WAAW,EAAE,wBAAwB,CAAC,MAAM,CAAC;YAC7C,qBAAqB,CAAC,iBAAyB;gBAC7C,KAAK,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;YAC9C,CAAC;SACF,CAAC,CAAC;QAEH,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,MAAM,EAAE,GAAG,MAAM,CAAC,iBAAiB,EAAE,CAAC,EAAE,CAAC;QACzC,OAAO,4BAA4B,CAAC;YAClC,EAAE;YACF,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,OAAO;YACP,MAAM;YACN,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;IACL,CAAC;IA0BD,iBAAiB;IACjB,KAAK,CAAC,yBAAyB,CAC7B,eAAuB,EACvB,UAA4C,EAAE;QAE9C,MAAM,EAAE,iBAAiB,EAAE,kBAAkB,KAAc,OAAO,EAAhB,IAAI,UAAK,OAAO,EAA5D,2CAAkD,CAAU,CAAC;QACnE,MAAM,MAAM,GAAG,kBAAkB,CAAC,eAAe,CAAC,CAAC;QACnD,MAAM,GAAG,GAAG,iCAAiC,CAAC;YAC5C,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,OAAO,kCAAO,IAAI,KAAE,iBAAiB,GAAE;YACvC,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;QAEH,MAAM,KAAK,GAAG,EAAE,iBAAiB,EAAE,EAAE,EAAE,CAAC;QAExC,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE;YACzC,YAAY,EAAE,kBAAkB;YAChC,WAAW,EAAE,eAAe;YAC5B,aAAa,EAAE,oBAAoB,CAAC;gBAClC,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,MAAM;gBACN,SAAS,kCAAO,IAAI,KAAE,iBAAiB,GAAE;gBACzC,KAAK;aACN,CAAC;YACF,WAAW,EAAE,wBAAwB,EAAE;YACvC,qBAAqB,CAAC,iBAAyB;gBAC7C,KAAK,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;YAC9C,CAAC;SACF,CAAC,CAAC;QAEH,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,MAAM,EAAE,GAAG,MAAM,CAAC,iBAAiB,EAAE,CAAC,EAAE,CAAC;QACzC,OAAO,4BAA4B,CAAC;YAClC,EAAE;YACF,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,OAAO;YACP,MAAM;YACN,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AnalyzeActionName,\n AnalyzeActionParameters,\n AnalyzeBatchAction,\n AnalyzeBatchPoller,\n AnalyzeResult,\n BeginAnalyzeBatchOptions,\n RestoreAnalyzeBatchPollerOptions,\n TextAnalysisClientOptions,\n TextAnalysisOperationOptions,\n} from \"./models\";\nimport {\n AnalyzeBatchActionUnion,\n GeneratedClientOptionalParams,\n LanguageDetectionInput,\n TextDocumentInput,\n} from \"./generated/models\";\nimport { DEFAULT_COGNITIVE_SCOPE, SDK_VERSION } from \"./constants\";\nimport { KeyCredential, TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { TracingClient, createTracingClient } from \"@azure/core-tracing\";\nimport {\n convertToLanguageDetectionInput,\n convertToTextDocumentInput,\n getOperationOptions,\n isStringArray,\n} from \"./util\";\nimport {\n createAnalyzeBatchLro,\n createCreateAnalyzeBatchPollerLro,\n createPollerWithCancellation,\n createUpdateAnalyzeState,\n getDocIDsFromState,\n processAnalyzeResult,\n} from \"./lro\";\nimport { throwError, transformActionResult } from \"./transforms\";\nimport { GeneratedClient } from \"./generated/generatedClient\";\nimport { bearerTokenAuthenticationPolicy } from \"@azure/core-rest-pipeline\";\nimport { createHttpPoller } from \"@azure/core-lro\";\nimport { logger } from \"./logger\";\nimport { textAnalyticsAzureKeyCredentialPolicy } from \"./azureKeyCredentialPolicy\";\n\n/**\n * A client for interacting with the text analysis features in Azure Cognitive\n * Language Service.\n *\n * The client needs the endpoint of a Language resource and an authentication\n * method such as an API key or AAD. The API key and endpoint can be found in\n * the Language resource page in the Azure portal. They will be located in the\n * resource's Keys and Endpoint page, under Resource Management.\n *\n * ### Examples for authentication:\n *\n * #### API Key\n *\n * ```js\n * import { TextAnalysisClient, AzureKeyCredential } from \"@azure/ai-language-text\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * #### Azure Active Directory\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity)\n * package for more information about authenticating with Azure Active Directory.\n *\n * ```js\n * import { TextAnalysisClient } from \"@azure/ai-language-text\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n */\nexport class TextAnalysisClient {\n private readonly _client: GeneratedClient;\n private readonly _tracing: TracingClient;\n private readonly defaultCountryHint: string;\n private readonly defaultLanguage: string;\n\n /**\n * Creates an instance of TextAnalysisClient with the endpoint of a Language\n * resource and an authentication method such as an API key or AAD.\n *\n * The API key and endpoint can be found in the Language resource page in the\n * Azure portal. They will be located in the resource's Keys and Endpoint page,\n * under Resource Management.\n *\n * ### Example\n *\n * ```js\n * import { TextAnalysisClient, AzureKeyCredential } from \"@azure/ai-language-text\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpointUrl - The URL to the endpoint of a Cognitive Language Service resource\n * @param credential - Key credential to be used to authenticate requests to the service.\n * @param options - Used to configure the TextAnalytics client.\n */\n constructor(endpointUrl: string, credential: KeyCredential, options?: TextAnalysisClientOptions);\n /**\n * Creates an instance of TextAnalysisClient with the endpoint of a Language\n * resource and an authentication method such as an API key or AAD.\n *\n * The API key and endpoint can be found in the Language resource page in the\n * Azure portal. They will be located in the resource's Keys and Endpoint page,\n * under Resource Management.\n *\n * ### Example\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity)\n * package for more information about authenticating with Azure Active Directory.\n *\n * ```js\n * import { TextAnalysisClient } from \"@azure/ai-language-text\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpointUrl - The URL to the endpoint of a Cognitive Language Service resource\n * @param credential - Token credential to be used to authenticate requests to the service.\n * @param options - Used to configure the TextAnalytics client.\n */\n constructor(\n endpointUrl: string,\n credential: TokenCredential,\n options?: TextAnalysisClientOptions\n );\n constructor(\n endpointUrl: string,\n credential: TokenCredential | KeyCredential,\n options: TextAnalysisClientOptions = {}\n ) {\n const {\n defaultCountryHint = \"us\",\n defaultLanguage = \"en\",\n serviceVersion,\n ...pipelineOptions\n } = options;\n this.defaultCountryHint = defaultCountryHint;\n this.defaultLanguage = defaultLanguage;\n\n const internalPipelineOptions: GeneratedClientOptionalParams = {\n ...pipelineOptions,\n ...{\n loggingOptions: {\n logger: logger.info,\n additionalAllowedHeaderNames: [\"x-ms-correlation-request-id\", \"x-ms-request-id\"],\n },\n },\n apiVersion: serviceVersion,\n };\n\n this._client = new GeneratedClient(endpointUrl, internalPipelineOptions);\n\n const authPolicy = isTokenCredential(credential)\n ? bearerTokenAuthenticationPolicy({ credential, scopes: DEFAULT_COGNITIVE_SCOPE })\n : textAnalyticsAzureKeyCredentialPolicy(credential);\n\n this._client.pipeline.addPolicy(authPolicy);\n this._tracing = createTracingClient({\n packageName: \"@azure/ai-language-text\",\n packageVersion: SDK_VERSION,\n namespace: \"Microsoft.CognitiveServices\",\n });\n }\n\n /**\n * Runs a predictive model to determine the language that the passed-in\n * input strings are written in, and returns, for each one, the detected\n * language as well as a score indicating the model's confidence that the\n * inferred language is correct. Scores close to 1 indicate high certainty in\n * the result. 120 languages are supported.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Language detection\n *\n * ```js\n * const documents = [<input strings>];\n * const countryHint = \"us\";\n * const results = await client.analyze(\"LanguageDetection\", documents, countryHint);\n *\n * for (let i = 0; i < results.length; i++) {\n * const result = results[i];\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { name, confidenceScore, iso6391Name } = result.primaryLanguage;\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/language-detection/overview}\n * for more information on language detection.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results where each element contains the primary language\n * for the corresponding input document.\n */\n public async analyze<ActionName extends \"LanguageDetection\">(\n actionName: ActionName,\n documents: LanguageDetectionInput[],\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n /**\n * Runs a predictive model to determine the language that the passed-in\n * input strings are written in, and returns, for each one, the detected\n * language as well as a score indicating the model's confidence that the\n * inferred language is correct. Scores close to 1 indicate high certainty in\n * the result. 120 languages are supported.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Language detection\n *\n * ```js\n * const documents = [<input strings>];\n * const countryHint = \"us\";\n * const results = await client.analyze(\"LanguageDetection\", documents, countryHint);\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { name, confidenceScore, iso6391Name } = result.primaryLanguage;\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/language-detection/overview}\n * for more information on language detection.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param countryHint - Indicates the country of origin for all of\n * the input strings to assist the model in predicting the language they are\n * written in. If unspecified, this value will be set to the default\n * country hint in `TextAnalysisClientOptions`. If set to an empty string,\n * or the string \"none\", the service will apply a model where the country is\n * explicitly unset. The same country hint is applied to all strings in the\n * input collection.\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results where each element contains the primary language\n * for the corresponding input document.\n */\n public async analyze<ActionName extends \"LanguageDetection\">(\n actionName: ActionName,\n documents: string[],\n countryHint?: string,\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n /**\n * Runs a predictive model to perform the action of choice on the input\n * documents. See ${@link AnalyzeActionName} for a list of supported\n * actions.\n *\n * The layout of each item in the results array depends on the action chosen.\n * For example, each PIIEntityRecognition document result consists of both\n * `entities` and `redactedText` where the former is a list of all Pii entities\n * in the text and the latter is the original text after all such Pii entities\n * have been redacted from it.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Opinion mining\n *\n * ```js\n * const documents = [{\n * id: \"1\",\n * text: \"The food and service aren't the best\",\n * language: \"en\"\n * }];\n * const results = await client.analyze(\"SentimentAnalysis\", documents, {\n * includeOpinionMining: true,\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { sentiment, confidenceScores, sentences } = result;\n * for (const { sentiment, confidenceScores, opinions } of sentences) {\n * for (const { target, assessments } of opinions) {\n * const { text, sentiment, confidenceScores } = target;\n * for (const { text, sentiment } of assessments) {\n * // Do something\n * }\n * }\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/sentiment-opinion-mining/overview}\n * for more information on opinion mining.\n *\n * #### Personally identifiable information\n *\n * ```js\n * const documents = [<input documents>];\n * const categoriesFilter = [KnownPiiCategory.USSocialSecurityNumber];\n * const domainFilter = KnownPiiDomain.Phi;\n * const results = await client.analyze(\"PiiEntityRecognition\", documents, {\n * domainFilter, categoriesFilter\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { entities, redactedText } = result;\n * for (const { text, category, confidenceScore, length, offset } of entities) {\n * // Do something\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/personally-identifiable-information/overview}\n * for more information on personally identifiable information.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results corresponding to the input documents\n */\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: TextDocumentInput[],\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n\n /**\n * Runs a predictive model to perform the action of choice on the input\n * strings. See ${@link AnalyzeActionName} for a list of supported\n * actions.\n *\n * The layout of each item in the results array depends on the action chosen.\n * For example, each PIIEntityRecognition document result consists of both\n * `entities` and `redactedText` where the former is a list of all Pii entities\n * in the text and the latter is the original text after all such Pii entities\n * have been redacted from it.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Opinion mining\n *\n * ```js\n * const documents = [\"The food and service aren't the best\"];\n * const results = await client.analyze(\"SentimentAnalysis\", documents, {\n * includeOpinionMining: true,\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { sentiment, confidenceScores, sentences } = result;\n * for (const { sentiment, confidenceScores, opinions } of sentences) {\n * for (const { target, assessments } of opinions) {\n * const { text, sentiment, confidenceScores } = target;\n * for (const { text, sentiment } of assessments) {\n * // Do something\n * }\n * }\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/sentiment-opinion-mining/overview}\n * for more information on opinion mining.\n *\n * #### Personally identifiable information\n *\n * ```js\n * const documents = [<input strings>];\n * const languageHint = \"en\";\n * const categoriesFilter = [KnownPiiCategory.USSocialSecurityNumber];\n * const domainFilter = KnownPiiDomain.Phi;\n * const results = await client.analyze(\"PiiEntityRecognition\", documents, languageHint, {\n * domainFilter, categoriesFilter\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { entities, redactedText } = result;\n * for (const { text, category, confidenceScore, length, offset } of entities) {\n * // Do something\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/personally-identifiable-information/overview}\n * for more information on personally identifiable information.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param languageCode - the code of the language that all the input strings are\n * written in. If unspecified, this value will be set to the default\n * language in `TextAnalysisClientOptions`. If set to an empty string,\n * the service will apply a model where the language is explicitly set to\n * \"None\". Language support varies per action, for example, more information\n * about the languages supported for Entity Recognition actions can be\n * found in {@link https://docs.microsoft.com//azure/cognitive-services/language-service/named-entity-recognition/language-support}\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results corresponding to the input documents\n */\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: string[],\n languageCode?: string,\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n // implementation\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: string[] | LanguageDetectionInput[] | TextDocumentInput[],\n languageOrCountryHintOrOptions?:\n | string\n | (AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions),\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>> {\n let realOptions: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions;\n\n if (documents.length === 0) {\n throw new Error(\"'documents' must be a non-empty array\");\n }\n\n let realInputs: LanguageDetectionInput[] | TextDocumentInput[];\n if (isStringArray(documents)) {\n if (actionName === \"LanguageDetection\") {\n realInputs = convertToLanguageDetectionInput(\n documents,\n typeof languageOrCountryHintOrOptions === \"string\"\n ? languageOrCountryHintOrOptions\n : this.defaultCountryHint\n );\n } else {\n realInputs = convertToTextDocumentInput(\n documents,\n typeof languageOrCountryHintOrOptions === \"string\"\n ? languageOrCountryHintOrOptions\n : this.defaultLanguage\n );\n }\n realOptions = options || ({} as any);\n } else {\n realInputs = documents;\n realOptions =\n (languageOrCountryHintOrOptions as AnalyzeActionParameters<ActionName> &\n TextAnalysisOperationOptions) || {};\n }\n const { options: operationOptions, rest: action } = getOperationOptions(realOptions);\n return this._tracing.withSpan(\n \"TextAnalysisClient.analyze\",\n operationOptions,\n async (updatedOptions: TextAnalysisOperationOptions) =>\n throwError(\n this._client\n .analyze(\n {\n kind: actionName,\n analysisInput: {\n documents: realInputs,\n },\n parameters: action,\n } as any,\n updatedOptions\n )\n .then(\n (result) =>\n transformActionResult(\n actionName,\n realInputs.map(({ id }) => id),\n result\n ) as AnalyzeResult<ActionName>\n )\n )\n );\n }\n\n /**\n * Performs an array (batch) of actions on the input documents. Each action has\n * a `kind` field that specifies the nature of the action. See ${@link AnalyzeBatchActionNames}\n * for a list of supported actions. In addition to `kind`, actions could also\n * have other parameters such as `disableServiceLogs` and `modelVersion`.\n *\n * The results array contains the results for those input actions where each\n * item also has a `kind` field that specifies the type of the results.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Key phrase extraction and Pii entity recognition\n *\n * ```js\n * const poller = await client.beginAnalyzeBatch(\n * [{ kind: \"KeyPhraseExtraction\" }, { kind: \"PiiEntityRecognition\" }],\n * documents\n * );\n * const actionResults = await poller.pollUntilDone();\n *\n * for await (const actionResult of actionResults) {\n * if (actionResult.error) {\n * throw new Error(`Unexpected error`);\n * }\n * switch (actionResult.kind) {\n * case \"KeyPhraseExtraction\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * case \"PiiEntityRecognition\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * }\n * }\n * ```\n *\n * @param actions - an array of actions that will be run on the input documents\n * @param documents - the input documents to be analyzed\n * @param languageCode - the code of the language that all the input strings are\n * written in. If unspecified, this value will be set to the default\n * language in `TextAnalysisClientOptions`. If set to an empty string,\n * the service will apply a model where the language is explicitly set to\n * \"None\". Language support varies per action, for example, more information\n * about the languages supported for Entity Recognition actions can be\n * found in {@link https://docs.microsoft.com//azure/cognitive-services/language-service/named-entity-recognition/language-support}\n * @param options - optional settings for the operation\n *\n * @returns an array of results corresponding to the input actions\n */\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: string[],\n languageCode?: string,\n options?: BeginAnalyzeBatchOptions\n ): Promise<AnalyzeBatchPoller>;\n /**\n * Performs an array (batch) of actions on the input documents. Each action has\n * a `kind` field that specifies the nature of the action. See ${@link AnalyzeBatchActionNames}\n * for a list of supported actions. In addition to `kind`, actions could also\n * have other parameters such as `disableServiceLogs` and `modelVersion`.\n *\n * The results array contains the results for those input actions where each\n * item also has a `kind` field that specifies the type of the results.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Keyphrase extraction and Pii entity recognition\n *\n * ```js\n * const poller = await client.beginAnalyzeBatch(\n * [{ kind: \"KeyPhraseExtraction\" }, { kind: \"PiiEntityRecognition\" }],\n * documents\n * );\n * const actionResults = await poller.pollUntilDone();\n *\n * for await (const actionResult of actionResults) {\n * if (actionResult.error) {\n * throw new Error(`Unexpected error`);\n * }\n * switch (actionResult.kind) {\n * case \"KeyPhraseExtraction\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * case \"PiiEntityRecognition\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * }\n * }\n * ```\n *\n * @param actions - an array of actions that will be run on the input documents\n * @param documents - the input documents to be analyzed\n * @param options - optional settings for the operation\n *\n * @returns an array of results corresponding to the input actions\n */\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: TextDocumentInput[],\n options?: BeginAnalyzeBatchOptions\n ): Promise<AnalyzeBatchPoller>;\n // implementation\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: TextDocumentInput[] | string[],\n languageOrOptions?: BeginAnalyzeBatchOptions | string,\n options: BeginAnalyzeBatchOptions = {}\n ): Promise<AnalyzeBatchPoller> {\n let realOptions: BeginAnalyzeBatchOptions;\n let realInputs: TextDocumentInput[];\n\n if (!Array.isArray(documents) || documents.length === 0) {\n throw new Error(\"'documents' must be a non-empty array\");\n }\n\n if (isStringArray(documents)) {\n const language = (languageOrOptions as string) || this.defaultLanguage;\n realInputs = convertToTextDocumentInput(documents, language);\n realOptions = options;\n } else {\n realInputs = documents;\n realOptions = languageOrOptions as BeginAnalyzeBatchOptions;\n }\n const realActions = actions.map(\n ({ kind, actionName, ...rest }): AnalyzeBatchActionUnion => ({\n kind,\n actionName,\n parameters: rest,\n })\n );\n const { includeStatistics, updateIntervalInMs, displayName, ...rest } = realOptions;\n const lro = createAnalyzeBatchLro({\n client: this._client,\n commonOptions: rest,\n documents: realInputs,\n initialRequestOptions: { displayName },\n pollRequestOptions: { includeStatistics },\n tasks: realActions,\n tracing: this._tracing,\n });\n\n const docIds = realInputs.map(({ id }) => id);\n\n const state = { continuationToken: \"\" };\n\n const poller = await createHttpPoller(lro, {\n intervalInMs: updateIntervalInMs,\n processResult: processAnalyzeResult({\n client: this._client,\n tracing: this._tracing,\n docIds,\n opOptions: { ...rest, includeStatistics },\n state,\n }),\n updateState: createUpdateAnalyzeState(docIds),\n withOperationLocation(operationLocation: string) {\n state.continuationToken = operationLocation;\n },\n });\n\n await poller.poll();\n const id = poller.getOperationState().id;\n return createPollerWithCancellation({\n id,\n client: this._client,\n options,\n poller,\n tracing: this._tracing,\n });\n }\n\n /**\n * Creates a poller from the serialized state of another poller. This can be\n * useful when you want to create pollers on a different host or a poller\n * needs to be constructed after the original one is not in scope.\n *\n * @param serializedState - the serialized state of another poller. It is the\n * result of `poller.toString()`\n * @param options - optional settings for the operation\n *\n * # Example\n *\n * `client.beginAnalyzeBatch` returns a promise that will resolve to a poller.\n * The state of the poller can be serialized and used to create another as follows:\n *\n * ```js\n * const serializedState = poller.toString();\n * const rehydratedPoller = await client.createAnalyzeBatchPoller(serializedState);\n * const actionResults = await rehydratedPoller.pollUntilDone();\n * ```\n */\n async restoreAnalyzeBatchPoller(\n serializedState: string,\n options?: RestoreAnalyzeBatchPollerOptions\n ): Promise<AnalyzeBatchPoller>;\n // implementation\n async restoreAnalyzeBatchPoller(\n serializedState: string,\n options: RestoreAnalyzeBatchPollerOptions = {}\n ): Promise<AnalyzeBatchPoller> {\n const { includeStatistics, updateIntervalInMs, ...rest } = options;\n const docIds = getDocIDsFromState(serializedState);\n const lro = createCreateAnalyzeBatchPollerLro({\n client: this._client,\n options: { ...rest, includeStatistics },\n tracing: this._tracing,\n });\n\n const state = { continuationToken: \"\" };\n\n const poller = await createHttpPoller(lro, {\n intervalInMs: updateIntervalInMs,\n restoreFrom: serializedState,\n processResult: processAnalyzeResult({\n client: this._client,\n tracing: this._tracing,\n docIds,\n opOptions: { ...rest, includeStatistics },\n state,\n }),\n updateState: createUpdateAnalyzeState(),\n withOperationLocation(operationLocation: string) {\n state.continuationToken = operationLocation;\n },\n });\n\n await poller.poll();\n const id = poller.getOperationState().id;\n return createPollerWithCancellation({\n id,\n client: this._client,\n options,\n poller,\n tracing: this._tracing,\n });\n }\n}\n"]}
@@ -23,31 +23,31 @@ function makeTextAnalysisErrorResult(id, error) {
23
23
  /**
24
24
  * combines successful and erroneous results into a single array of results and
25
25
  * sort them so that the IDs order match that of the input documents array.
26
- * @param input - the array of documents sent to the service for processing.
26
+ * @param ids - the array of input document IDs.
27
27
  * @param response - the response received from the service.
28
28
  * @param options - an options bag that includes functions to process the results.
29
29
  */
30
- function transformDocumentResults(input, response, options) {
30
+ function transformDocumentResults(ids, response, options) {
31
31
  const { processError = makeTextAnalysisErrorResult, processSuccess } = options || {};
32
32
  const successResults = processSuccess
33
33
  ? response.documents.map(processSuccess)
34
34
  : response.documents;
35
35
  const unsortedResults = successResults.concat(response.errors.map((error) => processError(error.id, error.error)));
36
- return sortResponseIdObjects(input, unsortedResults);
36
+ return sortResponseIdObjects(ids, unsortedResults);
37
37
  }
38
- function toLanguageDetectionResult(documents, results) {
39
- return transformDocumentResults(documents, results, {
38
+ function toLanguageDetectionResult(docIds, results) {
39
+ return transformDocumentResults(docIds, results, {
40
40
  processSuccess: (_a) => {
41
41
  var { detectedLanguage } = _a, rest = __rest(_a, ["detectedLanguage"]);
42
42
  return (Object.assign({ primaryLanguage: detectedLanguage }, rest));
43
43
  },
44
44
  });
45
45
  }
46
- function toPiiEntityRecognitionResult(documents, results) {
47
- return transformDocumentResults(documents, results);
46
+ function toPiiEntityRecognitionResult(docIds, results) {
47
+ return transformDocumentResults(docIds, results);
48
48
  }
49
- function toSentimentAnalysisResult(documents, results) {
50
- return transformDocumentResults(documents, results, {
49
+ function toSentimentAnalysisResult(docIds, results) {
50
+ return transformDocumentResults(docIds, results, {
51
51
  processSuccess: (_a) => {
52
52
  var { sentences } = _a, rest = __rest(_a, ["sentences"]);
53
53
  return (Object.assign(Object.assign({}, rest), { sentences: sentences.map((sentence) => convertGeneratedSentenceSentiment(sentence, sentences)) }));
@@ -100,37 +100,37 @@ function convertTargetRelationToAssessmentSentiment(targetRelation, sentences) {
100
100
  throw new Error(`Pointer "${assessmentPtr}" is not a valid Assessment pointer`);
101
101
  }
102
102
  }
103
- function toEntityLinkingResult(documents, results) {
104
- return transformDocumentResults(documents, results);
103
+ function toEntityLinkingResult(docIds, results) {
104
+ return transformDocumentResults(docIds, results);
105
105
  }
106
- function toKeyPhraseExtractionResult(documents, results) {
107
- return transformDocumentResults(documents, results);
106
+ function toKeyPhraseExtractionResult(docIds, results) {
107
+ return transformDocumentResults(docIds, results);
108
108
  }
109
- function toEntityRecognitionResult(documents, results) {
110
- return transformDocumentResults(documents, results);
109
+ function toEntityRecognitionResult(docIds, results) {
110
+ return transformDocumentResults(docIds, results);
111
111
  }
112
112
  /**
113
113
  * @internal
114
114
  */
115
- export function transformActionResult(actionName, input, response) {
115
+ export function transformActionResult(actionName, docIds, response) {
116
116
  switch (response.kind) {
117
117
  case "EntityLinkingResults": {
118
- return toEntityLinkingResult(input, response.results);
118
+ return toEntityLinkingResult(docIds, response.results);
119
119
  }
120
120
  case "EntityRecognitionResults": {
121
- return toEntityRecognitionResult(input, response.results);
121
+ return toEntityRecognitionResult(docIds, response.results);
122
122
  }
123
123
  case "KeyPhraseExtractionResults": {
124
- return toKeyPhraseExtractionResult(input, response.results);
124
+ return toKeyPhraseExtractionResult(docIds, response.results);
125
125
  }
126
126
  case "PiiEntityRecognitionResults": {
127
- return toPiiEntityRecognitionResult(input, response.results);
127
+ return toPiiEntityRecognitionResult(docIds, response.results);
128
128
  }
129
129
  case "SentimentAnalysisResults": {
130
- return toSentimentAnalysisResult(input, response.results);
130
+ return toSentimentAnalysisResult(docIds, response.results);
131
131
  }
132
132
  case "LanguageDetectionResults": {
133
- return toLanguageDetectionResult(input, response.results);
133
+ return toLanguageDetectionResult(docIds, response.results);
134
134
  }
135
135
  default: {
136
136
  const __exhaust = response;
@@ -188,7 +188,7 @@ export async function throwError(p) {
188
188
  throw transformError(e);
189
189
  }
190
190
  }
191
- function toHealthcareResult(documents, results) {
191
+ function toHealthcareResult(docIds, results) {
192
192
  function makeHealthcareEntity(entity) {
193
193
  const { dataSources } = entity, rest = __rest(entity, ["dataSources"]);
194
194
  return Object.assign({ dataSources: dataSources !== null && dataSources !== void 0 ? dataSources : [] }, rest);
@@ -202,7 +202,7 @@ function toHealthcareResult(documents, results) {
202
202
  })),
203
203
  });
204
204
  }
205
- return transformDocumentResults(documents, results, {
205
+ return transformDocumentResults(docIds, results, {
206
206
  processSuccess: (_a) => {
207
207
  var { entities, relations } = _a, rest = __rest(_a, ["entities", "relations"]);
208
208
  const newEntities = entities.map(makeHealthcareEntity);
@@ -210,74 +210,69 @@ function toHealthcareResult(documents, results) {
210
210
  },
211
211
  });
212
212
  }
213
- function toCustomSingleLabelClassificationResult(documents, results) {
214
- return transformDocumentResults(documents, results, {
213
+ function toCustomSingleLabelClassificationResult(docIds, results) {
214
+ return transformDocumentResults(docIds, results, {
215
215
  processSuccess: (_a) => {
216
- var { classification } = _a, rest = __rest(_a, ["classification"]);
217
- return Object.assign({ classifications: [classification] }, rest);
216
+ var { class: classification } = _a, rest = __rest(_a, ["class"]);
217
+ return Object.assign({ classifications: classification }, rest);
218
218
  },
219
219
  });
220
220
  }
221
221
  /**
222
222
  * @internal
223
223
  */
224
- export function transformAnalyzeBatchResults(documents, response = []) {
224
+ export function transformAnalyzeBatchResults(docIds, response = []) {
225
225
  return response.map((actionData) => {
226
226
  const { lastUpdateDateTime: completedOn, actionName, kind } = actionData;
227
227
  switch (kind) {
228
228
  case "SentimentAnalysisLROResults": {
229
229
  const { results } = actionData;
230
230
  const { modelVersion, statistics } = results;
231
- return Object.assign(Object.assign(Object.assign({ kind: "SentimentAnalysis", results: toSentimentAnalysisResult(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
231
+ return Object.assign(Object.assign(Object.assign({ kind: "SentimentAnalysis", results: toSentimentAnalysisResult(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
232
232
  }
233
233
  case "EntityRecognitionLROResults": {
234
234
  const { results } = actionData;
235
235
  const { modelVersion, statistics } = results;
236
- return Object.assign(Object.assign(Object.assign({ kind: "EntityRecognition", results: toEntityRecognitionResult(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
236
+ return Object.assign(Object.assign(Object.assign({ kind: "EntityRecognition", results: toEntityRecognitionResult(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
237
237
  }
238
238
  case "PiiEntityRecognitionLROResults": {
239
239
  const { results } = actionData;
240
240
  const { modelVersion, statistics } = results;
241
- return Object.assign(Object.assign(Object.assign({ kind: "PiiEntityRecognition", results: toPiiEntityRecognitionResult(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
241
+ return Object.assign(Object.assign(Object.assign({ kind: "PiiEntityRecognition", results: toPiiEntityRecognitionResult(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
242
242
  }
243
243
  case "KeyPhraseExtractionLROResults": {
244
244
  const { results } = actionData;
245
245
  const { modelVersion, statistics } = results;
246
- return Object.assign(Object.assign(Object.assign({ kind: "KeyPhraseExtraction", results: toKeyPhraseExtractionResult(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
246
+ return Object.assign(Object.assign(Object.assign({ kind: "KeyPhraseExtraction", results: toKeyPhraseExtractionResult(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
247
247
  }
248
248
  case "EntityLinkingLROResults": {
249
249
  const { results } = actionData;
250
250
  const { modelVersion, statistics } = results;
251
- return Object.assign(Object.assign(Object.assign({ kind: "EntityLinking", results: toEntityLinkingResult(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
251
+ return Object.assign(Object.assign(Object.assign({ kind: "EntityLinking", results: toEntityLinkingResult(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
252
252
  }
253
253
  case "HealthcareLROResults": {
254
254
  const { results } = actionData;
255
255
  const { modelVersion, statistics } = results;
256
- return Object.assign(Object.assign(Object.assign({ kind: "Healthcare", results: toHealthcareResult(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
256
+ return Object.assign(Object.assign(Object.assign({ kind: "Healthcare", results: toHealthcareResult(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
257
257
  }
258
258
  case "CustomEntityRecognitionLROResults": {
259
259
  const { results } = actionData;
260
260
  const { deploymentName, projectName, statistics } = results;
261
- return Object.assign(Object.assign(Object.assign({ kind: "CustomEntityRecognition", results: transformDocumentResults(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { deploymentName,
261
+ return Object.assign(Object.assign(Object.assign({ kind: "CustomEntityRecognition", results: transformDocumentResults(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { deploymentName,
262
262
  projectName });
263
263
  }
264
264
  case "CustomSingleLabelClassificationLROResults": {
265
265
  const { results } = actionData;
266
266
  const { deploymentName, projectName, statistics } = results;
267
- return Object.assign(Object.assign(Object.assign({ kind: "CustomSingleLabelClassification", results: toCustomSingleLabelClassificationResult(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { deploymentName,
267
+ return Object.assign(Object.assign(Object.assign({ kind: "CustomSingleLabelClassification", results: toCustomSingleLabelClassificationResult(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { deploymentName,
268
268
  projectName });
269
269
  }
270
270
  case "CustomMultiLabelClassificationLROResults": {
271
271
  const { results } = actionData;
272
272
  const { deploymentName, projectName, statistics } = results;
273
- return Object.assign(Object.assign(Object.assign({ kind: "CustomMultiLabelClassification", results: transformDocumentResults(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { deploymentName,
273
+ return Object.assign(Object.assign(Object.assign({ kind: "CustomMultiLabelClassification", results: toCustomSingleLabelClassificationResult(docIds, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { deploymentName,
274
274
  projectName });
275
275
  }
276
- case "ExtractiveSummarizationLROResults": {
277
- const { results } = actionData;
278
- const { modelVersion, statistics } = results;
279
- return Object.assign(Object.assign(Object.assign({ kind: "ExtractiveSummarization", results: transformDocumentResults(documents, results), completedOn }, (actionName ? { actionName } : {})), (statistics ? { statistics } : {})), { modelVersion });
280
- }
281
276
  default: {
282
277
  throw new Error(`Unsupported results kind: ${kind}`);
283
278
  }
@@ -1 +1 @@
1
- {"version":3,"file":"transforms.js","sourceRoot":"","sources":["../../src/transforms.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAqElC,OAAO,EAEL,oBAAoB,EACpB,0BAA0B,EAC1B,qBAAqB,GACtB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAC;AAEtD;;;GAGG;AACH,SAAS,mBAAmB,CAAC,UAAwC;IACnE,4BAA4B;IAC5B,IAAI,UAAU,CAAC,UAAU,KAAK,SAAS,EAAE;QACvC,OAAO,mBAAmB,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;KACnD;IAED,yBACK,UAAU,EACb;AACJ,CAAC;AAED,SAAS,2BAA2B,CAAC,EAAU,EAAE,KAAiB;IAChE,OAAO;QACL,EAAE;QACF,KAAK,EAAE,mBAAmB,CAAC,KAAK,CAAC;KAClC,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,SAAS,wBAAwB,CAK/B,KAA0B,EAC1B,QAGC,EACD,OAGC;IAED,MAAM,EAAE,YAAY,GAAG,2BAA2B,EAAE,cAAc,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACrF,MAAM,cAAc,GAAG,cAAc;QACnC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,cAAc,CAAC;QACxC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC;IACvB,MAAM,eAAe,GACnB,cACD,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAE9E,OAAO,qBAAqB,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;AACvD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAAmC,EACnC,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,EAAE;QAClD,cAAc,EAAE,CAAC,EAA6B,EAAE,EAAE;gBAAjC,EAAE,gBAAgB,OAAW,EAAN,IAAI,cAA3B,oBAA6B,CAAF;YAAO,OAAA,iBACjD,eAAe,EAAE,gBAAgB,IAC9B,IAAI,EACP,CAAA;SAAA;KACH,CAAC,CAAC;AACL,CAAC;AAED,SAAS,4BAA4B,CACnC,SAA8B,EAC9B,OAA4C;IAE5C,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAA8B,EAC9B,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,EAAE;QAClD,cAAc,EAAE,CAAC,EAAsB,EAAE,EAAE;gBAA1B,EAAE,SAAS,OAAW,EAAN,IAAI,cAApB,aAAsB,CAAF;YAAO,OAAA,iCACvC,IAAI,KACP,SAAS,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CACpC,iCAAiC,CAAC,QAAQ,EAAE,SAAS,CAAC,CACvD,IACD,CAAA;SAAA;KACH,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,iCAAiC,CACxC,EAAgE,EAChE,SAAuC;;QADvC,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,OAAuC,EAAlC,IAAI,cAAlC,0BAAoC,CAAF;IAGlC,uCACK,IAAI,KACP,QAAQ,EACN,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG;QACV,wDAAwD;QACxD,CAAC,EAAsC,EAAW,EAAE;gBAAnD,EAAE,SAAS,OAA2B,EAAtB,IAAI,cAApB,aAAsB,CAAF;YAAgC,OAAA,CAAC;gBACpD,MAAM,EAAE,IAAI;gBACZ,WAAW,EAAE,SAAS;qBACnB,MAAM,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,QAAQ,CAAC,YAAY,KAAK,YAAY,CAAC;qBAC5D,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,0CAA0C,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;aACtF,CAAC,CAAA;SAAA,CACH,mCAAI,EAAE,IACT;AACJ,CAAC;AAED;;;;;;;;;GASG;AACH,SAAS,0CAA0C,CACjD,cAA8B,EAC9B,SAAuC;;IAEvC,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC;IACzC,MAAM,eAAe,GAAoB,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAC7E,MAAM,UAAU,GACd,MAAA,SAAS,aAAT,SAAS,uBAAT,SAAS,CAAG,eAAe,CAAC,QAAQ,EAAE,WAAW,0CAAG,eAAe,CAAC,UAAU,CAAC,CAAC;IAClF,IAAI,UAAU,KAAK,SAAS,EAAE;QAC5B,OAAO,UAAU,CAAC;KACnB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,YAAY,aAAa,qCAAqC,CAAC,CAAC;KACjF;AACH,CAAC;AAED,SAAS,qBAAqB,CAC5B,SAA8B,EAC9B,OAAqC;IAErC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,2BAA2B,CAClC,SAA8B,EAC9B,OAA2C;IAE3C,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAA8B,EAC9B,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CACnC,UAAsB,EACtB,KAAqD,EACrD,QAAyB;IAEzB,QAAQ,QAAQ,CAAC,IAAI,EAAE;QACrB,KAAK,sBAAsB,CAAC,CAAC;YAC3B,OAAO,qBAAqB,CAAC,KAAK,EAAG,QAAoC,CAAC,OAAO,CAAC,CAAC;SACpF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAA+B,CAAC,OAAO,CAAC,CAAC;SACnF;QACD,KAAK,4BAA4B,CAAC,CAAC;YACjC,OAAO,2BAA2B,CAAC,KAAK,EAAG,QAAgC,CAAC,OAAO,CAAC,CAAC;SACtF;QACD,KAAK,6BAA6B,CAAC,CAAC;YAClC,OAAO,4BAA4B,CAAC,KAAK,EAAG,QAA0B,CAAC,OAAO,CAAC,CAAC;SACjF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAAgC,CAAC,OAAO,CAAC,CAAC;SACpF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAAwC,CAAC,OAAO,CAAC,CAAC;SAC5F;QACD,OAAO,CAAC,CAAC;YACP,MAAM,SAAS,GAAU,QAAQ,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,6BAA6B,SAAS,0BAA0B,UAAU,EAAE,CAAC,CAAC;SAC/F;KACF;AACH,CAAC;AAED,SAAS,0BAA0B,CAAC,cAAsB,EAAE,YAAoB;IAC9E,IAAI,OAAO,GAAG,cAAc,CAAC;IAC7B,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;QAC7B,OAAO,GAAG,OAAO,GAAG,GAAG,CAAC;KACzB;IACD,OAAO,OAAO,GAAG,GAAG,GAAG,YAAY,CAAC;AACtC,CAAC;AAED;;;;GAIG;AACH,SAAS,cAAc,CAAC,aAAsB;;IAC5C,MAAM,mBAAmB,GAAG,aAK3B,CAAC;IACF,IAAI,CAAC,mBAAmB,CAAC,QAAQ,EAAE;QACjC,MAAM,aAAa,CAAC;KACrB;IACD,MAAM,aAAa,GAAG,MAAA,mBAAmB,CAAC,QAAQ,CAAC,UAAU,0CAAE,KAAK,CAAC;IACrE,IAAI,CAAC,aAAa;QAAE,OAAO,aAAa,CAAC;IACzC,IAAI,YAAY,GAAG,aAAa,CAAC,OAAO,CAAC;IACzC,IAAI,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;IAC9B,SAAS,MAAM,CAAC,KAAmC;QACjD,MAAM,UAAU,GAAG,KAAK,CAAC,UAAU,CAAC;QACpC,IAAI,UAAU,EAAE;YACd,IAAI,UAAU,CAAC,OAAO,EAAE;gBACtB,YAAY,GAAG,0BAA0B,CAAC,YAAY,EAAE,UAAU,CAAC,OAAO,CAAC,CAAC;aAC7E;YACD,IAAI,UAAU,CAAC,IAAI,EAAE;gBACnB,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC;aACxB;YACD,OAAO,MAAM,CAAC,UAAU,CAAC,CAAC;SAC3B;QACD,OAAO,KAAmB,CAAC;IAC7B,CAAC;IACD,MAAM,CAAC,aAAa,CAAC,CAAC;IACtB,OAAO,IAAI,SAAS,CAAC,YAAY,EAAE;QACjC,IAAI;QACJ,UAAU,EAAE,mBAAmB,CAAC,UAAU;KAC3C,CAAC,CAAC;AACL,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,UAAU,CAAI,CAAa;IAC/C,IAAI;QACF,OAAO,MAAM,CAAC,CAAC;KAChB;IAAC,OAAO,CAAU,EAAE;QACnB,MAAM,cAAc,CAAC,CAAC,CAAC,CAAC;KACzB;AACH,CAAC;AAED,SAAS,kBAAkB,CACzB,SAA8B,EAC9B,OAAkC;IAElC,SAAS,oBAAoB,CAAC,MAAiC;QAC7D,MAAM,EAAE,WAAW,KAAc,MAAM,EAAf,IAAI,UAAK,MAAM,EAAjC,eAAwB,CAAS,CAAC;QACxC,uBACE,WAAW,EAAE,WAAW,aAAX,WAAW,cAAX,WAAW,GAAI,EAAE,IAC3B,IAAI,EACP;IACJ,CAAC;IACD,SAAS,sBAAsB,CAC7B,QAA4B;QAE5B,OAAO,CAAC,QAA4B,EAA4B,EAAE,CAAC,CAAC;YAClE,YAAY,EAAE,QAAQ,CAAC,YAAY;YACnC,KAAK,EAAE,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAC1B,CAAC,IAA8B,EAAgC,EAAE,CAAC,CAAC;gBACjE,MAAM,EAAE,QAAQ,CAAC,0BAA0B,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBACtD,IAAI,EAAE,IAAI,CAAC,IAAI;aAChB,CAAC,CACH;SACF,CAAC,CAAC;IACL,CAAC;IACD,OAAO,wBAAwB,CAC7B,SAAS,EACT,OAAO,EACP;QACE,cAAc,EAAE,CAAC,EAAgC,EAAE,EAAE;gBAApC,EAAE,QAAQ,EAAE,SAAS,OAAW,EAAN,IAAI,cAA9B,yBAAgC,CAAF;YAC7C,MAAM,WAAW,GAAG,QAAQ,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;YACvD,uBACE,QAAQ,EAAE,WAAW,EACrB,eAAe,EAAE,SAAS,CAAC,GAAG,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,IAChE,IAAI,EACP;QACJ,CAAC;KACF,CACF,CAAC;AACJ,CAAC;AAED,SAAS,uCAAuC,CAC9C,SAA8B,EAC9B,OAAuD;IAEvD,OAAO,wBAAwB,CAG7B,SAAS,EAAE,OAAO,EAAE;QACpB,cAAc,EAAE,CAAC,EAA2B,EAAE,EAAE;gBAA/B,EAAE,cAAc,OAAW,EAAN,IAAI,cAAzB,kBAA2B,CAAF;YACxC,uBACE,eAAe,EAAE,CAAC,cAAc,CAAC,IAC9B,IAAI,EACP;QACJ,CAAC;KACF,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,4BAA4B,CAC1C,SAA8B,EAC9B,WAAwC,EAAE;IAE1C,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,UAAU,EAAE,EAAE;QACjC,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,UAAU,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC;QACzE,QAAQ,IAAsC,EAAE;YAC9C,KAAK,6BAA6B,CAAC,CAAC;gBAClC,MAAM,EAAE,OAAO,EAAE,GAAG,UAAgC,CAAC;gBACrD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,mBAAmB,EACzB,OAAO,EAAE,yBAAyB,CAAC,SAAS,EAAE,OAAO,CAAC,EACtD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,6BAA6B,CAAC,CAAC;gBAClC,MAAM,EAAE,OAAO,EAAE,GAAG,UAAwC,CAAC;gBAC7D,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,mBAAmB,EACzB,OAAO,EAAE,yBAAyB,CAAC,SAAS,EAAE,OAAO,CAAC,EACtD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,gCAAgC,CAAC,CAAC;gBACrC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA2C,CAAC;gBAChE,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,sBAAsB,EAC5B,OAAO,EAAE,4BAA4B,CAAC,SAAS,EAAE,OAAO,CAAC,EACzD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,+BAA+B,CAAC,CAAC;gBACpC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA0C,CAAC;gBAC/D,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,qBAAqB,EAC3B,OAAO,EAAE,2BAA2B,CAAC,SAAS,EAAE,OAAO,CAAC,EACxD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,yBAAyB,CAAC,CAAC;gBAC9B,MAAM,EAAE,OAAO,EAAE,GAAG,UAAoC,CAAC;gBACzD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,eAAe,EACrB,OAAO,EAAE,qBAAqB,CAAC,SAAS,EAAE,OAAO,CAAC,EAClD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,sBAAsB,CAAC,CAAC;gBAC3B,MAAM,EAAE,OAAO,EAAE,GAAG,UAAiC,CAAC;gBACtD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,YAAY,EAClB,OAAO,EAAE,kBAAkB,CAAC,SAAS,EAAE,OAAO,CAAC,EAC/C,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,mCAAmC,CAAC,CAAC;gBACxC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA8C,CAAC;gBACnE,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,yBAAyB,EAC/B,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,2CAA2C,CAAC,CAAC;gBAChD,MAAM,EAAE,OAAO,EAAE,GAAG,UAAsD,CAAC;gBAC3E,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,iCAAiC,EACvC,OAAO,EAAE,uCAAuC,CAAC,SAAS,EAAE,OAAO,CAAC,EACpE,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,0CAA0C,CAAC,CAAC;gBAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,UAAqD,CAAC;gBAC1E,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,gCAAgC,EACtC,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,mCAAmC,CAAC,CAAC;gBACxC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA8C,CAAC;gBACnE,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,yBAAyB,EAC/B,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,OAAO,CAAC,CAAC;gBACP,MAAM,IAAI,KAAK,CAAC,6BAA6B,IAAI,EAAE,CAAC,CAAC;aACtD;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AnalyzeActionName,\n AnalyzeBatchResult,\n AnalyzeResult,\n CustomSingleLabelClassificationResult,\n CustomSingleLabelClassificationSuccessResult,\n EntityLinkingResult,\n EntityRecognitionResult,\n HealthcareEntity,\n HealthcareEntityRelation,\n HealthcareEntityRelationRole,\n HealthcareResult,\n HealthcareSuccessResult,\n KeyPhraseExtractionResult,\n LanguageDetectionResult,\n Opinion,\n PiiEntityRecognitionResult,\n SentenceSentiment,\n SentimentAnalysisResult,\n TextAnalysisError,\n TextAnalysisErrorResult,\n TextAnalysisSuccessResult,\n} from \"./models\";\nimport {\n AnalyzeResponse,\n AnalyzeTextLROResultUnion,\n AssessmentSentiment,\n CustomEntityRecognitionLROResult,\n CustomMultiLabelClassificationLROResult,\n CustomSingleLabelClassificationLROResult,\n DocumentError,\n EntitiesTaskResult,\n EntityLinkingLROResult,\n EntityLinkingTaskResult,\n EntityRecognitionLROResult,\n ErrorModel,\n ErrorResponse,\n ExtractiveSummarizationLROResult,\n CustomSingleLabelClassificationResult as GeneratedCustomSingleLabelClassificationResult,\n EntityLinkingResult as GeneratedEntityLinkingResult,\n EntitiesResult as GeneratedEntityRecognitionResult,\n HealthcareEntity as GeneratedHealthcareEntity,\n HealthcareResult as GeneratedHealthcareResult,\n KeyPhraseResult as GeneratedKeyPhraseExtractionResult,\n LanguageDetectionResult as GeneratedLanguageDetectionResult,\n PiiResult as GeneratedPiiEntityRecognitionResult,\n SentenceSentiment as GeneratedSentenceSentiment,\n SentimentResponse as GeneratedSentimentAnalysisResult,\n HealthcareEntitiesDocumentResult,\n HealthcareLROResult,\n HealthcareRelation,\n HealthcareRelationEntity,\n InnerErrorModel,\n KeyPhraseExtractionLROResult,\n KeyPhraseTaskResult,\n KnownAnalyzeTextLROResultsKind,\n LanguageDetectionInput,\n LanguageDetectionTaskResult,\n PiiEntityRecognitionLROResult,\n PiiTaskResult,\n SentenceTarget,\n SentimentLROResult,\n SentimentTaskResult,\n SingleClassificationDocumentResult,\n TargetRelation,\n TextDocumentInput,\n} from \"./generated\";\nimport {\n AssessmentIndex,\n parseAssessmentIndex,\n parseHealthcareEntityIndex,\n sortResponseIdObjects,\n} from \"./util\";\nimport { RestError } from \"@azure/core-rest-pipeline\";\n\n/**\n * Helper function for converting nested service error to the unified\n * TextAnalysisError\n */\nfunction toTextAnalysisError(errorModel: ErrorModel | InnerErrorModel): TextAnalysisError {\n // Return the deepest error.\n if (errorModel.innererror !== undefined) {\n return toTextAnalysisError(errorModel.innererror);\n }\n\n return {\n ...errorModel,\n };\n}\n\nfunction makeTextAnalysisErrorResult(id: string, error: ErrorModel): TextAnalysisErrorResult {\n return {\n id,\n error: toTextAnalysisError(error),\n };\n}\n\n/**\n * combines successful and erroneous results into a single array of results and\n * sort them so that the IDs order match that of the input documents array.\n * @param input - the array of documents sent to the service for processing.\n * @param response - the response received from the service.\n * @param options - an options bag that includes functions to process the results.\n */\nfunction transformDocumentResults<\n DocumentSuccess extends TextAnalysisSuccessResult,\n PublicDocumentSuccess extends TextAnalysisSuccessResult = DocumentSuccess,\n TError extends TextAnalysisErrorResult = TextAnalysisErrorResult\n>(\n input: TextDocumentInput[],\n response: {\n documents: DocumentSuccess[];\n errors: DocumentError[];\n },\n options?: {\n processSuccess?: (successResult: DocumentSuccess) => PublicDocumentSuccess;\n processError?: (id: string, error: ErrorModel) => TError;\n }\n): (PublicDocumentSuccess | TextAnalysisErrorResult)[] {\n const { processError = makeTextAnalysisErrorResult, processSuccess } = options || {};\n const successResults = processSuccess\n ? response.documents.map(processSuccess)\n : response.documents;\n const unsortedResults = (\n successResults as (PublicDocumentSuccess | TextAnalysisErrorResult)[]\n ).concat(response.errors.map((error) => processError(error.id, error.error)));\n\n return sortResponseIdObjects(input, unsortedResults);\n}\n\nfunction toLanguageDetectionResult(\n documents: LanguageDetectionInput[],\n results: GeneratedLanguageDetectionResult\n): LanguageDetectionResult[] {\n return transformDocumentResults(documents, results, {\n processSuccess: ({ detectedLanguage, ...rest }) => ({\n primaryLanguage: detectedLanguage,\n ...rest,\n }),\n });\n}\n\nfunction toPiiEntityRecognitionResult(\n documents: TextDocumentInput[],\n results: GeneratedPiiEntityRecognitionResult\n): PiiEntityRecognitionResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toSentimentAnalysisResult(\n documents: TextDocumentInput[],\n results: GeneratedSentimentAnalysisResult\n): SentimentAnalysisResult[] {\n return transformDocumentResults(documents, results, {\n processSuccess: ({ sentences, ...rest }) => ({\n ...rest,\n sentences: sentences.map((sentence) =>\n convertGeneratedSentenceSentiment(sentence, sentences)\n ),\n }),\n });\n}\n\n/**\n * Converts a sentence sentiment object returned by the service to another that\n * is user-friendly.\n *\n * @param sentence - The sentence sentiment object to be converted.\n * @param response - The entire response returned by the service.\n * @returns The user-friendly sentence sentiment object.\n * @internal\n */\nfunction convertGeneratedSentenceSentiment(\n { targets, assessments: _, ...rest }: GeneratedSentenceSentiment,\n sentences: GeneratedSentenceSentiment[]\n): SentenceSentiment {\n return {\n ...rest,\n opinions:\n targets?.map(\n // eslint-disable-next-line @typescript-eslint/no-shadow\n ({ relations, ...rest }: SentenceTarget): Opinion => ({\n target: rest,\n assessments: relations\n .filter((relation) => relation.relationType === \"assessment\")\n .map((relation) => convertTargetRelationToAssessmentSentiment(relation, sentences)),\n })\n ) ?? [],\n };\n}\n\n/**\n * Converts a target relation object returned by the service to an assessment\n * sentiment object where JSON pointers in the former are realized in the\n * latter.\n *\n * @param targetRelation - The target relation object to be converted.\n * @param response - The entire response returned by the service.\n * @returns The user-friendly assessment sentiment object.\n * @internal\n */\nfunction convertTargetRelationToAssessmentSentiment(\n targetRelation: TargetRelation,\n sentences: GeneratedSentenceSentiment[]\n): AssessmentSentiment {\n const assessmentPtr = targetRelation.ref;\n const assessmentIndex: AssessmentIndex = parseAssessmentIndex(assessmentPtr);\n const assessment =\n sentences?.[assessmentIndex.sentence].assessments?.[assessmentIndex.assessment];\n if (assessment !== undefined) {\n return assessment;\n } else {\n throw new Error(`Pointer \"${assessmentPtr}\" is not a valid Assessment pointer`);\n }\n}\n\nfunction toEntityLinkingResult(\n documents: TextDocumentInput[],\n results: GeneratedEntityLinkingResult\n): EntityLinkingResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toKeyPhraseExtractionResult(\n documents: TextDocumentInput[],\n results: GeneratedKeyPhraseExtractionResult\n): KeyPhraseExtractionResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toEntityRecognitionResult(\n documents: TextDocumentInput[],\n results: GeneratedEntityRecognitionResult\n): EntityRecognitionResult[] {\n return transformDocumentResults(documents, results);\n}\n\n/**\n * @internal\n */\nexport function transformActionResult<ActionName extends AnalyzeActionName>(\n actionName: ActionName,\n input: TextDocumentInput[] | LanguageDetectionInput[],\n response: AnalyzeResponse\n): AnalyzeResult<AnalyzeActionName> {\n switch (response.kind) {\n case \"EntityLinkingResults\": {\n return toEntityLinkingResult(input, (response as EntityLinkingTaskResult).results);\n }\n case \"EntityRecognitionResults\": {\n return toEntityRecognitionResult(input, (response as EntitiesTaskResult).results);\n }\n case \"KeyPhraseExtractionResults\": {\n return toKeyPhraseExtractionResult(input, (response as KeyPhraseTaskResult).results);\n }\n case \"PiiEntityRecognitionResults\": {\n return toPiiEntityRecognitionResult(input, (response as PiiTaskResult).results);\n }\n case \"SentimentAnalysisResults\": {\n return toSentimentAnalysisResult(input, (response as SentimentTaskResult).results);\n }\n case \"LanguageDetectionResults\": {\n return toLanguageDetectionResult(input, (response as LanguageDetectionTaskResult).results);\n }\n default: {\n const __exhaust: never = response;\n throw new Error(`Unsupported results kind: ${__exhaust} for an action of type ${actionName}`);\n }\n }\n}\n\nfunction appendReadableErrorMessage(currentMessage: string, innerMessage: string): string {\n let message = currentMessage;\n if (message.slice(-1) !== \".\") {\n message = message + \".\";\n }\n return message + \" \" + innerMessage;\n}\n\n/**\n * @internal\n * parses incoming errors from the service/\n * @param error - the incoming error\n */\nfunction transformError(errorResponse: unknown): any {\n const strongErrorResponse = errorResponse as {\n response: {\n parsedBody?: ErrorResponse;\n };\n statusCode: number;\n };\n if (!strongErrorResponse.response) {\n throw errorResponse;\n }\n const topLevelError = strongErrorResponse.response.parsedBody?.error;\n if (!topLevelError) return errorResponse;\n let errorMessage = topLevelError.message;\n let code = topLevelError.code;\n function unwrap(error: ErrorModel | InnerErrorModel): ErrorModel {\n const innerError = error.innererror;\n if (innerError) {\n if (innerError.message) {\n errorMessage = appendReadableErrorMessage(errorMessage, innerError.message);\n }\n if (innerError.code) {\n code = innerError.code;\n }\n return unwrap(innerError);\n }\n return error as ErrorModel;\n }\n unwrap(topLevelError);\n return new RestError(errorMessage, {\n code,\n statusCode: strongErrorResponse.statusCode,\n });\n}\n\nexport async function throwError<T>(p: Promise<T>): Promise<T> {\n try {\n return await p;\n } catch (e: unknown) {\n throw transformError(e);\n }\n}\n\nfunction toHealthcareResult(\n documents: TextDocumentInput[],\n results: GeneratedHealthcareResult\n): HealthcareResult[] {\n function makeHealthcareEntity(entity: GeneratedHealthcareEntity): HealthcareEntity {\n const { dataSources, ...rest } = entity;\n return {\n dataSources: dataSources ?? [],\n ...rest,\n };\n }\n function makeHealthcareRelation(\n entities: HealthcareEntity[]\n ): (relation: HealthcareRelation) => HealthcareEntityRelation {\n return (relation: HealthcareRelation): HealthcareEntityRelation => ({\n relationType: relation.relationType,\n roles: relation.entities.map(\n (role: HealthcareRelationEntity): HealthcareEntityRelationRole => ({\n entity: entities[parseHealthcareEntityIndex(role.ref)],\n name: role.role,\n })\n ),\n });\n }\n return transformDocumentResults<HealthcareEntitiesDocumentResult, HealthcareSuccessResult>(\n documents,\n results,\n {\n processSuccess: ({ entities, relations, ...rest }) => {\n const newEntities = entities.map(makeHealthcareEntity);\n return {\n entities: newEntities,\n entityRelations: relations.map(makeHealthcareRelation(newEntities)),\n ...rest,\n };\n },\n }\n );\n}\n\nfunction toCustomSingleLabelClassificationResult(\n documents: TextDocumentInput[],\n results: GeneratedCustomSingleLabelClassificationResult\n): CustomSingleLabelClassificationResult[] {\n return transformDocumentResults<\n SingleClassificationDocumentResult,\n CustomSingleLabelClassificationSuccessResult\n >(documents, results, {\n processSuccess: ({ classification, ...rest }) => {\n return {\n classifications: [classification],\n ...rest,\n };\n },\n });\n}\n\n/**\n * @internal\n */\nexport function transformAnalyzeBatchResults(\n documents: TextDocumentInput[],\n response: AnalyzeTextLROResultUnion[] = []\n): AnalyzeBatchResult[] {\n return response.map((actionData) => {\n const { lastUpdateDateTime: completedOn, actionName, kind } = actionData;\n switch (kind as KnownAnalyzeTextLROResultsKind) {\n case \"SentimentAnalysisLROResults\": {\n const { results } = actionData as SentimentLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"SentimentAnalysis\",\n results: toSentimentAnalysisResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"EntityRecognitionLROResults\": {\n const { results } = actionData as EntityRecognitionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"EntityRecognition\",\n results: toEntityRecognitionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"PiiEntityRecognitionLROResults\": {\n const { results } = actionData as PiiEntityRecognitionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"PiiEntityRecognition\",\n results: toPiiEntityRecognitionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"KeyPhraseExtractionLROResults\": {\n const { results } = actionData as KeyPhraseExtractionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"KeyPhraseExtraction\",\n results: toKeyPhraseExtractionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"EntityLinkingLROResults\": {\n const { results } = actionData as EntityLinkingLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"EntityLinking\",\n results: toEntityLinkingResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"HealthcareLROResults\": {\n const { results } = actionData as HealthcareLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"Healthcare\",\n results: toHealthcareResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"CustomEntityRecognitionLROResults\": {\n const { results } = actionData as CustomEntityRecognitionLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomEntityRecognition\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"CustomSingleLabelClassificationLROResults\": {\n const { results } = actionData as CustomSingleLabelClassificationLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomSingleLabelClassification\",\n results: toCustomSingleLabelClassificationResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"CustomMultiLabelClassificationLROResults\": {\n const { results } = actionData as CustomMultiLabelClassificationLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomMultiLabelClassification\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"ExtractiveSummarizationLROResults\": {\n const { results } = actionData as ExtractiveSummarizationLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"ExtractiveSummarization\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n default: {\n throw new Error(`Unsupported results kind: ${kind}`);\n }\n }\n });\n}\n"]}
1
+ {"version":3,"file":"transforms.js","sourceRoot":"","sources":["../../src/transforms.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAkElC,OAAO,EAEL,oBAAoB,EACpB,0BAA0B,EAC1B,qBAAqB,GACtB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAC;AAEtD;;;GAGG;AACH,SAAS,mBAAmB,CAAC,UAAwC;IACnE,4BAA4B;IAC5B,IAAI,UAAU,CAAC,UAAU,KAAK,SAAS,EAAE;QACvC,OAAO,mBAAmB,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;KACnD;IAED,yBACK,UAAU,EACb;AACJ,CAAC;AAED,SAAS,2BAA2B,CAAC,EAAU,EAAE,KAAiB;IAChE,OAAO;QACL,EAAE;QACF,KAAK,EAAE,mBAAmB,CAAC,KAAK,CAAC;KAClC,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,SAAS,wBAAwB,CAK/B,GAAa,EACb,QAGC,EACD,OAGC;IAED,MAAM,EAAE,YAAY,GAAG,2BAA2B,EAAE,cAAc,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACrF,MAAM,cAAc,GAAG,cAAc;QACnC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,cAAc,CAAC;QACxC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC;IACvB,MAAM,eAAe,GACnB,cACD,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAE9E,OAAO,qBAAqB,CAAC,GAAG,EAAE,eAAe,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,yBAAyB,CAChC,MAAgB,EAChB,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,MAAM,EAAE,OAAO,EAAE;QAC/C,cAAc,EAAE,CAAC,EAA6B,EAAE,EAAE;gBAAjC,EAAE,gBAAgB,OAAW,EAAN,IAAI,cAA3B,oBAA6B,CAAF;YAAO,OAAA,iBACjD,eAAe,EAAE,gBAAgB,IAC9B,IAAI,EACP,CAAA;SAAA;KACH,CAAC,CAAC;AACL,CAAC;AAED,SAAS,4BAA4B,CACnC,MAAgB,EAChB,OAA4C;IAE5C,OAAO,wBAAwB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,yBAAyB,CAChC,MAAgB,EAChB,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,MAAM,EAAE,OAAO,EAAE;QAC/C,cAAc,EAAE,CAAC,EAAsB,EAAE,EAAE;gBAA1B,EAAE,SAAS,OAAW,EAAN,IAAI,cAApB,aAAsB,CAAF;YAAO,OAAA,iCACvC,IAAI,KACP,SAAS,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CACpC,iCAAiC,CAAC,QAAQ,EAAE,SAAS,CAAC,CACvD,IACD,CAAA;SAAA;KACH,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,iCAAiC,CACxC,EAAgE,EAChE,SAAuC;;QADvC,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,OAAuC,EAAlC,IAAI,cAAlC,0BAAoC,CAAF;IAGlC,uCACK,IAAI,KACP,QAAQ,EACN,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG;QACV,wDAAwD;QACxD,CAAC,EAAsC,EAAW,EAAE;gBAAnD,EAAE,SAAS,OAA2B,EAAtB,IAAI,cAApB,aAAsB,CAAF;YAAgC,OAAA,CAAC;gBACpD,MAAM,EAAE,IAAI;gBACZ,WAAW,EAAE,SAAS;qBACnB,MAAM,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,QAAQ,CAAC,YAAY,KAAK,YAAY,CAAC;qBAC5D,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,0CAA0C,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;aACtF,CAAC,CAAA;SAAA,CACH,mCAAI,EAAE,IACT;AACJ,CAAC;AAED;;;;;;;;;GASG;AACH,SAAS,0CAA0C,CACjD,cAA8B,EAC9B,SAAuC;;IAEvC,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC;IACzC,MAAM,eAAe,GAAoB,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAC7E,MAAM,UAAU,GACd,MAAA,SAAS,aAAT,SAAS,uBAAT,SAAS,CAAG,eAAe,CAAC,QAAQ,EAAE,WAAW,0CAAG,eAAe,CAAC,UAAU,CAAC,CAAC;IAClF,IAAI,UAAU,KAAK,SAAS,EAAE;QAC5B,OAAO,UAAU,CAAC;KACnB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,YAAY,aAAa,qCAAqC,CAAC,CAAC;KACjF;AACH,CAAC;AAED,SAAS,qBAAqB,CAC5B,MAAgB,EAChB,OAAqC;IAErC,OAAO,wBAAwB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,2BAA2B,CAClC,MAAgB,EAChB,OAA2C;IAE3C,OAAO,wBAAwB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,yBAAyB,CAChC,MAAgB,EAChB,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACnD,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CACnC,UAAsB,EACtB,MAAgB,EAChB,QAAyB;IAEzB,QAAQ,QAAQ,CAAC,IAAI,EAAE;QACrB,KAAK,sBAAsB,CAAC,CAAC;YAC3B,OAAO,qBAAqB,CAAC,MAAM,EAAG,QAAoC,CAAC,OAAO,CAAC,CAAC;SACrF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,MAAM,EAAG,QAA+B,CAAC,OAAO,CAAC,CAAC;SACpF;QACD,KAAK,4BAA4B,CAAC,CAAC;YACjC,OAAO,2BAA2B,CAAC,MAAM,EAAG,QAAgC,CAAC,OAAO,CAAC,CAAC;SACvF;QACD,KAAK,6BAA6B,CAAC,CAAC;YAClC,OAAO,4BAA4B,CAAC,MAAM,EAAG,QAA0B,CAAC,OAAO,CAAC,CAAC;SAClF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,MAAM,EAAG,QAAgC,CAAC,OAAO,CAAC,CAAC;SACrF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,MAAM,EAAG,QAAwC,CAAC,OAAO,CAAC,CAAC;SAC7F;QACD,OAAO,CAAC,CAAC;YACP,MAAM,SAAS,GAAU,QAAQ,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,6BAA6B,SAAS,0BAA0B,UAAU,EAAE,CAAC,CAAC;SAC/F;KACF;AACH,CAAC;AAED,SAAS,0BAA0B,CAAC,cAAsB,EAAE,YAAoB;IAC9E,IAAI,OAAO,GAAG,cAAc,CAAC;IAC7B,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;QAC7B,OAAO,GAAG,OAAO,GAAG,GAAG,CAAC;KACzB;IACD,OAAO,OAAO,GAAG,GAAG,GAAG,YAAY,CAAC;AACtC,CAAC;AAED;;;;GAIG;AACH,SAAS,cAAc,CAAC,aAAsB;;IAC5C,MAAM,mBAAmB,GAAG,aAK3B,CAAC;IACF,IAAI,CAAC,mBAAmB,CAAC,QAAQ,EAAE;QACjC,MAAM,aAAa,CAAC;KACrB;IACD,MAAM,aAAa,GAAG,MAAA,mBAAmB,CAAC,QAAQ,CAAC,UAAU,0CAAE,KAAK,CAAC;IACrE,IAAI,CAAC,aAAa;QAAE,OAAO,aAAa,CAAC;IACzC,IAAI,YAAY,GAAG,aAAa,CAAC,OAAO,CAAC;IACzC,IAAI,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;IAC9B,SAAS,MAAM,CAAC,KAAmC;QACjD,MAAM,UAAU,GAAG,KAAK,CAAC,UAAU,CAAC;QACpC,IAAI,UAAU,EAAE;YACd,IAAI,UAAU,CAAC,OAAO,EAAE;gBACtB,YAAY,GAAG,0BAA0B,CAAC,YAAY,EAAE,UAAU,CAAC,OAAO,CAAC,CAAC;aAC7E;YACD,IAAI,UAAU,CAAC,IAAI,EAAE;gBACnB,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC;aACxB;YACD,OAAO,MAAM,CAAC,UAAU,CAAC,CAAC;SAC3B;QACD,OAAO,KAAmB,CAAC;IAC7B,CAAC;IACD,MAAM,CAAC,aAAa,CAAC,CAAC;IACtB,OAAO,IAAI,SAAS,CAAC,YAAY,EAAE;QACjC,IAAI;QACJ,UAAU,EAAE,mBAAmB,CAAC,UAAU;KAC3C,CAAC,CAAC;AACL,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,UAAU,CAAI,CAAa;IAC/C,IAAI;QACF,OAAO,MAAM,CAAC,CAAC;KAChB;IAAC,OAAO,CAAU,EAAE;QACnB,MAAM,cAAc,CAAC,CAAC,CAAC,CAAC;KACzB;AACH,CAAC;AAED,SAAS,kBAAkB,CACzB,MAAgB,EAChB,OAAkC;IAElC,SAAS,oBAAoB,CAAC,MAAiC;QAC7D,MAAM,EAAE,WAAW,KAAc,MAAM,EAAf,IAAI,UAAK,MAAM,EAAjC,eAAwB,CAAS,CAAC;QACxC,uBACE,WAAW,EAAE,WAAW,aAAX,WAAW,cAAX,WAAW,GAAI,EAAE,IAC3B,IAAI,EACP;IACJ,CAAC;IACD,SAAS,sBAAsB,CAC7B,QAA4B;QAE5B,OAAO,CAAC,QAA4B,EAA4B,EAAE,CAAC,CAAC;YAClE,YAAY,EAAE,QAAQ,CAAC,YAAY;YACnC,KAAK,EAAE,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAC1B,CAAC,IAA8B,EAAgC,EAAE,CAAC,CAAC;gBACjE,MAAM,EAAE,QAAQ,CAAC,0BAA0B,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBACtD,IAAI,EAAE,IAAI,CAAC,IAAI;aAChB,CAAC,CACH;SACF,CAAC,CAAC;IACL,CAAC;IACD,OAAO,wBAAwB,CAC7B,MAAM,EACN,OAAO,EACP;QACE,cAAc,EAAE,CAAC,EAAgC,EAAE,EAAE;gBAApC,EAAE,QAAQ,EAAE,SAAS,OAAW,EAAN,IAAI,cAA9B,yBAAgC,CAAF;YAC7C,MAAM,WAAW,GAAG,QAAQ,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;YACvD,uBACE,QAAQ,EAAE,WAAW,EACrB,eAAe,EAAE,SAAS,CAAC,GAAG,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,IAChE,IAAI,EACP;QACJ,CAAC;KACF,CACF,CAAC;AACJ,CAAC;AAED,SAAS,uCAAuC,CAC9C,MAAgB,EAChB,OAA4C;IAE5C,OAAO,wBAAwB,CAG7B,MAAM,EAAE,OAAO,EAAE;QACjB,cAAc,EAAE,CAAC,EAAkC,EAAE,EAAE;gBAAtC,EAAE,KAAK,EAAE,cAAc,OAAW,EAAN,IAAI,cAAhC,SAAkC,CAAF;YAC/C,uBACE,eAAe,EAAE,cAAc,IAC5B,IAAI,EACP;QACJ,CAAC;KACF,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,4BAA4B,CAC1C,MAAgB,EAChB,WAAwC,EAAE;IAE1C,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,UAAU,EAAE,EAAE;QACjC,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,UAAU,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC;QACzE,QAAQ,IAAsC,EAAE;YAC9C,KAAK,6BAA6B,CAAC,CAAC;gBAClC,MAAM,EAAE,OAAO,EAAE,GAAG,UAAgC,CAAC;gBACrD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,mBAAmB,EACzB,OAAO,EAAE,yBAAyB,CAAC,MAAM,EAAE,OAAO,CAAC,EACnD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,6BAA6B,CAAC,CAAC;gBAClC,MAAM,EAAE,OAAO,EAAE,GAAG,UAAwC,CAAC;gBAC7D,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,mBAAmB,EACzB,OAAO,EAAE,yBAAyB,CAAC,MAAM,EAAE,OAAO,CAAC,EACnD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,gCAAgC,CAAC,CAAC;gBACrC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA2C,CAAC;gBAChE,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,sBAAsB,EAC5B,OAAO,EAAE,4BAA4B,CAAC,MAAM,EAAE,OAAO,CAAC,EACtD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,+BAA+B,CAAC,CAAC;gBACpC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA0C,CAAC;gBAC/D,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,qBAAqB,EAC3B,OAAO,EAAE,2BAA2B,CAAC,MAAM,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,yBAAyB,CAAC,CAAC;gBAC9B,MAAM,EAAE,OAAO,EAAE,GAAG,UAAoC,CAAC;gBACzD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,eAAe,EACrB,OAAO,EAAE,qBAAqB,CAAC,MAAM,EAAE,OAAO,CAAC,EAC/C,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,sBAAsB,CAAC,CAAC;gBAC3B,MAAM,EAAE,OAAO,EAAE,GAAG,UAAiC,CAAC;gBACtD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,YAAY,EAClB,OAAO,EAAE,kBAAkB,CAAC,MAAM,EAAE,OAAO,CAAC,EAC5C,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,mCAAmC,CAAC,CAAC;gBACxC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA8C,CAAC;gBACnE,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,yBAAyB,EAC/B,OAAO,EAAE,wBAAwB,CAAC,MAAM,EAAE,OAAO,CAAC,EAClD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,2CAA2C,CAAC,CAAC;gBAChD,MAAM,EAAE,OAAO,EAAE,GAAG,UAAsD,CAAC;gBAC3E,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,iCAAiC,EACvC,OAAO,EAAE,uCAAuC,CAAC,MAAM,EAAE,OAAO,CAAC,EACjE,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,0CAA0C,CAAC,CAAC;gBAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,UAAqD,CAAC;gBAC1E,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,gCAAgC,EACtC,OAAO,EAAE,uCAAuC,CAAC,MAAM,EAAE,OAAO,CAAC,EACjE,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,OAAO,CAAC,CAAC;gBACP,MAAM,IAAI,KAAK,CAAC,6BAA6B,IAAI,EAAE,CAAC,CAAC;aACtD;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AnalyzeActionName,\n AnalyzeBatchResult,\n AnalyzeResult,\n CustomSingleLabelClassificationResult,\n CustomSingleLabelClassificationSuccessResult,\n EntityLinkingResult,\n EntityRecognitionResult,\n HealthcareEntity,\n HealthcareEntityRelation,\n HealthcareEntityRelationRole,\n HealthcareResult,\n HealthcareSuccessResult,\n KeyPhraseExtractionResult,\n LanguageDetectionResult,\n Opinion,\n PiiEntityRecognitionResult,\n SentenceSentiment,\n SentimentAnalysisResult,\n TextAnalysisError,\n TextAnalysisErrorResult,\n TextAnalysisSuccessResult,\n} from \"./models\";\nimport {\n AnalyzeResponse,\n AnalyzeTextLROResultUnion,\n AssessmentSentiment,\n ClassificationDocumentResult,\n CustomEntityRecognitionLROResult,\n CustomMultiLabelClassificationLROResult,\n CustomSingleLabelClassificationLROResult,\n DocumentError,\n EntitiesTaskResult,\n EntityLinkingLROResult,\n EntityLinkingTaskResult,\n EntityRecognitionLROResult,\n ErrorModel,\n ErrorResponse,\n CustomLabelClassificationResult as GeneratedCustomClassificationResult,\n EntityLinkingResult as GeneratedEntityLinkingResult,\n EntitiesResult as GeneratedEntityRecognitionResult,\n HealthcareEntity as GeneratedHealthcareEntity,\n HealthcareResult as GeneratedHealthcareResult,\n KeyPhraseResult as GeneratedKeyPhraseExtractionResult,\n LanguageDetectionResult as GeneratedLanguageDetectionResult,\n PiiResult as GeneratedPiiEntityRecognitionResult,\n SentenceSentiment as GeneratedSentenceSentiment,\n SentimentResponse as GeneratedSentimentAnalysisResult,\n HealthcareEntitiesDocumentResult,\n HealthcareLROResult,\n HealthcareRelation,\n HealthcareRelationEntity,\n InnerErrorModel,\n KeyPhraseExtractionLROResult,\n KeyPhraseTaskResult,\n KnownAnalyzeTextLROResultsKind,\n LanguageDetectionTaskResult,\n PiiEntityRecognitionLROResult,\n PiiTaskResult,\n SentenceTarget,\n SentimentLROResult,\n SentimentTaskResult,\n TargetRelation,\n} from \"./generated\";\nimport {\n AssessmentIndex,\n parseAssessmentIndex,\n parseHealthcareEntityIndex,\n sortResponseIdObjects,\n} from \"./util\";\nimport { RestError } from \"@azure/core-rest-pipeline\";\n\n/**\n * Helper function for converting nested service error to the unified\n * TextAnalysisError\n */\nfunction toTextAnalysisError(errorModel: ErrorModel | InnerErrorModel): TextAnalysisError {\n // Return the deepest error.\n if (errorModel.innererror !== undefined) {\n return toTextAnalysisError(errorModel.innererror);\n }\n\n return {\n ...errorModel,\n };\n}\n\nfunction makeTextAnalysisErrorResult(id: string, error: ErrorModel): TextAnalysisErrorResult {\n return {\n id,\n error: toTextAnalysisError(error),\n };\n}\n\n/**\n * combines successful and erroneous results into a single array of results and\n * sort them so that the IDs order match that of the input documents array.\n * @param ids - the array of input document IDs.\n * @param response - the response received from the service.\n * @param options - an options bag that includes functions to process the results.\n */\nfunction transformDocumentResults<\n DocumentSuccess extends TextAnalysisSuccessResult,\n PublicDocumentSuccess extends TextAnalysisSuccessResult = DocumentSuccess,\n TError extends TextAnalysisErrorResult = TextAnalysisErrorResult\n>(\n ids: string[],\n response: {\n documents: DocumentSuccess[];\n errors: DocumentError[];\n },\n options?: {\n processSuccess?: (successResult: DocumentSuccess) => PublicDocumentSuccess;\n processError?: (id: string, error: ErrorModel) => TError;\n }\n): (PublicDocumentSuccess | TextAnalysisErrorResult)[] {\n const { processError = makeTextAnalysisErrorResult, processSuccess } = options || {};\n const successResults = processSuccess\n ? response.documents.map(processSuccess)\n : response.documents;\n const unsortedResults = (\n successResults as (PublicDocumentSuccess | TextAnalysisErrorResult)[]\n ).concat(response.errors.map((error) => processError(error.id, error.error)));\n\n return sortResponseIdObjects(ids, unsortedResults);\n}\n\nfunction toLanguageDetectionResult(\n docIds: string[],\n results: GeneratedLanguageDetectionResult\n): LanguageDetectionResult[] {\n return transformDocumentResults(docIds, results, {\n processSuccess: ({ detectedLanguage, ...rest }) => ({\n primaryLanguage: detectedLanguage,\n ...rest,\n }),\n });\n}\n\nfunction toPiiEntityRecognitionResult(\n docIds: string[],\n results: GeneratedPiiEntityRecognitionResult\n): PiiEntityRecognitionResult[] {\n return transformDocumentResults(docIds, results);\n}\n\nfunction toSentimentAnalysisResult(\n docIds: string[],\n results: GeneratedSentimentAnalysisResult\n): SentimentAnalysisResult[] {\n return transformDocumentResults(docIds, results, {\n processSuccess: ({ sentences, ...rest }) => ({\n ...rest,\n sentences: sentences.map((sentence) =>\n convertGeneratedSentenceSentiment(sentence, sentences)\n ),\n }),\n });\n}\n\n/**\n * Converts a sentence sentiment object returned by the service to another that\n * is user-friendly.\n *\n * @param sentence - The sentence sentiment object to be converted.\n * @param response - The entire response returned by the service.\n * @returns The user-friendly sentence sentiment object.\n * @internal\n */\nfunction convertGeneratedSentenceSentiment(\n { targets, assessments: _, ...rest }: GeneratedSentenceSentiment,\n sentences: GeneratedSentenceSentiment[]\n): SentenceSentiment {\n return {\n ...rest,\n opinions:\n targets?.map(\n // eslint-disable-next-line @typescript-eslint/no-shadow\n ({ relations, ...rest }: SentenceTarget): Opinion => ({\n target: rest,\n assessments: relations\n .filter((relation) => relation.relationType === \"assessment\")\n .map((relation) => convertTargetRelationToAssessmentSentiment(relation, sentences)),\n })\n ) ?? [],\n };\n}\n\n/**\n * Converts a target relation object returned by the service to an assessment\n * sentiment object where JSON pointers in the former are realized in the\n * latter.\n *\n * @param targetRelation - The target relation object to be converted.\n * @param response - The entire response returned by the service.\n * @returns The user-friendly assessment sentiment object.\n * @internal\n */\nfunction convertTargetRelationToAssessmentSentiment(\n targetRelation: TargetRelation,\n sentences: GeneratedSentenceSentiment[]\n): AssessmentSentiment {\n const assessmentPtr = targetRelation.ref;\n const assessmentIndex: AssessmentIndex = parseAssessmentIndex(assessmentPtr);\n const assessment =\n sentences?.[assessmentIndex.sentence].assessments?.[assessmentIndex.assessment];\n if (assessment !== undefined) {\n return assessment;\n } else {\n throw new Error(`Pointer \"${assessmentPtr}\" is not a valid Assessment pointer`);\n }\n}\n\nfunction toEntityLinkingResult(\n docIds: string[],\n results: GeneratedEntityLinkingResult\n): EntityLinkingResult[] {\n return transformDocumentResults(docIds, results);\n}\n\nfunction toKeyPhraseExtractionResult(\n docIds: string[],\n results: GeneratedKeyPhraseExtractionResult\n): KeyPhraseExtractionResult[] {\n return transformDocumentResults(docIds, results);\n}\n\nfunction toEntityRecognitionResult(\n docIds: string[],\n results: GeneratedEntityRecognitionResult\n): EntityRecognitionResult[] {\n return transformDocumentResults(docIds, results);\n}\n\n/**\n * @internal\n */\nexport function transformActionResult<ActionName extends AnalyzeActionName>(\n actionName: ActionName,\n docIds: string[],\n response: AnalyzeResponse\n): AnalyzeResult<AnalyzeActionName> {\n switch (response.kind) {\n case \"EntityLinkingResults\": {\n return toEntityLinkingResult(docIds, (response as EntityLinkingTaskResult).results);\n }\n case \"EntityRecognitionResults\": {\n return toEntityRecognitionResult(docIds, (response as EntitiesTaskResult).results);\n }\n case \"KeyPhraseExtractionResults\": {\n return toKeyPhraseExtractionResult(docIds, (response as KeyPhraseTaskResult).results);\n }\n case \"PiiEntityRecognitionResults\": {\n return toPiiEntityRecognitionResult(docIds, (response as PiiTaskResult).results);\n }\n case \"SentimentAnalysisResults\": {\n return toSentimentAnalysisResult(docIds, (response as SentimentTaskResult).results);\n }\n case \"LanguageDetectionResults\": {\n return toLanguageDetectionResult(docIds, (response as LanguageDetectionTaskResult).results);\n }\n default: {\n const __exhaust: never = response;\n throw new Error(`Unsupported results kind: ${__exhaust} for an action of type ${actionName}`);\n }\n }\n}\n\nfunction appendReadableErrorMessage(currentMessage: string, innerMessage: string): string {\n let message = currentMessage;\n if (message.slice(-1) !== \".\") {\n message = message + \".\";\n }\n return message + \" \" + innerMessage;\n}\n\n/**\n * @internal\n * parses incoming errors from the service/\n * @param error - the incoming error\n */\nfunction transformError(errorResponse: unknown): any {\n const strongErrorResponse = errorResponse as {\n response: {\n parsedBody?: ErrorResponse;\n };\n statusCode: number;\n };\n if (!strongErrorResponse.response) {\n throw errorResponse;\n }\n const topLevelError = strongErrorResponse.response.parsedBody?.error;\n if (!topLevelError) return errorResponse;\n let errorMessage = topLevelError.message;\n let code = topLevelError.code;\n function unwrap(error: ErrorModel | InnerErrorModel): ErrorModel {\n const innerError = error.innererror;\n if (innerError) {\n if (innerError.message) {\n errorMessage = appendReadableErrorMessage(errorMessage, innerError.message);\n }\n if (innerError.code) {\n code = innerError.code;\n }\n return unwrap(innerError);\n }\n return error as ErrorModel;\n }\n unwrap(topLevelError);\n return new RestError(errorMessage, {\n code,\n statusCode: strongErrorResponse.statusCode,\n });\n}\n\nexport async function throwError<T>(p: Promise<T>): Promise<T> {\n try {\n return await p;\n } catch (e: unknown) {\n throw transformError(e);\n }\n}\n\nfunction toHealthcareResult(\n docIds: string[],\n results: GeneratedHealthcareResult\n): HealthcareResult[] {\n function makeHealthcareEntity(entity: GeneratedHealthcareEntity): HealthcareEntity {\n const { dataSources, ...rest } = entity;\n return {\n dataSources: dataSources ?? [],\n ...rest,\n };\n }\n function makeHealthcareRelation(\n entities: HealthcareEntity[]\n ): (relation: HealthcareRelation) => HealthcareEntityRelation {\n return (relation: HealthcareRelation): HealthcareEntityRelation => ({\n relationType: relation.relationType,\n roles: relation.entities.map(\n (role: HealthcareRelationEntity): HealthcareEntityRelationRole => ({\n entity: entities[parseHealthcareEntityIndex(role.ref)],\n name: role.role,\n })\n ),\n });\n }\n return transformDocumentResults<HealthcareEntitiesDocumentResult, HealthcareSuccessResult>(\n docIds,\n results,\n {\n processSuccess: ({ entities, relations, ...rest }) => {\n const newEntities = entities.map(makeHealthcareEntity);\n return {\n entities: newEntities,\n entityRelations: relations.map(makeHealthcareRelation(newEntities)),\n ...rest,\n };\n },\n }\n );\n}\n\nfunction toCustomSingleLabelClassificationResult(\n docIds: string[],\n results: GeneratedCustomClassificationResult\n): CustomSingleLabelClassificationResult[] {\n return transformDocumentResults<\n ClassificationDocumentResult,\n CustomSingleLabelClassificationSuccessResult\n >(docIds, results, {\n processSuccess: ({ class: classification, ...rest }) => {\n return {\n classifications: classification,\n ...rest,\n };\n },\n });\n}\n\n/**\n * @internal\n */\nexport function transformAnalyzeBatchResults(\n docIds: string[],\n response: AnalyzeTextLROResultUnion[] = []\n): AnalyzeBatchResult[] {\n return response.map((actionData) => {\n const { lastUpdateDateTime: completedOn, actionName, kind } = actionData;\n switch (kind as KnownAnalyzeTextLROResultsKind) {\n case \"SentimentAnalysisLROResults\": {\n const { results } = actionData as SentimentLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"SentimentAnalysis\",\n results: toSentimentAnalysisResult(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"EntityRecognitionLROResults\": {\n const { results } = actionData as EntityRecognitionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"EntityRecognition\",\n results: toEntityRecognitionResult(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"PiiEntityRecognitionLROResults\": {\n const { results } = actionData as PiiEntityRecognitionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"PiiEntityRecognition\",\n results: toPiiEntityRecognitionResult(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"KeyPhraseExtractionLROResults\": {\n const { results } = actionData as KeyPhraseExtractionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"KeyPhraseExtraction\",\n results: toKeyPhraseExtractionResult(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"EntityLinkingLROResults\": {\n const { results } = actionData as EntityLinkingLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"EntityLinking\",\n results: toEntityLinkingResult(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"HealthcareLROResults\": {\n const { results } = actionData as HealthcareLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"Healthcare\",\n results: toHealthcareResult(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"CustomEntityRecognitionLROResults\": {\n const { results } = actionData as CustomEntityRecognitionLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomEntityRecognition\",\n results: transformDocumentResults(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"CustomSingleLabelClassificationLROResults\": {\n const { results } = actionData as CustomSingleLabelClassificationLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomSingleLabelClassification\",\n results: toCustomSingleLabelClassificationResult(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"CustomMultiLabelClassificationLROResults\": {\n const { results } = actionData as CustomMultiLabelClassificationLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomMultiLabelClassification\",\n results: toCustomSingleLabelClassificationResult(docIds, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n default: {\n throw new Error(`Unsupported results kind: ${kind}`);\n }\n }\n });\n}\n"]}
@@ -7,16 +7,16 @@ import { logger } from "./logger";
7
7
  * return a sorted array of results.
8
8
  *
9
9
  * @internal
10
- * @param sortedArray - An array of entries sorted by `id`
10
+ * @param sortedIds - An array of sorted IDs
11
11
  * @param unsortedArray - An array of entries that contain `id` but are not sorted
12
12
  */
13
- export function sortResponseIdObjects(sortedArray, unsortedArray) {
13
+ export function sortResponseIdObjects(sortedIds, unsortedArray) {
14
14
  const unsortedMap = new Map();
15
15
  for (const item of unsortedArray) {
16
16
  unsortedMap.set(item.id, item);
17
17
  }
18
- if (unsortedArray.length !== sortedArray.length) {
19
- const ordinal = unsortedArray.length > sortedArray.length ? "more" : "fewer";
18
+ if (unsortedArray.length !== sortedIds.length) {
19
+ const ordinal = unsortedArray.length > sortedIds.length ? "more" : "fewer";
20
20
  logger.warning(`The service returned ${ordinal} responses than inputs. Some errors may be treated as fatal.`);
21
21
  }
22
22
  const result = [];
@@ -25,8 +25,8 @@ export function sortResponseIdObjects(sortedArray, unsortedArray) {
25
25
  * items than unsortedArray so it is ok to ignore the case when a sorted item
26
26
  * ID is not found in `unsortedMap`.
27
27
  */
28
- for (const sortedItem of sortedArray) {
29
- const item = unsortedMap.get(sortedItem.id);
28
+ for (const id of sortedIds) {
29
+ const item = unsortedMap.get(id);
30
30
  if (item) {
31
31
  result.push(item);
32
32
  }
@@ -100,11 +100,10 @@ export function convertToLanguageDetectionInput(inputs, countryHint) {
100
100
  * @internal
101
101
  */
102
102
  export function getOperationOptions(options) {
103
- const { abortSignal, apiVersion, includeStatistics, onResponse, requestOptions, serializerOptions, tracingOptions } = options, rest = __rest(options, ["abortSignal", "apiVersion", "includeStatistics", "onResponse", "requestOptions", "serializerOptions", "tracingOptions"]);
103
+ const { abortSignal, includeStatistics, onResponse, requestOptions, serializerOptions, tracingOptions } = options, rest = __rest(options, ["abortSignal", "includeStatistics", "onResponse", "requestOptions", "serializerOptions", "tracingOptions"]);
104
104
  return {
105
105
  options: {
106
106
  abortSignal,
107
- apiVersion,
108
107
  includeStatistics,
109
108
  onResponse,
110
109
  requestOptions,
@@ -1 +1 @@
1
- {"version":3,"file":"util.js","sourceRoot":"","sources":["../../src/util.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAIlC,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAMlC;;;;;;;GAOG;AACH,MAAM,UAAU,qBAAqB,CACnC,WAAuB,EACvB,aAAkB;IAElB,MAAM,WAAW,GAAG,IAAI,GAAG,EAAa,CAAC;IACzC,KAAK,MAAM,IAAI,IAAI,aAAa,EAAE;QAChC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,IAAI,CAAC,CAAC;KAChC;IAED,IAAI,aAAa,CAAC,MAAM,KAAK,WAAW,CAAC,MAAM,EAAE;QAC/C,MAAM,OAAO,GAAG,aAAa,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;QAC7E,MAAM,CAAC,OAAO,CACZ,wBAAwB,OAAO,8DAA8D,CAC9F,CAAC;KACH;IAED,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB;;;;OAIG;IACH,KAAK,MAAM,UAAU,IAAI,WAAW,EAAE;QACpC,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QAC5C,IAAI,IAAI,EAAE;YACR,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACnB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAWD;;GAEG;AACH,MAAM,UAAU,oBAAoB,CAAC,OAAe;IAClD,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,2DAA2D,CAAC,CAAC;IACtF,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,GAAG,KAAK,IAAI,EAAE;QAChB,MAAM,eAAe,GAAoB;YACvC,QAAQ,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YAC1B,QAAQ,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YAC1B,UAAU,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;SAC7B,CAAC;QACF,OAAO,eAAe,CAAC;KACxB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,YAAY,OAAO,qCAAqC,CAAC,CAAC;KAC3E;AACH,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,0BAA0B,CAAC,OAAe;IACxD,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,+CAA+C,CAAC,CAAC;IAC1E,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,GAAG,KAAK,IAAI,EAAE;QAChB,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;KACzB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,YAAY,OAAO,4CAA4C,CAAC,CAAC;KAClF;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,aAAa,CAAC,SAAoB;IAChD,OAAO,OAAO,SAAS,CAAC,CAAC,CAAC,KAAK,QAAQ,CAAC;AAC1C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,0BAA0B,CACxC,MAAgB,EAChB,QAAiB;IAEjB,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC,IAAY,EAAE,KAAK,EAAqB,EAAE;QAC3D,OAAO;YACL,EAAE,EAAE,MAAM,CAAC,KAAK,CAAC;YACjB,QAAQ;YACR,IAAI;SACL,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,+BAA+B,CAC7C,MAAgB,EAChB,WAAoB;IAEpB,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC,IAAY,EAAE,KAAK,EAA0B,EAAE;QAChE,OAAO;YACL,EAAE,EAAE,MAAM,CAAC,KAAK,CAAC;YACjB,WAAW;YACX,IAAI;SACL,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,mBAAmB,CACjC,OAAiB;IAcjB,MAAM,EACJ,WAAW,EACX,UAAU,EACV,iBAAiB,EACjB,UAAU,EACV,cAAc,EACd,iBAAiB,EACjB,cAAc,KAEZ,OAAO,EADN,IAAI,UACL,OAAO,EATL,yHASL,CAAU,CAAC;IACZ,OAAO;QACL,OAAO,EAAE;YACP,WAAW;YACX,UAAU;YACV,iBAAiB;YACjB,UAAU;YACV,cAAc;YACd,iBAAiB;YACjB,cAAc;SACf;QACD,IAAI;KACL,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LanguageDetectionInput, TextDocumentInput } from \"./generated\";\nimport { TextAnalysisOperationOptions } from \"./models\";\nimport { logger } from \"./logger\";\n\ninterface IdObject {\n id: string;\n}\n\n/**\n * Given a sorted array of input objects (with a unique ID) and an unsorted array of results,\n * return a sorted array of results.\n *\n * @internal\n * @param sortedArray - An array of entries sorted by `id`\n * @param unsortedArray - An array of entries that contain `id` but are not sorted\n */\nexport function sortResponseIdObjects<U extends IdObject>(\n sortedArray: IdObject[],\n unsortedArray: U[]\n): U[] {\n const unsortedMap = new Map<string, U>();\n for (const item of unsortedArray) {\n unsortedMap.set(item.id, item);\n }\n\n if (unsortedArray.length !== sortedArray.length) {\n const ordinal = unsortedArray.length > sortedArray.length ? \"more\" : \"fewer\";\n logger.warning(\n `The service returned ${ordinal} responses than inputs. Some errors may be treated as fatal.`\n );\n }\n\n const result: U[] = [];\n /**\n * When the results are returned in pages, sortedArray will probably have more\n * items than unsortedArray so it is ok to ignore the case when a sorted item\n * ID is not found in `unsortedMap`.\n */\n for (const sortedItem of sortedArray) {\n const item = unsortedMap.get(sortedItem.id);\n if (item) {\n result.push(item);\n }\n }\n return result;\n}\n\n/**\n * @internal\n */\nexport interface AssessmentIndex {\n document: number;\n sentence: number;\n assessment: number;\n}\n\n/**\n * @internal\n */\nexport function parseAssessmentIndex(pointer: string): AssessmentIndex {\n const regex = new RegExp(/#\\/documents\\/(\\d+)\\/sentences\\/(\\d+)\\/assessments\\/(\\d+)/);\n const res = regex.exec(pointer);\n if (res !== null) {\n const assessmentIndex: AssessmentIndex = {\n document: parseInt(res[1]),\n sentence: parseInt(res[2]),\n assessment: parseInt(res[3]),\n };\n return assessmentIndex;\n } else {\n throw new Error(`Pointer \"${pointer}\" is not a valid Assessment pointer`);\n }\n}\n\n/**\n * Parses the index of the healthcare entity from a JSON pointer.\n * @param pointer - a JSON pointer representing an entity\n * @internal\n */\nexport function parseHealthcareEntityIndex(pointer: string): number {\n const regex = new RegExp(/#\\/results\\/documents\\/(\\d+)\\/entities\\/(\\d+)/);\n const res = regex.exec(pointer);\n if (res !== null) {\n return parseInt(res[2]);\n } else {\n throw new Error(`Pointer \"${pointer}\" is not a valid healthcare entity pointer`);\n }\n}\n\n/**\n * @internal\n */\nexport function isStringArray(documents: unknown[]): documents is string[] {\n return typeof documents[0] === \"string\";\n}\n\n/**\n * @internal\n */\nexport function convertToTextDocumentInput(\n inputs: string[],\n language?: string\n): TextDocumentInput[] {\n return inputs.map((text: string, index): TextDocumentInput => {\n return {\n id: String(index),\n language,\n text,\n };\n });\n}\n\n/**\n * @internal\n */\nexport function convertToLanguageDetectionInput(\n inputs: string[],\n countryHint?: string\n): LanguageDetectionInput[] {\n return inputs.map((text: string, index): LanguageDetectionInput => {\n return {\n id: String(index),\n countryHint,\n text,\n };\n });\n}\n\n/**\n * @internal\n */\nexport function getOperationOptions<OptionsT extends TextAnalysisOperationOptions>(\n options: OptionsT\n): {\n options: TextAnalysisOperationOptions;\n rest: Omit<\n OptionsT,\n | \"onResponse\"\n | \"abortSignal\"\n | \"apiVersion\"\n | \"includeStatistics\"\n | \"requestOptions\"\n | \"serializerOptions\"\n | \"tracingOptions\"\n >;\n} {\n const {\n abortSignal,\n apiVersion,\n includeStatistics,\n onResponse,\n requestOptions,\n serializerOptions,\n tracingOptions,\n ...rest\n } = options;\n return {\n options: {\n abortSignal,\n apiVersion,\n includeStatistics,\n onResponse,\n requestOptions,\n serializerOptions,\n tracingOptions,\n },\n rest,\n };\n}\n"]}
1
+ {"version":3,"file":"util.js","sourceRoot":"","sources":["../../src/util.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAIlC,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC;;;;;;;GAOG;AACH,MAAM,UAAU,qBAAqB,CACnC,SAAmB,EACnB,aAAkB;IAElB,MAAM,WAAW,GAAG,IAAI,GAAG,EAAa,CAAC;IACzC,KAAK,MAAM,IAAI,IAAI,aAAa,EAAE;QAChC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,IAAI,CAAC,CAAC;KAChC;IAED,IAAI,aAAa,CAAC,MAAM,KAAK,SAAS,CAAC,MAAM,EAAE;QAC7C,MAAM,OAAO,GAAG,aAAa,CAAC,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;QAC3E,MAAM,CAAC,OAAO,CACZ,wBAAwB,OAAO,8DAA8D,CAC9F,CAAC;KACH;IAED,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB;;;;OAIG;IACH,KAAK,MAAM,EAAE,IAAI,SAAS,EAAE;QAC1B,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QACjC,IAAI,IAAI,EAAE;YACR,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACnB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAWD;;GAEG;AACH,MAAM,UAAU,oBAAoB,CAAC,OAAe;IAClD,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,2DAA2D,CAAC,CAAC;IACtF,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,GAAG,KAAK,IAAI,EAAE;QAChB,MAAM,eAAe,GAAoB;YACvC,QAAQ,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YAC1B,QAAQ,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YAC1B,UAAU,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;SAC7B,CAAC;QACF,OAAO,eAAe,CAAC;KACxB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,YAAY,OAAO,qCAAqC,CAAC,CAAC;KAC3E;AACH,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,0BAA0B,CAAC,OAAe;IACxD,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,+CAA+C,CAAC,CAAC;IAC1E,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAChC,IAAI,GAAG,KAAK,IAAI,EAAE;QAChB,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;KACzB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,YAAY,OAAO,4CAA4C,CAAC,CAAC;KAClF;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,aAAa,CAAC,SAAoB;IAChD,OAAO,OAAO,SAAS,CAAC,CAAC,CAAC,KAAK,QAAQ,CAAC;AAC1C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,0BAA0B,CACxC,MAAgB,EAChB,QAAiB;IAEjB,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC,IAAY,EAAE,KAAK,EAAqB,EAAE;QAC3D,OAAO;YACL,EAAE,EAAE,MAAM,CAAC,KAAK,CAAC;YACjB,QAAQ;YACR,IAAI;SACL,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,+BAA+B,CAC7C,MAAgB,EAChB,WAAoB;IAEpB,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC,IAAY,EAAE,KAAK,EAA0B,EAAE;QAChE,OAAO;YACL,EAAE,EAAE,MAAM,CAAC,KAAK,CAAC;YACjB,WAAW;YACX,IAAI;SACL,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,mBAAmB,CACjC,OAAiB;IAajB,MAAM,EACJ,WAAW,EACX,iBAAiB,EACjB,UAAU,EACV,cAAc,EACd,iBAAiB,EACjB,cAAc,KAEZ,OAAO,EADN,IAAI,UACL,OAAO,EARL,2GAQL,CAAU,CAAC;IACZ,OAAO;QACL,OAAO,EAAE;YACP,WAAW;YACX,iBAAiB;YACjB,UAAU;YACV,cAAc;YACd,iBAAiB;YACjB,cAAc;SACf;QACD,IAAI;KACL,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LanguageDetectionInput, TextDocumentInput } from \"./generated\";\nimport { TextAnalysisOperationOptions } from \"./models\";\nimport { logger } from \"./logger\";\n\n/**\n * Given a sorted array of input objects (with a unique ID) and an unsorted array of results,\n * return a sorted array of results.\n *\n * @internal\n * @param sortedIds - An array of sorted IDs\n * @param unsortedArray - An array of entries that contain `id` but are not sorted\n */\nexport function sortResponseIdObjects<U extends { id: string }>(\n sortedIds: string[],\n unsortedArray: U[]\n): U[] {\n const unsortedMap = new Map<string, U>();\n for (const item of unsortedArray) {\n unsortedMap.set(item.id, item);\n }\n\n if (unsortedArray.length !== sortedIds.length) {\n const ordinal = unsortedArray.length > sortedIds.length ? \"more\" : \"fewer\";\n logger.warning(\n `The service returned ${ordinal} responses than inputs. Some errors may be treated as fatal.`\n );\n }\n\n const result: U[] = [];\n /**\n * When the results are returned in pages, sortedArray will probably have more\n * items than unsortedArray so it is ok to ignore the case when a sorted item\n * ID is not found in `unsortedMap`.\n */\n for (const id of sortedIds) {\n const item = unsortedMap.get(id);\n if (item) {\n result.push(item);\n }\n }\n return result;\n}\n\n/**\n * @internal\n */\nexport interface AssessmentIndex {\n document: number;\n sentence: number;\n assessment: number;\n}\n\n/**\n * @internal\n */\nexport function parseAssessmentIndex(pointer: string): AssessmentIndex {\n const regex = new RegExp(/#\\/documents\\/(\\d+)\\/sentences\\/(\\d+)\\/assessments\\/(\\d+)/);\n const res = regex.exec(pointer);\n if (res !== null) {\n const assessmentIndex: AssessmentIndex = {\n document: parseInt(res[1]),\n sentence: parseInt(res[2]),\n assessment: parseInt(res[3]),\n };\n return assessmentIndex;\n } else {\n throw new Error(`Pointer \"${pointer}\" is not a valid Assessment pointer`);\n }\n}\n\n/**\n * Parses the index of the healthcare entity from a JSON pointer.\n * @param pointer - a JSON pointer representing an entity\n * @internal\n */\nexport function parseHealthcareEntityIndex(pointer: string): number {\n const regex = new RegExp(/#\\/results\\/documents\\/(\\d+)\\/entities\\/(\\d+)/);\n const res = regex.exec(pointer);\n if (res !== null) {\n return parseInt(res[2]);\n } else {\n throw new Error(`Pointer \"${pointer}\" is not a valid healthcare entity pointer`);\n }\n}\n\n/**\n * @internal\n */\nexport function isStringArray(documents: unknown[]): documents is string[] {\n return typeof documents[0] === \"string\";\n}\n\n/**\n * @internal\n */\nexport function convertToTextDocumentInput(\n inputs: string[],\n language?: string\n): TextDocumentInput[] {\n return inputs.map((text: string, index): TextDocumentInput => {\n return {\n id: String(index),\n language,\n text,\n };\n });\n}\n\n/**\n * @internal\n */\nexport function convertToLanguageDetectionInput(\n inputs: string[],\n countryHint?: string\n): LanguageDetectionInput[] {\n return inputs.map((text: string, index): LanguageDetectionInput => {\n return {\n id: String(index),\n countryHint,\n text,\n };\n });\n}\n\n/**\n * @internal\n */\nexport function getOperationOptions<OptionsT extends TextAnalysisOperationOptions>(\n options: OptionsT\n): {\n options: TextAnalysisOperationOptions;\n rest: Omit<\n OptionsT,\n | \"onResponse\"\n | \"abortSignal\"\n | \"includeStatistics\"\n | \"requestOptions\"\n | \"serializerOptions\"\n | \"tracingOptions\"\n >;\n} {\n const {\n abortSignal,\n includeStatistics,\n onResponse,\n requestOptions,\n serializerOptions,\n tracingOptions,\n ...rest\n } = options;\n return {\n options: {\n abortSignal,\n includeStatistics,\n onResponse,\n requestOptions,\n serializerOptions,\n tracingOptions,\n },\n rest,\n };\n}\n"]}
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "sdk-type": "client",
4
4
  "author": "Microsoft Corporation",
5
5
  "description": "An isomorphic client library for the text analysis features in the Azure Cognitive Language Service.",
6
- "version": "1.0.0-beta.1",
6
+ "version": "1.0.0",
7
7
  "keywords": [
8
8
  "node",
9
9
  "azure",
@@ -86,9 +86,9 @@
86
86
  "@azure/core-auth": "^1.3.0",
87
87
  "@azure/core-client": "^1.0.0",
88
88
  "@azure/core-rest-pipeline": "^1.8.1",
89
- "@azure/core-lro": "2.3.0-beta.1",
89
+ "@azure/core-lro": "^2.3.0",
90
90
  "@azure/core-paging": "^1.3.0",
91
- "@azure/core-tracing": "1.0.0",
91
+ "@azure/core-tracing": "^1.0.0",
92
92
  "@azure/logger": "^1.0.0",
93
93
  "tslib": "^2.2.0"
94
94
  },