@azure/ai-text-analytics 6.0.0-alpha.20220526.1 → 6.0.0-alpha.20220627.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@ import { LroEngine } from "@azure/core-lro";
8
8
  import { createTracingClient } from "@azure/core-tracing";
9
9
  import { convertToLanguageDetectionInput, convertToTextDocumentInput, getOperationOptions, isStringArray, } from "./util";
10
10
  import { createAnalyzeBatchLro, createCancelOperation, createCreateAnalyzeBatchPollerLro, createUpdateAnalyzeState, getDocsFromState, processAnalyzeResult, } from "./lro";
11
- import { transformActionResult, transformError } from "./transforms";
11
+ import { throwError, transformActionResult } from "./transforms";
12
12
  import { GeneratedClient } from "./generated/generatedClient";
13
13
  import { logger } from "./logger";
14
14
  import { textAnalyticsAzureKeyCredentialPolicy } from "./azureKeyCredentialPolicy";
@@ -97,21 +97,15 @@ export class TextAnalysisClient {
97
97
  languageOrCountryHintOrOptions || {};
98
98
  }
99
99
  const { options: operationOptions, rest: action } = getOperationOptions(realOptions);
100
- return this._tracing.withSpan("TextAnalysisClient.analyze", operationOptions, async (updatedOptions) => {
101
- try {
102
- const result = await this._client.analyze({
103
- kind: actionName,
104
- analysisInput: {
105
- documents: realInputs,
106
- },
107
- parameters: action,
108
- }, updatedOptions);
109
- return transformActionResult(actionName, realInputs, result);
110
- }
111
- catch (e) {
112
- throw transformError(e);
113
- }
114
- });
100
+ return this._tracing.withSpan("TextAnalysisClient.analyze", operationOptions, async (updatedOptions) => throwError(this._client
101
+ .analyze({
102
+ kind: actionName,
103
+ analysisInput: {
104
+ documents: realInputs,
105
+ },
106
+ parameters: action,
107
+ }, updatedOptions)
108
+ .then((result) => transformActionResult(actionName, realInputs, result))));
115
109
  }
116
110
  // implementation
117
111
  async beginAnalyzeBatch(actions, documents, languageOrOptions, options = {}) {
@@ -1 +1 @@
1
- {"version":3,"file":"textAnalysisClient.js","sourceRoot":"","sources":["../../src/textAnalysisClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAoBlC,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAGL,+BAA+B,GAChC,MAAM,2BAA2B,CAAC;AACnC,OAAO,EAAkC,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACrF,OAAO,EAAwB,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAClE,OAAO,EAAiB,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AACzE,OAAO,EACL,+BAA+B,EAC/B,0BAA0B,EAC1B,mBAAmB,EACnB,aAAa,GACd,MAAM,QAAQ,CAAC;AAChB,OAAO,EACL,qBAAqB,EACrB,qBAAqB,EACrB,iCAAiC,EACjC,wBAAwB,EACxB,gBAAgB,EAChB,oBAAoB,GACrB,MAAM,OAAO,CAAC;AACf,OAAO,EAAE,qBAAqB,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AACrE,OAAO,EAAE,eAAe,EAAE,MAAM,6BAA6B,CAAC;AAC9D,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAClC,OAAO,EAAE,qCAAqC,EAAE,MAAM,4BAA4B,CAAC;AAEnF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoCG;AACH,MAAM,OAAO,kBAAkB;IA8D7B,YACE,WAAmB,EACnB,UAA2C,EAC3C,UAAqC,EAAE;QAEvC,MAAM,EAAE,kBAAkB,GAAG,IAAI,EAAE,eAAe,GAAG,IAAI,KAAyB,OAAO,EAA3B,eAAe,UAAK,OAAO,EAAnF,yCAAyE,CAAU,CAAC;QAC1F,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;QAEvC,MAAM,uBAAuB,mCACxB,eAAe,GACf;YACD,cAAc,EAAE;gBACd,MAAM,EAAE,MAAM,CAAC,IAAI;gBACnB,4BAA4B,EAAE,CAAC,6BAA6B,EAAE,iBAAiB,CAAC;aACjF;SACF,CACF,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,IAAI,eAAe,CAAC,WAAW,EAAE,uBAAuB,CAAC,CAAC;QAEzE,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;YAC9C,CAAC,CAAC,+BAA+B,CAAC,EAAE,UAAU,EAAE,MAAM,EAAE,uBAAuB,EAAE,CAAC;YAClF,CAAC,CAAC,qCAAqC,CAAC,UAAU,CAAC,CAAC;QAEtD,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC5C,IAAI,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YAClC,WAAW,EAAE,0BAA0B;YACvC,cAAc,EAAE,WAAW;YAC3B,SAAS,EAAE,6BAA6B;SACzC,CAAC,CAAC;IACL,CAAC;IAmRD,iBAAiB;IACV,KAAK,CAAC,OAAO,CAClB,UAAsB,EACtB,SAAoE,EACpE,8BAEwE,EACxE,OAA4E;QAE5E,IAAI,WAA+E,CAAC;QAEpF,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QAED,IAAI,UAA0D,CAAC;QAC/D,IAAI,aAAa,CAAC,SAAS,CAAC,EAAE;YAC5B,IAAI,UAAU,KAAK,mBAAmB,EAAE;gBACtC,UAAU,GAAG,+BAA+B,CAC1C,SAAS,EACT,OAAO,8BAA8B,KAAK,QAAQ;oBAChD,CAAC,CAAC,8BAA8B;oBAChC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAC5B,CAAC;aACH;iBAAM;gBACL,UAAU,GAAG,0BAA0B,CACrC,SAAS,EACT,OAAO,8BAA8B,KAAK,QAAQ;oBAChD,CAAC,CAAC,8BAA8B;oBAChC,CAAC,CAAC,IAAI,CAAC,eAAe,CACzB,CAAC;aACH;YACD,WAAW,GAAG,OAAO,IAAK,EAAU,CAAC;SACtC;aAAM;YACL,UAAU,GAAG,SAAS,CAAC;YACvB,WAAW;gBACR,8BAC8B,IAAI,EAAE,CAAC;SACzC;QACD,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,mBAAmB,CAAC,WAAW,CAAC,CAAC;QACrF,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,4BAA4B,EAC5B,gBAAgB,EAChB,KAAK,EAAE,cAA4C,EAAE,EAAE;YACrD,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,OAAO,CACvC;oBACE,IAAI,EAAE,UAAU;oBAChB,aAAa,EAAE;wBACb,SAAS,EAAE,UAAU;qBACtB;oBACD,UAAU,EAAE,MAAM;iBACZ,EACR,cAAc,CACf,CAAC;gBACF,OAAO,qBAAqB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAA8B,CAAC;aAC3F;YAAC,OAAO,CAAU,EAAE;gBACnB,MAAM,cAAc,CAAC,CAAc,CAAC,CAAC;aACtC;QACH,CAAC,CACF,CAAC;IACJ,CAAC;IAwHD,iBAAiB;IACjB,KAAK,CAAC,iBAAiB,CACrB,OAA6B,EAC7B,SAAyC,EACzC,iBAAqD,EACrD,UAAoC,EAAE;QAEtC,IAAI,WAAqC,CAAC;QAC1C,IAAI,UAA+B,CAAC;QAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACvD,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QAED,IAAI,aAAa,CAAC,SAAS,CAAC,EAAE;YAC5B,MAAM,QAAQ,GAAI,iBAA4B,IAAI,IAAI,CAAC,eAAe,CAAC;YACvE,UAAU,GAAG,0BAA0B,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;YAC7D,WAAW,GAAG,OAAO,CAAC;SACvB;aAAM;YACL,UAAU,GAAG,SAAS,CAAC;YACvB,WAAW,GAAG,iBAA6C,CAAC;SAC7D;QACD,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAC7B,CAAC,EAA6B,EAA2B,EAAE;gBAA1D,EAAE,IAAI,EAAE,UAAU,OAAW,EAAN,IAAI,cAA3B,sBAA6B,CAAF;YAAgC,OAAA,CAAC;gBAC3D,IAAI;gBACJ,UAAU;gBACV,UAAU,EAAE,IAAI;aACjB,CAAC,CAAA;SAAA,CACH,CAAC;QACF,MAAM,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,WAAW,KAAc,WAAW,EAApB,IAAI,UAAK,WAAW,EAA7E,0DAA+D,CAAc,CAAC;QACpF,MAAM,GAAG,GAAG,qBAAqB,CAAC;YAChC,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,aAAa,EAAE,IAAI;YACnB,SAAS,EAAE,UAAU;YACrB,qBAAqB,EAAE,EAAE,WAAW,EAAE;YACtC,kBAAkB,EAAE,EAAE,iBAAiB,EAAE;YACzC,KAAK,EAAE,WAAW;YAClB,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,IAAI,SAAS,CAC1B,GAAoD,EACpD;YACE,YAAY,EAAE,kBAAkB;YAChC,aAAa,EAAE,oBAAoB,CAAC;gBAClC,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,SAAS,EAAE,UAAU;gBACrB,SAAS,kCAAO,IAAI,KAAE,iBAAiB,GAAE;aAC1C,CAAC;YACF,WAAW,EAAE,wBAAwB,CAAC,UAAU,CAAC;YACjD,MAAM,EAAE,qBAAqB,CAAC;gBAC5B,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,OAAO,EAAE,IAAI;aACd,CAAC;SACH,CACF,CAAC;QAEF,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;IA0BD,iBAAiB;IACjB,KAAK,CAAC,yBAAyB,CAC7B,eAAuB,EACvB,UAA4C,EAAE;QAE9C,MAAM,EAAE,iBAAiB,EAAE,kBAAkB,KAAc,OAAO,EAAhB,IAAI,UAAK,OAAO,EAA5D,2CAAkD,CAAU,CAAC;QACnE,MAAM,SAAS,GAAG,gBAAgB,CAAC,eAAe,CAAC,CAAC;QACpD,MAAM,GAAG,GAAG,iCAAiC,CAAC;YAC5C,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,OAAO,kCAAO,IAAI,KAAE,iBAAiB,GAAE;YACvC,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,IAAI,SAAS,CAC1B,GAAoD,EACpD;YACE,YAAY,EAAE,kBAAkB;YAChC,UAAU,EAAE,eAAe;YAC3B,aAAa,EAAE,oBAAoB,CAAC;gBAClC,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,SAAS;gBACT,SAAS,kCAAO,IAAI,KAAE,iBAAiB,GAAE;aAC1C,CAAC;YACF,WAAW,EAAE,wBAAwB,EAAE;YACvC,MAAM,EAAE,qBAAqB,CAAC;gBAC5B,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,OAAO,EAAE,IAAI;aACd,CAAC;SACH,CACF,CAAC;QAEF,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AnalyzeActionName,\n AnalyzeActionParameters,\n AnalyzeBatchAction,\n AnalyzeBatchOperationState,\n AnalyzeBatchPoller,\n AnalyzeResult,\n BeginAnalyzeBatchOptions,\n PagedAnalyzeBatchResult,\n RestoreAnalyzeBatchPollerOptions,\n TextAnalysisClientOptions,\n TextAnalysisOperationOptions,\n} from \"./models\";\nimport {\n AnalyzeBatchActionUnion,\n LanguageDetectionInput,\n TextDocumentInput,\n} from \"./generated/models\";\nimport { DEFAULT_COGNITIVE_SCOPE, SDK_VERSION } from \"./constants\";\nimport {\n InternalPipelineOptions,\n RestError,\n bearerTokenAuthenticationPolicy,\n} from \"@azure/core-rest-pipeline\";\nimport { KeyCredential, TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { LongRunningOperation, LroEngine } from \"@azure/core-lro\";\nimport { TracingClient, createTracingClient } from \"@azure/core-tracing\";\nimport {\n convertToLanguageDetectionInput,\n convertToTextDocumentInput,\n getOperationOptions,\n isStringArray,\n} from \"./util\";\nimport {\n createAnalyzeBatchLro,\n createCancelOperation,\n createCreateAnalyzeBatchPollerLro,\n createUpdateAnalyzeState,\n getDocsFromState,\n processAnalyzeResult,\n} from \"./lro\";\nimport { transformActionResult, transformError } from \"./transforms\";\nimport { GeneratedClient } from \"./generated/generatedClient\";\nimport { logger } from \"./logger\";\nimport { textAnalyticsAzureKeyCredentialPolicy } from \"./azureKeyCredentialPolicy\";\n\n/**\n * A client for interacting with the text analysis features in Azure Cognitive\n * Language Service.\n *\n * The client needs the endpoint of a Language resource and an authentication\n * method such as an API key or AAD. The API key and endpoint can be found in\n * the Language resource page in the Azure portal. They will be located in the\n * resource's Keys and Endpoint page, under Resource Management.\n *\n * ### Examples for authentication:\n *\n * #### API Key\n *\n * ```js\n * import { TextAnalysisClient, AzureKeyCredential } from \"@azure/ai-text-analytics\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * #### Azure Active Directory\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity)\n * package for more information about authenticating with Azure Active Directory.\n *\n * ```js\n * import { TextAnalysisClient } from \"@azure/ai-text-analytics\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n */\nexport class TextAnalysisClient {\n private readonly _client: GeneratedClient;\n private readonly _tracing: TracingClient;\n private readonly defaultCountryHint: string;\n private readonly defaultLanguage: string;\n\n /**\n * Creates an instance of TextAnalysisClient with the endpoint of a Language\n * resource and an authentication method such as an API key or AAD.\n *\n * The API key and endpoint can be found in the Language resource page in the\n * Azure portal. They will be located in the resource's Keys and Endpoint page,\n * under Resource Management.\n *\n * ### Example\n *\n * ```js\n * import { TextAnalysisClient, AzureKeyCredential } from \"@azure/ai-text-analytics\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpointUrl - The URL to the endpoint of a Cognitive Language Service resource\n * @param credential - Key credential to be used to authenticate requests to the service.\n * @param options - Used to configure the TextAnalytics client.\n */\n constructor(endpointUrl: string, credential: KeyCredential, options?: TextAnalysisClientOptions);\n /**\n * Creates an instance of TextAnalysisClient with the endpoint of a Language\n * resource and an authentication method such as an API key or AAD.\n *\n * The API key and endpoint can be found in the Language resource page in the\n * Azure portal. They will be located in the resource's Keys and Endpoint page,\n * under Resource Management.\n *\n * ### Example\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity)\n * package for more information about authenticating with Azure Active Directory.\n *\n * ```js\n * import { TextAnalysisClient } from \"@azure/ai-text-analytics\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpointUrl - The URL to the endpoint of a Cognitive Language Service resource\n * @param credential - Token credential to be used to authenticate requests to the service.\n * @param options - Used to configure the TextAnalytics client.\n */\n constructor(\n endpointUrl: string,\n credential: TokenCredential,\n options?: TextAnalysisClientOptions\n );\n constructor(\n endpointUrl: string,\n credential: TokenCredential | KeyCredential,\n options: TextAnalysisClientOptions = {}\n ) {\n const { defaultCountryHint = \"us\", defaultLanguage = \"en\", ...pipelineOptions } = options;\n this.defaultCountryHint = defaultCountryHint;\n this.defaultLanguage = defaultLanguage;\n\n const internalPipelineOptions: InternalPipelineOptions = {\n ...pipelineOptions,\n ...{\n loggingOptions: {\n logger: logger.info,\n additionalAllowedHeaderNames: [\"x-ms-correlation-request-id\", \"x-ms-request-id\"],\n },\n },\n };\n\n this._client = new GeneratedClient(endpointUrl, internalPipelineOptions);\n\n const authPolicy = isTokenCredential(credential)\n ? bearerTokenAuthenticationPolicy({ credential, scopes: DEFAULT_COGNITIVE_SCOPE })\n : textAnalyticsAzureKeyCredentialPolicy(credential);\n\n this._client.pipeline.addPolicy(authPolicy);\n this._tracing = createTracingClient({\n packageName: \"@azure/ai-text-analytics\",\n packageVersion: SDK_VERSION,\n namespace: \"Microsoft.CognitiveServices\",\n });\n }\n\n /**\n * Runs a predictive model to determine the language that the passed-in\n * input strings are written in, and returns, for each one, the detected\n * language as well as a score indicating the model's confidence that the\n * inferred language is correct. Scores close to 1 indicate high certainty in\n * the result. 120 languages are supported.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Language detection\n *\n * ```js\n * const documents = [<input strings>];\n * const countryHint = \"us\";\n * const results = await client.analyze(\"LanguageDetection\", documents, countryHint);\n *\n * for (let i = 0; i < results.length; i++) {\n * const result = results[i];\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { name, confidenceScore, iso6391Name } = result.primaryLanguage;\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/language-detection/overview}\n * for more information on language detection.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results where each element contains the primary language\n * for the corresponding input document.\n */\n public async analyze<ActionName extends \"LanguageDetection\">(\n actionName: ActionName,\n documents: LanguageDetectionInput[],\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n /**\n * Runs a predictive model to determine the language that the passed-in\n * input strings are written in, and returns, for each one, the detected\n * language as well as a score indicating the model's confidence that the\n * inferred language is correct. Scores close to 1 indicate high certainty in\n * the result. 120 languages are supported.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Language detection\n *\n * ```js\n * const documents = [<input strings>];\n * const countryHint = \"us\";\n * const results = await client.analyze(\"LanguageDetection\", documents, countryHint);\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { name, confidenceScore, iso6391Name } = result.primaryLanguage;\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/language-detection/overview}\n * for more information on language detection.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param countryHint - Indicates the country of origin for all of\n * the input strings to assist the model in predicting the language they are\n * written in. If unspecified, this value will be set to the default\n * country hint in `TextAnalysisClientOptions`. If set to an empty string,\n * or the string \"none\", the service will apply a model where the country is\n * explicitly unset. The same country hint is applied to all strings in the\n * input collection.\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results where each element contains the primary language\n * for the corresponding input document.\n */\n public async analyze<ActionName extends \"LanguageDetection\">(\n actionName: ActionName,\n documents: string[],\n countryHint?: string,\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n /**\n * Runs a predictive model to perform the action of choice on the input\n * documents. See ${@link AnalyzeActionName} for a list of supported\n * actions.\n *\n * The layout of each item in the results array depends on the action chosen.\n * For example, each PIIEntityRecognition document result consists of both\n * `entities` and `redactedText` where the former is a list of all Pii entities\n * in the text and the latter is the original text after all such Pii entities\n * have been redacted from it.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Opinion mining\n *\n * ```js\n * const documents = [{\n * id: \"1\",\n * text: \"The food and service aren't the best\",\n * language: \"en\"\n * }];\n * const results = await client.analyze(\"SentimentAnalysis\", documents, {\n * includeOpinionMining: true,\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { sentiment, confidenceScores, sentences } = result;\n * for (const { sentiment, confidenceScores, opinions } of sentences) {\n * for (const { target, assessments } of opinions) {\n * const { text, sentiment, confidenceScores } = target;\n * for (const { text, sentiment } of assessments) {\n * // Do something\n * }\n * }\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/sentiment-opinion-mining/overview}\n * for more information on opinion mining.\n *\n * #### Personally identifiable information\n *\n * ```js\n * const documents = [<input documents>];\n * const categoriesFilter = [KnownPiiCategory.USSocialSecurityNumber];\n * const domainFilter = KnownPiiDomain.Phi;\n * const results = await client.analyze(\"PiiEntityRecognition\", documents, {\n * domainFilter, categoriesFilter\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { entities, redactedText } = result;\n * for (const { text, category, confidenceScore, length, offset } of entities) {\n * // Do something\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/personally-identifiable-information/overview}\n * for more information on personally identifiable information.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results corresponding to the input documents\n */\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: TextDocumentInput[],\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n\n /**\n * Runs a predictive model to perform the action of choice on the input\n * strings. See ${@link AnalyzeActionName} for a list of supported\n * actions.\n *\n * The layout of each item in the results array depends on the action chosen.\n * For example, each PIIEntityRecognition document result consists of both\n * `entities` and `redactedText` where the former is a list of all Pii entities\n * in the text and the latter is the original text after all such Pii entities\n * have been redacted from it.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Opinion mining\n *\n * ```js\n * const documents = [\"The food and service aren't the best\"];\n * const results = await client.analyze(\"SentimentAnalysis\", documents, {\n * includeOpinionMining: true,\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { sentiment, confidenceScores, sentences } = result;\n * for (const { sentiment, confidenceScores, opinions } of sentences) {\n * for (const { target, assessments } of opinions) {\n * const { text, sentiment, confidenceScores } = target;\n * for (const { text, sentiment } of assessments) {\n * // Do something\n * }\n * }\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/sentiment-opinion-mining/overview}\n * for more information on opinion mining.\n *\n * #### Personally identifiable information\n *\n * ```js\n * const documents = [<input strings>];\n * const languageHint = \"en\";\n * const categoriesFilter = [KnownPiiCategory.USSocialSecurityNumber];\n * const domainFilter = KnownPiiDomain.Phi;\n * const results = await client.analyze(\"PiiEntityRecognition\", documents, languageHint, {\n * domainFilter, categoriesFilter\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { entities, redactedText } = result;\n * for (const { text, category, confidenceScore, length, offset } of entities) {\n * // Do something\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/personally-identifiable-information/overview}\n * for more information on personally identifiable information.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param languageCode - the code of the language that all the input strings are\n * written in. If unspecified, this value will be set to the default\n * language in `TextAnalysisClientOptions`. If set to an empty string,\n * the service will apply a model where the language is explicitly set to\n * \"None\". Language support varies per action, for example, more information\n * about the languages supported for Entity Recognition actions can be\n * found in {@link https://docs.microsoft.com//azure/cognitive-services/language-service/named-entity-recognition/language-support}\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results corresponding to the input documents\n */\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: string[],\n languageCode?: string,\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n // implementation\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: string[] | LanguageDetectionInput[] | TextDocumentInput[],\n languageOrCountryHintOrOptions?:\n | string\n | (AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions),\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>> {\n let realOptions: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions;\n\n if (documents.length === 0) {\n throw new Error(\"'documents' must be a non-empty array\");\n }\n\n let realInputs: LanguageDetectionInput[] | TextDocumentInput[];\n if (isStringArray(documents)) {\n if (actionName === \"LanguageDetection\") {\n realInputs = convertToLanguageDetectionInput(\n documents,\n typeof languageOrCountryHintOrOptions === \"string\"\n ? languageOrCountryHintOrOptions\n : this.defaultCountryHint\n );\n } else {\n realInputs = convertToTextDocumentInput(\n documents,\n typeof languageOrCountryHintOrOptions === \"string\"\n ? languageOrCountryHintOrOptions\n : this.defaultLanguage\n );\n }\n realOptions = options || ({} as any);\n } else {\n realInputs = documents;\n realOptions =\n (languageOrCountryHintOrOptions as AnalyzeActionParameters<ActionName> &\n TextAnalysisOperationOptions) || {};\n }\n const { options: operationOptions, rest: action } = getOperationOptions(realOptions);\n return this._tracing.withSpan(\n \"TextAnalysisClient.analyze\",\n operationOptions,\n async (updatedOptions: TextAnalysisOperationOptions) => {\n try {\n const result = await this._client.analyze(\n {\n kind: actionName,\n analysisInput: {\n documents: realInputs,\n },\n parameters: action,\n } as any,\n updatedOptions\n );\n return transformActionResult(actionName, realInputs, result) as AnalyzeResult<ActionName>;\n } catch (e: unknown) {\n throw transformError(e as RestError);\n }\n }\n );\n }\n\n /**\n * Performs an array (batch) of actions on the input documents. Each action has\n * a `kind` field that specifies the nature of the action. See ${@link AnalyzeBatchActionNames}\n * for a list of supported actions. In addition to `kind`, actions could also\n * have other parameters such as `disableServiceLogs` and `modelVersion`.\n *\n * The results array contains the results for those input actions where each\n * item also has a `kind` field that specifies the type of the results.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Key phrase extraction and Pii entity recognition\n *\n * ```js\n * const poller = await client.beginAnalyzeBatch(\n * [{ kind: \"KeyPhraseExtraction\" }, { kind: \"PiiEntityRecognition\" }],\n * documents\n * );\n * const actionResults = await poller.pollUntilDone();\n *\n * for await (const actionResult of actionResults) {\n * if (actionResult.error) {\n * throw new Error(`Unexpected error`);\n * }\n * switch (actionResult.kind) {\n * case \"KeyPhraseExtraction\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * case \"PiiEntityRecognition\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * }\n * }\n * ```\n *\n * @param actions - an array of actions that will be run on the input documents\n * @param documents - the input documents to be analyzed\n * @param languageCode - the code of the language that all the input strings are\n * written in. If unspecified, this value will be set to the default\n * language in `TextAnalysisClientOptions`. If set to an empty string,\n * the service will apply a model where the language is explicitly set to\n * \"None\". Language support varies per action, for example, more information\n * about the languages supported for Entity Recognition actions can be\n * found in {@link https://docs.microsoft.com//azure/cognitive-services/language-service/named-entity-recognition/language-support}\n * @param options - optional settings for the operation\n *\n * @returns an array of results corresponding to the input actions\n */\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: string[],\n languageCode?: string,\n options?: BeginAnalyzeBatchOptions\n ): Promise<AnalyzeBatchPoller>;\n /**\n * Performs an array (batch) of actions on the input documents. Each action has\n * a `kind` field that specifies the nature of the action. See ${@link AnalyzeBatchActionNames}\n * for a list of supported actions. In addition to `kind`, actions could also\n * have other parameters such as `disableServiceLogs` and `modelVersion`.\n *\n * The results array contains the results for those input actions where each\n * item also has a `kind` field that specifies the type of the results.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Keyphrase extraction and Pii entity recognition\n *\n * ```js\n * const poller = await client.beginAnalyzeBatch(\n * [{ kind: \"KeyPhraseExtraction\" }, { kind: \"PiiEntityRecognition\" }],\n * documents\n * );\n * const actionResults = await poller.pollUntilDone();\n *\n * for await (const actionResult of actionResults) {\n * if (actionResult.error) {\n * throw new Error(`Unexpected error`);\n * }\n * switch (actionResult.kind) {\n * case \"KeyPhraseExtraction\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * case \"PiiEntityRecognition\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * }\n * }\n * ```\n *\n * @param actions - an array of actions that will be run on the input documents\n * @param documents - the input documents to be analyzed\n * @param options - optional settings for the operation\n *\n * @returns an array of results corresponding to the input actions\n */\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: TextDocumentInput[],\n options?: BeginAnalyzeBatchOptions\n ): Promise<AnalyzeBatchPoller>;\n // implementation\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: TextDocumentInput[] | string[],\n languageOrOptions?: BeginAnalyzeBatchOptions | string,\n options: BeginAnalyzeBatchOptions = {}\n ): Promise<AnalyzeBatchPoller> {\n let realOptions: BeginAnalyzeBatchOptions;\n let realInputs: TextDocumentInput[];\n\n if (!Array.isArray(documents) || documents.length === 0) {\n throw new Error(\"'documents' must be a non-empty array\");\n }\n\n if (isStringArray(documents)) {\n const language = (languageOrOptions as string) || this.defaultLanguage;\n realInputs = convertToTextDocumentInput(documents, language);\n realOptions = options;\n } else {\n realInputs = documents;\n realOptions = languageOrOptions as BeginAnalyzeBatchOptions;\n }\n const realActions = actions.map(\n ({ kind, actionName, ...rest }): AnalyzeBatchActionUnion => ({\n kind,\n actionName,\n parameters: rest,\n })\n );\n const { includeStatistics, updateIntervalInMs, displayName, ...rest } = realOptions;\n const lro = createAnalyzeBatchLro({\n client: this._client,\n commonOptions: rest,\n documents: realInputs,\n initialRequestOptions: { displayName },\n pollRequestOptions: { includeStatistics },\n tasks: realActions,\n tracing: this._tracing,\n });\n\n const poller = new LroEngine<PagedAnalyzeBatchResult, AnalyzeBatchOperationState>(\n lro as LongRunningOperation<PagedAnalyzeBatchResult>,\n {\n intervalInMs: updateIntervalInMs,\n processResult: processAnalyzeResult({\n client: this._client,\n tracing: this._tracing,\n documents: realInputs,\n opOptions: { ...rest, includeStatistics },\n }),\n updateState: createUpdateAnalyzeState(realInputs),\n cancel: createCancelOperation({\n client: this._client,\n tracing: this._tracing,\n options: rest,\n }),\n }\n );\n\n await poller.poll();\n return poller;\n }\n\n /**\n * Creates a poller from the serialized state of another poller. This can be\n * useful when you want to create pollers on a different host or a poller\n * needs to be constructed after the original one is not in scope.\n *\n * @param serializedState - the serialized state of another poller. It is the\n * result of `poller.toString()`\n * @param options - optional settings for the operation\n *\n * # Example\n *\n * `client.beginAnalyzeBatch` returns a promise that will resolve to a poller.\n * The state of the poller can be serialized and used to create another as follows:\n *\n * ```js\n * const serializedState = poller.toString();\n * const rehydratedPoller = await client.createAnalyzeBatchPoller(serializedState);\n * const actionResults = await rehydratedPoller.pollUntilDone();\n * ```\n */\n async restoreAnalyzeBatchPoller(\n serializedState: string,\n options?: RestoreAnalyzeBatchPollerOptions\n ): Promise<AnalyzeBatchPoller>;\n // implementation\n async restoreAnalyzeBatchPoller(\n serializedState: string,\n options: RestoreAnalyzeBatchPollerOptions = {}\n ): Promise<AnalyzeBatchPoller> {\n const { includeStatistics, updateIntervalInMs, ...rest } = options;\n const documents = getDocsFromState(serializedState);\n const lro = createCreateAnalyzeBatchPollerLro({\n client: this._client,\n options: { ...rest, includeStatistics },\n tracing: this._tracing,\n });\n\n const poller = new LroEngine<PagedAnalyzeBatchResult, AnalyzeBatchOperationState>(\n lro as LongRunningOperation<PagedAnalyzeBatchResult>,\n {\n intervalInMs: updateIntervalInMs,\n resumeFrom: serializedState,\n processResult: processAnalyzeResult({\n client: this._client,\n tracing: this._tracing,\n documents,\n opOptions: { ...rest, includeStatistics },\n }),\n updateState: createUpdateAnalyzeState(),\n cancel: createCancelOperation({\n client: this._client,\n tracing: this._tracing,\n options: rest,\n }),\n }\n );\n\n await poller.poll();\n return poller;\n }\n}\n"]}
1
+ {"version":3,"file":"textAnalysisClient.js","sourceRoot":"","sources":["../../src/textAnalysisClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAoBlC,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AACnE,OAAO,EAEL,+BAA+B,GAChC,MAAM,2BAA2B,CAAC;AACnC,OAAO,EAAkC,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACrF,OAAO,EAAwB,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAClE,OAAO,EAAiB,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AACzE,OAAO,EACL,+BAA+B,EAC/B,0BAA0B,EAC1B,mBAAmB,EACnB,aAAa,GACd,MAAM,QAAQ,CAAC;AAChB,OAAO,EACL,qBAAqB,EACrB,qBAAqB,EACrB,iCAAiC,EACjC,wBAAwB,EACxB,gBAAgB,EAChB,oBAAoB,GACrB,MAAM,OAAO,CAAC;AACf,OAAO,EAAE,UAAU,EAAE,qBAAqB,EAAE,MAAM,cAAc,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,6BAA6B,CAAC;AAC9D,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAClC,OAAO,EAAE,qCAAqC,EAAE,MAAM,4BAA4B,CAAC;AAEnF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoCG;AACH,MAAM,OAAO,kBAAkB;IA8D7B,YACE,WAAmB,EACnB,UAA2C,EAC3C,UAAqC,EAAE;QAEvC,MAAM,EAAE,kBAAkB,GAAG,IAAI,EAAE,eAAe,GAAG,IAAI,KAAyB,OAAO,EAA3B,eAAe,UAAK,OAAO,EAAnF,yCAAyE,CAAU,CAAC;QAC1F,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;QAEvC,MAAM,uBAAuB,mCACxB,eAAe,GACf;YACD,cAAc,EAAE;gBACd,MAAM,EAAE,MAAM,CAAC,IAAI;gBACnB,4BAA4B,EAAE,CAAC,6BAA6B,EAAE,iBAAiB,CAAC;aACjF;SACF,CACF,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,IAAI,eAAe,CAAC,WAAW,EAAE,uBAAuB,CAAC,CAAC;QAEzE,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;YAC9C,CAAC,CAAC,+BAA+B,CAAC,EAAE,UAAU,EAAE,MAAM,EAAE,uBAAuB,EAAE,CAAC;YAClF,CAAC,CAAC,qCAAqC,CAAC,UAAU,CAAC,CAAC;QAEtD,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC5C,IAAI,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YAClC,WAAW,EAAE,0BAA0B;YACvC,cAAc,EAAE,WAAW;YAC3B,SAAS,EAAE,6BAA6B;SACzC,CAAC,CAAC;IACL,CAAC;IAmRD,iBAAiB;IACV,KAAK,CAAC,OAAO,CAClB,UAAsB,EACtB,SAAoE,EACpE,8BAEwE,EACxE,OAA4E;QAE5E,IAAI,WAA+E,CAAC;QAEpF,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QAED,IAAI,UAA0D,CAAC;QAC/D,IAAI,aAAa,CAAC,SAAS,CAAC,EAAE;YAC5B,IAAI,UAAU,KAAK,mBAAmB,EAAE;gBACtC,UAAU,GAAG,+BAA+B,CAC1C,SAAS,EACT,OAAO,8BAA8B,KAAK,QAAQ;oBAChD,CAAC,CAAC,8BAA8B;oBAChC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAC5B,CAAC;aACH;iBAAM;gBACL,UAAU,GAAG,0BAA0B,CACrC,SAAS,EACT,OAAO,8BAA8B,KAAK,QAAQ;oBAChD,CAAC,CAAC,8BAA8B;oBAChC,CAAC,CAAC,IAAI,CAAC,eAAe,CACzB,CAAC;aACH;YACD,WAAW,GAAG,OAAO,IAAK,EAAU,CAAC;SACtC;aAAM;YACL,UAAU,GAAG,SAAS,CAAC;YACvB,WAAW;gBACR,8BAC8B,IAAI,EAAE,CAAC;SACzC;QACD,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,mBAAmB,CAAC,WAAW,CAAC,CAAC;QACrF,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,4BAA4B,EAC5B,gBAAgB,EAChB,KAAK,EAAE,cAA4C,EAAE,EAAE,CACrD,UAAU,CACR,IAAI,CAAC,OAAO;aACT,OAAO,CACN;YACE,IAAI,EAAE,UAAU;YAChB,aAAa,EAAE;gBACb,SAAS,EAAE,UAAU;aACtB;YACD,UAAU,EAAE,MAAM;SACZ,EACR,cAAc,CACf;aACA,IAAI,CACH,CAAC,MAAM,EAAE,EAAE,CACT,qBAAqB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAA8B,CACrF,CACJ,CACJ,CAAC;IACJ,CAAC;IAwHD,iBAAiB;IACjB,KAAK,CAAC,iBAAiB,CACrB,OAA6B,EAC7B,SAAyC,EACzC,iBAAqD,EACrD,UAAoC,EAAE;QAEtC,IAAI,WAAqC,CAAC;QAC1C,IAAI,UAA+B,CAAC;QAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACvD,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QAED,IAAI,aAAa,CAAC,SAAS,CAAC,EAAE;YAC5B,MAAM,QAAQ,GAAI,iBAA4B,IAAI,IAAI,CAAC,eAAe,CAAC;YACvE,UAAU,GAAG,0BAA0B,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;YAC7D,WAAW,GAAG,OAAO,CAAC;SACvB;aAAM;YACL,UAAU,GAAG,SAAS,CAAC;YACvB,WAAW,GAAG,iBAA6C,CAAC;SAC7D;QACD,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAC7B,CAAC,EAA6B,EAA2B,EAAE;gBAA1D,EAAE,IAAI,EAAE,UAAU,OAAW,EAAN,IAAI,cAA3B,sBAA6B,CAAF;YAAgC,OAAA,CAAC;gBAC3D,IAAI;gBACJ,UAAU;gBACV,UAAU,EAAE,IAAI;aACjB,CAAC,CAAA;SAAA,CACH,CAAC;QACF,MAAM,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,WAAW,KAAc,WAAW,EAApB,IAAI,UAAK,WAAW,EAA7E,0DAA+D,CAAc,CAAC;QACpF,MAAM,GAAG,GAAG,qBAAqB,CAAC;YAChC,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,aAAa,EAAE,IAAI;YACnB,SAAS,EAAE,UAAU;YACrB,qBAAqB,EAAE,EAAE,WAAW,EAAE;YACtC,kBAAkB,EAAE,EAAE,iBAAiB,EAAE;YACzC,KAAK,EAAE,WAAW;YAClB,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,IAAI,SAAS,CAC1B,GAAoD,EACpD;YACE,YAAY,EAAE,kBAAkB;YAChC,aAAa,EAAE,oBAAoB,CAAC;gBAClC,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,SAAS,EAAE,UAAU;gBACrB,SAAS,kCAAO,IAAI,KAAE,iBAAiB,GAAE;aAC1C,CAAC;YACF,WAAW,EAAE,wBAAwB,CAAC,UAAU,CAAC;YACjD,MAAM,EAAE,qBAAqB,CAAC;gBAC5B,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,OAAO,EAAE,IAAI;aACd,CAAC;SACH,CACF,CAAC;QAEF,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;IA0BD,iBAAiB;IACjB,KAAK,CAAC,yBAAyB,CAC7B,eAAuB,EACvB,UAA4C,EAAE;QAE9C,MAAM,EAAE,iBAAiB,EAAE,kBAAkB,KAAc,OAAO,EAAhB,IAAI,UAAK,OAAO,EAA5D,2CAAkD,CAAU,CAAC;QACnE,MAAM,SAAS,GAAG,gBAAgB,CAAC,eAAe,CAAC,CAAC;QACpD,MAAM,GAAG,GAAG,iCAAiC,CAAC;YAC5C,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,OAAO,kCAAO,IAAI,KAAE,iBAAiB,GAAE;YACvC,OAAO,EAAE,IAAI,CAAC,QAAQ;SACvB,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,IAAI,SAAS,CAC1B,GAAoD,EACpD;YACE,YAAY,EAAE,kBAAkB;YAChC,UAAU,EAAE,eAAe;YAC3B,aAAa,EAAE,oBAAoB,CAAC;gBAClC,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,SAAS;gBACT,SAAS,kCAAO,IAAI,KAAE,iBAAiB,GAAE;aAC1C,CAAC;YACF,WAAW,EAAE,wBAAwB,EAAE;YACvC,MAAM,EAAE,qBAAqB,CAAC;gBAC5B,MAAM,EAAE,IAAI,CAAC,OAAO;gBACpB,OAAO,EAAE,IAAI,CAAC,QAAQ;gBACtB,OAAO,EAAE,IAAI;aACd,CAAC;SACH,CACF,CAAC;QAEF,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AnalyzeActionName,\n AnalyzeActionParameters,\n AnalyzeBatchAction,\n AnalyzeBatchOperationState,\n AnalyzeBatchPoller,\n AnalyzeResult,\n BeginAnalyzeBatchOptions,\n PagedAnalyzeBatchResult,\n RestoreAnalyzeBatchPollerOptions,\n TextAnalysisClientOptions,\n TextAnalysisOperationOptions,\n} from \"./models\";\nimport {\n AnalyzeBatchActionUnion,\n LanguageDetectionInput,\n TextDocumentInput,\n} from \"./generated/models\";\nimport { DEFAULT_COGNITIVE_SCOPE, SDK_VERSION } from \"./constants\";\nimport {\n InternalPipelineOptions,\n bearerTokenAuthenticationPolicy,\n} from \"@azure/core-rest-pipeline\";\nimport { KeyCredential, TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { LongRunningOperation, LroEngine } from \"@azure/core-lro\";\nimport { TracingClient, createTracingClient } from \"@azure/core-tracing\";\nimport {\n convertToLanguageDetectionInput,\n convertToTextDocumentInput,\n getOperationOptions,\n isStringArray,\n} from \"./util\";\nimport {\n createAnalyzeBatchLro,\n createCancelOperation,\n createCreateAnalyzeBatchPollerLro,\n createUpdateAnalyzeState,\n getDocsFromState,\n processAnalyzeResult,\n} from \"./lro\";\nimport { throwError, transformActionResult } from \"./transforms\";\nimport { GeneratedClient } from \"./generated/generatedClient\";\nimport { logger } from \"./logger\";\nimport { textAnalyticsAzureKeyCredentialPolicy } from \"./azureKeyCredentialPolicy\";\n\n/**\n * A client for interacting with the text analysis features in Azure Cognitive\n * Language Service.\n *\n * The client needs the endpoint of a Language resource and an authentication\n * method such as an API key or AAD. The API key and endpoint can be found in\n * the Language resource page in the Azure portal. They will be located in the\n * resource's Keys and Endpoint page, under Resource Management.\n *\n * ### Examples for authentication:\n *\n * #### API Key\n *\n * ```js\n * import { TextAnalysisClient, AzureKeyCredential } from \"@azure/ai-text-analytics\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * #### Azure Active Directory\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity)\n * package for more information about authenticating with Azure Active Directory.\n *\n * ```js\n * import { TextAnalysisClient } from \"@azure/ai-text-analytics\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n */\nexport class TextAnalysisClient {\n private readonly _client: GeneratedClient;\n private readonly _tracing: TracingClient;\n private readonly defaultCountryHint: string;\n private readonly defaultLanguage: string;\n\n /**\n * Creates an instance of TextAnalysisClient with the endpoint of a Language\n * resource and an authentication method such as an API key or AAD.\n *\n * The API key and endpoint can be found in the Language resource page in the\n * Azure portal. They will be located in the resource's Keys and Endpoint page,\n * under Resource Management.\n *\n * ### Example\n *\n * ```js\n * import { TextAnalysisClient, AzureKeyCredential } from \"@azure/ai-text-analytics\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpointUrl - The URL to the endpoint of a Cognitive Language Service resource\n * @param credential - Key credential to be used to authenticate requests to the service.\n * @param options - Used to configure the TextAnalytics client.\n */\n constructor(endpointUrl: string, credential: KeyCredential, options?: TextAnalysisClientOptions);\n /**\n * Creates an instance of TextAnalysisClient with the endpoint of a Language\n * resource and an authentication method such as an API key or AAD.\n *\n * The API key and endpoint can be found in the Language resource page in the\n * Azure portal. They will be located in the resource's Keys and Endpoint page,\n * under Resource Management.\n *\n * ### Example\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity)\n * package for more information about authenticating with Azure Active Directory.\n *\n * ```js\n * import { TextAnalysisClient } from \"@azure/ai-text-analytics\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new TextAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpointUrl - The URL to the endpoint of a Cognitive Language Service resource\n * @param credential - Token credential to be used to authenticate requests to the service.\n * @param options - Used to configure the TextAnalytics client.\n */\n constructor(\n endpointUrl: string,\n credential: TokenCredential,\n options?: TextAnalysisClientOptions\n );\n constructor(\n endpointUrl: string,\n credential: TokenCredential | KeyCredential,\n options: TextAnalysisClientOptions = {}\n ) {\n const { defaultCountryHint = \"us\", defaultLanguage = \"en\", ...pipelineOptions } = options;\n this.defaultCountryHint = defaultCountryHint;\n this.defaultLanguage = defaultLanguage;\n\n const internalPipelineOptions: InternalPipelineOptions = {\n ...pipelineOptions,\n ...{\n loggingOptions: {\n logger: logger.info,\n additionalAllowedHeaderNames: [\"x-ms-correlation-request-id\", \"x-ms-request-id\"],\n },\n },\n };\n\n this._client = new GeneratedClient(endpointUrl, internalPipelineOptions);\n\n const authPolicy = isTokenCredential(credential)\n ? bearerTokenAuthenticationPolicy({ credential, scopes: DEFAULT_COGNITIVE_SCOPE })\n : textAnalyticsAzureKeyCredentialPolicy(credential);\n\n this._client.pipeline.addPolicy(authPolicy);\n this._tracing = createTracingClient({\n packageName: \"@azure/ai-text-analytics\",\n packageVersion: SDK_VERSION,\n namespace: \"Microsoft.CognitiveServices\",\n });\n }\n\n /**\n * Runs a predictive model to determine the language that the passed-in\n * input strings are written in, and returns, for each one, the detected\n * language as well as a score indicating the model's confidence that the\n * inferred language is correct. Scores close to 1 indicate high certainty in\n * the result. 120 languages are supported.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Language detection\n *\n * ```js\n * const documents = [<input strings>];\n * const countryHint = \"us\";\n * const results = await client.analyze(\"LanguageDetection\", documents, countryHint);\n *\n * for (let i = 0; i < results.length; i++) {\n * const result = results[i];\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { name, confidenceScore, iso6391Name } = result.primaryLanguage;\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/language-detection/overview}\n * for more information on language detection.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results where each element contains the primary language\n * for the corresponding input document.\n */\n public async analyze<ActionName extends \"LanguageDetection\">(\n actionName: ActionName,\n documents: LanguageDetectionInput[],\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n /**\n * Runs a predictive model to determine the language that the passed-in\n * input strings are written in, and returns, for each one, the detected\n * language as well as a score indicating the model's confidence that the\n * inferred language is correct. Scores close to 1 indicate high certainty in\n * the result. 120 languages are supported.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Language detection\n *\n * ```js\n * const documents = [<input strings>];\n * const countryHint = \"us\";\n * const results = await client.analyze(\"LanguageDetection\", documents, countryHint);\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { name, confidenceScore, iso6391Name } = result.primaryLanguage;\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/language-detection/overview}\n * for more information on language detection.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param countryHint - Indicates the country of origin for all of\n * the input strings to assist the model in predicting the language they are\n * written in. If unspecified, this value will be set to the default\n * country hint in `TextAnalysisClientOptions`. If set to an empty string,\n * or the string \"none\", the service will apply a model where the country is\n * explicitly unset. The same country hint is applied to all strings in the\n * input collection.\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results where each element contains the primary language\n * for the corresponding input document.\n */\n public async analyze<ActionName extends \"LanguageDetection\">(\n actionName: ActionName,\n documents: string[],\n countryHint?: string,\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n /**\n * Runs a predictive model to perform the action of choice on the input\n * documents. See ${@link AnalyzeActionName} for a list of supported\n * actions.\n *\n * The layout of each item in the results array depends on the action chosen.\n * For example, each PIIEntityRecognition document result consists of both\n * `entities` and `redactedText` where the former is a list of all Pii entities\n * in the text and the latter is the original text after all such Pii entities\n * have been redacted from it.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Opinion mining\n *\n * ```js\n * const documents = [{\n * id: \"1\",\n * text: \"The food and service aren't the best\",\n * language: \"en\"\n * }];\n * const results = await client.analyze(\"SentimentAnalysis\", documents, {\n * includeOpinionMining: true,\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { sentiment, confidenceScores, sentences } = result;\n * for (const { sentiment, confidenceScores, opinions } of sentences) {\n * for (const { target, assessments } of opinions) {\n * const { text, sentiment, confidenceScores } = target;\n * for (const { text, sentiment } of assessments) {\n * // Do something\n * }\n * }\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/sentiment-opinion-mining/overview}\n * for more information on opinion mining.\n *\n * #### Personally identifiable information\n *\n * ```js\n * const documents = [<input documents>];\n * const categoriesFilter = [KnownPiiCategory.USSocialSecurityNumber];\n * const domainFilter = KnownPiiDomain.Phi;\n * const results = await client.analyze(\"PiiEntityRecognition\", documents, {\n * domainFilter, categoriesFilter\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { entities, redactedText } = result;\n * for (const { text, category, confidenceScore, length, offset } of entities) {\n * // Do something\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/personally-identifiable-information/overview}\n * for more information on personally identifiable information.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results corresponding to the input documents\n */\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: TextDocumentInput[],\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n\n /**\n * Runs a predictive model to perform the action of choice on the input\n * strings. See ${@link AnalyzeActionName} for a list of supported\n * actions.\n *\n * The layout of each item in the results array depends on the action chosen.\n * For example, each PIIEntityRecognition document result consists of both\n * `entities` and `redactedText` where the former is a list of all Pii entities\n * in the text and the latter is the original text after all such Pii entities\n * have been redacted from it.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Opinion mining\n *\n * ```js\n * const documents = [\"The food and service aren't the best\"];\n * const results = await client.analyze(\"SentimentAnalysis\", documents, {\n * includeOpinionMining: true,\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { sentiment, confidenceScores, sentences } = result;\n * for (const { sentiment, confidenceScores, opinions } of sentences) {\n * for (const { target, assessments } of opinions) {\n * const { text, sentiment, confidenceScores } = target;\n * for (const { text, sentiment } of assessments) {\n * // Do something\n * }\n * }\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/sentiment-opinion-mining/overview}\n * for more information on opinion mining.\n *\n * #### Personally identifiable information\n *\n * ```js\n * const documents = [<input strings>];\n * const languageHint = \"en\";\n * const categoriesFilter = [KnownPiiCategory.USSocialSecurityNumber];\n * const domainFilter = KnownPiiDomain.Phi;\n * const results = await client.analyze(\"PiiEntityRecognition\", documents, languageHint, {\n * domainFilter, categoriesFilter\n * });\n *\n * for (const result of results) {\n * if (result.error) {\n * // a document has an error instead of results\n * } else {\n * const { entities, redactedText } = result;\n * for (const { text, category, confidenceScore, length, offset } of entities) {\n * // Do something\n * }\n * }\n * }\n * ```\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/personally-identifiable-information/overview}\n * for more information on personally identifiable information.\n *\n * @param actionName - the name of the action to be performed on the input\n * documents, see ${@link AnalyzeActionName}\n * @param documents - the input documents to be analyzed\n * @param languageCode - the code of the language that all the input strings are\n * written in. If unspecified, this value will be set to the default\n * language in `TextAnalysisClientOptions`. If set to an empty string,\n * the service will apply a model where the language is explicitly set to\n * \"None\". Language support varies per action, for example, more information\n * about the languages supported for Entity Recognition actions can be\n * found in {@link https://docs.microsoft.com//azure/cognitive-services/language-service/named-entity-recognition/language-support}\n * @param options - optional action parameters and settings for the operation\n *\n * @returns an array of results corresponding to the input documents\n */\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: string[],\n languageCode?: string,\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>>;\n // implementation\n public async analyze<ActionName extends AnalyzeActionName = AnalyzeActionName>(\n actionName: ActionName,\n documents: string[] | LanguageDetectionInput[] | TextDocumentInput[],\n languageOrCountryHintOrOptions?:\n | string\n | (AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions),\n options?: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions\n ): Promise<AnalyzeResult<ActionName>> {\n let realOptions: AnalyzeActionParameters<ActionName> & TextAnalysisOperationOptions;\n\n if (documents.length === 0) {\n throw new Error(\"'documents' must be a non-empty array\");\n }\n\n let realInputs: LanguageDetectionInput[] | TextDocumentInput[];\n if (isStringArray(documents)) {\n if (actionName === \"LanguageDetection\") {\n realInputs = convertToLanguageDetectionInput(\n documents,\n typeof languageOrCountryHintOrOptions === \"string\"\n ? languageOrCountryHintOrOptions\n : this.defaultCountryHint\n );\n } else {\n realInputs = convertToTextDocumentInput(\n documents,\n typeof languageOrCountryHintOrOptions === \"string\"\n ? languageOrCountryHintOrOptions\n : this.defaultLanguage\n );\n }\n realOptions = options || ({} as any);\n } else {\n realInputs = documents;\n realOptions =\n (languageOrCountryHintOrOptions as AnalyzeActionParameters<ActionName> &\n TextAnalysisOperationOptions) || {};\n }\n const { options: operationOptions, rest: action } = getOperationOptions(realOptions);\n return this._tracing.withSpan(\n \"TextAnalysisClient.analyze\",\n operationOptions,\n async (updatedOptions: TextAnalysisOperationOptions) =>\n throwError(\n this._client\n .analyze(\n {\n kind: actionName,\n analysisInput: {\n documents: realInputs,\n },\n parameters: action,\n } as any,\n updatedOptions\n )\n .then(\n (result) =>\n transformActionResult(actionName, realInputs, result) as AnalyzeResult<ActionName>\n )\n )\n );\n }\n\n /**\n * Performs an array (batch) of actions on the input documents. Each action has\n * a `kind` field that specifies the nature of the action. See ${@link AnalyzeBatchActionNames}\n * for a list of supported actions. In addition to `kind`, actions could also\n * have other parameters such as `disableServiceLogs` and `modelVersion`.\n *\n * The results array contains the results for those input actions where each\n * item also has a `kind` field that specifies the type of the results.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Key phrase extraction and Pii entity recognition\n *\n * ```js\n * const poller = await client.beginAnalyzeBatch(\n * [{ kind: \"KeyPhraseExtraction\" }, { kind: \"PiiEntityRecognition\" }],\n * documents\n * );\n * const actionResults = await poller.pollUntilDone();\n *\n * for await (const actionResult of actionResults) {\n * if (actionResult.error) {\n * throw new Error(`Unexpected error`);\n * }\n * switch (actionResult.kind) {\n * case \"KeyPhraseExtraction\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * case \"PiiEntityRecognition\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * }\n * }\n * ```\n *\n * @param actions - an array of actions that will be run on the input documents\n * @param documents - the input documents to be analyzed\n * @param languageCode - the code of the language that all the input strings are\n * written in. If unspecified, this value will be set to the default\n * language in `TextAnalysisClientOptions`. If set to an empty string,\n * the service will apply a model where the language is explicitly set to\n * \"None\". Language support varies per action, for example, more information\n * about the languages supported for Entity Recognition actions can be\n * found in {@link https://docs.microsoft.com//azure/cognitive-services/language-service/named-entity-recognition/language-support}\n * @param options - optional settings for the operation\n *\n * @returns an array of results corresponding to the input actions\n */\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: string[],\n languageCode?: string,\n options?: BeginAnalyzeBatchOptions\n ): Promise<AnalyzeBatchPoller>;\n /**\n * Performs an array (batch) of actions on the input documents. Each action has\n * a `kind` field that specifies the nature of the action. See ${@link AnalyzeBatchActionNames}\n * for a list of supported actions. In addition to `kind`, actions could also\n * have other parameters such as `disableServiceLogs` and `modelVersion`.\n *\n * The results array contains the results for those input actions where each\n * item also has a `kind` field that specifies the type of the results.\n *\n * See {@link https://docs.microsoft.com//azure/cognitive-services/language-service/concepts/data-limits}\n * for data limits.\n *\n * ### Examples\n *\n * #### Keyphrase extraction and Pii entity recognition\n *\n * ```js\n * const poller = await client.beginAnalyzeBatch(\n * [{ kind: \"KeyPhraseExtraction\" }, { kind: \"PiiEntityRecognition\" }],\n * documents\n * );\n * const actionResults = await poller.pollUntilDone();\n *\n * for await (const actionResult of actionResults) {\n * if (actionResult.error) {\n * throw new Error(`Unexpected error`);\n * }\n * switch (actionResult.kind) {\n * case \"KeyPhraseExtraction\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * case \"PiiEntityRecognition\": {\n * for (const doc of actionResult.results) {\n * // do something\n * }\n * break;\n * }\n * }\n * }\n * ```\n *\n * @param actions - an array of actions that will be run on the input documents\n * @param documents - the input documents to be analyzed\n * @param options - optional settings for the operation\n *\n * @returns an array of results corresponding to the input actions\n */\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: TextDocumentInput[],\n options?: BeginAnalyzeBatchOptions\n ): Promise<AnalyzeBatchPoller>;\n // implementation\n async beginAnalyzeBatch(\n actions: AnalyzeBatchAction[],\n documents: TextDocumentInput[] | string[],\n languageOrOptions?: BeginAnalyzeBatchOptions | string,\n options: BeginAnalyzeBatchOptions = {}\n ): Promise<AnalyzeBatchPoller> {\n let realOptions: BeginAnalyzeBatchOptions;\n let realInputs: TextDocumentInput[];\n\n if (!Array.isArray(documents) || documents.length === 0) {\n throw new Error(\"'documents' must be a non-empty array\");\n }\n\n if (isStringArray(documents)) {\n const language = (languageOrOptions as string) || this.defaultLanguage;\n realInputs = convertToTextDocumentInput(documents, language);\n realOptions = options;\n } else {\n realInputs = documents;\n realOptions = languageOrOptions as BeginAnalyzeBatchOptions;\n }\n const realActions = actions.map(\n ({ kind, actionName, ...rest }): AnalyzeBatchActionUnion => ({\n kind,\n actionName,\n parameters: rest,\n })\n );\n const { includeStatistics, updateIntervalInMs, displayName, ...rest } = realOptions;\n const lro = createAnalyzeBatchLro({\n client: this._client,\n commonOptions: rest,\n documents: realInputs,\n initialRequestOptions: { displayName },\n pollRequestOptions: { includeStatistics },\n tasks: realActions,\n tracing: this._tracing,\n });\n\n const poller = new LroEngine<PagedAnalyzeBatchResult, AnalyzeBatchOperationState>(\n lro as LongRunningOperation<PagedAnalyzeBatchResult>,\n {\n intervalInMs: updateIntervalInMs,\n processResult: processAnalyzeResult({\n client: this._client,\n tracing: this._tracing,\n documents: realInputs,\n opOptions: { ...rest, includeStatistics },\n }),\n updateState: createUpdateAnalyzeState(realInputs),\n cancel: createCancelOperation({\n client: this._client,\n tracing: this._tracing,\n options: rest,\n }),\n }\n );\n\n await poller.poll();\n return poller;\n }\n\n /**\n * Creates a poller from the serialized state of another poller. This can be\n * useful when you want to create pollers on a different host or a poller\n * needs to be constructed after the original one is not in scope.\n *\n * @param serializedState - the serialized state of another poller. It is the\n * result of `poller.toString()`\n * @param options - optional settings for the operation\n *\n * # Example\n *\n * `client.beginAnalyzeBatch` returns a promise that will resolve to a poller.\n * The state of the poller can be serialized and used to create another as follows:\n *\n * ```js\n * const serializedState = poller.toString();\n * const rehydratedPoller = await client.createAnalyzeBatchPoller(serializedState);\n * const actionResults = await rehydratedPoller.pollUntilDone();\n * ```\n */\n async restoreAnalyzeBatchPoller(\n serializedState: string,\n options?: RestoreAnalyzeBatchPollerOptions\n ): Promise<AnalyzeBatchPoller>;\n // implementation\n async restoreAnalyzeBatchPoller(\n serializedState: string,\n options: RestoreAnalyzeBatchPollerOptions = {}\n ): Promise<AnalyzeBatchPoller> {\n const { includeStatistics, updateIntervalInMs, ...rest } = options;\n const documents = getDocsFromState(serializedState);\n const lro = createCreateAnalyzeBatchPollerLro({\n client: this._client,\n options: { ...rest, includeStatistics },\n tracing: this._tracing,\n });\n\n const poller = new LroEngine<PagedAnalyzeBatchResult, AnalyzeBatchOperationState>(\n lro as LongRunningOperation<PagedAnalyzeBatchResult>,\n {\n intervalInMs: updateIntervalInMs,\n resumeFrom: serializedState,\n processResult: processAnalyzeResult({\n client: this._client,\n tracing: this._tracing,\n documents,\n opOptions: { ...rest, includeStatistics },\n }),\n updateState: createUpdateAnalyzeState(),\n cancel: createCancelOperation({\n client: this._client,\n tracing: this._tracing,\n options: rest,\n }),\n }\n );\n\n await poller.poll();\n return poller;\n }\n}\n"]}
@@ -150,7 +150,7 @@ function appendReadableErrorMessage(currentMessage, innerMessage) {
150
150
  * parses incoming errors from the service/
151
151
  * @param error - the incoming error
152
152
  */
153
- export function transformError(errorResponse) {
153
+ function transformError(errorResponse) {
154
154
  var _a;
155
155
  const strongErrorResponse = errorResponse;
156
156
  if (!strongErrorResponse.response) {
@@ -180,6 +180,14 @@ export function transformError(errorResponse) {
180
180
  statusCode: strongErrorResponse.statusCode,
181
181
  });
182
182
  }
183
+ export async function throwError(p) {
184
+ try {
185
+ return await p;
186
+ }
187
+ catch (e) {
188
+ throw transformError(e);
189
+ }
190
+ }
183
191
  function toHealthcareResult(documents, results) {
184
192
  function makeHealthcareEntity(entity) {
185
193
  const { dataSources } = entity, rest = __rest(entity, ["dataSources"]);
@@ -1 +1 @@
1
- {"version":3,"file":"transforms.js","sourceRoot":"","sources":["../../src/transforms.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAqElC,OAAO,EAEL,oBAAoB,EACpB,0BAA0B,EAC1B,qBAAqB,GACtB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAC;AAEtD;;;GAGG;AACH,SAAS,mBAAmB,CAAC,UAAwC;IACnE,4BAA4B;IAC5B,IAAI,UAAU,CAAC,UAAU,KAAK,SAAS,EAAE;QACvC,OAAO,mBAAmB,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;KACnD;IAED,yBACK,UAAU,EACb;AACJ,CAAC;AAED,SAAS,2BAA2B,CAAC,EAAU,EAAE,KAAiB;IAChE,OAAO;QACL,EAAE;QACF,KAAK,EAAE,mBAAmB,CAAC,KAAK,CAAC;KAClC,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,SAAS,wBAAwB,CAK/B,KAA0B,EAC1B,QAGC,EACD,OAGC;IAED,MAAM,EAAE,YAAY,GAAG,2BAA2B,EAAE,cAAc,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACrF,MAAM,cAAc,GAAG,cAAc;QACnC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,cAAc,CAAC;QACxC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC;IACvB,MAAM,eAAe,GACnB,cACD,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAE9E,OAAO,qBAAqB,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;AACvD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAAmC,EACnC,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,EAAE;QAClD,cAAc,EAAE,CAAC,EAA6B,EAAE,EAAE;gBAAjC,EAAE,gBAAgB,OAAW,EAAN,IAAI,cAA3B,oBAA6B,CAAF;YAAO,OAAA,iBACjD,eAAe,EAAE,gBAAgB,IAC9B,IAAI,EACP,CAAA;SAAA;KACH,CAAC,CAAC;AACL,CAAC;AAED,SAAS,4BAA4B,CACnC,SAA8B,EAC9B,OAA4C;IAE5C,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAA8B,EAC9B,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,EAAE;QAClD,cAAc,EAAE,CAAC,EAAsB,EAAE,EAAE;gBAA1B,EAAE,SAAS,OAAW,EAAN,IAAI,cAApB,aAAsB,CAAF;YAAO,OAAA,iCACvC,IAAI,KACP,SAAS,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CACpC,iCAAiC,CAAC,QAAQ,EAAE,SAAS,CAAC,CACvD,IACD,CAAA;SAAA;KACH,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,iCAAiC,CACxC,EAAgE,EAChE,SAAuC;;QADvC,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,OAAuC,EAAlC,IAAI,cAAlC,0BAAoC,CAAF;IAGlC,uCACK,IAAI,KACP,QAAQ,EACN,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG;QACV,wDAAwD;QACxD,CAAC,EAAsC,EAAW,EAAE;gBAAnD,EAAE,SAAS,OAA2B,EAAtB,IAAI,cAApB,aAAsB,CAAF;YAAgC,OAAA,CAAC;gBACpD,MAAM,EAAE,IAAI;gBACZ,WAAW,EAAE,SAAS;qBACnB,MAAM,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,QAAQ,CAAC,YAAY,KAAK,YAAY,CAAC;qBAC5D,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,0CAA0C,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;aACtF,CAAC,CAAA;SAAA,CACH,mCAAI,EAAE,IACT;AACJ,CAAC;AAED;;;;;;;;;GASG;AACH,SAAS,0CAA0C,CACjD,cAA8B,EAC9B,SAAuC;;IAEvC,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC;IACzC,MAAM,eAAe,GAAoB,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAC7E,MAAM,UAAU,GACd,MAAA,SAAS,aAAT,SAAS,uBAAT,SAAS,CAAG,eAAe,CAAC,QAAQ,EAAE,WAAW,0CAAG,eAAe,CAAC,UAAU,CAAC,CAAC;IAClF,IAAI,UAAU,KAAK,SAAS,EAAE;QAC5B,OAAO,UAAU,CAAC;KACnB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,YAAY,aAAa,qCAAqC,CAAC,CAAC;KACjF;AACH,CAAC;AAED,SAAS,qBAAqB,CAC5B,SAA8B,EAC9B,OAAqC;IAErC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,2BAA2B,CAClC,SAA8B,EAC9B,OAA2C;IAE3C,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAA8B,EAC9B,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CACnC,UAAsB,EACtB,KAAqD,EACrD,QAAyB;IAEzB,QAAQ,QAAQ,CAAC,IAAI,EAAE;QACrB,KAAK,sBAAsB,CAAC,CAAC;YAC3B,OAAO,qBAAqB,CAAC,KAAK,EAAG,QAAoC,CAAC,OAAO,CAAC,CAAC;SACpF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAA+B,CAAC,OAAO,CAAC,CAAC;SACnF;QACD,KAAK,4BAA4B,CAAC,CAAC;YACjC,OAAO,2BAA2B,CAAC,KAAK,EAAG,QAAgC,CAAC,OAAO,CAAC,CAAC;SACtF;QACD,KAAK,6BAA6B,CAAC,CAAC;YAClC,OAAO,4BAA4B,CAAC,KAAK,EAAG,QAA0B,CAAC,OAAO,CAAC,CAAC;SACjF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAAgC,CAAC,OAAO,CAAC,CAAC;SACpF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAAwC,CAAC,OAAO,CAAC,CAAC;SAC5F;QACD,OAAO,CAAC,CAAC;YACP,MAAM,SAAS,GAAU,QAAQ,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,6BAA6B,SAAS,0BAA0B,UAAU,EAAE,CAAC,CAAC;SAC/F;KACF;AACH,CAAC;AAED,SAAS,0BAA0B,CAAC,cAAsB,EAAE,YAAoB;IAC9E,IAAI,OAAO,GAAG,cAAc,CAAC;IAC7B,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;QAC7B,OAAO,GAAG,OAAO,GAAG,GAAG,CAAC;KACzB;IACD,OAAO,OAAO,GAAG,GAAG,GAAG,YAAY,CAAC;AACtC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,aAAsB;;IACnD,MAAM,mBAAmB,GAAG,aAK3B,CAAC;IACF,IAAI,CAAC,mBAAmB,CAAC,QAAQ,EAAE;QACjC,MAAM,aAAa,CAAC;KACrB;IACD,MAAM,aAAa,GAAG,MAAA,mBAAmB,CAAC,QAAQ,CAAC,UAAU,0CAAE,KAAK,CAAC;IACrE,IAAI,CAAC,aAAa;QAAE,OAAO,aAAa,CAAC;IACzC,IAAI,YAAY,GAAG,aAAa,CAAC,OAAO,CAAC;IACzC,IAAI,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;IAC9B,SAAS,MAAM,CAAC,KAAmC;QACjD,MAAM,UAAU,GAAG,KAAK,CAAC,UAAU,CAAC;QACpC,IAAI,UAAU,EAAE;YACd,IAAI,UAAU,CAAC,OAAO,EAAE;gBACtB,YAAY,GAAG,0BAA0B,CAAC,YAAY,EAAE,UAAU,CAAC,OAAO,CAAC,CAAC;aAC7E;YACD,IAAI,UAAU,CAAC,IAAI,EAAE;gBACnB,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC;aACxB;YACD,OAAO,MAAM,CAAC,UAAU,CAAC,CAAC;SAC3B;QACD,OAAO,KAAmB,CAAC;IAC7B,CAAC;IACD,MAAM,CAAC,aAAa,CAAC,CAAC;IACtB,OAAO,IAAI,SAAS,CAAC,YAAY,EAAE;QACjC,IAAI;QACJ,UAAU,EAAE,mBAAmB,CAAC,UAAU;KAC3C,CAAC,CAAC;AACL,CAAC;AAED,SAAS,kBAAkB,CACzB,SAA8B,EAC9B,OAAkC;IAElC,SAAS,oBAAoB,CAAC,MAAiC;QAC7D,MAAM,EAAE,WAAW,KAAc,MAAM,EAAf,IAAI,UAAK,MAAM,EAAjC,eAAwB,CAAS,CAAC;QACxC,uBACE,WAAW,EAAE,WAAW,aAAX,WAAW,cAAX,WAAW,GAAI,EAAE,IAC3B,IAAI,EACP;IACJ,CAAC;IACD,SAAS,sBAAsB,CAC7B,QAA4B;QAE5B,OAAO,CAAC,QAA4B,EAA4B,EAAE,CAAC,CAAC;YAClE,YAAY,EAAE,QAAQ,CAAC,YAAY;YACnC,KAAK,EAAE,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAC1B,CAAC,IAA8B,EAAgC,EAAE,CAAC,CAAC;gBACjE,MAAM,EAAE,QAAQ,CAAC,0BAA0B,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBACtD,IAAI,EAAE,IAAI,CAAC,IAAI;aAChB,CAAC,CACH;SACF,CAAC,CAAC;IACL,CAAC;IACD,OAAO,wBAAwB,CAC7B,SAAS,EACT,OAAO,EACP;QACE,cAAc,EAAE,CAAC,EAAgC,EAAE,EAAE;gBAApC,EAAE,QAAQ,EAAE,SAAS,OAAW,EAAN,IAAI,cAA9B,yBAAgC,CAAF;YAC7C,MAAM,WAAW,GAAG,QAAQ,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;YACvD,uBACE,QAAQ,EAAE,WAAW,EACrB,eAAe,EAAE,SAAS,CAAC,GAAG,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,IAChE,IAAI,EACP;QACJ,CAAC;KACF,CACF,CAAC;AACJ,CAAC;AAED,SAAS,uCAAuC,CAC9C,SAA8B,EAC9B,OAAuD;IAEvD,OAAO,wBAAwB,CAG7B,SAAS,EAAE,OAAO,EAAE;QACpB,cAAc,EAAE,CAAC,EAA2B,EAAE,EAAE;gBAA/B,EAAE,cAAc,OAAW,EAAN,IAAI,cAAzB,kBAA2B,CAAF;YACxC,uBACE,eAAe,EAAE,CAAC,cAAc,CAAC,IAC9B,IAAI,EACP;QACJ,CAAC;KACF,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,4BAA4B,CAC1C,SAA8B,EAC9B,WAAwC,EAAE;IAE1C,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,UAAU,EAAE,EAAE;QACjC,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,UAAU,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC;QACzE,QAAQ,IAAsC,EAAE;YAC9C,KAAK,6BAA6B,CAAC,CAAC;gBAClC,MAAM,EAAE,OAAO,EAAE,GAAG,UAAgC,CAAC;gBACrD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,mBAAmB,EACzB,OAAO,EAAE,yBAAyB,CAAC,SAAS,EAAE,OAAO,CAAC,EACtD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,6BAA6B,CAAC,CAAC;gBAClC,MAAM,EAAE,OAAO,EAAE,GAAG,UAAwC,CAAC;gBAC7D,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,mBAAmB,EACzB,OAAO,EAAE,yBAAyB,CAAC,SAAS,EAAE,OAAO,CAAC,EACtD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,gCAAgC,CAAC,CAAC;gBACrC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA2C,CAAC;gBAChE,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,sBAAsB,EAC5B,OAAO,EAAE,4BAA4B,CAAC,SAAS,EAAE,OAAO,CAAC,EACzD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,+BAA+B,CAAC,CAAC;gBACpC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA0C,CAAC;gBAC/D,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,qBAAqB,EAC3B,OAAO,EAAE,2BAA2B,CAAC,SAAS,EAAE,OAAO,CAAC,EACxD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,yBAAyB,CAAC,CAAC;gBAC9B,MAAM,EAAE,OAAO,EAAE,GAAG,UAAoC,CAAC;gBACzD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,eAAe,EACrB,OAAO,EAAE,qBAAqB,CAAC,SAAS,EAAE,OAAO,CAAC,EAClD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,sBAAsB,CAAC,CAAC;gBAC3B,MAAM,EAAE,OAAO,EAAE,GAAG,UAAiC,CAAC;gBACtD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,YAAY,EAClB,OAAO,EAAE,kBAAkB,CAAC,SAAS,EAAE,OAAO,CAAC,EAC/C,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,mCAAmC,CAAC,CAAC;gBACxC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA8C,CAAC;gBACnE,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,yBAAyB,EAC/B,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,2CAA2C,CAAC,CAAC;gBAChD,MAAM,EAAE,OAAO,EAAE,GAAG,UAAsD,CAAC;gBAC3E,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,iCAAiC,EACvC,OAAO,EAAE,uCAAuC,CAAC,SAAS,EAAE,OAAO,CAAC,EACpE,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,0CAA0C,CAAC,CAAC;gBAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,UAAqD,CAAC;gBAC1E,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,gCAAgC,EACtC,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,mCAAmC,CAAC,CAAC;gBACxC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA8C,CAAC;gBACnE,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,yBAAyB,EAC/B,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,OAAO,CAAC,CAAC;gBACP,MAAM,IAAI,KAAK,CAAC,6BAA6B,IAAI,EAAE,CAAC,CAAC;aACtD;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AnalyzeActionName,\n AnalyzeBatchResult,\n AnalyzeResult,\n CustomSingleLabelClassificationResult,\n CustomSingleLabelClassificationSuccessResult,\n EntityLinkingResult,\n EntityRecognitionResult,\n HealthcareEntity,\n HealthcareEntityRelation,\n HealthcareEntityRelationRole,\n HealthcareResult,\n HealthcareSuccessResult,\n KeyPhraseExtractionResult,\n LanguageDetectionResult,\n Opinion,\n PiiEntityRecognitionResult,\n SentenceSentiment,\n SentimentAnalysisResult,\n TextAnalysisError,\n TextAnalysisErrorResult,\n TextAnalysisSuccessResult,\n} from \"./models\";\nimport {\n AnalyzeResponse,\n AnalyzeTextLROResultUnion,\n AssessmentSentiment,\n CustomEntityRecognitionLROResult,\n CustomMultiLabelClassificationLROResult,\n CustomSingleLabelClassificationLROResult,\n DocumentError,\n EntitiesTaskResult,\n EntityLinkingLROResult,\n EntityLinkingTaskResult,\n EntityRecognitionLROResult,\n ErrorModel,\n ErrorResponse,\n ExtractiveSummarizationLROResult,\n CustomSingleLabelClassificationResult as GeneratedCustomSingleLabelClassificationResult,\n EntityLinkingResult as GeneratedEntityLinkingResult,\n EntitiesResult as GeneratedEntityRecognitionResult,\n HealthcareEntity as GeneratedHealthcareEntity,\n HealthcareResult as GeneratedHealthcareResult,\n KeyPhraseResult as GeneratedKeyPhraseExtractionResult,\n LanguageDetectionResult as GeneratedLanguageDetectionResult,\n PiiResult as GeneratedPiiEntityRecognitionResult,\n SentenceSentiment as GeneratedSentenceSentiment,\n SentimentResponse as GeneratedSentimentAnalysisResult,\n HealthcareEntitiesDocumentResult,\n HealthcareLROResult,\n HealthcareRelation,\n HealthcareRelationEntity,\n InnerErrorModel,\n KeyPhraseExtractionLROResult,\n KeyPhraseTaskResult,\n KnownAnalyzeTextLROResultsKind,\n LanguageDetectionInput,\n LanguageDetectionTaskResult,\n PiiEntityRecognitionLROResult,\n PiiTaskResult,\n SentenceTarget,\n SentimentLROResult,\n SentimentTaskResult,\n SingleClassificationDocumentResult,\n TargetRelation,\n TextDocumentInput,\n} from \"./generated\";\nimport {\n AssessmentIndex,\n parseAssessmentIndex,\n parseHealthcareEntityIndex,\n sortResponseIdObjects,\n} from \"./util\";\nimport { RestError } from \"@azure/core-rest-pipeline\";\n\n/**\n * Helper function for converting nested service error to the unified\n * TextAnalysisError\n */\nfunction toTextAnalysisError(errorModel: ErrorModel | InnerErrorModel): TextAnalysisError {\n // Return the deepest error.\n if (errorModel.innererror !== undefined) {\n return toTextAnalysisError(errorModel.innererror);\n }\n\n return {\n ...errorModel,\n };\n}\n\nfunction makeTextAnalysisErrorResult(id: string, error: ErrorModel): TextAnalysisErrorResult {\n return {\n id,\n error: toTextAnalysisError(error),\n };\n}\n\n/**\n * combines successful and erroneous results into a single array of results and\n * sort them so that the IDs order match that of the input documents array.\n * @param input - the array of documents sent to the service for processing.\n * @param response - the response received from the service.\n * @param options - an options bag that includes functions to process the results.\n */\nfunction transformDocumentResults<\n DocumentSuccess extends TextAnalysisSuccessResult,\n PublicDocumentSuccess extends TextAnalysisSuccessResult = DocumentSuccess,\n TError extends TextAnalysisErrorResult = TextAnalysisErrorResult\n>(\n input: TextDocumentInput[],\n response: {\n documents: DocumentSuccess[];\n errors: DocumentError[];\n },\n options?: {\n processSuccess?: (successResult: DocumentSuccess) => PublicDocumentSuccess;\n processError?: (id: string, error: ErrorModel) => TError;\n }\n): (PublicDocumentSuccess | TextAnalysisErrorResult)[] {\n const { processError = makeTextAnalysisErrorResult, processSuccess } = options || {};\n const successResults = processSuccess\n ? response.documents.map(processSuccess)\n : response.documents;\n const unsortedResults = (\n successResults as (PublicDocumentSuccess | TextAnalysisErrorResult)[]\n ).concat(response.errors.map((error) => processError(error.id, error.error)));\n\n return sortResponseIdObjects(input, unsortedResults);\n}\n\nfunction toLanguageDetectionResult(\n documents: LanguageDetectionInput[],\n results: GeneratedLanguageDetectionResult\n): LanguageDetectionResult[] {\n return transformDocumentResults(documents, results, {\n processSuccess: ({ detectedLanguage, ...rest }) => ({\n primaryLanguage: detectedLanguage,\n ...rest,\n }),\n });\n}\n\nfunction toPiiEntityRecognitionResult(\n documents: TextDocumentInput[],\n results: GeneratedPiiEntityRecognitionResult\n): PiiEntityRecognitionResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toSentimentAnalysisResult(\n documents: TextDocumentInput[],\n results: GeneratedSentimentAnalysisResult\n): SentimentAnalysisResult[] {\n return transformDocumentResults(documents, results, {\n processSuccess: ({ sentences, ...rest }) => ({\n ...rest,\n sentences: sentences.map((sentence) =>\n convertGeneratedSentenceSentiment(sentence, sentences)\n ),\n }),\n });\n}\n\n/**\n * Converts a sentence sentiment object returned by the service to another that\n * is user-friendly.\n *\n * @param sentence - The sentence sentiment object to be converted.\n * @param response - The entire response returned by the service.\n * @returns The user-friendly sentence sentiment object.\n * @internal\n */\nfunction convertGeneratedSentenceSentiment(\n { targets, assessments: _, ...rest }: GeneratedSentenceSentiment,\n sentences: GeneratedSentenceSentiment[]\n): SentenceSentiment {\n return {\n ...rest,\n opinions:\n targets?.map(\n // eslint-disable-next-line @typescript-eslint/no-shadow\n ({ relations, ...rest }: SentenceTarget): Opinion => ({\n target: rest,\n assessments: relations\n .filter((relation) => relation.relationType === \"assessment\")\n .map((relation) => convertTargetRelationToAssessmentSentiment(relation, sentences)),\n })\n ) ?? [],\n };\n}\n\n/**\n * Converts a target relation object returned by the service to an assessment\n * sentiment object where JSON pointers in the former are realized in the\n * latter.\n *\n * @param targetRelation - The target relation object to be converted.\n * @param response - The entire response returned by the service.\n * @returns The user-friendly assessment sentiment object.\n * @internal\n */\nfunction convertTargetRelationToAssessmentSentiment(\n targetRelation: TargetRelation,\n sentences: GeneratedSentenceSentiment[]\n): AssessmentSentiment {\n const assessmentPtr = targetRelation.ref;\n const assessmentIndex: AssessmentIndex = parseAssessmentIndex(assessmentPtr);\n const assessment =\n sentences?.[assessmentIndex.sentence].assessments?.[assessmentIndex.assessment];\n if (assessment !== undefined) {\n return assessment;\n } else {\n throw new Error(`Pointer \"${assessmentPtr}\" is not a valid Assessment pointer`);\n }\n}\n\nfunction toEntityLinkingResult(\n documents: TextDocumentInput[],\n results: GeneratedEntityLinkingResult\n): EntityLinkingResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toKeyPhraseExtractionResult(\n documents: TextDocumentInput[],\n results: GeneratedKeyPhraseExtractionResult\n): KeyPhraseExtractionResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toEntityRecognitionResult(\n documents: TextDocumentInput[],\n results: GeneratedEntityRecognitionResult\n): EntityRecognitionResult[] {\n return transformDocumentResults(documents, results);\n}\n\n/**\n * @internal\n */\nexport function transformActionResult<ActionName extends AnalyzeActionName>(\n actionName: ActionName,\n input: TextDocumentInput[] | LanguageDetectionInput[],\n response: AnalyzeResponse\n): AnalyzeResult<AnalyzeActionName> {\n switch (response.kind) {\n case \"EntityLinkingResults\": {\n return toEntityLinkingResult(input, (response as EntityLinkingTaskResult).results);\n }\n case \"EntityRecognitionResults\": {\n return toEntityRecognitionResult(input, (response as EntitiesTaskResult).results);\n }\n case \"KeyPhraseExtractionResults\": {\n return toKeyPhraseExtractionResult(input, (response as KeyPhraseTaskResult).results);\n }\n case \"PiiEntityRecognitionResults\": {\n return toPiiEntityRecognitionResult(input, (response as PiiTaskResult).results);\n }\n case \"SentimentAnalysisResults\": {\n return toSentimentAnalysisResult(input, (response as SentimentTaskResult).results);\n }\n case \"LanguageDetectionResults\": {\n return toLanguageDetectionResult(input, (response as LanguageDetectionTaskResult).results);\n }\n default: {\n const __exhaust: never = response;\n throw new Error(`Unsupported results kind: ${__exhaust} for an action of type ${actionName}`);\n }\n }\n}\n\nfunction appendReadableErrorMessage(currentMessage: string, innerMessage: string): string {\n let message = currentMessage;\n if (message.slice(-1) !== \".\") {\n message = message + \".\";\n }\n return message + \" \" + innerMessage;\n}\n\n/**\n * @internal\n * parses incoming errors from the service/\n * @param error - the incoming error\n */\nexport function transformError(errorResponse: unknown): any {\n const strongErrorResponse = errorResponse as {\n response: {\n parsedBody?: ErrorResponse;\n };\n statusCode: number;\n };\n if (!strongErrorResponse.response) {\n throw errorResponse;\n }\n const topLevelError = strongErrorResponse.response.parsedBody?.error;\n if (!topLevelError) return errorResponse;\n let errorMessage = topLevelError.message;\n let code = topLevelError.code;\n function unwrap(error: ErrorModel | InnerErrorModel): ErrorModel {\n const innerError = error.innererror;\n if (innerError) {\n if (innerError.message) {\n errorMessage = appendReadableErrorMessage(errorMessage, innerError.message);\n }\n if (innerError.code) {\n code = innerError.code;\n }\n return unwrap(innerError);\n }\n return error as ErrorModel;\n }\n unwrap(topLevelError);\n return new RestError(errorMessage, {\n code,\n statusCode: strongErrorResponse.statusCode,\n });\n}\n\nfunction toHealthcareResult(\n documents: TextDocumentInput[],\n results: GeneratedHealthcareResult\n): HealthcareResult[] {\n function makeHealthcareEntity(entity: GeneratedHealthcareEntity): HealthcareEntity {\n const { dataSources, ...rest } = entity;\n return {\n dataSources: dataSources ?? [],\n ...rest,\n };\n }\n function makeHealthcareRelation(\n entities: HealthcareEntity[]\n ): (relation: HealthcareRelation) => HealthcareEntityRelation {\n return (relation: HealthcareRelation): HealthcareEntityRelation => ({\n relationType: relation.relationType,\n roles: relation.entities.map(\n (role: HealthcareRelationEntity): HealthcareEntityRelationRole => ({\n entity: entities[parseHealthcareEntityIndex(role.ref)],\n name: role.role,\n })\n ),\n });\n }\n return transformDocumentResults<HealthcareEntitiesDocumentResult, HealthcareSuccessResult>(\n documents,\n results,\n {\n processSuccess: ({ entities, relations, ...rest }) => {\n const newEntities = entities.map(makeHealthcareEntity);\n return {\n entities: newEntities,\n entityRelations: relations.map(makeHealthcareRelation(newEntities)),\n ...rest,\n };\n },\n }\n );\n}\n\nfunction toCustomSingleLabelClassificationResult(\n documents: TextDocumentInput[],\n results: GeneratedCustomSingleLabelClassificationResult\n): CustomSingleLabelClassificationResult[] {\n return transformDocumentResults<\n SingleClassificationDocumentResult,\n CustomSingleLabelClassificationSuccessResult\n >(documents, results, {\n processSuccess: ({ classification, ...rest }) => {\n return {\n classifications: [classification],\n ...rest,\n };\n },\n });\n}\n\n/**\n * @internal\n */\nexport function transformAnalyzeBatchResults(\n documents: TextDocumentInput[],\n response: AnalyzeTextLROResultUnion[] = []\n): AnalyzeBatchResult[] {\n return response.map((actionData) => {\n const { lastUpdateDateTime: completedOn, actionName, kind } = actionData;\n switch (kind as KnownAnalyzeTextLROResultsKind) {\n case \"SentimentAnalysisLROResults\": {\n const { results } = actionData as SentimentLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"SentimentAnalysis\",\n results: toSentimentAnalysisResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"EntityRecognitionLROResults\": {\n const { results } = actionData as EntityRecognitionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"EntityRecognition\",\n results: toEntityRecognitionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"PiiEntityRecognitionLROResults\": {\n const { results } = actionData as PiiEntityRecognitionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"PiiEntityRecognition\",\n results: toPiiEntityRecognitionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"KeyPhraseExtractionLROResults\": {\n const { results } = actionData as KeyPhraseExtractionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"KeyPhraseExtraction\",\n results: toKeyPhraseExtractionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"EntityLinkingLROResults\": {\n const { results } = actionData as EntityLinkingLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"EntityLinking\",\n results: toEntityLinkingResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"HealthcareLROResults\": {\n const { results } = actionData as HealthcareLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"Healthcare\",\n results: toHealthcareResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"CustomEntityRecognitionLROResults\": {\n const { results } = actionData as CustomEntityRecognitionLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomEntityRecognition\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"CustomSingleLabelClassificationLROResults\": {\n const { results } = actionData as CustomSingleLabelClassificationLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomSingleLabelClassification\",\n results: toCustomSingleLabelClassificationResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"CustomMultiLabelClassificationLROResults\": {\n const { results } = actionData as CustomMultiLabelClassificationLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomMultiLabelClassification\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"ExtractiveSummarizationLROResults\": {\n const { results } = actionData as ExtractiveSummarizationLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"ExtractiveSummarization\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n default: {\n throw new Error(`Unsupported results kind: ${kind}`);\n }\n }\n });\n}\n"]}
1
+ {"version":3,"file":"transforms.js","sourceRoot":"","sources":["../../src/transforms.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAqElC,OAAO,EAEL,oBAAoB,EACpB,0BAA0B,EAC1B,qBAAqB,GACtB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAC;AAEtD;;;GAGG;AACH,SAAS,mBAAmB,CAAC,UAAwC;IACnE,4BAA4B;IAC5B,IAAI,UAAU,CAAC,UAAU,KAAK,SAAS,EAAE;QACvC,OAAO,mBAAmB,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;KACnD;IAED,yBACK,UAAU,EACb;AACJ,CAAC;AAED,SAAS,2BAA2B,CAAC,EAAU,EAAE,KAAiB;IAChE,OAAO;QACL,EAAE;QACF,KAAK,EAAE,mBAAmB,CAAC,KAAK,CAAC;KAClC,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,SAAS,wBAAwB,CAK/B,KAA0B,EAC1B,QAGC,EACD,OAGC;IAED,MAAM,EAAE,YAAY,GAAG,2BAA2B,EAAE,cAAc,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;IACrF,MAAM,cAAc,GAAG,cAAc;QACnC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC,cAAc,CAAC;QACxC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC;IACvB,MAAM,eAAe,GACnB,cACD,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAE9E,OAAO,qBAAqB,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;AACvD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAAmC,EACnC,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,EAAE;QAClD,cAAc,EAAE,CAAC,EAA6B,EAAE,EAAE;gBAAjC,EAAE,gBAAgB,OAAW,EAAN,IAAI,cAA3B,oBAA6B,CAAF;YAAO,OAAA,iBACjD,eAAe,EAAE,gBAAgB,IAC9B,IAAI,EACP,CAAA;SAAA;KACH,CAAC,CAAC;AACL,CAAC;AAED,SAAS,4BAA4B,CACnC,SAA8B,EAC9B,OAA4C;IAE5C,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAA8B,EAC9B,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,EAAE;QAClD,cAAc,EAAE,CAAC,EAAsB,EAAE,EAAE;gBAA1B,EAAE,SAAS,OAAW,EAAN,IAAI,cAApB,aAAsB,CAAF;YAAO,OAAA,iCACvC,IAAI,KACP,SAAS,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CACpC,iCAAiC,CAAC,QAAQ,EAAE,SAAS,CAAC,CACvD,IACD,CAAA;SAAA;KACH,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,iCAAiC,CACxC,EAAgE,EAChE,SAAuC;;QADvC,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,OAAuC,EAAlC,IAAI,cAAlC,0BAAoC,CAAF;IAGlC,uCACK,IAAI,KACP,QAAQ,EACN,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG;QACV,wDAAwD;QACxD,CAAC,EAAsC,EAAW,EAAE;gBAAnD,EAAE,SAAS,OAA2B,EAAtB,IAAI,cAApB,aAAsB,CAAF;YAAgC,OAAA,CAAC;gBACpD,MAAM,EAAE,IAAI;gBACZ,WAAW,EAAE,SAAS;qBACnB,MAAM,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,QAAQ,CAAC,YAAY,KAAK,YAAY,CAAC;qBAC5D,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,0CAA0C,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;aACtF,CAAC,CAAA;SAAA,CACH,mCAAI,EAAE,IACT;AACJ,CAAC;AAED;;;;;;;;;GASG;AACH,SAAS,0CAA0C,CACjD,cAA8B,EAC9B,SAAuC;;IAEvC,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC;IACzC,MAAM,eAAe,GAAoB,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAC7E,MAAM,UAAU,GACd,MAAA,SAAS,aAAT,SAAS,uBAAT,SAAS,CAAG,eAAe,CAAC,QAAQ,EAAE,WAAW,0CAAG,eAAe,CAAC,UAAU,CAAC,CAAC;IAClF,IAAI,UAAU,KAAK,SAAS,EAAE;QAC5B,OAAO,UAAU,CAAC;KACnB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,YAAY,aAAa,qCAAqC,CAAC,CAAC;KACjF;AACH,CAAC;AAED,SAAS,qBAAqB,CAC5B,SAA8B,EAC9B,OAAqC;IAErC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,2BAA2B,CAClC,SAA8B,EAC9B,OAA2C;IAE3C,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED,SAAS,yBAAyB,CAChC,SAA8B,EAC9B,OAAyC;IAEzC,OAAO,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACtD,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CACnC,UAAsB,EACtB,KAAqD,EACrD,QAAyB;IAEzB,QAAQ,QAAQ,CAAC,IAAI,EAAE;QACrB,KAAK,sBAAsB,CAAC,CAAC;YAC3B,OAAO,qBAAqB,CAAC,KAAK,EAAG,QAAoC,CAAC,OAAO,CAAC,CAAC;SACpF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAA+B,CAAC,OAAO,CAAC,CAAC;SACnF;QACD,KAAK,4BAA4B,CAAC,CAAC;YACjC,OAAO,2BAA2B,CAAC,KAAK,EAAG,QAAgC,CAAC,OAAO,CAAC,CAAC;SACtF;QACD,KAAK,6BAA6B,CAAC,CAAC;YAClC,OAAO,4BAA4B,CAAC,KAAK,EAAG,QAA0B,CAAC,OAAO,CAAC,CAAC;SACjF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAAgC,CAAC,OAAO,CAAC,CAAC;SACpF;QACD,KAAK,0BAA0B,CAAC,CAAC;YAC/B,OAAO,yBAAyB,CAAC,KAAK,EAAG,QAAwC,CAAC,OAAO,CAAC,CAAC;SAC5F;QACD,OAAO,CAAC,CAAC;YACP,MAAM,SAAS,GAAU,QAAQ,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,6BAA6B,SAAS,0BAA0B,UAAU,EAAE,CAAC,CAAC;SAC/F;KACF;AACH,CAAC;AAED,SAAS,0BAA0B,CAAC,cAAsB,EAAE,YAAoB;IAC9E,IAAI,OAAO,GAAG,cAAc,CAAC;IAC7B,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;QAC7B,OAAO,GAAG,OAAO,GAAG,GAAG,CAAC;KACzB;IACD,OAAO,OAAO,GAAG,GAAG,GAAG,YAAY,CAAC;AACtC,CAAC;AAED;;;;GAIG;AACH,SAAS,cAAc,CAAC,aAAsB;;IAC5C,MAAM,mBAAmB,GAAG,aAK3B,CAAC;IACF,IAAI,CAAC,mBAAmB,CAAC,QAAQ,EAAE;QACjC,MAAM,aAAa,CAAC;KACrB;IACD,MAAM,aAAa,GAAG,MAAA,mBAAmB,CAAC,QAAQ,CAAC,UAAU,0CAAE,KAAK,CAAC;IACrE,IAAI,CAAC,aAAa;QAAE,OAAO,aAAa,CAAC;IACzC,IAAI,YAAY,GAAG,aAAa,CAAC,OAAO,CAAC;IACzC,IAAI,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;IAC9B,SAAS,MAAM,CAAC,KAAmC;QACjD,MAAM,UAAU,GAAG,KAAK,CAAC,UAAU,CAAC;QACpC,IAAI,UAAU,EAAE;YACd,IAAI,UAAU,CAAC,OAAO,EAAE;gBACtB,YAAY,GAAG,0BAA0B,CAAC,YAAY,EAAE,UAAU,CAAC,OAAO,CAAC,CAAC;aAC7E;YACD,IAAI,UAAU,CAAC,IAAI,EAAE;gBACnB,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC;aACxB;YACD,OAAO,MAAM,CAAC,UAAU,CAAC,CAAC;SAC3B;QACD,OAAO,KAAmB,CAAC;IAC7B,CAAC;IACD,MAAM,CAAC,aAAa,CAAC,CAAC;IACtB,OAAO,IAAI,SAAS,CAAC,YAAY,EAAE;QACjC,IAAI;QACJ,UAAU,EAAE,mBAAmB,CAAC,UAAU;KAC3C,CAAC,CAAC;AACL,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,UAAU,CAAI,CAAa;IAC/C,IAAI;QACF,OAAO,MAAM,CAAC,CAAC;KAChB;IAAC,OAAO,CAAU,EAAE;QACnB,MAAM,cAAc,CAAC,CAAC,CAAC,CAAC;KACzB;AACH,CAAC;AAED,SAAS,kBAAkB,CACzB,SAA8B,EAC9B,OAAkC;IAElC,SAAS,oBAAoB,CAAC,MAAiC;QAC7D,MAAM,EAAE,WAAW,KAAc,MAAM,EAAf,IAAI,UAAK,MAAM,EAAjC,eAAwB,CAAS,CAAC;QACxC,uBACE,WAAW,EAAE,WAAW,aAAX,WAAW,cAAX,WAAW,GAAI,EAAE,IAC3B,IAAI,EACP;IACJ,CAAC;IACD,SAAS,sBAAsB,CAC7B,QAA4B;QAE5B,OAAO,CAAC,QAA4B,EAA4B,EAAE,CAAC,CAAC;YAClE,YAAY,EAAE,QAAQ,CAAC,YAAY;YACnC,KAAK,EAAE,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAC1B,CAAC,IAA8B,EAAgC,EAAE,CAAC,CAAC;gBACjE,MAAM,EAAE,QAAQ,CAAC,0BAA0B,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBACtD,IAAI,EAAE,IAAI,CAAC,IAAI;aAChB,CAAC,CACH;SACF,CAAC,CAAC;IACL,CAAC;IACD,OAAO,wBAAwB,CAC7B,SAAS,EACT,OAAO,EACP;QACE,cAAc,EAAE,CAAC,EAAgC,EAAE,EAAE;gBAApC,EAAE,QAAQ,EAAE,SAAS,OAAW,EAAN,IAAI,cAA9B,yBAAgC,CAAF;YAC7C,MAAM,WAAW,GAAG,QAAQ,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;YACvD,uBACE,QAAQ,EAAE,WAAW,EACrB,eAAe,EAAE,SAAS,CAAC,GAAG,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,IAChE,IAAI,EACP;QACJ,CAAC;KACF,CACF,CAAC;AACJ,CAAC;AAED,SAAS,uCAAuC,CAC9C,SAA8B,EAC9B,OAAuD;IAEvD,OAAO,wBAAwB,CAG7B,SAAS,EAAE,OAAO,EAAE;QACpB,cAAc,EAAE,CAAC,EAA2B,EAAE,EAAE;gBAA/B,EAAE,cAAc,OAAW,EAAN,IAAI,cAAzB,kBAA2B,CAAF;YACxC,uBACE,eAAe,EAAE,CAAC,cAAc,CAAC,IAC9B,IAAI,EACP;QACJ,CAAC;KACF,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,4BAA4B,CAC1C,SAA8B,EAC9B,WAAwC,EAAE;IAE1C,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,UAAU,EAAE,EAAE;QACjC,MAAM,EAAE,kBAAkB,EAAE,WAAW,EAAE,UAAU,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC;QACzE,QAAQ,IAAsC,EAAE;YAC9C,KAAK,6BAA6B,CAAC,CAAC;gBAClC,MAAM,EAAE,OAAO,EAAE,GAAG,UAAgC,CAAC;gBACrD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,mBAAmB,EACzB,OAAO,EAAE,yBAAyB,CAAC,SAAS,EAAE,OAAO,CAAC,EACtD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,6BAA6B,CAAC,CAAC;gBAClC,MAAM,EAAE,OAAO,EAAE,GAAG,UAAwC,CAAC;gBAC7D,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,mBAAmB,EACzB,OAAO,EAAE,yBAAyB,CAAC,SAAS,EAAE,OAAO,CAAC,EACtD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,gCAAgC,CAAC,CAAC;gBACrC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA2C,CAAC;gBAChE,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,sBAAsB,EAC5B,OAAO,EAAE,4BAA4B,CAAC,SAAS,EAAE,OAAO,CAAC,EACzD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,+BAA+B,CAAC,CAAC;gBACpC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA0C,CAAC;gBAC/D,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,qBAAqB,EAC3B,OAAO,EAAE,2BAA2B,CAAC,SAAS,EAAE,OAAO,CAAC,EACxD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,yBAAyB,CAAC,CAAC;gBAC9B,MAAM,EAAE,OAAO,EAAE,GAAG,UAAoC,CAAC;gBACzD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,eAAe,EACrB,OAAO,EAAE,qBAAqB,CAAC,SAAS,EAAE,OAAO,CAAC,EAClD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,sBAAsB,CAAC,CAAC;gBAC3B,MAAM,EAAE,OAAO,EAAE,GAAG,UAAiC,CAAC;gBACtD,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,YAAY,EAClB,OAAO,EAAE,kBAAkB,CAAC,SAAS,EAAE,OAAO,CAAC,EAC/C,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,KAAK,mCAAmC,CAAC,CAAC;gBACxC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA8C,CAAC;gBACnE,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,yBAAyB,EAC/B,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,2CAA2C,CAAC,CAAC;gBAChD,MAAM,EAAE,OAAO,EAAE,GAAG,UAAsD,CAAC;gBAC3E,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,iCAAiC,EACvC,OAAO,EAAE,uCAAuC,CAAC,SAAS,EAAE,OAAO,CAAC,EACpE,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,0CAA0C,CAAC,CAAC;gBAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,UAAqD,CAAC;gBAC1E,MAAM,EAAE,cAAc,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC5D,mDACE,IAAI,EAAE,gCAAgC,EACtC,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,cAAc;oBACd,WAAW,IACX;aACH;YACD,KAAK,mCAAmC,CAAC,CAAC;gBACxC,MAAM,EAAE,OAAO,EAAE,GAAG,UAA8C,CAAC;gBACnE,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC;gBAC7C,mDACE,IAAI,EAAE,yBAAyB,EAC/B,OAAO,EAAE,wBAAwB,CAAC,SAAS,EAAE,OAAO,CAAC,EACrD,WAAW,IACR,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAClC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KACrC,YAAY,IACZ;aACH;YACD,OAAO,CAAC,CAAC;gBACP,MAAM,IAAI,KAAK,CAAC,6BAA6B,IAAI,EAAE,CAAC,CAAC;aACtD;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AnalyzeActionName,\n AnalyzeBatchResult,\n AnalyzeResult,\n CustomSingleLabelClassificationResult,\n CustomSingleLabelClassificationSuccessResult,\n EntityLinkingResult,\n EntityRecognitionResult,\n HealthcareEntity,\n HealthcareEntityRelation,\n HealthcareEntityRelationRole,\n HealthcareResult,\n HealthcareSuccessResult,\n KeyPhraseExtractionResult,\n LanguageDetectionResult,\n Opinion,\n PiiEntityRecognitionResult,\n SentenceSentiment,\n SentimentAnalysisResult,\n TextAnalysisError,\n TextAnalysisErrorResult,\n TextAnalysisSuccessResult,\n} from \"./models\";\nimport {\n AnalyzeResponse,\n AnalyzeTextLROResultUnion,\n AssessmentSentiment,\n CustomEntityRecognitionLROResult,\n CustomMultiLabelClassificationLROResult,\n CustomSingleLabelClassificationLROResult,\n DocumentError,\n EntitiesTaskResult,\n EntityLinkingLROResult,\n EntityLinkingTaskResult,\n EntityRecognitionLROResult,\n ErrorModel,\n ErrorResponse,\n ExtractiveSummarizationLROResult,\n CustomSingleLabelClassificationResult as GeneratedCustomSingleLabelClassificationResult,\n EntityLinkingResult as GeneratedEntityLinkingResult,\n EntitiesResult as GeneratedEntityRecognitionResult,\n HealthcareEntity as GeneratedHealthcareEntity,\n HealthcareResult as GeneratedHealthcareResult,\n KeyPhraseResult as GeneratedKeyPhraseExtractionResult,\n LanguageDetectionResult as GeneratedLanguageDetectionResult,\n PiiResult as GeneratedPiiEntityRecognitionResult,\n SentenceSentiment as GeneratedSentenceSentiment,\n SentimentResponse as GeneratedSentimentAnalysisResult,\n HealthcareEntitiesDocumentResult,\n HealthcareLROResult,\n HealthcareRelation,\n HealthcareRelationEntity,\n InnerErrorModel,\n KeyPhraseExtractionLROResult,\n KeyPhraseTaskResult,\n KnownAnalyzeTextLROResultsKind,\n LanguageDetectionInput,\n LanguageDetectionTaskResult,\n PiiEntityRecognitionLROResult,\n PiiTaskResult,\n SentenceTarget,\n SentimentLROResult,\n SentimentTaskResult,\n SingleClassificationDocumentResult,\n TargetRelation,\n TextDocumentInput,\n} from \"./generated\";\nimport {\n AssessmentIndex,\n parseAssessmentIndex,\n parseHealthcareEntityIndex,\n sortResponseIdObjects,\n} from \"./util\";\nimport { RestError } from \"@azure/core-rest-pipeline\";\n\n/**\n * Helper function for converting nested service error to the unified\n * TextAnalysisError\n */\nfunction toTextAnalysisError(errorModel: ErrorModel | InnerErrorModel): TextAnalysisError {\n // Return the deepest error.\n if (errorModel.innererror !== undefined) {\n return toTextAnalysisError(errorModel.innererror);\n }\n\n return {\n ...errorModel,\n };\n}\n\nfunction makeTextAnalysisErrorResult(id: string, error: ErrorModel): TextAnalysisErrorResult {\n return {\n id,\n error: toTextAnalysisError(error),\n };\n}\n\n/**\n * combines successful and erroneous results into a single array of results and\n * sort them so that the IDs order match that of the input documents array.\n * @param input - the array of documents sent to the service for processing.\n * @param response - the response received from the service.\n * @param options - an options bag that includes functions to process the results.\n */\nfunction transformDocumentResults<\n DocumentSuccess extends TextAnalysisSuccessResult,\n PublicDocumentSuccess extends TextAnalysisSuccessResult = DocumentSuccess,\n TError extends TextAnalysisErrorResult = TextAnalysisErrorResult\n>(\n input: TextDocumentInput[],\n response: {\n documents: DocumentSuccess[];\n errors: DocumentError[];\n },\n options?: {\n processSuccess?: (successResult: DocumentSuccess) => PublicDocumentSuccess;\n processError?: (id: string, error: ErrorModel) => TError;\n }\n): (PublicDocumentSuccess | TextAnalysisErrorResult)[] {\n const { processError = makeTextAnalysisErrorResult, processSuccess } = options || {};\n const successResults = processSuccess\n ? response.documents.map(processSuccess)\n : response.documents;\n const unsortedResults = (\n successResults as (PublicDocumentSuccess | TextAnalysisErrorResult)[]\n ).concat(response.errors.map((error) => processError(error.id, error.error)));\n\n return sortResponseIdObjects(input, unsortedResults);\n}\n\nfunction toLanguageDetectionResult(\n documents: LanguageDetectionInput[],\n results: GeneratedLanguageDetectionResult\n): LanguageDetectionResult[] {\n return transformDocumentResults(documents, results, {\n processSuccess: ({ detectedLanguage, ...rest }) => ({\n primaryLanguage: detectedLanguage,\n ...rest,\n }),\n });\n}\n\nfunction toPiiEntityRecognitionResult(\n documents: TextDocumentInput[],\n results: GeneratedPiiEntityRecognitionResult\n): PiiEntityRecognitionResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toSentimentAnalysisResult(\n documents: TextDocumentInput[],\n results: GeneratedSentimentAnalysisResult\n): SentimentAnalysisResult[] {\n return transformDocumentResults(documents, results, {\n processSuccess: ({ sentences, ...rest }) => ({\n ...rest,\n sentences: sentences.map((sentence) =>\n convertGeneratedSentenceSentiment(sentence, sentences)\n ),\n }),\n });\n}\n\n/**\n * Converts a sentence sentiment object returned by the service to another that\n * is user-friendly.\n *\n * @param sentence - The sentence sentiment object to be converted.\n * @param response - The entire response returned by the service.\n * @returns The user-friendly sentence sentiment object.\n * @internal\n */\nfunction convertGeneratedSentenceSentiment(\n { targets, assessments: _, ...rest }: GeneratedSentenceSentiment,\n sentences: GeneratedSentenceSentiment[]\n): SentenceSentiment {\n return {\n ...rest,\n opinions:\n targets?.map(\n // eslint-disable-next-line @typescript-eslint/no-shadow\n ({ relations, ...rest }: SentenceTarget): Opinion => ({\n target: rest,\n assessments: relations\n .filter((relation) => relation.relationType === \"assessment\")\n .map((relation) => convertTargetRelationToAssessmentSentiment(relation, sentences)),\n })\n ) ?? [],\n };\n}\n\n/**\n * Converts a target relation object returned by the service to an assessment\n * sentiment object where JSON pointers in the former are realized in the\n * latter.\n *\n * @param targetRelation - The target relation object to be converted.\n * @param response - The entire response returned by the service.\n * @returns The user-friendly assessment sentiment object.\n * @internal\n */\nfunction convertTargetRelationToAssessmentSentiment(\n targetRelation: TargetRelation,\n sentences: GeneratedSentenceSentiment[]\n): AssessmentSentiment {\n const assessmentPtr = targetRelation.ref;\n const assessmentIndex: AssessmentIndex = parseAssessmentIndex(assessmentPtr);\n const assessment =\n sentences?.[assessmentIndex.sentence].assessments?.[assessmentIndex.assessment];\n if (assessment !== undefined) {\n return assessment;\n } else {\n throw new Error(`Pointer \"${assessmentPtr}\" is not a valid Assessment pointer`);\n }\n}\n\nfunction toEntityLinkingResult(\n documents: TextDocumentInput[],\n results: GeneratedEntityLinkingResult\n): EntityLinkingResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toKeyPhraseExtractionResult(\n documents: TextDocumentInput[],\n results: GeneratedKeyPhraseExtractionResult\n): KeyPhraseExtractionResult[] {\n return transformDocumentResults(documents, results);\n}\n\nfunction toEntityRecognitionResult(\n documents: TextDocumentInput[],\n results: GeneratedEntityRecognitionResult\n): EntityRecognitionResult[] {\n return transformDocumentResults(documents, results);\n}\n\n/**\n * @internal\n */\nexport function transformActionResult<ActionName extends AnalyzeActionName>(\n actionName: ActionName,\n input: TextDocumentInput[] | LanguageDetectionInput[],\n response: AnalyzeResponse\n): AnalyzeResult<AnalyzeActionName> {\n switch (response.kind) {\n case \"EntityLinkingResults\": {\n return toEntityLinkingResult(input, (response as EntityLinkingTaskResult).results);\n }\n case \"EntityRecognitionResults\": {\n return toEntityRecognitionResult(input, (response as EntitiesTaskResult).results);\n }\n case \"KeyPhraseExtractionResults\": {\n return toKeyPhraseExtractionResult(input, (response as KeyPhraseTaskResult).results);\n }\n case \"PiiEntityRecognitionResults\": {\n return toPiiEntityRecognitionResult(input, (response as PiiTaskResult).results);\n }\n case \"SentimentAnalysisResults\": {\n return toSentimentAnalysisResult(input, (response as SentimentTaskResult).results);\n }\n case \"LanguageDetectionResults\": {\n return toLanguageDetectionResult(input, (response as LanguageDetectionTaskResult).results);\n }\n default: {\n const __exhaust: never = response;\n throw new Error(`Unsupported results kind: ${__exhaust} for an action of type ${actionName}`);\n }\n }\n}\n\nfunction appendReadableErrorMessage(currentMessage: string, innerMessage: string): string {\n let message = currentMessage;\n if (message.slice(-1) !== \".\") {\n message = message + \".\";\n }\n return message + \" \" + innerMessage;\n}\n\n/**\n * @internal\n * parses incoming errors from the service/\n * @param error - the incoming error\n */\nfunction transformError(errorResponse: unknown): any {\n const strongErrorResponse = errorResponse as {\n response: {\n parsedBody?: ErrorResponse;\n };\n statusCode: number;\n };\n if (!strongErrorResponse.response) {\n throw errorResponse;\n }\n const topLevelError = strongErrorResponse.response.parsedBody?.error;\n if (!topLevelError) return errorResponse;\n let errorMessage = topLevelError.message;\n let code = topLevelError.code;\n function unwrap(error: ErrorModel | InnerErrorModel): ErrorModel {\n const innerError = error.innererror;\n if (innerError) {\n if (innerError.message) {\n errorMessage = appendReadableErrorMessage(errorMessage, innerError.message);\n }\n if (innerError.code) {\n code = innerError.code;\n }\n return unwrap(innerError);\n }\n return error as ErrorModel;\n }\n unwrap(topLevelError);\n return new RestError(errorMessage, {\n code,\n statusCode: strongErrorResponse.statusCode,\n });\n}\n\nexport async function throwError<T>(p: Promise<T>): Promise<T> {\n try {\n return await p;\n } catch (e: unknown) {\n throw transformError(e);\n }\n}\n\nfunction toHealthcareResult(\n documents: TextDocumentInput[],\n results: GeneratedHealthcareResult\n): HealthcareResult[] {\n function makeHealthcareEntity(entity: GeneratedHealthcareEntity): HealthcareEntity {\n const { dataSources, ...rest } = entity;\n return {\n dataSources: dataSources ?? [],\n ...rest,\n };\n }\n function makeHealthcareRelation(\n entities: HealthcareEntity[]\n ): (relation: HealthcareRelation) => HealthcareEntityRelation {\n return (relation: HealthcareRelation): HealthcareEntityRelation => ({\n relationType: relation.relationType,\n roles: relation.entities.map(\n (role: HealthcareRelationEntity): HealthcareEntityRelationRole => ({\n entity: entities[parseHealthcareEntityIndex(role.ref)],\n name: role.role,\n })\n ),\n });\n }\n return transformDocumentResults<HealthcareEntitiesDocumentResult, HealthcareSuccessResult>(\n documents,\n results,\n {\n processSuccess: ({ entities, relations, ...rest }) => {\n const newEntities = entities.map(makeHealthcareEntity);\n return {\n entities: newEntities,\n entityRelations: relations.map(makeHealthcareRelation(newEntities)),\n ...rest,\n };\n },\n }\n );\n}\n\nfunction toCustomSingleLabelClassificationResult(\n documents: TextDocumentInput[],\n results: GeneratedCustomSingleLabelClassificationResult\n): CustomSingleLabelClassificationResult[] {\n return transformDocumentResults<\n SingleClassificationDocumentResult,\n CustomSingleLabelClassificationSuccessResult\n >(documents, results, {\n processSuccess: ({ classification, ...rest }) => {\n return {\n classifications: [classification],\n ...rest,\n };\n },\n });\n}\n\n/**\n * @internal\n */\nexport function transformAnalyzeBatchResults(\n documents: TextDocumentInput[],\n response: AnalyzeTextLROResultUnion[] = []\n): AnalyzeBatchResult[] {\n return response.map((actionData) => {\n const { lastUpdateDateTime: completedOn, actionName, kind } = actionData;\n switch (kind as KnownAnalyzeTextLROResultsKind) {\n case \"SentimentAnalysisLROResults\": {\n const { results } = actionData as SentimentLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"SentimentAnalysis\",\n results: toSentimentAnalysisResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"EntityRecognitionLROResults\": {\n const { results } = actionData as EntityRecognitionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"EntityRecognition\",\n results: toEntityRecognitionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"PiiEntityRecognitionLROResults\": {\n const { results } = actionData as PiiEntityRecognitionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"PiiEntityRecognition\",\n results: toPiiEntityRecognitionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"KeyPhraseExtractionLROResults\": {\n const { results } = actionData as KeyPhraseExtractionLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"KeyPhraseExtraction\",\n results: toKeyPhraseExtractionResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"EntityLinkingLROResults\": {\n const { results } = actionData as EntityLinkingLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"EntityLinking\",\n results: toEntityLinkingResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"HealthcareLROResults\": {\n const { results } = actionData as HealthcareLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"Healthcare\",\n results: toHealthcareResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n case \"CustomEntityRecognitionLROResults\": {\n const { results } = actionData as CustomEntityRecognitionLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomEntityRecognition\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"CustomSingleLabelClassificationLROResults\": {\n const { results } = actionData as CustomSingleLabelClassificationLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomSingleLabelClassification\",\n results: toCustomSingleLabelClassificationResult(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"CustomMultiLabelClassificationLROResults\": {\n const { results } = actionData as CustomMultiLabelClassificationLROResult;\n const { deploymentName, projectName, statistics } = results;\n return {\n kind: \"CustomMultiLabelClassification\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n deploymentName,\n projectName,\n };\n }\n case \"ExtractiveSummarizationLROResults\": {\n const { results } = actionData as ExtractiveSummarizationLROResult;\n const { modelVersion, statistics } = results;\n return {\n kind: \"ExtractiveSummarization\",\n results: transformDocumentResults(documents, results),\n completedOn,\n ...(actionName ? { actionName } : {}),\n ...(statistics ? { statistics } : {}),\n modelVersion,\n };\n }\n default: {\n throw new Error(`Unsupported results kind: ${kind}`);\n }\n }\n });\n}\n"]}
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "sdk-type": "client",
4
4
  "author": "Microsoft Corporation",
5
5
  "description": "An isomorphic client library for the text analysis features in the Azure Cognitive Language Service.",
6
- "version": "6.0.0-alpha.20220526.1",
6
+ "version": "6.0.0-alpha.20220627.1",
7
7
  "keywords": [
8
8
  "node",
9
9
  "azure",
@@ -87,7 +87,7 @@
87
87
  "@azure/core-auth": "^1.3.0",
88
88
  "@azure/core-client": "^1.0.0",
89
89
  "@azure/core-rest-pipeline": "^1.8.1",
90
- "@azure/core-lro": ">=2.3.0-alpha <2.3.0-alphb",
90
+ "@azure/core-lro": "2.3.0-beta.1",
91
91
  "@azure/core-paging": "^1.3.0",
92
92
  "@azure/core-tracing": "1.0.0",
93
93
  "@azure/logger": "^1.0.0",