@azure/ai-form-recognizer 4.1.0-beta.1 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +25 -35
  2. package/dist/index.js +98 -302
  3. package/dist/index.js.map +1 -1
  4. package/dist-esm/src/constants.js +2 -1
  5. package/dist-esm/src/constants.js.map +1 -1
  6. package/dist-esm/src/documentAnalysisClient.js +5 -7
  7. package/dist-esm/src/documentAnalysisClient.js.map +1 -1
  8. package/dist-esm/src/documentModel.js.map +1 -1
  9. package/dist-esm/src/documentModelAdministrationClient.js +13 -55
  10. package/dist-esm/src/documentModelAdministrationClient.js.map +1 -1
  11. package/dist-esm/src/generated/generatedClient.js +2 -2
  12. package/dist-esm/src/generated/generatedClient.js.map +1 -1
  13. package/dist-esm/src/generated/models/index.js +10 -26
  14. package/dist-esm/src/generated/models/index.js.map +1 -1
  15. package/dist-esm/src/generated/models/mappers.js +4 -131
  16. package/dist-esm/src/generated/models/mappers.js.map +1 -1
  17. package/dist-esm/src/generated/models/parameters.js +1 -19
  18. package/dist-esm/src/generated/models/parameters.js.map +1 -1
  19. package/dist-esm/src/generated/operations/documentClassifiers.js +12 -5
  20. package/dist-esm/src/generated/operations/documentClassifiers.js.map +1 -1
  21. package/dist-esm/src/generated/operations/documentModels.js +15 -11
  22. package/dist-esm/src/generated/operations/documentModels.js.map +1 -1
  23. package/dist-esm/src/generated/operations/miscellaneous.js +12 -5
  24. package/dist-esm/src/generated/operations/miscellaneous.js.map +1 -1
  25. package/dist-esm/src/index.js +1 -1
  26. package/dist-esm/src/index.js.map +1 -1
  27. package/dist-esm/src/lro/analysis.js +2 -2
  28. package/dist-esm/src/lro/analysis.js.map +1 -1
  29. package/dist-esm/src/models/contentSource.js +4 -0
  30. package/dist-esm/src/models/contentSource.js.map +1 -0
  31. package/dist-esm/src/models/documentElements.js.map +1 -1
  32. package/dist-esm/src/models/index.js.map +1 -1
  33. package/dist-esm/src/options/AnalyzeDocumentOptions.js +19 -8
  34. package/dist-esm/src/options/AnalyzeDocumentOptions.js.map +1 -1
  35. package/dist-esm/src/options/FormRecognizerClientOptions.js +0 -27
  36. package/dist-esm/src/options/FormRecognizerClientOptions.js.map +1 -1
  37. package/dist-esm/src/options/index.js +2 -2
  38. package/dist-esm/src/options/index.js.map +1 -1
  39. package/dist-esm/src/util.js +2 -4
  40. package/dist-esm/src/util.js.map +1 -1
  41. package/package.json +6 -6
  42. package/types/ai-form-recognizer.d.ts +140 -171
@@ -8,5 +8,6 @@ export const DEFAULT_COGNITIVE_SCOPE = "https://cognitiveservices.azure.com/.def
8
8
  /**
9
9
  * @internal
10
10
  */
11
- export const SDK_VERSION = "4.1.0-beta.1";
11
+ export const SDK_VERSION = "5.0.0";
12
+ export const FORM_RECOGNIZER_API_VERSION = "2023-07-31";
12
13
  //# sourceMappingURL=constants.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAG,8CAA8C,CAAC;AAEtF;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,cAAc,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The default AAD permissions scope for Cognitive Services.\n * @internal\n */\nexport const DEFAULT_COGNITIVE_SCOPE = \"https://cognitiveservices.azure.com/.default\";\n\n/**\n * @internal\n */\nexport const SDK_VERSION = \"4.1.0-beta.1\";\n"]}
1
+ {"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAG,8CAA8C,CAAC;AAEtF;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,OAAO,CAAC;AAEnC,MAAM,CAAC,MAAM,2BAA2B,GAAG,YAAY,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The default AAD permissions scope for Cognitive Services.\n * @internal\n */\nexport const DEFAULT_COGNITIVE_SCOPE = \"https://cognitiveservices.azure.com/.default\";\n\n/**\n * @internal\n */\nexport const SDK_VERSION = \"5.0.0\";\n\nexport const FORM_RECOGNIZER_API_VERSION = \"2023-07-31\";\n"]}
@@ -1,11 +1,10 @@
1
1
  // Copyright (c) Microsoft Corporation.
2
2
  // Licensed under the MIT license.
3
3
  import { createTracingClient } from "@azure/core-tracing";
4
- import { SDK_VERSION } from "./constants";
4
+ import { FORM_RECOGNIZER_API_VERSION, SDK_VERSION } from "./constants";
5
5
  import { accept1 } from "./generated/models/parameters";
6
6
  import { toAnalyzeResultFromGenerated, toDocumentAnalysisPollOperationState, } from "./lro/analysis";
7
7
  import { lro } from "./lro/util/poller";
8
- import { DEFAULT_GENERATED_CLIENT_OPTIONS, } from "./options/FormRecognizerClientOptions";
9
8
  import { makeServiceClient, Mappers, SERIALIZER } from "./util";
10
9
  /**
11
10
  * A client for interacting with the Form Recognizer service's analysis features.
@@ -39,14 +38,12 @@ import { makeServiceClient, Mappers, SERIALIZER } from "./util";
39
38
  */
40
39
  export class DocumentAnalysisClient {
41
40
  constructor(endpoint, credential, options = {}) {
42
- var _a;
43
41
  this._restClient = makeServiceClient(endpoint, credential, options);
44
42
  this._tracing = createTracingClient({
45
43
  packageName: "@azure/ai-form-recognizer",
46
44
  packageVersion: SDK_VERSION,
47
45
  namespace: "Microsoft.CognitiveServices",
48
46
  });
49
- this._apiVersion = (_a = options.apiVersion) !== null && _a !== void 0 ? _a : DEFAULT_GENERATED_CLIENT_OPTIONS.apiVersion;
50
47
  }
51
48
  async beginAnalyzeDocument(model, document, options = {}) {
52
49
  return this._tracing.withSpan("DocumentAnalysisClient.beginAnalyzeDocument", options,
@@ -69,11 +66,12 @@ export class DocumentAnalysisClient {
69
66
  const { modelId: initialModelId, apiVersion: requestApiVersion, transformResult, } = typeof model === "string"
70
67
  ? { modelId: model, apiVersion: undefined, transformResult: (v) => v }
71
68
  : model;
72
- if (requestApiVersion && requestApiVersion !== this._apiVersion) {
69
+ if (requestApiVersion && requestApiVersion !== FORM_RECOGNIZER_API_VERSION) {
73
70
  throw new Error([
74
- `API Version mismatch: the provided model wants version: ${requestApiVersion}, but the client is using ${this._apiVersion}.`,
71
+ `API Version mismatch: the provided model wants version: ${requestApiVersion},`,
72
+ `but the client is using ${FORM_RECOGNIZER_API_VERSION}.`,
75
73
  "The API version of the model must match the client's API version.",
76
- ].join("\n"));
74
+ ].join(" "));
77
75
  }
78
76
  return this.createUnifiedPoller((abortSignal) => {
79
77
  const [contentType, analyzeRequest] = toAnalyzeRequest(input);
@@ -1 +1 @@
1
- {"version":3,"file":"documentAnalysisClient.js","sourceRoot":"","sources":["../../src/documentAnalysisClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAE1D,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAO1C,OAAO,EAAE,OAAO,EAAE,MAAM,+BAA+B,CAAC;AACxD,OAAO,EAML,4BAA4B,EAC5B,oCAAoC,GACrC,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAoB,GAAG,EAAE,MAAM,mBAAmB,CAAC;AAE1D,OAAO,EACL,gCAAgC,GAGjC,MAAM,uCAAuC,CAAC;AAE/C,OAAO,EAAE,iBAAiB,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAIhE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AACH,MAAM,OAAO,sBAAsB;IA+DjC,YACE,QAAgB,EAChB,UAA2C,EAC3C,UAAyC,EAAE;;QAE3C,IAAI,CAAC,WAAW,GAAG,iBAAiB,CAAC,QAAQ,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QACpE,IAAI,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YAClC,WAAW,EAAE,2BAA2B;YACxC,cAAc,EAAE,WAAW;YAC3B,SAAS,EAAE,6BAA6B;SACzC,CAAC,CAAC;QAEH,IAAI,CAAC,WAAW,GAAG,MAAA,OAAO,CAAC,UAAU,mCAAI,gCAAgC,CAAC,UAAU,CAAC;IACvF,CAAC;IAsHM,KAAK,CAAC,oBAAoB,CAC/B,KAAsC,EACtC,QAAmC,EACnC,UAA2C,EAAE;QAE7C,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,6CAA6C,EAC7C,OAAO;QACP,+GAA+G;QAC/G,wDAAwD;QACxD,IAAI,CAAC,OAAO,CAAC,IAAI,CACf,IAAI,EACJ,KAAK,EACL,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,CAClF,CACF,CAAC;IACJ,CAAC;IAgHM,KAAK,CAAC,2BAA2B,CACtC,KAAsC,EACtC,WAAmB,EACnB,UAA2C,EAAE;QAE7C,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,oDAAoD,EACpD,OAAO,EACP,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CAC3D,CAAC;IACJ,CAAC;IAED;;;;;;;OAOG;IACK,OAAO,CACb,KAAsC,EACtC,KAAqB,EACrB,OAAwC;QAExC,MAAM,EACJ,OAAO,EAAE,cAAc,EACvB,UAAU,EAAE,iBAAiB,EAC7B,eAAe,GAChB,GAAG,OAAO,KAAK,KAAK,QAAQ;YAC3B,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,SAAS,EAAE,eAAe,EAAE,CAAC,CAAgB,EAAE,EAAE,CAAC,CAAC,EAAE;YACrF,CAAC,CAAC,KAAK,CAAC;QAEV,IAAI,iBAAiB,IAAI,iBAAiB,KAAK,IAAI,CAAC,WAAW,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb;gBACE,2DAA2D,iBAAiB,6BAA6B,IAAI,CAAC,WAAW,GAAG;gBAC5H,mEAAmE;aACpE,CAAC,IAAI,CAAC,IAAI,CAAC,CACb,CAAC;SACH;QAED,OAAO,IAAI,CAAC,mBAAmB,CAC7B,CAAC,WAAW,EAAE,EAAE;YACd,MAAM,CAAC,WAAW,EAAE,cAAc,CAAC,GAAG,gBAAgB,CAAC,KAAK,CAAC,CAAC;YAE9D,OAAO,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,eAAe,CAAC,cAAc,EAAE,WAAkB,kCACpF,OAAO,KACV,WAAW;gBACX,cAAc,IACd,CAAC;QACL,CAAC,EACD;YACE,cAAc;YACd,OAAO;YACP,eAAe,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,eAAe,CAAC,4BAA4B,CAAC,MAAM,CAAC,CAAC;SACnF,CACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAqCG;IACI,KAAK,CAAC,qBAAqB,CAChC,YAAoB,EACpB,QAAmC;IACnC,8DAA8D;IAC9D,UAAmC,EAAE;QAErC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,8CAA8C,EAC9C,OAAO,EACP,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CACjE,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAqCG;IACI,KAAK,CAAC,4BAA4B,CACvC,YAAoB,EACpB,WAAmB;IACnB,8DAA8D;IAC9D,UAAmC,EAAE;QAErC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,qDAAqD,EACrD,OAAO,EACP,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CACnE,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACK,QAAQ,CACd,YAAoB,EACpB,KAAqB,EACrB,OAAgC;QAEhC,OAAO,IAAI,CAAC,mBAAmB,CAC7B,KAAK,EAAE,WAAW,EAAE,EAAE;YACpB,MAAM,CAAC,WAAW,EAAE,eAAe,CAAC,GAAG,gBAAgB,CAAC,KAAK,CAAC,CAAC;YAE/D,OAAO,IAAI,CAAC,WAAW,CAAC,mBAAmB,CAAC,gBAAgB,CAC1D,YAAY,EACZ,WAAkB,kCAEb,OAAO,KACV,WAAW;gBACX,eAAe,IAElB,CAAC;QACJ,CAAC,EACD;YACE,cAAc,EAAE,YAAY;YAC5B,OAAO;YACP,eAAe,EAAE,4BAA4B;SAC9C,CACF,CAAC;IACJ,CAAC;IAED;;;;;;;;OAQG;IACK,KAAK,CAAC,mBAAmB,CAC/B,cAE4C,EAC5C,UAA+C;QAE/C,MAAM,EAAE,UAAU,EAAE,GAAG,UAAU,CAAC,OAAO,CAAC;QAE1C,kFAAkF;QAClF,+CAA+C;QAE/C,MAAM,gBAAgB,GAAG,CACvB,GAAqB,EACrB,iBAAyB,EACQ,EAAE,CACnC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,8DAA8D,EAC9D,UAAU,CAAC,OAAO,EAClB,CAAC,YAAY,EAAE,EAAE,CACf,IAAI,CAAC,WAAW,CAAC,oBAAoB,CACnC;YACE,OAAO,gCACL,UAAU,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,EAAE,EAAE;;oBACzC,mDAAmD;oBACnD,MAAM,gBAAgB,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;oBAChE,oGAAoG;oBACpG,QAAQ;oBACR,IAAI,gBAAgB,EAAE;wBACpB,MAAM,YAAY,GAAG,MAAM,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;wBACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,EAAE;4BAC/B,GAAG,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;yBAC/B;6BAAM;4BACL,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC;yBAC5D;qBACF;yBAAM;wBACL,GAAG,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;qBAC5B;oBAED,wDAAwD;oBACxD,OAAO,MAAA,YAAY,CAAC,UAAU,6DAAG,WAAW,EAAE,GAAG,IAAI,CAAC,CAAC;gBACzD,CAAC,IACE,YAAY;gBACf,uGAAuG;gBACvG,2EAA2E;gBAC3E,WAAW,EAAE,GAAG,CAAC,WAAW,GAC7B;SACF,EACD;YACE,IAAI,EAAE,iBAAiB;YACvB,UAAU,EAAE,KAAK;YACjB,SAAS,EAAE;gBACT,GAAG,EAAE;oBACH,UAAU,EAAE,OAAO,CAAC,sBAAsB;iBAC3C;gBACD,OAAO,EAAE;oBACP,UAAU,EAAE,OAAO,CAAC,aAAa;iBAClC;aACF;YACD,6DAA6D;YAC7D,gBAAgB,EAAE,CAAC,OAAO,CAAC;YAC3B,UAAU,EAAE,UAAU;SACvB,CACF,CACJ,CAAC;QAEJ,MAAM,MAAM;QACV,0DAA0D;QAC1D,UAAU,KAAK,SAAS;YACtB,CAAC,CAAC,KAAK,EAAE,GAAqB,EAAE,EAAE,CAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,oDAAoD,EACpD,UAAU,CAAC,OAAO,EAClB,KAAK,IAAI,EAAE;gBACT,MAAM,EAAE,aAAa,EAAE,iBAAiB,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAI1E,CAAC;gBAEF,IAAI,CAAC,aAAa,IAAI,aAAa,KAAK,WAAW,EAAE;oBACnD,MAAM,IAAI,KAAK,CACb;wBACE,sFAAsF;wBACtF,0BAA0B,aAAa,gBAAgB,WAAW,KAAK;qBACxE,CAAC,IAAI,CAAC,GAAG,CAAC,CACZ,CAAC;iBACH;gBAED,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;gBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,OAAO,EACP,iBAAiB,EACjB,MAAM,CACP,CAAC;YACJ,CAAC,CACF;YACL,CAAC,CAAC,iEAAiE;gBACjE,KAAK,EAAE,GAAqB,EAAE,EAAE,CAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,mDAAmD,EACnD,UAAU,CAAC,OAAO,EAClB,KAAK,IAAI,EAAE;oBACT,MAAM,EAAE,iBAAiB,EAAE,GAAG,MAAM,cAAc,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;oBAEpE,IAAI,iBAAiB,KAAK,SAAS,EAAE;wBACnC,MAAM,IAAI,KAAK,CACb,qEAAqE,CACtE,CAAC;qBACH;oBAED,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;oBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,UAAU,CAAC,cAAc,EACzB,iBAAiB,EACjB,MAAM,CACP,CAAC;gBACJ,CAAC,CACF,CAAC;QAEV,MAAM,MAAM,GAAG,MAAM,GAAG,CACtB;YACE,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,EAAE,iBAAiB,EAAE,OAAO,EAAE,EAAE,EAAE,CAClD,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,kDAAkD,EAClD,EAAE,EACF,KAAK,IAAI,EAAE;gBACT,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;gBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,OAAO,EACP,iBAAiB,EACjB,MAAM,CACP,CAAC;YACJ,CAAC,CACF;YACH,SAAS,EAAE,CAAC,EAAE,iBAAiB,EAAE,OAAO,EAAE,EAAE,EAAE,CAC5C,IAAI,CAAC,SAAS,CAAC,EAAE,aAAa,EAAE,WAAW,EAAE,EAAE,EAAE,OAAO,EAAE,iBAAiB,EAAE,CAAC;SACjF,EACD,UAAU,CAAC,OAAO,CAAC,kBAAkB,EACrC,UAAU,CAAC,OAAO,CAAC,WAAW,CAC/B,CAAC;QAEF,IAAI,UAAU,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,EAAE;YAC/C,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YACjD,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAC;SAC3D;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CAGF;AAED;;;GAGG;AACH,SAAS,gBAAgB,CACvB,KAAqB;IAErB,QAAQ,KAAK,CAAC,IAAI,EAAE;QAClB,KAAK,MAAM;YACT,OAAO,CAAC,0BAA0B,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;QAClD,KAAK,KAAK;YACR,OAAO,CAAC,kBAAkB,EAAE,EAAE,SAAS,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QACxD,KAAK,QAAQ;YACX,OAAO,CAAC,kBAAkB,EAAE,EAAE,YAAY,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,CAAC;YACP,MAAM,SAAS,GAAU,KAAK,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,wCAAwC,SAAS,EAAE,CAAC,CAAC;SACtE;KACF;AACH,CAAC;AAED;;GAEG;AACH,uFAAuF;AAEvF,SAAS,MAAM,CACb,IAAO,EACP,KAA2F;IAE3F,OAAO;QACL,IAAI;QACJ,CAAC,IAAI,CAAC,EAAE,KAAK;KACe,CAAC;AACjC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { KeyCredential, TokenCredential } from \"@azure/core-auth\";\nimport { createTracingClient } from \"@azure/core-tracing\";\nimport { TracingClient } from \"@azure/core-tracing\";\nimport { SDK_VERSION } from \"./constants\";\nimport {\n AnalyzeDocumentRequest,\n AnalyzeResultOperation,\n ContentType,\n GeneratedClient,\n} from \"./generated\";\nimport { accept1 } from \"./generated/models/parameters\";\nimport {\n AnalysisOperationDefinition,\n AnalysisPoller,\n AnalyzeResult,\n DocumentAnalysisPollOperationState,\n FormRecognizerRequestBody,\n toAnalyzeResultFromGenerated,\n toDocumentAnalysisPollOperationState,\n} from \"./lro/analysis\";\nimport { OperationContext, lro } from \"./lro/util/poller\";\nimport { AnalyzeDocumentOptions } from \"./options/AnalyzeDocumentOptions\";\nimport {\n DEFAULT_GENERATED_CLIENT_OPTIONS,\n DocumentAnalysisClientOptions,\n FormRecognizerApiVersion,\n} from \"./options/FormRecognizerClientOptions\";\nimport { DocumentModel } from \"./documentModel\";\nimport { makeServiceClient, Mappers, SERIALIZER } from \"./util\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { ClassifyDocumentOptions } from \"./options/ClassifyDocumentOptions\";\n\n/**\n * A client for interacting with the Form Recognizer service's analysis features.\n *\n * ### Examples:\n *\n * The Form Recognizer service and clients support two means of authentication:\n *\n * #### Azure Active Directory\n *\n * ```javascript\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new DocumentAnalysisClient(endpoint, credential);\n * ```\n *\n * #### API Key (Subscription Key)\n *\n * ```javascript\n * import { DocumentAnalysisClient, AzureKeyCredential } from \"@azure/ai-form-recognizer\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new DocumentAnalysisClient(endpoint, credential);\n * ```\n */\nexport class DocumentAnalysisClient {\n private _restClient: GeneratedClient;\n private _tracing: TracingClient;\n private _apiVersion: FormRecognizerApiVersion;\n\n /**\n * Create a `DocumentAnalysisClient` instance from a resource endpoint and a an Azure Identity `TokenCredential`.\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity) package for more information about\n * authenticating with Azure Active Directory.\n *\n * ### Example:\n *\n * ```javascript\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new DocumentAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpoint - the endpoint URL of an Azure Cognitive Services instance\n * @param credential - a TokenCredential instance from the `@azure/identity` package\n * @param options - optional settings for configuring all methods in the client\n */\n public constructor(\n endpoint: string,\n credential: TokenCredential,\n options?: DocumentAnalysisClientOptions\n );\n /**\n * Create a `DocumentAnalysisClient` instance from a resource endpoint and a static API key (`KeyCredential`),\n *\n * ### Example:\n *\n * ```javascript\n * import { DocumentAnalysisClient, AzureKeyCredential } from \"@azure/ai-form-recognizer\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new DocumentAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpoint - the endpoint URL of an Azure Cognitive Services instance\n * @param credential - a KeyCredential containing the Cognitive Services instance subscription key\n * @param options - optional settings for configuring all methods in the client\n */\n public constructor(\n endpoint: string,\n credential: KeyCredential,\n options?: DocumentAnalysisClientOptions\n );\n /**\n * @hidden\n */\n public constructor(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options?: DocumentAnalysisClientOptions\n );\n public constructor(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options: DocumentAnalysisClientOptions = {}\n ) {\n this._restClient = makeServiceClient(endpoint, credential, options);\n this._tracing = createTracingClient({\n packageName: \"@azure/ai-form-recognizer\",\n packageVersion: SDK_VERSION,\n namespace: \"Microsoft.CognitiveServices\",\n });\n\n this._apiVersion = options.apiVersion ?? DEFAULT_GENERATED_CLIENT_OPTIONS.apiVersion;\n }\n\n // #region Analysis\n\n /**\n * Extract data from an input using a model given by its unique ID.\n *\n * This operation supports custom as well as prebuilt models. For example, to use the prebuilt invoice model, provide\n * the model ID \"prebuilt-invoice\", or to use the simpler prebuilt layout model, provide the model ID\n * \"prebuilt-layout\".\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis, and the values in any\n * extracted documents' fields depend on the document types in the model (if any) and their corresponding field\n * schemas.\n *\n * ### Examples\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * ```javascript\n * import * as fs from \"fs\";\n *\n * const file = fs.createReadStream(\"path/to/receipt.pdf\");\n *\n * // The model that is passed to the following function call determines the type of the eventual result. In the\n * // example, we will use the prebuilt receipt model, but you could use a custom model ID/name instead.\n * const poller = await client.beginAnalyzeDocument(\"prebuilt-receipt\", file);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain lines and words\n * tables, // extracted tables, organized into cells that contain their contents\n * styles, // text styles (ex. handwriting) that were observed in the document\n * keyValuePairs, // extracted pairs of elements (directed associations from one element in the input to another)\n * entities, // extracted entities in the input's content, which are categorized (ex. \"Location\" or \"Organization\")\n * documents // extracted documents (instances of one of the model's document types and its field schema)\n * } = await poller.pollUntilDone();\n *\n * // Extract the fields of the first document. These fields constitute a receipt, because we used the receipt model\n * const [{ fields: receipt }] = documents;\n *\n * // The fields correspond to the model's document types and their field schemas. Refer to the Form Recognizer\n * // documentation for information about the document types and field schemas within a model, or use the `getModel`\n * // operation to view this information programmatically.\n * console.log(\"The type of this receipt is:\", receipt?.[\"ReceiptType\"]?.value);\n * ```\n *\n *\n * @param modelId - the unique ID (name) of the model within this client's resource\n * @param document - a {@link FormRecognizerRequestBody} that will be uploaded with the request\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocument(\n modelId: string,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions\n ): Promise<AnalysisPoller>;\n /**\n * Extract data from an input using a model that has a known, strongly-typed document schema (a {@link DocumentModel}).\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis. In TypeScript, the type\n * of the result for this method overload is inferred from the type of the input `DocumentModel`.\n *\n * ### Examples\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * If the input provided is a string, it will be treated as a URL to the location of a document to be analyzed. See the\n * {@link beginAnalyzeDocumentFromUrl} method for more information. Use of that method is preferred when using URLs,\n * and URL support is only provided in this method for backwards compatibility.\n *\n * ```typescript\n * import * as fs from \"fs\";\n *\n * // See the `prebuilt` folder in the SDK samples (http://aka.ms/azsdk/formrecognizer/js/samples) for examples of\n * // DocumentModels for known prebuilts.\n * import { PrebuiltReceiptModel } from \"./prebuilt-receipt.ts\";\n *\n * const file = fs.createReadStream(\"path/to/receipt.pdf\");\n *\n * // The model that is passed to the following function call determines the type of the eventual result. In the\n * // example, we will use the prebuilt receipt model.\n * const poller = await client.beginAnalyzeDocument(PrebuiltReceiptModel, file);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain lines and words\n * tables, // extracted tables, organized into cells that contain their contents\n * styles, // text styles (ex. handwriting) that were observed in the document\n * keyValuePairs, // extracted pairs of elements (directed associations from one element in the input to another)\n *\n * documents // extracted documents (instances of one of the model's document types and its field schema)\n * } = await poller.pollUntilDone();\n *\n * // Extract the fields of the first document. These fields constitute a receipt, because we used the receipt model\n * const [{ fields: receipt }] = documents;\n *\n * // Since we used the strongly-typed PrebuiltReceiptModel object instead of the \"prebuilt-receipt\" model ID\n * // string, the fields of the receipt are strongly-typed and have camelCase names (as opposed to PascalCase).\n * console.log(\"The type of this receipt is:\", receipt.receiptType?.value);\n * ```\n *\n * @param model - a {@link DocumentModel} representing the model to use for analysis and the expected output type\n * @param document - a {@link FormRecognizerRequestBody} that will be uploaded with the request\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult` with documents that have\n * the result type associated with the input model\n */\n public async beginAnalyzeDocument<Result>(\n model: DocumentModel<Result>,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions<Result>\n ): Promise<AnalysisPoller<Result>>;\n public async beginAnalyzeDocument(\n model: string | DocumentModel<unknown>,\n document: FormRecognizerRequestBody,\n options: AnalyzeDocumentOptions<unknown> = {}\n ): Promise<AnalysisPoller<unknown>> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginAnalyzeDocument\",\n options,\n // In the first version of the SDK, the document input was treated as a URL if it was a string, and we preserve\n // this behavior to avoid introducing a breaking change.\n this.analyze.bind(\n this,\n model,\n typeof document === \"string\" ? source(\"url\", document) : source(\"body\", document)\n )\n );\n }\n\n /**\n * Extract data from an input using a model given by its unique ID.\n *\n * This operation supports custom as well as prebuilt models. For example, to use the prebuilt invoice model, provide\n * the model ID \"prebuilt-invoice\", or to use the simpler prebuilt layout model, provide the model ID\n * \"prebuilt-layout\".\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis, and the values in any\n * extracted documents' fields depend on the document types in the model (if any) and their corresponding field\n * schemas.\n *\n * ### Examples\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```javascript\n * // the URL must be publicly accessible\n * const url = \"<receipt document url>\";\n *\n * // The model that is passed to the following function call determines the type of the eventual result. In the\n * // example, we will use the prebuilt receipt model, but you could use a custom model ID/name instead.\n * const poller = await client.beginAnalyzeDocument(\"prebuilt-receipt\", url);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain lines and words\n * tables, // extracted tables, organized into cells that contain their contents\n * styles, // text styles (ex. handwriting) that were observed in the document\n * keyValuePairs, // extracted pairs of elements (directed associations from one element in the input to another)\n *\n * documents // extracted documents (instances of one of the model's document types and its field schema)\n * } = await poller.pollUntilDone();\n *\n * // Extract the fields of the first document. These fields constitute a receipt, because we used the receipt model\n * const [{ fields: receipt }] = documents;\n *\n * // The fields correspond to the model's document types and their field schemas. Refer to the Form Recognizer\n * // documentation for information about the document types and field schemas within a model, or use the `getModel`\n * // operation to view this information programmatically.\n * console.log(\"The type of this receipt is:\", receipt?.[\"ReceiptType\"]?.value);\n * ```\n *\n * @param modelId - the unique ID (name) of the model within this client's resource\n * @param documentUrl - a URL (string) to an input document accessible from the public internet\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocumentFromUrl(\n modelId: string,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions\n ): Promise<AnalysisPoller>;\n /**\n * Extract data from an input using a model that has a known, strongly-typed document schema (a {@link DocumentModel}).\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis. In TypeScript, the type\n * of the result for this method overload is inferred from the type of the input `DocumentModel`.\n *\n * ### Examples\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```typescript\n * // See the `prebuilt` folder in the SDK samples (http://aka.ms/azsdk/formrecognizer/js/samples) for examples of\n * // DocumentModels for known prebuilts.\n * import { PrebuiltReceiptModel } from \"./prebuilt-receipt.ts\";\n *\n * // the URL must be publicly accessible\n * const url = \"<receipt document url>\";\n *\n * // The model that is passed to the following function call determines the type of the eventual result. In the\n * // example, we will use the prebuilt receipt model.\n * const poller = await client.beginAnalyzeDocument(PrebuiltReceiptModel, url);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain lines and words\n * tables, // extracted tables, organized into cells that contain their contents\n * styles, // text styles (ex. handwriting) that were observed in the document\n * keyValuePairs, // extracted pairs of elements (directed associations from one element in the input to another)\n *\n * documents // extracted documents (instances of one of the model's document types and its field schema)\n * } = await poller.pollUntilDone();\n *\n * // Extract the fields of the first document. These fields constitute a receipt, because we used the receipt model\n * const [{ fields: receipt }] = documents;\n *\n * // Since we used the strongly-typed PrebuiltReceiptModel object instead of the \"prebuilt-receipt\" model ID\n * // string, the fields of the receipt are strongly-typed and have camelCase names (as opposed to PascalCase).\n * console.log(\"The type of this receipt is:\", receipt.receiptType?.value);\n * ```\n *\n * @param model - a {@link DocumentModel} representing the model to use for analysis and the expected output type\n * @param documentUrl - a URL (string) to an input document accessible from the public internet\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocumentFromUrl<Result>(\n model: DocumentModel<Result>,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions<Result>\n ): Promise<AnalysisPoller<Result>>;\n public async beginAnalyzeDocumentFromUrl(\n model: string | DocumentModel<unknown>,\n documentUrl: string,\n options: AnalyzeDocumentOptions<unknown> = {}\n ): Promise<AnalysisPoller<unknown>> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginAnalyzeDocumentFromUrl\",\n options,\n this.analyze.bind(this, model, source(\"url\", documentUrl))\n );\n }\n\n /**\n * A helper method for running analysis polymorphically.\n *\n * @param model - the model ID or DocumentModel to use for analysis\n * @param input - the string URL or request body to use\n * @param options - analysis options\n * @returns - an analysis poller\n */\n private analyze(\n model: string | DocumentModel<unknown>,\n input: DocumentSource,\n options: AnalyzeDocumentOptions<unknown>\n ) {\n const {\n modelId: initialModelId,\n apiVersion: requestApiVersion,\n transformResult,\n } = typeof model === \"string\"\n ? { modelId: model, apiVersion: undefined, transformResult: (v: AnalyzeResult) => v }\n : model;\n\n if (requestApiVersion && requestApiVersion !== this._apiVersion) {\n throw new Error(\n [\n `API Version mismatch: the provided model wants version: ${requestApiVersion}, but the client is using ${this._apiVersion}.`,\n \"The API version of the model must match the client's API version.\",\n ].join(\"\\n\")\n );\n }\n\n return this.createUnifiedPoller<unknown>(\n (abortSignal) => {\n const [contentType, analyzeRequest] = toAnalyzeRequest(input);\n\n return this._restClient.documentModels.analyzeDocument(initialModelId, contentType as any, {\n ...options,\n abortSignal,\n analyzeRequest,\n });\n },\n {\n initialModelId,\n options,\n transformResult: (result) => transformResult(toAnalyzeResultFromGenerated(result)),\n }\n );\n }\n\n /**\n * Classify a document using a custom classifier given by its ID.\n *\n * This method produces a long-running operation (poller) that will eventually produce an `AnalyzeResult`. This is the\n * same type as `beginAnalyzeDocument` and `beginAnalyzeDocumentFromUrl`, but the result will only contain a small\n * subset of its fields. Only the `documents` field and `pages` field will be populated, and only minimal page\n * information will be returned. The `documents` field will contain information about all the identified documents and\n * the `docType` that they were classified as.\n *\n * ### Example\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * ```typescript\n * import * as fs from \"fs\";\n *\n * const file = fs.createReadStream(\"path/to/file.pdf\");\n *\n * const poller = await client.beginClassifyDocument(\"<classifier ID>\", file);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain only basic information for classifiers\n * documents // extracted documents and their types\n * } = await poller.pollUntilDone();\n *\n * // We'll print the documents and their types\n * for (const { docType } of documents) {\n * console.log(\"The type of this document is:\", docType);\n * }\n * ```\n *\n * @param classifierId - the ID of the custom classifier to use for analysis\n * @param document - the document to classify\n * @param options - options for the classification operation\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginClassifyDocument(\n classifierId: string,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: ClassifyDocumentOptions = {}\n ): Promise<AnalysisPoller> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginClassifyDocument\",\n options,\n this.classify.bind(this, classifierId, source(\"body\", document))\n );\n }\n\n /**\n * Classify a document from a URL using a custom classifier given by its ID.\n *\n * This method produces a long-running operation (poller) that will eventually produce an `AnalyzeResult`. This is the\n * same type as `beginAnalyzeDocument` and `beginAnalyzeDocumentFromUrl`, but the result will only contain a small\n * subset of its fields. Only the `documents` field and `pages` field will be populated, and only minimal page\n * information will be returned. The `documents` field will contain information about all the identified documents and\n * the `docType` that they were classified as.\n *\n * ### Example\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```typescript\n * // the URL must be publicly accessible\n * const url = \"<file url>\";\n *\n * const poller = await client.beginClassifyDocument(\"<classifier ID>\", url);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain only basic information for classifiers\n * documents // extracted documents and their types\n * } = await poller.pollUntilDone();\n *\n * // We'll print the documents and their types\n * for (const { docType } of documents) {\n * console.log(\"The type of this document is:\", docType);\n * }\n * ```\n * @param classifierId - the ID of the custom classifier to use for analysis\n * @param documentUrl - the URL of the document to classify\n * @param options -\n * @returns\n */\n public async beginClassifyDocumentFromUrl(\n classifierId: string,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: ClassifyDocumentOptions = {}\n ): Promise<AnalysisPoller> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginClassifyDocumentFromUrl\",\n options,\n this.classify.bind(this, classifierId, source(\"url\", documentUrl))\n );\n }\n\n /**\n * A helper method for running classification polymorphically.\n * @param classifierId - the ID of the classifier to use\n * @param input - the string URL or request body to use\n * @param options - analysis options\n * @returns an analysis poller\n */\n private classify(\n classifierId: string,\n input: DocumentSource,\n options: ClassifyDocumentOptions\n ): Promise<AnalysisPoller> {\n return this.createUnifiedPoller(\n async (abortSignal) => {\n const [contentType, classifyRequest] = toAnalyzeRequest(input);\n\n return this._restClient.documentClassifiers.classifyDocument(\n classifierId,\n contentType as any,\n {\n ...options,\n abortSignal,\n classifyRequest,\n }\n );\n },\n {\n initialModelId: classifierId,\n options,\n transformResult: toAnalyzeResultFromGenerated,\n }\n );\n }\n\n /**\n * Create an LRO poller that handles analysis operations.\n *\n * This is the meat of all analysis polling operations.\n *\n * @param startOperation - function that starts the operation and returns the operation location\n * @param definition - operation definition (initial model ID, operation transforms, request options)\n * @returns - an analysis poller that produces the given return types according to the operation spec\n */\n private async createUnifiedPoller<Result>(\n startOperation: (\n abortSignal: AbortSignalLike | undefined\n ) => Promise<{ operationLocation?: string }>,\n definition: AnalysisOperationDefinition<Result>\n ): Promise<AnalysisPoller<Result>> {\n const { resumeFrom } = definition.options;\n\n // TODO: what should we do if resumeFrom.modelId is different from initialModelId?\n // And what do we do with the redundant input??\n\n const getAnalyzeResult = (\n ctx: OperationContext,\n operationLocation: string\n ): Promise<AnalyzeResultOperation> =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-getAnalyzeResult\",\n definition.options,\n (finalOptions) =>\n this._restClient.sendOperationRequest<AnalyzeResultOperation>(\n {\n options: {\n onResponse: async (rawResponse, ...args) => {\n // Capture the `Retry-After` header if it was sent.\n const retryAfterHeader = rawResponse.headers.get(\"retry-after\");\n // Convert the header value to milliseconds. If the header is not a valid number, then it is an HTTP\n // date.\n if (retryAfterHeader) {\n const retryAfterMs = Number(retryAfterHeader) * 1000;\n if (!Number.isNaN(retryAfterMs)) {\n ctx.updateDelay(retryAfterMs);\n } else {\n ctx.updateDelay(Date.parse(retryAfterHeader) - Date.now());\n }\n } else {\n ctx.updateDelay(undefined);\n }\n\n // Forward the `onResponse` callback if it was provided.\n return finalOptions.onResponse?.(rawResponse, ...args);\n },\n ...finalOptions,\n // We need to pass the abort signal from the context rather than from the options, since the user could\n // poll the LRO with a different AbortSignal than it was instantiated with.\n abortSignal: ctx.abortSignal,\n },\n },\n {\n path: operationLocation,\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.AnalyzeResultOperation,\n },\n default: {\n bodyMapper: Mappers.ErrorResponse,\n },\n },\n // URL is fully-formed, so we don't need any query parameters\n headerParameters: [accept1],\n serializer: SERIALIZER,\n }\n )\n );\n\n const toInit =\n // If the user gave us a stored token, we'll poll it again\n resumeFrom !== undefined\n ? async (ctx: OperationContext) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-resume\",\n definition.options,\n async () => {\n const { clientVersion, operationLocation, modelId } = JSON.parse(resumeFrom) as {\n clientVersion?: string;\n operationLocation: string;\n modelId: string;\n };\n\n if (!clientVersion || clientVersion !== SDK_VERSION) {\n throw new Error(\n [\n \"Cannot restore poller from a serialized state from a different version of the client\",\n `library (restoreFrom: '${clientVersion}', current: '${SDK_VERSION}').`,\n ].join(\" \")\n );\n }\n\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n modelId,\n operationLocation,\n result\n );\n }\n )\n : // Otherwise, we'll start a new operation from the initialModelId\n async (ctx: OperationContext) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-start\",\n definition.options,\n async () => {\n const { operationLocation } = await startOperation(ctx.abortSignal);\n\n if (operationLocation === undefined) {\n throw new Error(\n \"Unable to start analysis operation: no Operation-Location received.\"\n );\n }\n\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n definition.initialModelId,\n operationLocation,\n result\n );\n }\n );\n\n const poller = await lro<Result, DocumentAnalysisPollOperationState<Result>>(\n {\n init: toInit,\n poll: async (ctx, { operationLocation, modelId }) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-poll\",\n {},\n async () => {\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n modelId,\n operationLocation,\n result\n );\n }\n ),\n serialize: ({ operationLocation, modelId }) =>\n JSON.stringify({ clientVersion: SDK_VERSION, id: modelId, operationLocation }),\n },\n definition.options.updateIntervalInMs,\n definition.options.abortSignal\n );\n\n if (definition.options.onProgress !== undefined) {\n poller.onProgress(definition.options.onProgress);\n definition.options.onProgress(poller.getOperationState());\n }\n\n return poller;\n }\n\n // #endregion\n}\n\n/**\n * Produce an appropriate pair of content-type and analyzeRequest value for the analysis request.\n * @internal\n */\nfunction toAnalyzeRequest(\n input: DocumentSource\n): [\"application/json\", AnalyzeDocumentRequest] | [ContentType, FormRecognizerRequestBody] {\n switch (input.kind) {\n case \"body\":\n return [\"application/octet-stream\", input.body];\n case \"url\":\n return [\"application/json\", { urlSource: input.url }];\n case \"base64\":\n return [\"application/json\", { base64Source: input.base64 }];\n default: {\n const __exhaust: never = input;\n throw new Error(`Unreachable 'toAnalyzeRequest' case: ${__exhaust}`);\n }\n }\n}\n\n/**\n * The input to a document analysis operation.\n */\n// type DocumentSource = DocumentBodySource | DocumentUrlSource | DocumentBase64Source;\n\nfunction source<K extends DocumentSource[\"kind\"]>(\n kind: K,\n value: Extract<DocumentSource, { kind: K }>[K & keyof Extract<DocumentSource, { kind: K }>]\n): DocumentSource {\n return {\n kind,\n [kind]: value,\n } as unknown as DocumentSource;\n}\n\n/**\n * The input to a document analysis operation.\n *\n * @internal\n */\ntype DocumentSource = {\n [K in keyof DocumentSourceTypes]: {\n /** The input kind. */\n kind: K;\n } & { [_ in K]: DocumentSourceTypes[K] };\n}[keyof DocumentSourceTypes];\n\n/**\n * A map of input discriminants to concrete input types.\n *\n * @internal\n */\ninterface DocumentSourceTypes {\n /**\n * A document buffer or stream to be uploaded in the request body.\n */\n body: FormRecognizerRequestBody;\n\n /**\n * A URL to a document to be analyzed.\n */\n url: string;\n\n /**\n * The data of a document to be analyzed. This is NOT base64-encoded, but will\n * be base64-encoded by the client before uploading.\n *\n * NOTE: This is never used by the client because it is inefficient compared to direct uploads and does not currently\n * support any features that `body` does not.\n */\n base64: Uint8Array;\n}\n"]}
1
+ {"version":3,"file":"documentAnalysisClient.js","sourceRoot":"","sources":["../../src/documentAnalysisClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAE1D,OAAO,EAAE,2BAA2B,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAOvE,OAAO,EAAE,OAAO,EAAE,MAAM,+BAA+B,CAAC;AACxD,OAAO,EAML,4BAA4B,EAC5B,oCAAoC,GACrC,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAoB,GAAG,EAAE,MAAM,mBAAmB,CAAC;AAI1D,OAAO,EAAE,iBAAiB,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAIhE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AACH,MAAM,OAAO,sBAAsB;IA8DjC,YACE,QAAgB,EAChB,UAA2C,EAC3C,UAAyC,EAAE;QAE3C,IAAI,CAAC,WAAW,GAAG,iBAAiB,CAAC,QAAQ,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QACpE,IAAI,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YAClC,WAAW,EAAE,2BAA2B;YACxC,cAAc,EAAE,WAAW;YAC3B,SAAS,EAAE,6BAA6B;SACzC,CAAC,CAAC;IACL,CAAC;IAsHM,KAAK,CAAC,oBAAoB,CAC/B,KAAsC,EACtC,QAAmC,EACnC,UAA2C,EAAE;QAE7C,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,6CAA6C,EAC7C,OAAO;QACP,+GAA+G;QAC/G,wDAAwD;QACxD,IAAI,CAAC,OAAO,CAAC,IAAI,CACf,IAAI,EACJ,KAAK,EACL,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,CAClF,CACF,CAAC;IACJ,CAAC;IAgHM,KAAK,CAAC,2BAA2B,CACtC,KAAsC,EACtC,WAAmB,EACnB,UAA2C,EAAE;QAE7C,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,oDAAoD,EACpD,OAAO,EACP,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CAC3D,CAAC;IACJ,CAAC;IAED;;;;;;;OAOG;IACK,OAAO,CACb,KAAsC,EACtC,KAAqB,EACrB,OAAwC;QAExC,MAAM,EACJ,OAAO,EAAE,cAAc,EACvB,UAAU,EAAE,iBAAiB,EAC7B,eAAe,GAChB,GAAG,OAAO,KAAK,KAAK,QAAQ;YAC3B,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,SAAS,EAAE,eAAe,EAAE,CAAC,CAAgB,EAAE,EAAE,CAAC,CAAC,EAAE;YACrF,CAAC,CAAC,KAAK,CAAC;QAEV,IAAI,iBAAiB,IAAI,iBAAiB,KAAK,2BAA2B,EAAE;YAC1E,MAAM,IAAI,KAAK,CACb;gBACE,2DAA2D,iBAAiB,GAAG;gBAC/E,2BAA2B,2BAA2B,GAAG;gBACzD,mEAAmE;aACpE,CAAC,IAAI,CAAC,GAAG,CAAC,CACZ,CAAC;SACH;QAED,OAAO,IAAI,CAAC,mBAAmB,CAC7B,CAAC,WAAW,EAAE,EAAE;YACd,MAAM,CAAC,WAAW,EAAE,cAAc,CAAC,GAAG,gBAAgB,CAAC,KAAK,CAAC,CAAC;YAE9D,OAAO,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,eAAe,CAAC,cAAc,EAAE,WAAkB,kCACpF,OAAO,KACV,WAAW;gBACX,cAAc,IACd,CAAC;QACL,CAAC,EACD;YACE,cAAc;YACd,OAAO;YACP,eAAe,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,eAAe,CAAC,4BAA4B,CAAC,MAAM,CAAC,CAAC;SACnF,CACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAqCG;IACI,KAAK,CAAC,qBAAqB,CAChC,YAAoB,EACpB,QAAmC;IACnC,8DAA8D;IAC9D,UAAmC,EAAE;QAErC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,8CAA8C,EAC9C,OAAO,EACP,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CACjE,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAqCG;IACI,KAAK,CAAC,4BAA4B,CACvC,YAAoB,EACpB,WAAmB;IACnB,8DAA8D;IAC9D,UAAmC,EAAE;QAErC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,qDAAqD,EACrD,OAAO,EACP,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CACnE,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACK,QAAQ,CACd,YAAoB,EACpB,KAAqB,EACrB,OAAgC;QAEhC,OAAO,IAAI,CAAC,mBAAmB,CAC7B,KAAK,EAAE,WAAW,EAAE,EAAE;YACpB,MAAM,CAAC,WAAW,EAAE,eAAe,CAAC,GAAG,gBAAgB,CAAC,KAAK,CAAC,CAAC;YAE/D,OAAO,IAAI,CAAC,WAAW,CAAC,mBAAmB,CAAC,gBAAgB,CAC1D,YAAY,EACZ,WAAkB,kCAEb,OAAO,KACV,WAAW;gBACX,eAAe,IAElB,CAAC;QACJ,CAAC,EACD;YACE,cAAc,EAAE,YAAY;YAC5B,OAAO;YACP,eAAe,EAAE,4BAA4B;SAC9C,CACF,CAAC;IACJ,CAAC;IAED;;;;;;;;OAQG;IACK,KAAK,CAAC,mBAAmB,CAC/B,cAE4C,EAC5C,UAA+C;QAE/C,MAAM,EAAE,UAAU,EAAE,GAAG,UAAU,CAAC,OAAO,CAAC;QAE1C,kFAAkF;QAClF,+CAA+C;QAE/C,MAAM,gBAAgB,GAAG,CACvB,GAAqB,EACrB,iBAAyB,EACQ,EAAE,CACnC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,8DAA8D,EAC9D,UAAU,CAAC,OAAO,EAClB,CAAC,YAAY,EAAE,EAAE,CACf,IAAI,CAAC,WAAW,CAAC,oBAAoB,CACnC;YACE,OAAO,gCACL,UAAU,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,EAAE,EAAE;;oBACzC,mDAAmD;oBACnD,MAAM,gBAAgB,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;oBAChE,oGAAoG;oBACpG,QAAQ;oBACR,IAAI,gBAAgB,EAAE;wBACpB,MAAM,YAAY,GAAG,MAAM,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;wBACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,EAAE;4BAC/B,GAAG,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;yBAC/B;6BAAM;4BACL,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC;yBAC5D;qBACF;yBAAM;wBACL,GAAG,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;qBAC5B;oBAED,wDAAwD;oBACxD,OAAO,MAAA,YAAY,CAAC,UAAU,6DAAG,WAAW,EAAE,GAAG,IAAI,CAAC,CAAC;gBACzD,CAAC,IACE,YAAY;gBACf,uGAAuG;gBACvG,2EAA2E;gBAC3E,WAAW,EAAE,GAAG,CAAC,WAAW,GAC7B;SACF,EACD;YACE,IAAI,EAAE,iBAAiB;YACvB,UAAU,EAAE,KAAK;YACjB,SAAS,EAAE;gBACT,GAAG,EAAE;oBACH,UAAU,EAAE,OAAO,CAAC,sBAAsB;iBAC3C;gBACD,OAAO,EAAE;oBACP,UAAU,EAAE,OAAO,CAAC,aAAa;iBAClC;aACF;YACD,6DAA6D;YAC7D,gBAAgB,EAAE,CAAC,OAAO,CAAC;YAC3B,UAAU,EAAE,UAAU;SACvB,CACF,CACJ,CAAC;QAEJ,MAAM,MAAM;QACV,0DAA0D;QAC1D,UAAU,KAAK,SAAS;YACtB,CAAC,CAAC,KAAK,EAAE,GAAqB,EAAE,EAAE,CAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,oDAAoD,EACpD,UAAU,CAAC,OAAO,EAClB,KAAK,IAAI,EAAE;gBACT,MAAM,EAAE,aAAa,EAAE,iBAAiB,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAI1E,CAAC;gBAEF,IAAI,CAAC,aAAa,IAAI,aAAa,KAAK,WAAW,EAAE;oBACnD,MAAM,IAAI,KAAK,CACb;wBACE,sFAAsF;wBACtF,0BAA0B,aAAa,gBAAgB,WAAW,KAAK;qBACxE,CAAC,IAAI,CAAC,GAAG,CAAC,CACZ,CAAC;iBACH;gBAED,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;gBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,OAAO,EACP,iBAAiB,EACjB,MAAM,CACP,CAAC;YACJ,CAAC,CACF;YACL,CAAC,CAAC,iEAAiE;gBACjE,KAAK,EAAE,GAAqB,EAAE,EAAE,CAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,mDAAmD,EACnD,UAAU,CAAC,OAAO,EAClB,KAAK,IAAI,EAAE;oBACT,MAAM,EAAE,iBAAiB,EAAE,GAAG,MAAM,cAAc,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;oBAEpE,IAAI,iBAAiB,KAAK,SAAS,EAAE;wBACnC,MAAM,IAAI,KAAK,CACb,qEAAqE,CACtE,CAAC;qBACH;oBAED,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;oBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,UAAU,CAAC,cAAc,EACzB,iBAAiB,EACjB,MAAM,CACP,CAAC;gBACJ,CAAC,CACF,CAAC;QAEV,MAAM,MAAM,GAAG,MAAM,GAAG,CACtB;YACE,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,EAAE,iBAAiB,EAAE,OAAO,EAAE,EAAE,EAAE,CAClD,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,kDAAkD,EAClD,EAAE,EACF,KAAK,IAAI,EAAE;gBACT,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;gBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,OAAO,EACP,iBAAiB,EACjB,MAAM,CACP,CAAC;YACJ,CAAC,CACF;YACH,SAAS,EAAE,CAAC,EAAE,iBAAiB,EAAE,OAAO,EAAE,EAAE,EAAE,CAC5C,IAAI,CAAC,SAAS,CAAC,EAAE,aAAa,EAAE,WAAW,EAAE,EAAE,EAAE,OAAO,EAAE,iBAAiB,EAAE,CAAC;SACjF,EACD,UAAU,CAAC,OAAO,CAAC,kBAAkB,EACrC,UAAU,CAAC,OAAO,CAAC,WAAW,CAC/B,CAAC;QAEF,IAAI,UAAU,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,EAAE;YAC/C,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YACjD,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAC;SAC3D;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CAGF;AAED;;;GAGG;AACH,SAAS,gBAAgB,CACvB,KAAqB;IAErB,QAAQ,KAAK,CAAC,IAAI,EAAE;QAClB,KAAK,MAAM;YACT,OAAO,CAAC,0BAA0B,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;QAClD,KAAK,KAAK;YACR,OAAO,CAAC,kBAAkB,EAAE,EAAE,SAAS,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QACxD,KAAK,QAAQ;YACX,OAAO,CAAC,kBAAkB,EAAE,EAAE,YAAY,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,CAAC;YACP,MAAM,SAAS,GAAU,KAAK,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,wCAAwC,SAAS,EAAE,CAAC,CAAC;SACtE;KACF;AACH,CAAC;AAED;;GAEG;AACH,uFAAuF;AAEvF,SAAS,MAAM,CACb,IAAO,EACP,KAA2F;IAE3F,OAAO;QACL,IAAI;QACJ,CAAC,IAAI,CAAC,EAAE,KAAK;KACe,CAAC;AACjC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { KeyCredential, TokenCredential } from \"@azure/core-auth\";\nimport { createTracingClient } from \"@azure/core-tracing\";\nimport { TracingClient } from \"@azure/core-tracing\";\nimport { FORM_RECOGNIZER_API_VERSION, SDK_VERSION } from \"./constants\";\nimport {\n AnalyzeDocumentRequest,\n AnalyzeResultOperation,\n ContentType,\n GeneratedClient,\n} from \"./generated\";\nimport { accept1 } from \"./generated/models/parameters\";\nimport {\n AnalysisOperationDefinition,\n AnalysisPoller,\n AnalyzeResult,\n DocumentAnalysisPollOperationState,\n FormRecognizerRequestBody,\n toAnalyzeResultFromGenerated,\n toDocumentAnalysisPollOperationState,\n} from \"./lro/analysis\";\nimport { OperationContext, lro } from \"./lro/util/poller\";\nimport { AnalyzeDocumentOptions } from \"./options/AnalyzeDocumentOptions\";\nimport { DocumentAnalysisClientOptions } from \"./options/FormRecognizerClientOptions\";\nimport { DocumentModel } from \"./documentModel\";\nimport { makeServiceClient, Mappers, SERIALIZER } from \"./util\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { ClassifyDocumentOptions } from \"./options/ClassifyDocumentOptions\";\n\n/**\n * A client for interacting with the Form Recognizer service's analysis features.\n *\n * ### Examples:\n *\n * The Form Recognizer service and clients support two means of authentication:\n *\n * #### Azure Active Directory\n *\n * ```javascript\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new DocumentAnalysisClient(endpoint, credential);\n * ```\n *\n * #### API Key (Subscription Key)\n *\n * ```javascript\n * import { DocumentAnalysisClient, AzureKeyCredential } from \"@azure/ai-form-recognizer\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new DocumentAnalysisClient(endpoint, credential);\n * ```\n */\nexport class DocumentAnalysisClient {\n private _restClient: GeneratedClient;\n private _tracing: TracingClient;\n\n /**\n * Create a `DocumentAnalysisClient` instance from a resource endpoint and a an Azure Identity `TokenCredential`.\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity) package for more information about\n * authenticating with Azure Active Directory.\n *\n * ### Example:\n *\n * ```javascript\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { DefaultAzureCredential } from \"@azure/identity\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new DefaultAzureCredential();\n *\n * const client = new DocumentAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpoint - the endpoint URL of an Azure Cognitive Services instance\n * @param credential - a TokenCredential instance from the `@azure/identity` package\n * @param options - optional settings for configuring all methods in the client\n */\n public constructor(\n endpoint: string,\n credential: TokenCredential,\n options?: DocumentAnalysisClientOptions\n );\n /**\n * Create a `DocumentAnalysisClient` instance from a resource endpoint and a static API key (`KeyCredential`),\n *\n * ### Example:\n *\n * ```javascript\n * import { DocumentAnalysisClient, AzureKeyCredential } from \"@azure/ai-form-recognizer\";\n *\n * const endpoint = \"https://<resource name>.cognitiveservices.azure.com\";\n * const credential = new AzureKeyCredential(\"<api key>\");\n *\n * const client = new DocumentAnalysisClient(endpoint, credential);\n * ```\n *\n * @param endpoint - the endpoint URL of an Azure Cognitive Services instance\n * @param credential - a KeyCredential containing the Cognitive Services instance subscription key\n * @param options - optional settings for configuring all methods in the client\n */\n public constructor(\n endpoint: string,\n credential: KeyCredential,\n options?: DocumentAnalysisClientOptions\n );\n /**\n * @hidden\n */\n public constructor(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options?: DocumentAnalysisClientOptions\n );\n public constructor(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options: DocumentAnalysisClientOptions = {}\n ) {\n this._restClient = makeServiceClient(endpoint, credential, options);\n this._tracing = createTracingClient({\n packageName: \"@azure/ai-form-recognizer\",\n packageVersion: SDK_VERSION,\n namespace: \"Microsoft.CognitiveServices\",\n });\n }\n\n // #region Analysis\n\n /**\n * Extract data from an input using a model given by its unique ID.\n *\n * This operation supports custom as well as prebuilt models. For example, to use the prebuilt invoice model, provide\n * the model ID \"prebuilt-invoice\", or to use the simpler prebuilt layout model, provide the model ID\n * \"prebuilt-layout\".\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis, and the values in any\n * extracted documents' fields depend on the document types in the model (if any) and their corresponding field\n * schemas.\n *\n * ### Examples\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * ```javascript\n * import * as fs from \"fs\";\n *\n * const file = fs.createReadStream(\"path/to/receipt.pdf\");\n *\n * // The model that is passed to the following function call determines the type of the eventual result. In the\n * // example, we will use the prebuilt receipt model, but you could use a custom model ID/name instead.\n * const poller = await client.beginAnalyzeDocument(\"prebuilt-receipt\", file);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain lines and words\n * tables, // extracted tables, organized into cells that contain their contents\n * styles, // text styles (ex. handwriting) that were observed in the document\n * keyValuePairs, // extracted pairs of elements (directed associations from one element in the input to another)\n * entities, // extracted entities in the input's content, which are categorized (ex. \"Location\" or \"Organization\")\n * documents // extracted documents (instances of one of the model's document types and its field schema)\n * } = await poller.pollUntilDone();\n *\n * // Extract the fields of the first document. These fields constitute a receipt, because we used the receipt model\n * const [{ fields: receipt }] = documents;\n *\n * // The fields correspond to the model's document types and their field schemas. Refer to the Form Recognizer\n * // documentation for information about the document types and field schemas within a model, or use the `getModel`\n * // operation to view this information programmatically.\n * console.log(\"The type of this receipt is:\", receipt?.[\"ReceiptType\"]?.value);\n * ```\n *\n *\n * @param modelId - the unique ID (name) of the model within this client's resource\n * @param document - a {@link FormRecognizerRequestBody} that will be uploaded with the request\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocument(\n modelId: string,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions\n ): Promise<AnalysisPoller>;\n /**\n * Extract data from an input using a model that has a known, strongly-typed document schema (a {@link DocumentModel}).\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis. In TypeScript, the type\n * of the result for this method overload is inferred from the type of the input `DocumentModel`.\n *\n * ### Examples\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * If the input provided is a string, it will be treated as a URL to the location of a document to be analyzed. See the\n * {@link beginAnalyzeDocumentFromUrl} method for more information. Use of that method is preferred when using URLs,\n * and URL support is only provided in this method for backwards compatibility.\n *\n * ```typescript\n * import * as fs from \"fs\";\n *\n * // See the `prebuilt` folder in the SDK samples (http://aka.ms/azsdk/formrecognizer/js/samples) for examples of\n * // DocumentModels for known prebuilts.\n * import { PrebuiltReceiptModel } from \"./prebuilt-receipt.ts\";\n *\n * const file = fs.createReadStream(\"path/to/receipt.pdf\");\n *\n * // The model that is passed to the following function call determines the type of the eventual result. In the\n * // example, we will use the prebuilt receipt model.\n * const poller = await client.beginAnalyzeDocument(PrebuiltReceiptModel, file);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain lines and words\n * tables, // extracted tables, organized into cells that contain their contents\n * styles, // text styles (ex. handwriting) that were observed in the document\n * keyValuePairs, // extracted pairs of elements (directed associations from one element in the input to another)\n *\n * documents // extracted documents (instances of one of the model's document types and its field schema)\n * } = await poller.pollUntilDone();\n *\n * // Extract the fields of the first document. These fields constitute a receipt, because we used the receipt model\n * const [{ fields: receipt }] = documents;\n *\n * // Since we used the strongly-typed PrebuiltReceiptModel object instead of the \"prebuilt-receipt\" model ID\n * // string, the fields of the receipt are strongly-typed and have camelCase names (as opposed to PascalCase).\n * console.log(\"The type of this receipt is:\", receipt.receiptType?.value);\n * ```\n *\n * @param model - a {@link DocumentModel} representing the model to use for analysis and the expected output type\n * @param document - a {@link FormRecognizerRequestBody} that will be uploaded with the request\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult` with documents that have\n * the result type associated with the input model\n */\n public async beginAnalyzeDocument<Result>(\n model: DocumentModel<Result>,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions<Result>\n ): Promise<AnalysisPoller<Result>>;\n public async beginAnalyzeDocument(\n model: string | DocumentModel<unknown>,\n document: FormRecognizerRequestBody,\n options: AnalyzeDocumentOptions<unknown> = {}\n ): Promise<AnalysisPoller<unknown>> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginAnalyzeDocument\",\n options,\n // In the first version of the SDK, the document input was treated as a URL if it was a string, and we preserve\n // this behavior to avoid introducing a breaking change.\n this.analyze.bind(\n this,\n model,\n typeof document === \"string\" ? source(\"url\", document) : source(\"body\", document)\n )\n );\n }\n\n /**\n * Extract data from an input using a model given by its unique ID.\n *\n * This operation supports custom as well as prebuilt models. For example, to use the prebuilt invoice model, provide\n * the model ID \"prebuilt-invoice\", or to use the simpler prebuilt layout model, provide the model ID\n * \"prebuilt-layout\".\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis, and the values in any\n * extracted documents' fields depend on the document types in the model (if any) and their corresponding field\n * schemas.\n *\n * ### Examples\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```javascript\n * // the URL must be publicly accessible\n * const url = \"<receipt document url>\";\n *\n * // The model that is passed to the following function call determines the type of the eventual result. In the\n * // example, we will use the prebuilt receipt model, but you could use a custom model ID/name instead.\n * const poller = await client.beginAnalyzeDocument(\"prebuilt-receipt\", url);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain lines and words\n * tables, // extracted tables, organized into cells that contain their contents\n * styles, // text styles (ex. handwriting) that were observed in the document\n * keyValuePairs, // extracted pairs of elements (directed associations from one element in the input to another)\n *\n * documents // extracted documents (instances of one of the model's document types and its field schema)\n * } = await poller.pollUntilDone();\n *\n * // Extract the fields of the first document. These fields constitute a receipt, because we used the receipt model\n * const [{ fields: receipt }] = documents;\n *\n * // The fields correspond to the model's document types and their field schemas. Refer to the Form Recognizer\n * // documentation for information about the document types and field schemas within a model, or use the `getModel`\n * // operation to view this information programmatically.\n * console.log(\"The type of this receipt is:\", receipt?.[\"ReceiptType\"]?.value);\n * ```\n *\n * @param modelId - the unique ID (name) of the model within this client's resource\n * @param documentUrl - a URL (string) to an input document accessible from the public internet\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocumentFromUrl(\n modelId: string,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions\n ): Promise<AnalysisPoller>;\n /**\n * Extract data from an input using a model that has a known, strongly-typed document schema (a {@link DocumentModel}).\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis. In TypeScript, the type\n * of the result for this method overload is inferred from the type of the input `DocumentModel`.\n *\n * ### Examples\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```typescript\n * // See the `prebuilt` folder in the SDK samples (http://aka.ms/azsdk/formrecognizer/js/samples) for examples of\n * // DocumentModels for known prebuilts.\n * import { PrebuiltReceiptModel } from \"./prebuilt-receipt.ts\";\n *\n * // the URL must be publicly accessible\n * const url = \"<receipt document url>\";\n *\n * // The model that is passed to the following function call determines the type of the eventual result. In the\n * // example, we will use the prebuilt receipt model.\n * const poller = await client.beginAnalyzeDocument(PrebuiltReceiptModel, url);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain lines and words\n * tables, // extracted tables, organized into cells that contain their contents\n * styles, // text styles (ex. handwriting) that were observed in the document\n * keyValuePairs, // extracted pairs of elements (directed associations from one element in the input to another)\n *\n * documents // extracted documents (instances of one of the model's document types and its field schema)\n * } = await poller.pollUntilDone();\n *\n * // Extract the fields of the first document. These fields constitute a receipt, because we used the receipt model\n * const [{ fields: receipt }] = documents;\n *\n * // Since we used the strongly-typed PrebuiltReceiptModel object instead of the \"prebuilt-receipt\" model ID\n * // string, the fields of the receipt are strongly-typed and have camelCase names (as opposed to PascalCase).\n * console.log(\"The type of this receipt is:\", receipt.receiptType?.value);\n * ```\n *\n * @param model - a {@link DocumentModel} representing the model to use for analysis and the expected output type\n * @param documentUrl - a URL (string) to an input document accessible from the public internet\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocumentFromUrl<Result>(\n model: DocumentModel<Result>,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions<Result>\n ): Promise<AnalysisPoller<Result>>;\n public async beginAnalyzeDocumentFromUrl(\n model: string | DocumentModel<unknown>,\n documentUrl: string,\n options: AnalyzeDocumentOptions<unknown> = {}\n ): Promise<AnalysisPoller<unknown>> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginAnalyzeDocumentFromUrl\",\n options,\n this.analyze.bind(this, model, source(\"url\", documentUrl))\n );\n }\n\n /**\n * A helper method for running analysis polymorphically.\n *\n * @param model - the model ID or DocumentModel to use for analysis\n * @param input - the string URL or request body to use\n * @param options - analysis options\n * @returns - an analysis poller\n */\n private analyze(\n model: string | DocumentModel<unknown>,\n input: DocumentSource,\n options: AnalyzeDocumentOptions<unknown>\n ) {\n const {\n modelId: initialModelId,\n apiVersion: requestApiVersion,\n transformResult,\n } = typeof model === \"string\"\n ? { modelId: model, apiVersion: undefined, transformResult: (v: AnalyzeResult) => v }\n : model;\n\n if (requestApiVersion && requestApiVersion !== FORM_RECOGNIZER_API_VERSION) {\n throw new Error(\n [\n `API Version mismatch: the provided model wants version: ${requestApiVersion},`,\n `but the client is using ${FORM_RECOGNIZER_API_VERSION}.`,\n \"The API version of the model must match the client's API version.\",\n ].join(\" \")\n );\n }\n\n return this.createUnifiedPoller<unknown>(\n (abortSignal) => {\n const [contentType, analyzeRequest] = toAnalyzeRequest(input);\n\n return this._restClient.documentModels.analyzeDocument(initialModelId, contentType as any, {\n ...options,\n abortSignal,\n analyzeRequest,\n });\n },\n {\n initialModelId,\n options,\n transformResult: (result) => transformResult(toAnalyzeResultFromGenerated(result)),\n }\n );\n }\n\n /**\n * Classify a document using a custom classifier given by its ID.\n *\n * This method produces a long-running operation (poller) that will eventually produce an `AnalyzeResult`. This is the\n * same type as `beginAnalyzeDocument` and `beginAnalyzeDocumentFromUrl`, but the result will only contain a small\n * subset of its fields. Only the `documents` field and `pages` field will be populated, and only minimal page\n * information will be returned. The `documents` field will contain information about all the identified documents and\n * the `docType` that they were classified as.\n *\n * ### Example\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * ```typescript\n * import * as fs from \"fs\";\n *\n * const file = fs.createReadStream(\"path/to/file.pdf\");\n *\n * const poller = await client.beginClassifyDocument(\"<classifier ID>\", file);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain only basic information for classifiers\n * documents // extracted documents and their types\n * } = await poller.pollUntilDone();\n *\n * // We'll print the documents and their types\n * for (const { docType } of documents) {\n * console.log(\"The type of this document is:\", docType);\n * }\n * ```\n *\n * @param classifierId - the ID of the custom classifier to use for analysis\n * @param document - the document to classify\n * @param options - options for the classification operation\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginClassifyDocument(\n classifierId: string,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: ClassifyDocumentOptions = {}\n ): Promise<AnalysisPoller> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginClassifyDocument\",\n options,\n this.classify.bind(this, classifierId, source(\"body\", document))\n );\n }\n\n /**\n * Classify a document from a URL using a custom classifier given by its ID.\n *\n * This method produces a long-running operation (poller) that will eventually produce an `AnalyzeResult`. This is the\n * same type as `beginAnalyzeDocument` and `beginAnalyzeDocumentFromUrl`, but the result will only contain a small\n * subset of its fields. Only the `documents` field and `pages` field will be populated, and only minimal page\n * information will be returned. The `documents` field will contain information about all the identified documents and\n * the `docType` that they were classified as.\n *\n * ### Example\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```typescript\n * // the URL must be publicly accessible\n * const url = \"<file url>\";\n *\n * const poller = await client.beginClassifyDocument(\"<classifier ID>\", url);\n *\n * // The result is a long-running operation (poller), which must itself be polled until the operation completes\n * const {\n * pages, // pages extracted from the document, which contain only basic information for classifiers\n * documents // extracted documents and their types\n * } = await poller.pollUntilDone();\n *\n * // We'll print the documents and their types\n * for (const { docType } of documents) {\n * console.log(\"The type of this document is:\", docType);\n * }\n * ```\n * @param classifierId - the ID of the custom classifier to use for analysis\n * @param documentUrl - the URL of the document to classify\n * @param options -\n * @returns\n */\n public async beginClassifyDocumentFromUrl(\n classifierId: string,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: ClassifyDocumentOptions = {}\n ): Promise<AnalysisPoller> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginClassifyDocumentFromUrl\",\n options,\n this.classify.bind(this, classifierId, source(\"url\", documentUrl))\n );\n }\n\n /**\n * A helper method for running classification polymorphically.\n * @param classifierId - the ID of the classifier to use\n * @param input - the string URL or request body to use\n * @param options - analysis options\n * @returns an analysis poller\n */\n private classify(\n classifierId: string,\n input: DocumentSource,\n options: ClassifyDocumentOptions\n ): Promise<AnalysisPoller> {\n return this.createUnifiedPoller(\n async (abortSignal) => {\n const [contentType, classifyRequest] = toAnalyzeRequest(input);\n\n return this._restClient.documentClassifiers.classifyDocument(\n classifierId,\n contentType as any,\n {\n ...options,\n abortSignal,\n classifyRequest,\n }\n );\n },\n {\n initialModelId: classifierId,\n options,\n transformResult: toAnalyzeResultFromGenerated,\n }\n );\n }\n\n /**\n * Create an LRO poller that handles analysis operations.\n *\n * This is the meat of all analysis polling operations.\n *\n * @param startOperation - function that starts the operation and returns the operation location\n * @param definition - operation definition (initial model ID, operation transforms, request options)\n * @returns - an analysis poller that produces the given return types according to the operation spec\n */\n private async createUnifiedPoller<Result>(\n startOperation: (\n abortSignal: AbortSignalLike | undefined\n ) => Promise<{ operationLocation?: string }>,\n definition: AnalysisOperationDefinition<Result>\n ): Promise<AnalysisPoller<Result>> {\n const { resumeFrom } = definition.options;\n\n // TODO: what should we do if resumeFrom.modelId is different from initialModelId?\n // And what do we do with the redundant input??\n\n const getAnalyzeResult = (\n ctx: OperationContext,\n operationLocation: string\n ): Promise<AnalyzeResultOperation> =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-getAnalyzeResult\",\n definition.options,\n (finalOptions) =>\n this._restClient.sendOperationRequest<AnalyzeResultOperation>(\n {\n options: {\n onResponse: async (rawResponse, ...args) => {\n // Capture the `Retry-After` header if it was sent.\n const retryAfterHeader = rawResponse.headers.get(\"retry-after\");\n // Convert the header value to milliseconds. If the header is not a valid number, then it is an HTTP\n // date.\n if (retryAfterHeader) {\n const retryAfterMs = Number(retryAfterHeader) * 1000;\n if (!Number.isNaN(retryAfterMs)) {\n ctx.updateDelay(retryAfterMs);\n } else {\n ctx.updateDelay(Date.parse(retryAfterHeader) - Date.now());\n }\n } else {\n ctx.updateDelay(undefined);\n }\n\n // Forward the `onResponse` callback if it was provided.\n return finalOptions.onResponse?.(rawResponse, ...args);\n },\n ...finalOptions,\n // We need to pass the abort signal from the context rather than from the options, since the user could\n // poll the LRO with a different AbortSignal than it was instantiated with.\n abortSignal: ctx.abortSignal,\n },\n },\n {\n path: operationLocation,\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.AnalyzeResultOperation,\n },\n default: {\n bodyMapper: Mappers.ErrorResponse,\n },\n },\n // URL is fully-formed, so we don't need any query parameters\n headerParameters: [accept1],\n serializer: SERIALIZER,\n }\n )\n );\n\n const toInit =\n // If the user gave us a stored token, we'll poll it again\n resumeFrom !== undefined\n ? async (ctx: OperationContext) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-resume\",\n definition.options,\n async () => {\n const { clientVersion, operationLocation, modelId } = JSON.parse(resumeFrom) as {\n clientVersion?: string;\n operationLocation: string;\n modelId: string;\n };\n\n if (!clientVersion || clientVersion !== SDK_VERSION) {\n throw new Error(\n [\n \"Cannot restore poller from a serialized state from a different version of the client\",\n `library (restoreFrom: '${clientVersion}', current: '${SDK_VERSION}').`,\n ].join(\" \")\n );\n }\n\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n modelId,\n operationLocation,\n result\n );\n }\n )\n : // Otherwise, we'll start a new operation from the initialModelId\n async (ctx: OperationContext) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-start\",\n definition.options,\n async () => {\n const { operationLocation } = await startOperation(ctx.abortSignal);\n\n if (operationLocation === undefined) {\n throw new Error(\n \"Unable to start analysis operation: no Operation-Location received.\"\n );\n }\n\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n definition.initialModelId,\n operationLocation,\n result\n );\n }\n );\n\n const poller = await lro<Result, DocumentAnalysisPollOperationState<Result>>(\n {\n init: toInit,\n poll: async (ctx, { operationLocation, modelId }) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-poll\",\n {},\n async () => {\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n modelId,\n operationLocation,\n result\n );\n }\n ),\n serialize: ({ operationLocation, modelId }) =>\n JSON.stringify({ clientVersion: SDK_VERSION, id: modelId, operationLocation }),\n },\n definition.options.updateIntervalInMs,\n definition.options.abortSignal\n );\n\n if (definition.options.onProgress !== undefined) {\n poller.onProgress(definition.options.onProgress);\n definition.options.onProgress(poller.getOperationState());\n }\n\n return poller;\n }\n\n // #endregion\n}\n\n/**\n * Produce an appropriate pair of content-type and analyzeRequest value for the analysis request.\n * @internal\n */\nfunction toAnalyzeRequest(\n input: DocumentSource\n): [\"application/json\", AnalyzeDocumentRequest] | [ContentType, FormRecognizerRequestBody] {\n switch (input.kind) {\n case \"body\":\n return [\"application/octet-stream\", input.body];\n case \"url\":\n return [\"application/json\", { urlSource: input.url }];\n case \"base64\":\n return [\"application/json\", { base64Source: input.base64 }];\n default: {\n const __exhaust: never = input;\n throw new Error(`Unreachable 'toAnalyzeRequest' case: ${__exhaust}`);\n }\n }\n}\n\n/**\n * The input to a document analysis operation.\n */\n// type DocumentSource = DocumentBodySource | DocumentUrlSource | DocumentBase64Source;\n\nfunction source<K extends DocumentSource[\"kind\"]>(\n kind: K,\n value: Extract<DocumentSource, { kind: K }>[K & keyof Extract<DocumentSource, { kind: K }>]\n): DocumentSource {\n return {\n kind,\n [kind]: value,\n } as unknown as DocumentSource;\n}\n\n/**\n * The input to a document analysis operation.\n *\n * @internal\n */\ntype DocumentSource = {\n [K in keyof DocumentSourceTypes]: {\n /** The input kind. */\n kind: K;\n } & { [_ in K]: DocumentSourceTypes[K] };\n}[keyof DocumentSourceTypes];\n\n/**\n * A map of input discriminants to concrete input types.\n *\n * @internal\n */\ninterface DocumentSourceTypes {\n /**\n * A document buffer or stream to be uploaded in the request body.\n */\n body: FormRecognizerRequestBody;\n\n /**\n * A URL to a document to be analyzed.\n */\n url: string;\n\n /**\n * The data of a document to be analyzed. This is NOT base64-encoded, but will\n * be base64-encoded by the client before uploading.\n *\n * NOTE: This is never used by the client because it is inefficient compared to direct uploads and does not currently\n * support any features that `body` does not.\n */\n base64: Uint8Array;\n}\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"documentModel.js","sourceRoot":"","sources":["../../src/documentModel.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAMlC,OAAO,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAuBnD;;;;;;;GAOG;AACH,SAAS,YAAY,CACnB,SAAiB,EACjB,MAA2B,EAC3B,KAAoB;IAEpB,IAAI,MAAM,CAAC,IAAI,KAAK,KAAK,CAAC,IAAI,EAAE;QAC9B,MAAM,IAAI,KAAK,CACb,qBAAqB,SAAS,cAAc,KAAK,CAAC,IAAI,oBAAoB,MAAM,CAAC,IAAI,GAAG,CACzF,CAAC;KACH;IAED,kFAAkF;IAClF,IAAI,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;QAC3B,MAAM,MAAM,GAAQ,EAAE,CAAC;QAEvB,KAAK,MAAM,CAAC,YAAY,EAAE,cAAc,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,UAAW,CAAC,EAAE;YAC/E,IAAI,KAAK,CAAC,UAAU,CAAC,YAAY,CAAC,KAAK,SAAS,IAAI,KAAK,CAAC,UAAU,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAC3F,MAAM,aAAa,GAAG,CACpB,WAAW,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,YAAY,CAAC,CACtE,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;gBAErB,MAAM,CAAC,aAAa,CAAC,GAAG,YAAY,CAClC,SAAS,GAAG,GAAG,GAAG,YAAY,EAC9B,cAAc,EACd,KAAK,CAAC,UAAU,CAAC,YAAY,CAAE,CAChC,CAAC;aACH;SACF;QAED,uCACK,KAAK,KACR,UAAU,EAAE,MAAM,IAClB;KACH;SAAM,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;QACjC,uCACK,KAAK,KACR,MAAM,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACpC,YAAY,CAAC,SAAS,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,MAAM,CAAC,KAAM,EAAE,GAAG,CAAC,CAC9D,IACD;KACH;;QAAM,OAAO,KAAK,CAAC;AACtB,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,qBAAqB,CACnC,MAA+C;IAE/C,OAAO;QACL,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,UAAU,EAAE,MAAM,CAAC,UAAsC;QACzD,eAAe,CAAC,UAAyB;;YACvC,MAAM,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC,MAAA,MAAM,CAAC,QAAQ,mCAAI,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;YAEtE,uCACK,UAAU,KACb,SAAS,EAAE,YAAY;oBACrB,CAAC,CAAC,MAAA,UAAU,CAAC,SAAS,0CAAE,GAAG,CAAC,UAAU,CAAC;oBACvC,CAAC,CAAC,MAAA,UAAU,CAAC,SAAS,mCAAI,EAAE,IAC9B;YAEF,SAAS,UAAU,CAAC,QAA0B;;gBAC5C,MAAM,MAAM,GAA4B,EAAE,CAAC;gBAC3C,MAAM,KAAK,GAAG,MAAA,MAAM,CAAC,QAAQ,0CAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBAElD,IAAI,KAAK,KAAK,SAAS,EAAE;oBACvB,MAAM,IAAI,KAAK,CACb,6BAA6B,QAAQ,CAAC,OAAO,4BAA4B,MAAM,CAAC,OAAO,GAAG,CAC3F,CAAC;iBACH;gBACD,KAAK,MAAM,CAAC,SAAS,EAAE,WAAW,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,EAAE;oBACxE,IACE,QAAQ,CAAC,MAAM;wBACf,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,SAAS;wBACxC,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,IAAI,EACnC;wBACA,MAAM,aAAa,GAAG,CACpB,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,YAAY,CAAC,SAAS,CAAC,CAC7D,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;wBACrB,MAAM,CAAC,aAAa,CAAC,GAAG,YAAY,CAClC,SAAS,EACT,WAAW,EACX,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,CAC3B,CAAC;qBACH;iBACF;gBAED,uCACK,QAAQ,KACX,MAAM,EAAE,MAAM,IACd;YACJ,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DocumentFieldSchema, DocumentModelDetails } from \"./generated\";\nimport { AnalyzedDocument, AnalyzeResult } from \"./lro/analysis\";\nimport { DocumentField } from \"./models/fields\";\nimport { FormRecognizerApiVersion } from \"./options\";\nimport { isAcronymic, uncapitalize } from \"./util\";\n\n/**\n * A well-known model specification that supports extracting structured documents.\n *\n * See the `beginAnalyzeDocument` method of {@link DocumentAnalysisClient}, which supports consuming these\n * `DocumentModel` objects instead of model ID strings to provide stronger result types.\n */\nexport interface DocumentModel<Result> {\n /**\n * The unique ID of this model.\n */\n modelId: string;\n /**\n * The API version of the model.\n */\n apiVersion?: FormRecognizerApiVersion;\n /**\n * An associated transformation that is used to conver the base (weak) Result type to the strong version.\n */\n transformResult: (input: AnalyzeResult) => Result;\n}\n\n/**\n * Checks a field value against a schema and converts it into a strong idiomatic DocumentField,\n * @internal\n * @param fieldName - the name of the field (used in diagnostics)\n * @param schema - the field's schema\n * @param field - the raw DocumentField value\n * @returns\n */\nfunction extractField(\n fieldName: string,\n schema: DocumentFieldSchema,\n field: DocumentField\n): DocumentField {\n if (schema.type !== field.kind) {\n throw new Error(\n `Schema violation: ${fieldName} had type \"${field.kind}\", but expected \"${schema.type}\"`\n );\n }\n\n // Objects need to be handled specially, so that we can camelCase the field names.\n if (field.kind === \"object\") {\n const result: any = {};\n\n for (const [subFieldName, subFieldSchema] of Object.entries(schema.properties!)) {\n if (field.properties[subFieldName] !== undefined && field.properties[subFieldName] !== null) {\n const trueFieldName = (\n isAcronymic(subFieldName) ? subFieldName : uncapitalize(subFieldName)\n ).replace(/\\s/g, \"\");\n\n result[trueFieldName] = extractField(\n fieldName + \".\" + subFieldName,\n subFieldSchema,\n field.properties[subFieldName]!\n );\n }\n }\n\n return {\n ...field,\n properties: result,\n };\n } else if (field.kind === \"array\") {\n return {\n ...field,\n values: field.values.map((val, idx) =>\n extractField(fieldName + \"[\" + idx + \"]\", schema.items!, val)\n ),\n };\n } else return field;\n}\n\n/**\n * Create a DocumentModel that performs analysis using the given schema.\n *\n * The types of `documents` are created from the schema, so they are `unknown` unless they are asserted to be a\n * different type.\n *\n * @hidden\n * @param schema - model schema contents\n * @returns - a DocumentModel that encodes the schema\n */\nexport function createModelFromSchema(\n schema: Omit<DocumentModelDetails, \"createdOn\">\n): DocumentModel<AnalyzeResult<unknown>> {\n return {\n modelId: schema.modelId,\n apiVersion: schema.apiVersion as FormRecognizerApiVersion,\n transformResult(baseResult: AnalyzeResult): AnalyzeResult<unknown> {\n const hasDocuments = Object.entries(schema.docTypes ?? {}).length > 0;\n\n return {\n ...baseResult,\n documents: hasDocuments\n ? baseResult.documents?.map(toDocument)\n : baseResult.documents ?? [],\n };\n\n function toDocument(document: AnalyzedDocument): unknown {\n const result: Record<string, unknown> = {};\n const model = schema.docTypes?.[document.docType];\n\n if (model === undefined) {\n throw new Error(\n `Unexpected document type \"${document.docType}\" in result using model \"${schema.modelId}\"`\n );\n }\n for (const [fieldName, fieldSchema] of Object.entries(model.fieldSchema)) {\n if (\n document.fields &&\n document.fields[fieldName] !== undefined &&\n document.fields[fieldName] !== null\n ) {\n const trueFieldName = (\n isAcronymic(fieldName) ? fieldName : uncapitalize(fieldName)\n ).replace(/\\s/g, \"\");\n result[trueFieldName] = extractField(\n fieldName,\n fieldSchema,\n document.fields[fieldName]\n );\n }\n }\n\n return {\n ...document,\n fields: result,\n };\n }\n },\n };\n}\n"]}
1
+ {"version":3,"file":"documentModel.js","sourceRoot":"","sources":["../../src/documentModel.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC,OAAO,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAuBnD;;;;;;;GAOG;AACH,SAAS,YAAY,CACnB,SAAiB,EACjB,MAA2B,EAC3B,KAAoB;IAEpB,IAAI,MAAM,CAAC,IAAI,KAAK,KAAK,CAAC,IAAI,EAAE;QAC9B,MAAM,IAAI,KAAK,CACb,qBAAqB,SAAS,cAAc,KAAK,CAAC,IAAI,oBAAoB,MAAM,CAAC,IAAI,GAAG,CACzF,CAAC;KACH;IAED,kFAAkF;IAClF,IAAI,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;QAC3B,MAAM,MAAM,GAAQ,EAAE,CAAC;QAEvB,KAAK,MAAM,CAAC,YAAY,EAAE,cAAc,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,UAAW,CAAC,EAAE;YAC/E,IAAI,KAAK,CAAC,UAAU,CAAC,YAAY,CAAC,KAAK,SAAS,IAAI,KAAK,CAAC,UAAU,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAC3F,MAAM,aAAa,GAAG,CACpB,WAAW,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,YAAY,CAAC,CACtE,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;gBAErB,MAAM,CAAC,aAAa,CAAC,GAAG,YAAY,CAClC,SAAS,GAAG,GAAG,GAAG,YAAY,EAC9B,cAAc,EACd,KAAK,CAAC,UAAU,CAAC,YAAY,CAAE,CAChC,CAAC;aACH;SACF;QAED,uCACK,KAAK,KACR,UAAU,EAAE,MAAM,IAClB;KACH;SAAM,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;QACjC,uCACK,KAAK,KACR,MAAM,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACpC,YAAY,CAAC,SAAS,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,MAAM,CAAC,KAAM,EAAE,GAAG,CAAC,CAC9D,IACD;KACH;;QAAM,OAAO,KAAK,CAAC;AACtB,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,qBAAqB,CACnC,MAA+C;IAE/C,OAAO;QACL,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,eAAe,CAAC,UAAyB;;YACvC,MAAM,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC,MAAA,MAAM,CAAC,QAAQ,mCAAI,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;YAEtE,uCACK,UAAU,KACb,SAAS,EAAE,YAAY;oBACrB,CAAC,CAAC,MAAA,UAAU,CAAC,SAAS,0CAAE,GAAG,CAAC,UAAU,CAAC;oBACvC,CAAC,CAAC,MAAA,UAAU,CAAC,SAAS,mCAAI,EAAE,IAC9B;YAEF,SAAS,UAAU,CAAC,QAA0B;;gBAC5C,MAAM,MAAM,GAA4B,EAAE,CAAC;gBAC3C,MAAM,KAAK,GAAG,MAAA,MAAM,CAAC,QAAQ,0CAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBAElD,IAAI,KAAK,KAAK,SAAS,EAAE;oBACvB,MAAM,IAAI,KAAK,CACb,6BAA6B,QAAQ,CAAC,OAAO,4BAA4B,MAAM,CAAC,OAAO,GAAG,CAC3F,CAAC;iBACH;gBACD,KAAK,MAAM,CAAC,SAAS,EAAE,WAAW,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,EAAE;oBACxE,IACE,QAAQ,CAAC,MAAM;wBACf,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,SAAS;wBACxC,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,IAAI,EACnC;wBACA,MAAM,aAAa,GAAG,CACpB,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,YAAY,CAAC,SAAS,CAAC,CAC7D,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;wBACrB,MAAM,CAAC,aAAa,CAAC,GAAG,YAAY,CAClC,SAAS,EACT,WAAW,EACX,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,CAC3B,CAAC;qBACH;iBACF;gBAED,uCACK,QAAQ,KACX,MAAM,EAAE,MAAM,IACd;YACJ,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DocumentFieldSchema, DocumentModelDetails } from \"./generated\";\nimport { AnalyzedDocument, AnalyzeResult } from \"./lro/analysis\";\nimport { DocumentField } from \"./models/fields\";\nimport { isAcronymic, uncapitalize } from \"./util\";\n\n/**\n * A well-known model specification that supports extracting structured documents.\n *\n * See the `beginAnalyzeDocument` method of {@link DocumentAnalysisClient}, which supports consuming these\n * `DocumentModel` objects instead of model ID strings to provide stronger result types.\n */\nexport interface DocumentModel<Result> {\n /**\n * The unique ID of this model.\n */\n modelId: string;\n /**\n * The API version of the model.\n */\n apiVersion?: string;\n /**\n * An associated transformation that is used to conver the base (weak) Result type to the strong version.\n */\n transformResult: (input: AnalyzeResult) => Result;\n}\n\n/**\n * Checks a field value against a schema and converts it into a strong idiomatic DocumentField,\n * @internal\n * @param fieldName - the name of the field (used in diagnostics)\n * @param schema - the field's schema\n * @param field - the raw DocumentField value\n * @returns\n */\nfunction extractField(\n fieldName: string,\n schema: DocumentFieldSchema,\n field: DocumentField\n): DocumentField {\n if (schema.type !== field.kind) {\n throw new Error(\n `Schema violation: ${fieldName} had type \"${field.kind}\", but expected \"${schema.type}\"`\n );\n }\n\n // Objects need to be handled specially, so that we can camelCase the field names.\n if (field.kind === \"object\") {\n const result: any = {};\n\n for (const [subFieldName, subFieldSchema] of Object.entries(schema.properties!)) {\n if (field.properties[subFieldName] !== undefined && field.properties[subFieldName] !== null) {\n const trueFieldName = (\n isAcronymic(subFieldName) ? subFieldName : uncapitalize(subFieldName)\n ).replace(/\\s/g, \"\");\n\n result[trueFieldName] = extractField(\n fieldName + \".\" + subFieldName,\n subFieldSchema,\n field.properties[subFieldName]!\n );\n }\n }\n\n return {\n ...field,\n properties: result,\n };\n } else if (field.kind === \"array\") {\n return {\n ...field,\n values: field.values.map((val, idx) =>\n extractField(fieldName + \"[\" + idx + \"]\", schema.items!, val)\n ),\n };\n } else return field;\n}\n\n/**\n * Create a DocumentModel that performs analysis using the given schema.\n *\n * The types of `documents` are created from the schema, so they are `unknown` unless they are asserted to be a\n * different type.\n *\n * @hidden\n * @param schema - model schema contents\n * @returns - a DocumentModel that encodes the schema\n */\nexport function createModelFromSchema(\n schema: Omit<DocumentModelDetails, \"createdOn\">\n): DocumentModel<AnalyzeResult<unknown>> {\n return {\n modelId: schema.modelId,\n apiVersion: schema.apiVersion,\n transformResult(baseResult: AnalyzeResult): AnalyzeResult<unknown> {\n const hasDocuments = Object.entries(schema.docTypes ?? {}).length > 0;\n\n return {\n ...baseResult,\n documents: hasDocuments\n ? baseResult.documents?.map(toDocument)\n : baseResult.documents ?? [],\n };\n\n function toDocument(document: AnalyzedDocument): unknown {\n const result: Record<string, unknown> = {};\n const model = schema.docTypes?.[document.docType];\n\n if (model === undefined) {\n throw new Error(\n `Unexpected document type \"${document.docType}\" in result using model \"${schema.modelId}\"`\n );\n }\n for (const [fieldName, fieldSchema] of Object.entries(model.fieldSchema)) {\n if (\n document.fields &&\n document.fields[fieldName] !== undefined &&\n document.fields[fieldName] !== null\n ) {\n const trueFieldName = (\n isAcronymic(fieldName) ? fieldName : uncapitalize(fieldName)\n ).replace(/\\s/g, \"\");\n result[trueFieldName] = extractField(\n fieldName,\n fieldSchema,\n document.fields[fieldName]\n );\n }\n }\n\n return {\n ...document,\n fields: result,\n };\n }\n },\n };\n}\n"]}
@@ -44,60 +44,17 @@ export class DocumentModelAdministrationClient {
44
44
  namespace: "Microsoft.CognitiveServices",
45
45
  });
46
46
  }
47
- // #region Model Creation
48
- /**
49
- * Build a new model with a given ID from a set of input documents and labeled fields.
50
- *
51
- * The Model ID can consist of any text, so long as it does not begin with "prebuilt-" (as these models refer to
52
- * prebuilt Form Recognizer models that are common to all resources), and so long as it does not already exist within
53
- * the resource.
54
- *
55
- * The Form Recognizer service reads the training data set from an Azure Storage container, given as a URL to the
56
- * container with a SAS token that allows the service backend to communicate with the container. At a minimum, the
57
- * "read" and "list" permissions are required. In addition, the data in the given container must be organized
58
- * according to a particular convention, which is documented in [the service's documentation for building custom
59
- * models](https://aka.ms/form-recognizer/custom).
60
- *
61
- * ### Example
62
- *
63
- * ```javascript
64
- * const modelId = "aNewModel";
65
- * const containerUrl = "<training data container SAS URL>";
66
- *
67
- * const poller = await client.beginBuildDocumentModel(modelId, containerUrl, {
68
- * // Optionally, a text description may be attached to the model
69
- * description: "This is an example model!"
70
- * });
71
- *
72
- * // Model building, like all other model creation operations, returns a poller that eventually produces a ModelDetails
73
- * // object
74
- * const modelDetails = await poller.pollUntilDone();
75
- *
76
- * const {
77
- * modelId, // identical to the modelId given when creating the model
78
- * description, // identical to the description given when creating the model
79
- * createdOn, // the Date (timestamp) that the model was created
80
- * docTypes // information about the document types in the model and their field schemas
81
- * } = modelDetails;
82
- * ```
83
- *
84
- * @param modelId - the unique ID of the model to create
85
- * @param containerUrl - SAS-encoded URL to an Azure Storage container holding the training data set
86
- * @param buildMode - the mode to use when building the model (see `DocumentModelBuildMode`)
87
- * @param options - optional settings for the model build operation
88
- * @returns a long-running operation (poller) that will eventually produce the created model information or an error
89
- */
90
- async beginBuildDocumentModel(modelId, containerUrl, buildMode, options = {}) {
91
- return this._tracing.withSpan("DocumentModelAdministrationClient.beginBuildDocumentModel", options, (finalOptions) => this.createAdministrationPoller({
92
- options: finalOptions,
93
- start: (ctx) => this._restClient.documentModels.buildModel({
94
- modelId,
95
- description: finalOptions.description,
47
+ async beginBuildDocumentModel(modelId, urlOrSource, buildMode, options = {}) {
48
+ const sourceInfo = typeof urlOrSource === "string"
49
+ ? {
96
50
  azureBlobSource: {
97
- containerUrl,
51
+ containerUrl: urlOrSource,
98
52
  },
99
- buildMode,
100
- }, Object.assign(Object.assign({}, finalOptions), { abortSignal: ctx.abortSignal })),
53
+ }
54
+ : urlOrSource;
55
+ return this._tracing.withSpan("DocumentModelAdministrationClient.beginBuildDocumentModel", options, (finalOptions) => this.createAdministrationPoller({
56
+ options: finalOptions,
57
+ start: (ctx) => this._restClient.documentModels.buildModel(Object.assign(Object.assign({ modelId, description: finalOptions.description }, sourceInfo), { buildMode }), Object.assign(Object.assign({}, finalOptions), { abortSignal: ctx.abortSignal })),
101
58
  }));
102
59
  }
103
60
  /**
@@ -283,17 +240,18 @@ export class DocumentModelAdministrationClient {
283
240
  * ```
284
241
  *
285
242
  * @param classifierId - the unique ID of the classifier to create
286
- * @param docTypes - the document types to include in the classifier (a map of document type names to `ClassifierDocumentTypeDetails`)
243
+ * @param docTypeSources - the document types to include in the classifier and their sources (a map of document type
244
+ * names to `ClassifierDocumentTypeDetails`)
287
245
  * @param options - optional settings for the classifier build operation
288
246
  * @returns a long-running operation (poller) that will eventually produce the created classifier details or an error
289
247
  */
290
- async beginBuildDocumentClassifier(classifierId, docTypes, options = {}) {
248
+ async beginBuildDocumentClassifier(classifierId, docTypeSources, options = {}) {
291
249
  return this._tracing.withSpan("DocumentModelAdministrationClient.beginBuildDocumentClassifier", options, (finalOptions) => this.createAdministrationPoller({
292
250
  options: finalOptions,
293
251
  start: () => this._restClient.documentClassifiers.buildClassifier({
294
252
  classifierId,
295
253
  description: finalOptions.description,
296
- docTypes,
254
+ docTypes: docTypeSources,
297
255
  }, finalOptions),
298
256
  }));
299
257
  }