@azure/ai-form-recognizer 5.1.0-alpha.20250618.1 → 5.1.0-alpha.20250718.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/bin/defaultFields.js +14 -13
- package/dist/browser/bin/defaultFields.js.map +1 -1
- package/dist/browser/bin/writeModelCode.js +19 -19
- package/dist/browser/bin/writeModelCode.js.map +1 -1
- package/dist/browser/documentAnalysisClient.js +29 -13
- package/dist/browser/documentAnalysisClient.js.map +1 -1
- package/dist/browser/documentModel.js +20 -10
- package/dist/browser/documentModel.js.map +1 -1
- package/dist/browser/documentModelAdministrationClient.js +33 -10
- package/dist/browser/documentModelAdministrationClient.js.map +1 -1
- package/dist/browser/error.js +15 -2
- package/dist/browser/error.js.map +1 -1
- package/dist/browser/generated/generatedClient.js +14 -4
- package/dist/browser/generated/generatedClient.js.map +1 -1
- package/dist/browser/generated/models/mappers.js +20 -8
- package/dist/browser/generated/models/mappers.js.map +1 -1
- package/dist/browser/generated/operations/documentClassifiers.js +14 -31
- package/dist/browser/generated/operations/documentClassifiers.js.map +1 -1
- package/dist/browser/generated/operations/documentModels.js +14 -31
- package/dist/browser/generated/operations/documentModels.js.map +1 -1
- package/dist/browser/generated/operations/miscellaneous.js +14 -31
- package/dist/browser/generated/operations/miscellaneous.js.map +1 -1
- package/dist/browser/lro/administration.js +1 -2
- package/dist/browser/lro/administration.js.map +1 -1
- package/dist/browser/lro/analysis.js +37 -17
- package/dist/browser/lro/analysis.js.map +1 -1
- package/dist/browser/lro/util/delayMs.js +3 -3
- package/dist/browser/lro/util/delayMs.js.map +1 -1
- package/dist/browser/lro/util/poller.js +7 -7
- package/dist/browser/lro/util/poller.js.map +1 -1
- package/dist/browser/models/fields.js +11 -5
- package/dist/browser/models/fields.js.map +1 -1
- package/dist/browser/transforms/polygon.js +16 -5
- package/dist/browser/transforms/polygon.js.map +1 -1
- package/dist/browser/util.js +6 -3
- package/dist/browser/util.js.map +1 -1
- package/dist/commonjs/bin/defaultFields.js +14 -13
- package/dist/commonjs/bin/defaultFields.js.map +1 -1
- package/dist/commonjs/bin/writeModelCode.js +19 -19
- package/dist/commonjs/bin/writeModelCode.js.map +1 -1
- package/dist/commonjs/documentAnalysisClient.js +29 -13
- package/dist/commonjs/documentAnalysisClient.js.map +1 -1
- package/dist/commonjs/documentModel.js +20 -10
- package/dist/commonjs/documentModel.js.map +1 -1
- package/dist/commonjs/documentModelAdministrationClient.js +33 -10
- package/dist/commonjs/documentModelAdministrationClient.js.map +1 -1
- package/dist/commonjs/error.js +15 -2
- package/dist/commonjs/error.js.map +1 -1
- package/dist/commonjs/generated/generatedClient.js +14 -4
- package/dist/commonjs/generated/generatedClient.js.map +1 -1
- package/dist/commonjs/generated/models/mappers.js +20 -8
- package/dist/commonjs/generated/models/mappers.js.map +1 -1
- package/dist/commonjs/generated/operations/documentClassifiers.js +14 -30
- package/dist/commonjs/generated/operations/documentClassifiers.js.map +1 -1
- package/dist/commonjs/generated/operations/documentModels.js +14 -30
- package/dist/commonjs/generated/operations/documentModels.js.map +1 -1
- package/dist/commonjs/generated/operations/miscellaneous.js +14 -30
- package/dist/commonjs/generated/operations/miscellaneous.js.map +1 -1
- package/dist/commonjs/lro/administration.js +1 -2
- package/dist/commonjs/lro/administration.js.map +1 -1
- package/dist/commonjs/lro/analysis.js +37 -17
- package/dist/commonjs/lro/analysis.js.map +1 -1
- package/dist/commonjs/lro/util/delayMs.js +3 -3
- package/dist/commonjs/lro/util/delayMs.js.map +1 -1
- package/dist/commonjs/lro/util/poller.js +7 -7
- package/dist/commonjs/lro/util/poller.js.map +1 -1
- package/dist/commonjs/models/fields.js +11 -5
- package/dist/commonjs/models/fields.js.map +1 -1
- package/dist/commonjs/transforms/polygon.js +16 -5
- package/dist/commonjs/transforms/polygon.js.map +1 -1
- package/dist/commonjs/tsdoc-metadata.json +11 -11
- package/dist/commonjs/util.js +6 -3
- package/dist/commonjs/util.js.map +1 -1
- package/dist/esm/bin/defaultFields.js +14 -13
- package/dist/esm/bin/defaultFields.js.map +1 -1
- package/dist/esm/bin/writeModelCode.js +19 -19
- package/dist/esm/bin/writeModelCode.js.map +1 -1
- package/dist/esm/documentAnalysisClient.js +29 -13
- package/dist/esm/documentAnalysisClient.js.map +1 -1
- package/dist/esm/documentModel.js +20 -10
- package/dist/esm/documentModel.js.map +1 -1
- package/dist/esm/documentModelAdministrationClient.js +33 -10
- package/dist/esm/documentModelAdministrationClient.js.map +1 -1
- package/dist/esm/error.js +15 -2
- package/dist/esm/error.js.map +1 -1
- package/dist/esm/generated/generatedClient.js +14 -4
- package/dist/esm/generated/generatedClient.js.map +1 -1
- package/dist/esm/generated/models/mappers.js +20 -8
- package/dist/esm/generated/models/mappers.js.map +1 -1
- package/dist/esm/generated/operations/documentClassifiers.js +14 -31
- package/dist/esm/generated/operations/documentClassifiers.js.map +1 -1
- package/dist/esm/generated/operations/documentModels.js +14 -31
- package/dist/esm/generated/operations/documentModels.js.map +1 -1
- package/dist/esm/generated/operations/miscellaneous.js +14 -31
- package/dist/esm/generated/operations/miscellaneous.js.map +1 -1
- package/dist/esm/lro/administration.js +1 -2
- package/dist/esm/lro/administration.js.map +1 -1
- package/dist/esm/lro/analysis.js +37 -17
- package/dist/esm/lro/analysis.js.map +1 -1
- package/dist/esm/lro/util/delayMs.js +3 -3
- package/dist/esm/lro/util/delayMs.js.map +1 -1
- package/dist/esm/lro/util/poller.js +7 -7
- package/dist/esm/lro/util/poller.js.map +1 -1
- package/dist/esm/models/fields.js +11 -5
- package/dist/esm/models/fields.js.map +1 -1
- package/dist/esm/transforms/polygon.js +16 -5
- package/dist/esm/transforms/polygon.js.map +1 -1
- package/dist/esm/util.js +6 -3
- package/dist/esm/util.js.map +1 -1
- package/dist/react-native/bin/defaultFields.js +14 -13
- package/dist/react-native/bin/defaultFields.js.map +1 -1
- package/dist/react-native/bin/writeModelCode.js +19 -19
- package/dist/react-native/bin/writeModelCode.js.map +1 -1
- package/dist/react-native/documentAnalysisClient.js +29 -13
- package/dist/react-native/documentAnalysisClient.js.map +1 -1
- package/dist/react-native/documentModel.js +20 -10
- package/dist/react-native/documentModel.js.map +1 -1
- package/dist/react-native/documentModelAdministrationClient.js +33 -10
- package/dist/react-native/documentModelAdministrationClient.js.map +1 -1
- package/dist/react-native/error.js +15 -2
- package/dist/react-native/error.js.map +1 -1
- package/dist/react-native/generated/generatedClient.js +14 -4
- package/dist/react-native/generated/generatedClient.js.map +1 -1
- package/dist/react-native/generated/models/mappers.js +20 -8
- package/dist/react-native/generated/models/mappers.js.map +1 -1
- package/dist/react-native/generated/operations/documentClassifiers.js +14 -31
- package/dist/react-native/generated/operations/documentClassifiers.js.map +1 -1
- package/dist/react-native/generated/operations/documentModels.js +14 -31
- package/dist/react-native/generated/operations/documentModels.js.map +1 -1
- package/dist/react-native/generated/operations/miscellaneous.js +14 -31
- package/dist/react-native/generated/operations/miscellaneous.js.map +1 -1
- package/dist/react-native/lro/administration.js +1 -2
- package/dist/react-native/lro/administration.js.map +1 -1
- package/dist/react-native/lro/analysis.js +37 -17
- package/dist/react-native/lro/analysis.js.map +1 -1
- package/dist/react-native/lro/util/delayMs.js +3 -3
- package/dist/react-native/lro/util/delayMs.js.map +1 -1
- package/dist/react-native/lro/util/poller.js +7 -7
- package/dist/react-native/lro/util/poller.js.map +1 -1
- package/dist/react-native/models/fields.js +11 -5
- package/dist/react-native/models/fields.js.map +1 -1
- package/dist/react-native/transforms/polygon.js +16 -5
- package/dist/react-native/transforms/polygon.js.map +1 -1
- package/dist/react-native/util.js +6 -3
- package/dist/react-native/util.js.map +1 -1
- package/package.json +2 -2
|
@@ -66,18 +66,19 @@ const documentFeatures = [...layoutFeatures, "keyValuePairs"];
|
|
|
66
66
|
* @returns the list of features supported by the model
|
|
67
67
|
*/
|
|
68
68
|
export function getFeatures(model) {
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
69
|
+
return (model.features ??
|
|
70
|
+
{
|
|
71
|
+
"prebuilt-read": [...textFeatures, "languages"],
|
|
72
|
+
"prebuilt-layout": layoutFeatures,
|
|
73
|
+
"prebuilt-document": [...documentFeatures, "unconstrainedDocuments"],
|
|
74
|
+
"prebuilt-receipt": [...textFeatures, "keyValuePairs", "_documents"],
|
|
75
|
+
"prebuilt-invoice": [...layoutFeatures, "keyValuePairs", "_documents"],
|
|
76
|
+
"prebuilt-idDocument": [...textFeatures, "keyValuePairs", "_documents"],
|
|
77
|
+
"prebuilt-businessCard": [...textFeatures, "keyValuePairs", "_documents"],
|
|
78
|
+
"prebuilt-tax.us.w2": [...textFeatures, "keyValuePairs", "_documents"],
|
|
79
|
+
"prebuilt-vaccinationCard": [...textFeatures, "keyValuePairs", "_documents"],
|
|
80
|
+
"prebuilt-healthInsuranceCard.us": [...textFeatures, "keyValuePairs", "_documents"],
|
|
81
|
+
}[model.modelId] ??
|
|
82
|
+
(model.modelId.startsWith("prebuilt-") ? allFeatures : [...documentFeatures, "_documents"]));
|
|
82
83
|
}
|
|
83
84
|
//# sourceMappingURL=defaultFields.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"defaultFields.js","sourceRoot":"","sources":["../../../src/bin/defaultFields.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC;;;GAGG;AACH,MAAM,CAAC,MAAM,mBAAmB,GAA0B;IACxD,KAAK,EAAE;QACL,IAAI,EAAE,OAAO;QACb,WAAW,EAAE,kBAAkB;QAC/B,IAAI,EAAE,mBAAmB;QACzB,QAAQ,EAAE,IAAI;KACf;IACD,MAAM,EAAE;QACN,IAAI,EAAE,QAAQ;QACd,WAAW,EAAE,mBAAmB;QAChC,IAAI,EAAE,oBAAoB;QAC1B,QAAQ,EAAE,IAAI;KACf;IACD,aAAa,EAAE;QACb,IAAI,EAAE,eAAe;QACrB,WAAW,EAAE,4BAA4B;QACzC,IAAI,EAAE,2BAA2B;QACjC,QAAQ,EAAE,IAAI;KACf;IACD,SAAS,EAAE;QACT,IAAI,EAAE,WAAW;QACjB,WAAW,EAAE,2BAA2B;QACxC,IAAI,EAAE,uBAAuB;QAC7B,QAAQ,EAAE,IAAI;KACf;IACD,MAAM,EAAE;QACN,IAAI,EAAE,QAAQ;QACd,WAAW,EAAE,wBAAwB;QACrC,IAAI,EAAE,oBAAoB;QAC1B,QAAQ,EAAE,IAAI;KACf;IACD,sBAAsB,EAAE;QACtB,IAAI,EAAE,WAAW;QACjB,WAAW,EAAE,sDAAsD;QACnE,IAAI,EAAE,uBAAuB;QAC7B,QAAQ,EAAE,IAAI;KACf;IACD,UAAU,EAAE;QACV,IAAI,EAAE,YAAY;QAClB,WAAW,EAAE,gCAAgC;QAC7C,IAAI,EAAE,wBAAwB;QAC9B,QAAQ,EAAE,IAAI;KACf;CACO,CAAC;AAEX,MAAM,WAAW,GAAG;IAClB,OAAO;IACP,QAAQ;IACR,eAAe;IACf,WAAW;IACX,QAAQ;IACR,YAAY;IACZ,YAAY;CACb,CAAC;AAEF,MAAM,YAAY,GAAG,CAAC,OAAO,EAAE,YAAY,EAAE,QAAQ,CAAC,CAAC;AACvD,MAAM,cAAc,GAAG,CAAC,GAAG,YAAY,EAAE,QAAQ,CAAC,CAAC;AACnD,MAAM,gBAAgB,GAAG,CAAC,GAAG,cAAc,EAAE,eAAe,CAAC,CAAC;AAE9D;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAAC,KAA2B
|
|
1
|
+
{"version":3,"file":"defaultFields.js","sourceRoot":"","sources":["../../../src/bin/defaultFields.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC;;;GAGG;AACH,MAAM,CAAC,MAAM,mBAAmB,GAA0B;IACxD,KAAK,EAAE;QACL,IAAI,EAAE,OAAO;QACb,WAAW,EAAE,kBAAkB;QAC/B,IAAI,EAAE,mBAAmB;QACzB,QAAQ,EAAE,IAAI;KACf;IACD,MAAM,EAAE;QACN,IAAI,EAAE,QAAQ;QACd,WAAW,EAAE,mBAAmB;QAChC,IAAI,EAAE,oBAAoB;QAC1B,QAAQ,EAAE,IAAI;KACf;IACD,aAAa,EAAE;QACb,IAAI,EAAE,eAAe;QACrB,WAAW,EAAE,4BAA4B;QACzC,IAAI,EAAE,2BAA2B;QACjC,QAAQ,EAAE,IAAI;KACf;IACD,SAAS,EAAE;QACT,IAAI,EAAE,WAAW;QACjB,WAAW,EAAE,2BAA2B;QACxC,IAAI,EAAE,uBAAuB;QAC7B,QAAQ,EAAE,IAAI;KACf;IACD,MAAM,EAAE;QACN,IAAI,EAAE,QAAQ;QACd,WAAW,EAAE,wBAAwB;QACrC,IAAI,EAAE,oBAAoB;QAC1B,QAAQ,EAAE,IAAI;KACf;IACD,sBAAsB,EAAE;QACtB,IAAI,EAAE,WAAW;QACjB,WAAW,EAAE,sDAAsD;QACnE,IAAI,EAAE,uBAAuB;QAC7B,QAAQ,EAAE,IAAI;KACf;IACD,UAAU,EAAE;QACV,IAAI,EAAE,YAAY;QAClB,WAAW,EAAE,gCAAgC;QAC7C,IAAI,EAAE,wBAAwB;QAC9B,QAAQ,EAAE,IAAI;KACf;CACO,CAAC;AAEX,MAAM,WAAW,GAAG;IAClB,OAAO;IACP,QAAQ;IACR,eAAe;IACf,WAAW;IACX,QAAQ;IACR,YAAY;IACZ,YAAY;CACb,CAAC;AAEF,MAAM,YAAY,GAAG,CAAC,OAAO,EAAE,YAAY,EAAE,QAAQ,CAAC,CAAC;AACvD,MAAM,cAAc,GAAG,CAAC,GAAG,YAAY,EAAE,QAAQ,CAAC,CAAC;AACnD,MAAM,gBAAgB,GAAG,CAAC,GAAG,cAAc,EAAE,eAAe,CAAC,CAAC;AAE9D;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAAC,KAA2B;IACrD,OAAO,CACJ,KAAa,CAAC,QAAQ;QACvB;YACE,eAAe,EAAE,CAAC,GAAG,YAAY,EAAE,WAAW,CAAC;YAC/C,iBAAiB,EAAE,cAAc;YACjC,mBAAmB,EAAE,CAAC,GAAG,gBAAgB,EAAE,wBAAwB,CAAC;YACpE,kBAAkB,EAAE,CAAC,GAAG,YAAY,EAAE,eAAe,EAAE,YAAY,CAAC;YACpE,kBAAkB,EAAE,CAAC,GAAG,cAAc,EAAE,eAAe,EAAE,YAAY,CAAC;YACtE,qBAAqB,EAAE,CAAC,GAAG,YAAY,EAAE,eAAe,EAAE,YAAY,CAAC;YACvE,uBAAuB,EAAE,CAAC,GAAG,YAAY,EAAE,eAAe,EAAE,YAAY,CAAC;YACzE,oBAAoB,EAAE,CAAC,GAAG,YAAY,EAAE,eAAe,EAAE,YAAY,CAAC;YACtE,0BAA0B,EAAE,CAAC,GAAG,YAAY,EAAE,eAAe,EAAE,YAAY,CAAC;YAC5E,iCAAiC,EAAE,CAAC,GAAG,YAAY,EAAE,eAAe,EAAE,YAAY,CAAC;SACpF,CAAC,KAAK,CAAC,OAAO,CAAC;QAChB,CAAC,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,GAAG,gBAAgB,EAAE,YAAY,CAAC,CAAC,CAC5F,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport type { DocumentModelDetails } from \"../index.js\";\nimport type { Field } from \"./utils.js\";\n\n/**\n * @internal\n * Field information for the top level fields of `AnalyzeResult`.\n */\nexport const defaultResultFields: Record<string, Field> = {\n pages: {\n name: \"pages\",\n docContents: \"Extracted pages.\",\n type: \"fr.DocumentPage[]\",\n optional: true,\n },\n tables: {\n name: \"tables\",\n docContents: \"Extracted tables.\",\n type: \"fr.DocumentTable[]\",\n optional: true,\n },\n keyValuePairs: {\n name: \"keyValuePairs\",\n docContents: \"Extracted key-value pairs.\",\n type: \"fr.DocumentKeyValuePair[]\",\n optional: true,\n },\n languages: {\n name: \"languages\",\n docContents: \"Extracted text languages.\",\n type: \"fr.DocumentLanguage[]\",\n optional: true,\n },\n styles: {\n name: \"styles\",\n docContents: \"Extracted font styles.\",\n type: \"fr.DocumentStyle[]\",\n optional: true,\n },\n unconstrainedDocuments: {\n name: \"documents\",\n docContents: \"Extracted documents containing any extracted fields.\",\n type: \"fr.AnalyzedDocument[]\",\n optional: true,\n },\n paragraphs: {\n name: \"paragraphs\",\n docContents: \"Extracted document paragraphs.\",\n type: \"fr.DocumentParagraph[]\",\n optional: true,\n },\n} as const;\n\nconst allFeatures = [\n \"pages\",\n \"tables\",\n \"keyValuePairs\",\n \"languages\",\n \"styles\",\n \"paragraphs\",\n \"_documents\",\n];\n\nconst textFeatures = [\"pages\", \"paragraphs\", \"styles\"];\nconst layoutFeatures = [...textFeatures, \"tables\"];\nconst documentFeatures = [...layoutFeatures, \"keyValuePairs\"];\n\n/**\n * @internal\n * @param model - the model to get the features of\n * @returns the list of features supported by the model\n */\nexport function getFeatures(model: DocumentModelDetails): string[] {\n return (\n (model as any).features ??\n {\n \"prebuilt-read\": [...textFeatures, \"languages\"],\n \"prebuilt-layout\": layoutFeatures,\n \"prebuilt-document\": [...documentFeatures, \"unconstrainedDocuments\"],\n \"prebuilt-receipt\": [...textFeatures, \"keyValuePairs\", \"_documents\"],\n \"prebuilt-invoice\": [...layoutFeatures, \"keyValuePairs\", \"_documents\"],\n \"prebuilt-idDocument\": [...textFeatures, \"keyValuePairs\", \"_documents\"],\n \"prebuilt-businessCard\": [...textFeatures, \"keyValuePairs\", \"_documents\"],\n \"prebuilt-tax.us.w2\": [...textFeatures, \"keyValuePairs\", \"_documents\"],\n \"prebuilt-vaccinationCard\": [...textFeatures, \"keyValuePairs\", \"_documents\"],\n \"prebuilt-healthInsuranceCard.us\": [...textFeatures, \"keyValuePairs\", \"_documents\"],\n }[model.modelId] ??\n (model.modelId.startsWith(\"prebuilt-\") ? allFeatures : [...documentFeatures, \"_documents\"])\n );\n}\n"]}
|
|
@@ -116,12 +116,16 @@ export async function writeModelCode(model, test) {
|
|
|
116
116
|
}
|
|
117
117
|
// Get the doc type variants of a model.
|
|
118
118
|
function extractModelVariants(model, _rootSlug) {
|
|
119
|
-
var _a;
|
|
120
119
|
const result = {};
|
|
121
|
-
for (const [docType, info] of Object.entries(
|
|
120
|
+
for (const [docType, info] of Object.entries(model.docTypes ?? {})) {
|
|
122
121
|
const slug = docType.split(".");
|
|
123
122
|
const docTypeName = camelCase(slug);
|
|
124
|
-
result[docTypeName] =
|
|
123
|
+
result[docTypeName] = {
|
|
124
|
+
...info,
|
|
125
|
+
name: docTypeName,
|
|
126
|
+
originalDocType: docType,
|
|
127
|
+
slug,
|
|
128
|
+
};
|
|
125
129
|
}
|
|
126
130
|
return result;
|
|
127
131
|
}
|
|
@@ -183,22 +187,21 @@ function* writeFieldsInterfaces(docType) {
|
|
|
183
187
|
yield* writeInterfaceDeclaration(name, docType.description, docType.fieldSchema);
|
|
184
188
|
// Recursively visit all child interfaces and write them.
|
|
185
189
|
yield* (function* collectNestedInterfaces(fields, namingContext) {
|
|
186
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
187
190
|
for (const [fieldName, schema] of Object.entries(fields)) {
|
|
188
|
-
if (schema.type === "array" &&
|
|
191
|
+
if (schema.type === "array" && schema.items?.type === "object") {
|
|
189
192
|
// Generate element interface and recur if the nested type is an object
|
|
190
193
|
const nextNamingContext = namingContext + fieldName + "Element";
|
|
191
194
|
yield "";
|
|
192
195
|
yield "";
|
|
193
|
-
yield* writeInterfaceDeclaration(nextNamingContext, schema.description,
|
|
194
|
-
yield* collectNestedInterfaces(
|
|
196
|
+
yield* writeInterfaceDeclaration(nextNamingContext, schema.description, schema.items?.properties ?? {});
|
|
197
|
+
yield* collectNestedInterfaces(schema.items?.properties ?? {}, nextNamingContext);
|
|
195
198
|
}
|
|
196
199
|
else if (schema.type === "object") {
|
|
197
200
|
// Generate named interface and recur
|
|
198
201
|
yield "";
|
|
199
202
|
yield "";
|
|
200
|
-
yield* writeInterfaceDeclaration(namingContext + fieldName, schema.description,
|
|
201
|
-
yield* collectNestedInterfaces(
|
|
203
|
+
yield* writeInterfaceDeclaration(namingContext + fieldName, schema.description, schema.properties ?? {});
|
|
204
|
+
yield* collectNestedInterfaces(schema.properties ?? {}, namingContext + fieldName);
|
|
202
205
|
}
|
|
203
206
|
}
|
|
204
207
|
})(docType.fieldSchema, docType.name);
|
|
@@ -225,16 +228,13 @@ function* writeFieldsInterfaces(docType) {
|
|
|
225
228
|
}
|
|
226
229
|
else {
|
|
227
230
|
yield prefix;
|
|
228
|
-
yield* indent(flatMap(fieldEntries.map(([fieldName, schema]) => {
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
optional: true,
|
|
236
|
-
});
|
|
237
|
-
}), writeField));
|
|
231
|
+
yield* indent(flatMap(fieldEntries.map(([fieldName, schema]) => ({
|
|
232
|
+
// Uncapitalize the field name and remove all whitespace
|
|
233
|
+
name: uncapitalize(fieldName).replace(/\s/g, ""),
|
|
234
|
+
type: writeType(schema, fieldName, docType.slug),
|
|
235
|
+
docContents: schema.description ?? `\`${docType.name}\` "${fieldName}" field`,
|
|
236
|
+
optional: true,
|
|
237
|
+
})), writeField));
|
|
238
238
|
yield suffix;
|
|
239
239
|
}
|
|
240
240
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writeModelCode.js","sourceRoot":"","sources":["../../../src/bin/writeModelCode.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,SAAS,CAAC;AAI9B,OAAO,EAAE,mBAAmB,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAEtE,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAEjE,iHAAiH;AACjH,kHAAkH;AAElH;;GAEG;AACH,MAAM,YAAY,GAAG;;;;;CAKpB,CAAC;AAEF;;GAEG;AACH,SAAS,cAAc,CAAC,KAA2B,EAAE,IAAa;IAChE,OAAO;;EAEP,CAAC,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;kBACT,KAAK,CAAC,OAAO;kBACb,KAAK,CAAC,WAAW;kBACjB,KAAK,CAAC,UAAU;kBAChB,IAAI,IAAI,EAAE,CAAC,YAAY,EAAE;;CAE1C,CAAC;AACF,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAAC,KAA2B,EAAE,IAAa;IAC7E,IAAI,QAAQ,GAAG,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IAE3C,IAAI,IAAI,EAAE,CAAC;QACT,QAAQ,IAAI,qCAAqC,GAAG,GAAG,GAAG,GAAG,CAAC;IAChE,CAAC;SAAM,CAAC;QACN,QAAQ,IAAI,kDAAkD,GAAG,GAAG,GAAG,GAAG,CAAC;IAC7E,CAAC;IAED,MAAM,aAAa,GAAG;QACpB,GAAG,KAAK,CAAC,OAAO;aACb,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;YACd,wBAAwB;aACvB,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC,GAAG,GAAG,EAAE,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,EAAc,CAAC;KACrE,CAAC;IAEF,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,GAAG,aAAa,EAAE,OAAO,CAAC,CAAC,CAAC;IACzD,MAAM,gBAAgB,GAAG,SAAS,CAAC,CAAC,GAAG,aAAa,EAAE,UAAU,CAAC,CAAC,CAAC;IACnE,MAAM,UAAU,GAAG,SAAS,CAAC,CAAC,GAAG,aAAa,EAAE,QAAQ,CAAC,CAAC,CAAC;IAE3D,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,CAAC,CAAC;IAEzC,MAAM,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,YAAY,CAAC,CAAC;IAEnE,IAAI,KAAK,CAAC,WAAW,EAAE,CAAC;QACtB,QAAQ,IAAI,KAAK,GAAG,GAAG,CAAC;QACxB,QAAQ;YACN,KAAK,CAAC,WAAW;gBACf,gCAAgC;iBAC/B,KAAK,CAAC,OAAO,CAAC;gBACf,4EAA4E;iBAC3E,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC;iBACrB,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;QACrB,QAAQ,IAAI,KAAK,GAAG,GAAG,CAAC;IAC1B,CAAC;IAED,sHAAsH;IACtH,gBAAgB;IAChB,QAAQ,IAAI,gBAAgB,SAAS,gEAAgE,UAAU,KAC7G,GAAG,GAAG,GACR,EAAE,CAAC;IAEH,mCAAmC;IACnC,KAAK,MAAM,IAAI,IAAI,oBAAoB,CAAC,UAAU,EAAE,gBAAgB,EAAE,aAAa,CAAC,EAAE,CAAC;QACrF,QAAQ,IAAI,IAAI,GAAG,GAAG,CAAC;IACzB,CAAC;IAED,QAAQ,IAAI,GAAG,CAAC;IAEhB,2EAA2E;IAC3E,IAAI,YAAY,EAAE,CAAC;QACjB,MAAM,QAAQ,GAAG,oBAAoB,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC;QAE5D,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAE3C,MAAM,YAAY,GAChB,YAAY,CAAC,MAAM,GAAG,CAAC;YACrB,CAAC,CAAC,4CAA4C;gBAC5C,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,OAAO,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;YACpD,CAAC,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC;gBACzB,CAAC,CAAC,4CAA4C;oBAC5C,IAAI,YAAY,CAAC,CAAC,CAAC,EAAE;gBACvB,CAAC,CAAC,mEAAmE;oBACnE,0FAA0F;oBAC1F,QAAQ,CAAC;QAEjB,yDAAyD;QACzD,QAAQ,IAAI,eAAe,gBAAgB,KAAK,YAAY,IAAI,GAAG,GAAG,GAAG,EAAE,CAAC;QAE5E,4CAA4C;QAC5C,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC3C,KAAK,MAAM,IAAI,IAAI,qBAAqB,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC/C,QAAQ,IAAI,IAAI,GAAG,GAAG,CAAC;YACzB,CAAC;YAED,QAAQ,IAAI,GAAG,CAAC;QAClB,CAAC;QAED,wDAAwD;QACxD,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC3C,KAAK,MAAM,IAAI,IAAI,qBAAqB,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC/C,QAAQ,IAAI,IAAI,GAAG,GAAG,CAAC;YACzB,CAAC;YAED,QAAQ,IAAI,GAAG,CAAC;QAClB,CAAC;IACH,CAAC;IAED,qEAAqE;IACrE,QAAQ,IAAI,KAAK,GAAG,GAAG,CAAC;IACxB,QAAQ,IAAI,0BAA0B,GAAG,GAAG,CAAC;IAC7C,QAAQ,IAAI,KAAK,GAAG,GAAG,CAAC;IACxB,QAAQ,IAAI,wBAAwB,GAAG,GAAG,CAAC;IAC3C,QAAQ;QACN,YAAY,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;aACvC,KAAK,CAAC,OAAO,CAAC;aACd,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,YAAY,GAAG,GAAG,CAAC;IACxC,QAAQ,IAAI,GAAG,GAAG,GAAG,CAAC;IAEtB,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,wCAAwC;AACxC,SAAS,oBAAoB,CAC3B,KAA2B,EAC3B,SAAmB;;IAEnB,MAAM,MAAM,GAA4C,EAAE,CAAC;IAE3D,KAAK,MAAM,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAA,KAAK,CAAC,QAAQ,mCAAI,EAAE,CAAC,EAAE,CAAC;QACnE,MAAM,IAAI,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAChC,MAAM,WAAW,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC;QACpC,MAAM,CAAC,WAAW,CAAC,mCACd,IAAI,KACP,IAAI,EAAE,WAAW,EACjB,eAAe,EAAE,OAAO,EACxB,IAAI,GACL,CAAC;IACJ,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,2CAA2C;AAC3C,QAAQ,CAAC,CAAC,qBAAqB,CAAC,OAAgB;IAC9C,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;QACxB,MAAM,KAAK,CAAC;QACZ,KAAK,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAChE,MAAM,KAAK,CAAC;IACd,CAAC;IACD,MAAM,oBAAoB,OAAO,CAAC,IAAI,IAAI,CAAC;IAC3C,KAAK,CAAC,CAAC,MAAM,CACX,UAAU,CAAC;QACT,IAAI,EAAE,SAAS;QACf,IAAI,EAAE,IAAI,OAAO,CAAC,eAAe,GAAG;QACpC,WAAW,EAAE,mBAAmB,OAAO,CAAC,eAAe,IAAI;KAC5D,CAAC,CACH,CAAC;IAEF,KAAK,CAAC,CAAC,MAAM,CACX,UAAU,CAAC;QACT,IAAI,EAAE,QAAQ;QACd,IAAI,EAAE,OAAO,CAAC,IAAI,GAAG,QAAQ;QAC7B,WAAW,EAAE,kBAAkB;KAChC,CAAC,CACH,CAAC;IAEF,KAAK,CAAC,CAAC,MAAM,CACX,OAAO,CACL;QACE;YACE,IAAI,EAAE,iBAAiB;YACvB,WAAW,EAAE,yCAAyC;YACtD,IAAI,EAAE,qBAAqB;YAC3B,QAAQ,EAAE,IAAI;SACf;QACD;YACE,IAAI,EAAE,OAAO;YACb,WAAW,EACT,kGAAkG;YACpG,IAAI,EAAE,mBAAmB;SAC1B;QACD;YACE,IAAI,EAAE,YAAY;YAClB,WAAW,EAAE,wEAAwE;YACrF,IAAI,EAAE,QAAQ;SACf;KACF,EACD,UAAU,CACX,CACF,CAAC;IAEF,MAAM,GAAG,CAAC;AACZ,CAAC;AAED;;GAEG;AACH,QAAQ,CAAC,CAAC,UAAU,CAAC,KAAY;IAC/B,MAAM,KAAK,CAAC;IACZ,KAAK,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;IACpE,MAAM,KAAK,CAAC;IAEZ,mEAAmE;IACnE,MAAM,WAAW,GAAG,CAAC,4BAA4B,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IACnE,MAAM,gBAAgB,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC;IAEtE,MAAM,GAAG,gBAAgB,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,KAAK,KAAK,CAAC,IAAI,GAAG,CAAC;AAC1E,CAAC;AAED;;GAEG;AACH,QAAQ,CAAC,CAAC,qBAAqB,CAAC,OAAgB;IAC9C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,GAAG,QAAQ,CAAC;IAErC,KAAK,CAAC,CAAC,yBAAyB,CAAC,IAAI,EAAE,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAEjF,yDAAyD;IACzD,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,uBAAuB,CACvC,MAA2C,EAC3C,aAAqB;;QAErB,KAAK,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC;YACzD,IAAI,MAAM,CAAC,IAAI,KAAK,OAAO,IAAI,CAAA,MAAA,MAAM,CAAC,KAAK,0CAAE,IAAI,MAAK,QAAQ,EAAE,CAAC;gBAC/D,uEAAuE;gBAEvE,MAAM,iBAAiB,GAAG,aAAa,GAAG,SAAS,GAAG,SAAS,CAAC;gBAEhE,MAAM,EAAE,CAAC;gBACT,MAAM,EAAE,CAAC;gBACT,KAAK,CAAC,CAAC,yBAAyB,CAC9B,iBAAiB,EACjB,MAAM,CAAC,WAAW,EAClB,MAAA,MAAA,MAAM,CAAC,KAAK,0CAAE,UAAU,mCAAI,EAAE,CAC/B,CAAC;gBAEF,KAAK,CAAC,CAAC,uBAAuB,CAAC,MAAA,MAAA,MAAM,CAAC,KAAK,0CAAE,UAAU,mCAAI,EAAE,EAAE,iBAAiB,CAAC,CAAC;YACpF,CAAC;iBAAM,IAAI,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBACpC,qCAAqC;gBAErC,MAAM,EAAE,CAAC;gBACT,MAAM,EAAE,CAAC;gBACT,KAAK,CAAC,CAAC,yBAAyB,CAC9B,aAAa,GAAG,SAAS,EACzB,MAAM,CAAC,WAAW,EAClB,MAAA,MAAM,CAAC,UAAU,mCAAI,EAAE,CACxB,CAAC;gBAEF,KAAK,CAAC,CAAC,uBAAuB,CAAC,MAAA,MAAM,CAAC,UAAU,mCAAI,EAAE,EAAE,aAAa,GAAG,SAAS,CAAC,CAAC;YACrF,CAAC;QACH,CAAC;IACH,CAAC,CAAC,CAAC,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEtC;;;;;;OAMG;IACH,QAAQ,CAAC,CAAC,yBAAyB,CACjC,aAAqB,EACrB,WAA+B,EAC/B,MAA2C;QAE3C,MAAM,KAAK,CAAC;QACZ,MAAM,gCAAgC,aAAa,KAAK,CAAC;QACzD,IAAI,WAAW,EAAE,CAAC;YAChB,MAAM,KAAK,CAAC;YACZ,KAAK,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC1D,CAAC;QACD,MAAM,KAAK,CAAC;QACZ,MAAM,MAAM,GAAG,oBAAoB,aAAa,IAAI,CAAC;QACrD,MAAM,MAAM,GAAG,GAAG,CAAC;QAEnB,MAAM,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAE5C,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC9B,MAAM,MAAM,GAAG,MAAM,CAAC;QACxB,CAAC;aAAM,CAAC;YACN,MAAM,MAAM,CAAC;YACb,KAAK,CAAC,CAAC,MAAM,CACX,OAAO,CACL,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,EAAE;;gBAAC,OAAA,CAAC;oBACzC,wDAAwD;oBACxD,IAAI,EAAE,YAAY,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC;oBAChD,IAAI,EAAE,SAAS,CAAC,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,IAAI,CAAC;oBAChD,WAAW,EAAE,MAAA,MAAM,CAAC,WAAW,mCAAI,KAAK,OAAO,CAAC,IAAI,OAAO,SAAS,SAAS;oBAC7E,QAAQ,EAAE,IAAa;iBACxB,CAAC,CAAA;aAAA,CAAC,EACH,UAAU,CACX,CACF,CAAC;YACF,MAAM,MAAM,CAAC;QACf,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;;GAOG;AACH,SAAS,SAAS,CAAC,MAA2B,EAAE,IAAY,EAAE,IAAc;IAC1E,MAAM,IAAI,GAAG,MAAM,CAAC,IAA6B,CAAC;IAElD,IAAI,IAAI,KAAK,OAAO,EAAE,CAAC;QACrB,OAAO,yBAAyB,SAAS,CAAC,MAAM,CAAC,KAAM,EAAE,SAAS,EAAE,CAAC,GAAG,IAAI,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC;IAC1F,CAAC;SAAM,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;QAC7B,MAAM,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC;QAC5D,OAAO,0BAA0B,cAAc,GAAG,CAAC;IACrD,CAAC;SAAM,CAAC;QACN,OAAO;YACL,OAAO,EAAE,yBAAyB;YAClC,MAAM,EAAE,wBAAwB;YAChC,aAAa,EAAE,+BAA+B;YAC9C,QAAQ,EAAE,0BAA0B;YACpC,IAAI,EAAE,sBAAsB;YAC5B,IAAI,EAAE,sBAAsB;YAC5B,MAAM,EAAE,wBAAwB;YAChC,OAAO,EAAE,yBAAyB;YAClC,WAAW,EAAE,6BAA6B;YAC1C,aAAa,EAAE,+BAA+B;YAC9C,SAAS,EAAE,2BAA2B;YACtC,OAAO,EAAE,yBAAyB;SACnC,CAAC,IAAI,CAAC,CAAC;IACV,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,QAAQ,CAAC,CAAC,oBAAoB,CAC5B,IAAY,EACZ,qBAA6B,EAC7B,QAAkB;IAElB,MAAM,YAAY,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,YAAY,CAAC,CAAC;IAE9D,MAAM,oBAAoB,IAAI,mCAAmC,CAAC;IAClE,KAAK,CAAC,CAAC,MAAM,CACX,OAAO,CACL,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,YAAY,CAAC,EAC1C,CAAC,CAAC,EAAE,EAAE,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAC1C,CACF,CAAC;IACF,IAAI,YAAY,EAAE,CAAC;QACjB,KAAK,CAAC,CAAC,MAAM,CACX,UAAU,CAAC;YACT,IAAI,EAAE,WAAW;YACjB,IAAI,EAAE,qBAAqB,GAAG,IAAI;YAClC,WAAW,EAAE,sBAAsB;SACpC,CAAC,CACH,CAAC;IACJ,CAAC;IACD,MAAM,GAAG,CAAC;AACZ,CAAC;AAED;;GAEG;AACH,QAAQ,CAAC,CAAC,MAAM,CAAC,QAA0B,EAAE,QAAgB,CAAC;IAC5D,MAAM,iBAAiB,GAAG,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAE5C,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE,CAAC;QAC5B,MAAM,iBAAiB,GAAG,IAAI,CAAC;IACjC,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,QAAQ,CAAC,CAAC,OAAO,CAAS,EAAgB,EAAE,CAA+B;IACzE,KAAK,MAAM,KAAK,IAAI,EAAE,EAAE,CAAC;QACvB,MAAM,MAAM,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC;QACxB,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC,QAAQ,IAAI,MAAM,EAAE,CAAC;YAC/E,KAAK,CAAC,CAAC,MAAsB,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,MAAY,CAAC;QACrB,CAAC;IACH,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { EOL } from \"node:os\";\nimport type { DocumentTypeDetails, DocumentModelDetails } from \"../index.js\";\nimport type { DocumentFieldSchema } from \"../generated/index.js\";\nimport type { DocumentField } from \"../models/index.js\";\nimport { defaultResultFields, getFeatures } from \"./defaultFields.js\";\nimport type { Field } from \"./utils.js\";\nimport { camelCase, capitalize, uncapitalize } from \"./utils.js\";\n\n// NOTE: currently, this command is set up to generate sample files for the SDK itself. If we want to expose this\n// functionality outside of samples, then we'll need to change the way the headers are generated for external use.\n\n/**\n * The header to append to the top of every file.\n */\nconst sampleHeader = `\n/**\n * @azsdk-util\n * @azsdk-skip-javascript\n */\n`;\n\n/**\n * Generate a header with the model information.\n */\nfunction templateHeader(model: DocumentModelDetails, test: boolean): string {\n return `// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n${!test ? sampleHeader : \"\"}\n// Model: ${model.modelId}\n// Description: ${model.description}\n// API Version: ${model.apiVersion}\n// Created: ${new Date().toDateString()}\n\n`;\n}\n\n/**\n * Generate a TypeScript source file for a given model.\n *\n * @param model - the ModelInfo to generate code for\n * @returns a string of TypeScript source code\n */\nexport async function writeModelCode(model: DocumentModelDetails, test: boolean): Promise<string> {\n let contents = templateHeader(model, test);\n\n if (test) {\n contents += 'import * as fr from \"../../../src\";' + EOL + EOL;\n } else {\n contents += 'import * as fr from \"@azure/ai-form-recognizer\";' + EOL + EOL;\n }\n\n const modelNameSlug = [\n ...model.modelId\n .split(\"-\", 2)\n // for lack of a flatMap\n .reduce((acc, cur) => [...acc, ...cur.split(\".\")], [] as string[]),\n ];\n\n const modelName = camelCase([...modelNameSlug, \"Model\"]);\n const documentTypeName = camelCase([...modelNameSlug, \"Document\"]);\n const resultName = camelCase([...modelNameSlug, \"Result\"]);\n\n const modelFeatures = getFeatures(model);\n\n const hasDocuments = modelFeatures.some((f) => f === \"_documents\");\n\n if (model.description) {\n contents += \"/**\" + EOL;\n contents +=\n model.description\n // Split the comment into lines.\n .split(/\\r?\\n/)\n // Add the asterisk to the beginning of each line to block-align the comment\n .map((l) => \" * \" + l)\n .join(EOL) + EOL;\n contents += \" */\" + EOL;\n }\n\n // Create the exported model object. We rely on a function named `modelInfo` to exist in the module scope, and we will\n // add it later.\n contents += `export const ${modelName} = fr.createModelFromSchema(modelInfo()) as fr.DocumentModel<${resultName}>;${\n EOL + EOL\n }`;\n\n // Write the main Result interface.\n for (const line of writeResultInterface(resultName, documentTypeName, modelFeatures)) {\n contents += line + EOL;\n }\n\n contents += EOL;\n\n // Now add a variant for each document type in the model, if there are any.\n if (hasDocuments) {\n const variants = extractModelVariants(model, modelNameSlug);\n\n const variantNames = Object.keys(variants);\n\n const documentType =\n variantNames.length > 1\n ? // In the case of multiple variants, a union\n variantNames.map((n) => `${EOL} | ${n}`).join(\"\")\n : variantNames.length === 1\n ? // For a single variant, just emit its name.\n ` ${variantNames[0]}`\n : // This should probably be unreachable, since there's no case where\n // `hasDocuments && variantNames.length === 0`, but we'll still emit \"never\" just in case.\n \" never\";\n\n // Finally, add the type alias for the type of a Document\n contents += `export type ${documentTypeName} =${documentType};${EOL + EOL}`;\n\n // Write the root interface for the variant.\n for (const info of Object.values(variants)) {\n for (const line of writeDocTypeInterface(info)) {\n contents += line + EOL;\n }\n\n contents += EOL;\n }\n\n // Write all the fields interfaces that the variant uses\n for (const info of Object.values(variants)) {\n for (const line of writeFieldsInterfaces(info)) {\n contents += line + EOL;\n }\n\n contents += EOL;\n }\n }\n\n // Finally, emit the modelInfo function that provides the raw schema.\n contents += \"/**\" + EOL;\n contents += \" * The raw model schema.\" + EOL;\n contents += \" */\" + EOL;\n contents += \"function modelInfo() {\" + EOL;\n contents +=\n ` return ${JSON.stringify(model, null, 2)\n .split(/\\r?\\n/)\n .join(EOL + \" \")} as const;` + EOL;\n contents += \"}\" + EOL;\n\n return contents;\n}\n\n// Get the doc type variants of a model.\nfunction extractModelVariants(\n model: DocumentModelDetails,\n _rootSlug: string[],\n): Record<string, DocType> {\n const result: ReturnType<typeof extractModelVariants> = {};\n\n for (const [docType, info] of Object.entries(model.docTypes ?? {})) {\n const slug = docType.split(\".\");\n const docTypeName = camelCase(slug);\n result[docTypeName] = {\n ...info,\n name: docTypeName,\n originalDocType: docType,\n slug,\n };\n }\n\n return result;\n}\n\n// Write the interface for a given DocType.\nfunction* writeDocTypeInterface(docType: DocType): Iterable<string> {\n if (docType.description) {\n yield \"/**\";\n yield* docType.description.split(/\\r?\\n/).map((l) => \" * \" + l);\n yield \" */\";\n }\n yield `export interface ${docType.name} {`;\n yield* indent(\n writeField({\n name: \"docType\",\n type: `\"${docType.originalDocType}\"`,\n docContents: `Document type: \"${docType.originalDocType}\".`,\n }),\n );\n\n yield* indent(\n writeField({\n name: \"fields\",\n type: docType.name + \"Fields\",\n docContents: \"Document fields.\",\n }),\n );\n\n yield* indent(\n flatMap(\n [\n {\n name: \"boundingRegions\",\n docContents: \"Bounding regions covering the document.\",\n type: \"fr.BoundingRegion[]\",\n optional: true,\n },\n {\n name: \"spans\",\n docContents:\n \"Locations of the document's elements in the `content` text (reading-order-concatenated content).\",\n type: \"fr.DocumentSpan[]\",\n },\n {\n name: \"confidence\",\n docContents: \"The service's confidence that it has correctly extracted the document.\",\n type: \"number\",\n },\n ],\n writeField,\n ),\n );\n\n yield \"}\";\n}\n\n/**\n * Write a field and its comment to an iterable of strings.\n */\nfunction* writeField(field: Field): Iterable<string> {\n yield \"/**\";\n yield* field.docContents.split(/\\r?\\n/).map((line) => ` * ${line}`);\n yield \" */\";\n\n // If the field name has an illegal character, we need to quote it.\n const needsQuotes = !/^[a-zA-Z_$][a-zA-Z_$0-9]*$/.test(field.name);\n const printedFieldName = needsQuotes ? `\"${field.name}\"` : field.name;\n\n yield `${printedFieldName}${field.optional ? \"?\" : \"\"}: ${field.type};`;\n}\n\n/**\n * Recursively write the fields interfaces for a given document type.\n */\nfunction* writeFieldsInterfaces(docType: DocType): Iterable<string> {\n const name = docType.name + \"Fields\";\n\n yield* writeInterfaceDeclaration(name, docType.description, docType.fieldSchema);\n\n // Recursively visit all child interfaces and write them.\n yield* (function* collectNestedInterfaces(\n fields: Record<string, DocumentFieldSchema>,\n namingContext: string,\n ): Iterable<string> {\n for (const [fieldName, schema] of Object.entries(fields)) {\n if (schema.type === \"array\" && schema.items?.type === \"object\") {\n // Generate element interface and recur if the nested type is an object\n\n const nextNamingContext = namingContext + fieldName + \"Element\";\n\n yield \"\";\n yield \"\";\n yield* writeInterfaceDeclaration(\n nextNamingContext,\n schema.description,\n schema.items?.properties ?? {},\n );\n\n yield* collectNestedInterfaces(schema.items?.properties ?? {}, nextNamingContext);\n } else if (schema.type === \"object\") {\n // Generate named interface and recur\n\n yield \"\";\n yield \"\";\n yield* writeInterfaceDeclaration(\n namingContext + fieldName,\n schema.description,\n schema.properties ?? {},\n );\n\n yield* collectNestedInterfaces(schema.properties ?? {}, namingContext + fieldName);\n }\n }\n })(docType.fieldSchema, docType.name);\n\n /**\n * Write a simple interface declaration.\n *\n * @param interfaceName - the symbolic name of the interface\n * @param description - the interface's documentation string.\n * @param fields - the fields of the interface (a map of names to schemas)\n */\n function* writeInterfaceDeclaration(\n interfaceName: string,\n description: string | undefined,\n fields: Record<string, DocumentFieldSchema>,\n ): Generator<string, void, any> {\n yield \"/**\";\n yield ` * Describes the fields of \\`${interfaceName}\\`.`;\n if (description) {\n yield \" * \";\n yield* description.split(/\\r?\\n/).map((l) => \" * \" + l);\n }\n yield \" */\";\n const prefix = `export interface ${interfaceName} {`;\n const suffix = \"}\";\n\n const fieldEntries = Object.entries(fields);\n\n if (fieldEntries.length === 0) {\n yield prefix + suffix;\n } else {\n yield prefix;\n yield* indent(\n flatMap(\n fieldEntries.map(([fieldName, schema]) => ({\n // Uncapitalize the field name and remove all whitespace\n name: uncapitalize(fieldName).replace(/\\s/g, \"\"),\n type: writeType(schema, fieldName, docType.slug),\n docContents: schema.description ?? `\\`${docType.name}\\` \"${fieldName}\" field`,\n optional: true as const,\n })),\n writeField,\n ),\n );\n yield suffix;\n }\n }\n}\n\n/**\n * Generate a type name for a field schema.\n *\n * @param schema - the schema of the type\n * @param name - the name to append to the field type, if necessary (only applies to objects and arrays)\n * @param slug - the current taxonomic naming context of the field name (used to name object types)\n * @returns a string representing the type name for the field\n */\nfunction writeType(schema: DocumentFieldSchema, name: string, slug: string[]): string {\n const kind = schema.type as DocumentField[\"kind\"];\n\n if (kind === \"array\") {\n return `fr.DocumentArrayField<${writeType(schema.items!, \"Element\", [...slug, name])}>`;\n } else if (kind === \"object\") {\n const propertiesName = slug.map(capitalize).join(\"\") + name;\n return `fr.DocumentObjectField<${propertiesName}>`;\n } else {\n return {\n integer: \"fr.DocumentIntegerField\",\n string: \"fr.DocumentStringField\",\n countryRegion: \"fr.DocumentCountryRegionField\",\n currency: \"fr.DocumentCurrencyField\",\n time: \"fr.DocumentTimeField\",\n date: \"fr.DocumentDateField\",\n number: \"fr.DocumentNumberField\",\n boolean: \"fr.DocumentBooleanField\",\n phoneNumber: \"fr.DocumentPhoneNumberField\",\n selectionMark: \"fr.DocumentSelectionMarkField\",\n signature: \"fr.DocumentSignatureField\",\n address: \"fr.DocumentAddressField\",\n }[kind];\n }\n}\n\n/**\n * Generate an AnalyzeResult-level interface.\n *\n * @param name - the name of the interface.\n * @param documentInterfaceName - the name of the interface's document type.\n * @param features - the features supported by the model\n */\nfunction* writeResultInterface(\n name: string,\n documentInterfaceName: string,\n features: string[],\n): Iterable<string> {\n const hasDocuments = features.some((f) => f === \"_documents\");\n\n yield `export interface ${name} extends fr.AnalyzeResultCommon {`;\n yield* indent(\n flatMap(\n features.filter((f) => f !== \"_documents\"),\n (f) => writeField(defaultResultFields[f]),\n ),\n );\n if (hasDocuments) {\n yield* indent(\n writeField({\n name: \"documents\",\n type: documentInterfaceName + \"[]\",\n docContents: \"Extracted documents.\",\n }),\n );\n }\n yield \"}\";\n}\n\n/**\n * A helper function to indent an iterable.\n */\nfunction* indent(contents: Iterable<string>, level: number = 2): Iterable<string> {\n const indentationString = \" \".repeat(level);\n\n for (const line of contents) {\n yield indentationString + line;\n }\n}\n\n/**\n * A `flatMap` helper that works with generic iterables, not only arrays. If the result of `f` is iterable, its elements\n * will be produced instead of itself.\n *\n * @param it - the iterable to map over\n * @param f - the function to apply to each item in `it`\n */\nfunction* flatMap<T1, T2>(it: Iterable<T1>, f: (v: T1) => T2 | Iterable<T2>): Iterable<T2> {\n for (const value of it) {\n const result = f(value);\n if (typeof result === \"object\" && result !== null && Symbol.iterator in result) {\n yield* result as Iterable<T2>;\n } else {\n yield result as T2;\n }\n }\n}\n\n/**\n * Helper interface for representing a DocType during generation, allowing for renaming and contextual, taxonomic naming.\n */\ninterface DocType extends DocumentTypeDetails {\n originalDocType: string;\n name: string;\n slug: string[];\n}\n"]}
|
|
1
|
+
{"version":3,"file":"writeModelCode.js","sourceRoot":"","sources":["../../../src/bin/writeModelCode.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,SAAS,CAAC;AAI9B,OAAO,EAAE,mBAAmB,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAEtE,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAEjE,iHAAiH;AACjH,kHAAkH;AAElH;;GAEG;AACH,MAAM,YAAY,GAAG;;;;;CAKpB,CAAC;AAEF;;GAEG;AACH,SAAS,cAAc,CAAC,KAA2B,EAAE,IAAa;IAChE,OAAO;;EAEP,CAAC,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;kBACT,KAAK,CAAC,OAAO;kBACb,KAAK,CAAC,WAAW;kBACjB,KAAK,CAAC,UAAU;kBAChB,IAAI,IAAI,EAAE,CAAC,YAAY,EAAE;;CAE1C,CAAC;AACF,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAAC,KAA2B,EAAE,IAAa;IAC7E,IAAI,QAAQ,GAAG,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IAE3C,IAAI,IAAI,EAAE,CAAC;QACT,QAAQ,IAAI,qCAAqC,GAAG,GAAG,GAAG,GAAG,CAAC;IAChE,CAAC;SAAM,CAAC;QACN,QAAQ,IAAI,kDAAkD,GAAG,GAAG,GAAG,GAAG,CAAC;IAC7E,CAAC;IAED,MAAM,aAAa,GAAG;QACpB,GAAG,KAAK,CAAC,OAAO;aACb,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;YACd,wBAAwB;aACvB,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC,GAAG,GAAG,EAAE,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,EAAc,CAAC;KACrE,CAAC;IAEF,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,GAAG,aAAa,EAAE,OAAO,CAAC,CAAC,CAAC;IACzD,MAAM,gBAAgB,GAAG,SAAS,CAAC,CAAC,GAAG,aAAa,EAAE,UAAU,CAAC,CAAC,CAAC;IACnE,MAAM,UAAU,GAAG,SAAS,CAAC,CAAC,GAAG,aAAa,EAAE,QAAQ,CAAC,CAAC,CAAC;IAE3D,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,CAAC,CAAC;IAEzC,MAAM,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,YAAY,CAAC,CAAC;IAEnE,IAAI,KAAK,CAAC,WAAW,EAAE,CAAC;QACtB,QAAQ,IAAI,KAAK,GAAG,GAAG,CAAC;QACxB,QAAQ;YACN,KAAK,CAAC,WAAW;gBACf,gCAAgC;iBAC/B,KAAK,CAAC,OAAO,CAAC;gBACf,4EAA4E;iBAC3E,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC;iBACrB,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;QACrB,QAAQ,IAAI,KAAK,GAAG,GAAG,CAAC;IAC1B,CAAC;IAED,sHAAsH;IACtH,gBAAgB;IAChB,QAAQ,IAAI,gBAAgB,SAAS,gEAAgE,UAAU,KAC7G,GAAG,GAAG,GACR,EAAE,CAAC;IAEH,mCAAmC;IACnC,KAAK,MAAM,IAAI,IAAI,oBAAoB,CAAC,UAAU,EAAE,gBAAgB,EAAE,aAAa,CAAC,EAAE,CAAC;QACrF,QAAQ,IAAI,IAAI,GAAG,GAAG,CAAC;IACzB,CAAC;IAED,QAAQ,IAAI,GAAG,CAAC;IAEhB,2EAA2E;IAC3E,IAAI,YAAY,EAAE,CAAC;QACjB,MAAM,QAAQ,GAAG,oBAAoB,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC;QAE5D,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAE3C,MAAM,YAAY,GAChB,YAAY,CAAC,MAAM,GAAG,CAAC;YACrB,CAAC,CAAC,4CAA4C;gBAC5C,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,OAAO,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;YACpD,CAAC,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC;gBACzB,CAAC,CAAC,4CAA4C;oBAC5C,IAAI,YAAY,CAAC,CAAC,CAAC,EAAE;gBACvB,CAAC,CAAC,mEAAmE;oBACnE,0FAA0F;oBAC1F,QAAQ,CAAC;QAEjB,yDAAyD;QACzD,QAAQ,IAAI,eAAe,gBAAgB,KAAK,YAAY,IAAI,GAAG,GAAG,GAAG,EAAE,CAAC;QAE5E,4CAA4C;QAC5C,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC3C,KAAK,MAAM,IAAI,IAAI,qBAAqB,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC/C,QAAQ,IAAI,IAAI,GAAG,GAAG,CAAC;YACzB,CAAC;YAED,QAAQ,IAAI,GAAG,CAAC;QAClB,CAAC;QAED,wDAAwD;QACxD,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC3C,KAAK,MAAM,IAAI,IAAI,qBAAqB,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC/C,QAAQ,IAAI,IAAI,GAAG,GAAG,CAAC;YACzB,CAAC;YAED,QAAQ,IAAI,GAAG,CAAC;QAClB,CAAC;IACH,CAAC;IAED,qEAAqE;IACrE,QAAQ,IAAI,KAAK,GAAG,GAAG,CAAC;IACxB,QAAQ,IAAI,0BAA0B,GAAG,GAAG,CAAC;IAC7C,QAAQ,IAAI,KAAK,GAAG,GAAG,CAAC;IACxB,QAAQ,IAAI,wBAAwB,GAAG,GAAG,CAAC;IAC3C,QAAQ;QACN,YAAY,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;aACvC,KAAK,CAAC,OAAO,CAAC;aACd,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,YAAY,GAAG,GAAG,CAAC;IACxC,QAAQ,IAAI,GAAG,GAAG,GAAG,CAAC;IAEtB,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,wCAAwC;AACxC,SAAS,oBAAoB,CAC3B,KAA2B,EAC3B,SAAmB;IAEnB,MAAM,MAAM,GAA4C,EAAE,CAAC;IAE3D,KAAK,MAAM,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,CAAC,EAAE,CAAC;QACnE,MAAM,IAAI,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAChC,MAAM,WAAW,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC;QACpC,MAAM,CAAC,WAAW,CAAC,GAAG;YACpB,GAAG,IAAI;YACP,IAAI,EAAE,WAAW;YACjB,eAAe,EAAE,OAAO;YACxB,IAAI;SACL,CAAC;IACJ,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,2CAA2C;AAC3C,QAAQ,CAAC,CAAC,qBAAqB,CAAC,OAAgB;IAC9C,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;QACxB,MAAM,KAAK,CAAC;QACZ,KAAK,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAChE,MAAM,KAAK,CAAC;IACd,CAAC;IACD,MAAM,oBAAoB,OAAO,CAAC,IAAI,IAAI,CAAC;IAC3C,KAAK,CAAC,CAAC,MAAM,CACX,UAAU,CAAC;QACT,IAAI,EAAE,SAAS;QACf,IAAI,EAAE,IAAI,OAAO,CAAC,eAAe,GAAG;QACpC,WAAW,EAAE,mBAAmB,OAAO,CAAC,eAAe,IAAI;KAC5D,CAAC,CACH,CAAC;IAEF,KAAK,CAAC,CAAC,MAAM,CACX,UAAU,CAAC;QACT,IAAI,EAAE,QAAQ;QACd,IAAI,EAAE,OAAO,CAAC,IAAI,GAAG,QAAQ;QAC7B,WAAW,EAAE,kBAAkB;KAChC,CAAC,CACH,CAAC;IAEF,KAAK,CAAC,CAAC,MAAM,CACX,OAAO,CACL;QACE;YACE,IAAI,EAAE,iBAAiB;YACvB,WAAW,EAAE,yCAAyC;YACtD,IAAI,EAAE,qBAAqB;YAC3B,QAAQ,EAAE,IAAI;SACf;QACD;YACE,IAAI,EAAE,OAAO;YACb,WAAW,EACT,kGAAkG;YACpG,IAAI,EAAE,mBAAmB;SAC1B;QACD;YACE,IAAI,EAAE,YAAY;YAClB,WAAW,EAAE,wEAAwE;YACrF,IAAI,EAAE,QAAQ;SACf;KACF,EACD,UAAU,CACX,CACF,CAAC;IAEF,MAAM,GAAG,CAAC;AACZ,CAAC;AAED;;GAEG;AACH,QAAQ,CAAC,CAAC,UAAU,CAAC,KAAY;IAC/B,MAAM,KAAK,CAAC;IACZ,KAAK,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;IACpE,MAAM,KAAK,CAAC;IAEZ,mEAAmE;IACnE,MAAM,WAAW,GAAG,CAAC,4BAA4B,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IACnE,MAAM,gBAAgB,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC;IAEtE,MAAM,GAAG,gBAAgB,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,KAAK,KAAK,CAAC,IAAI,GAAG,CAAC;AAC1E,CAAC;AAED;;GAEG;AACH,QAAQ,CAAC,CAAC,qBAAqB,CAAC,OAAgB;IAC9C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,GAAG,QAAQ,CAAC;IAErC,KAAK,CAAC,CAAC,yBAAyB,CAAC,IAAI,EAAE,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;IAEjF,yDAAyD;IACzD,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,uBAAuB,CACvC,MAA2C,EAC3C,aAAqB;QAErB,KAAK,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC;YACzD,IAAI,MAAM,CAAC,IAAI,KAAK,OAAO,IAAI,MAAM,CAAC,KAAK,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC/D,uEAAuE;gBAEvE,MAAM,iBAAiB,GAAG,aAAa,GAAG,SAAS,GAAG,SAAS,CAAC;gBAEhE,MAAM,EAAE,CAAC;gBACT,MAAM,EAAE,CAAC;gBACT,KAAK,CAAC,CAAC,yBAAyB,CAC9B,iBAAiB,EACjB,MAAM,CAAC,WAAW,EAClB,MAAM,CAAC,KAAK,EAAE,UAAU,IAAI,EAAE,CAC/B,CAAC;gBAEF,KAAK,CAAC,CAAC,uBAAuB,CAAC,MAAM,CAAC,KAAK,EAAE,UAAU,IAAI,EAAE,EAAE,iBAAiB,CAAC,CAAC;YACpF,CAAC;iBAAM,IAAI,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBACpC,qCAAqC;gBAErC,MAAM,EAAE,CAAC;gBACT,MAAM,EAAE,CAAC;gBACT,KAAK,CAAC,CAAC,yBAAyB,CAC9B,aAAa,GAAG,SAAS,EACzB,MAAM,CAAC,WAAW,EAClB,MAAM,CAAC,UAAU,IAAI,EAAE,CACxB,CAAC;gBAEF,KAAK,CAAC,CAAC,uBAAuB,CAAC,MAAM,CAAC,UAAU,IAAI,EAAE,EAAE,aAAa,GAAG,SAAS,CAAC,CAAC;YACrF,CAAC;QACH,CAAC;IACH,CAAC,CAAC,CAAC,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEtC;;;;;;OAMG;IACH,QAAQ,CAAC,CAAC,yBAAyB,CACjC,aAAqB,EACrB,WAA+B,EAC/B,MAA2C;QAE3C,MAAM,KAAK,CAAC;QACZ,MAAM,gCAAgC,aAAa,KAAK,CAAC;QACzD,IAAI,WAAW,EAAE,CAAC;YAChB,MAAM,KAAK,CAAC;YACZ,KAAK,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC1D,CAAC;QACD,MAAM,KAAK,CAAC;QACZ,MAAM,MAAM,GAAG,oBAAoB,aAAa,IAAI,CAAC;QACrD,MAAM,MAAM,GAAG,GAAG,CAAC;QAEnB,MAAM,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAE5C,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC9B,MAAM,MAAM,GAAG,MAAM,CAAC;QACxB,CAAC;aAAM,CAAC;YACN,MAAM,MAAM,CAAC;YACb,KAAK,CAAC,CAAC,MAAM,CACX,OAAO,CACL,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC;gBACzC,wDAAwD;gBACxD,IAAI,EAAE,YAAY,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC;gBAChD,IAAI,EAAE,SAAS,CAAC,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,IAAI,CAAC;gBAChD,WAAW,EAAE,MAAM,CAAC,WAAW,IAAI,KAAK,OAAO,CAAC,IAAI,OAAO,SAAS,SAAS;gBAC7E,QAAQ,EAAE,IAAa;aACxB,CAAC,CAAC,EACH,UAAU,CACX,CACF,CAAC;YACF,MAAM,MAAM,CAAC;QACf,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;;GAOG;AACH,SAAS,SAAS,CAAC,MAA2B,EAAE,IAAY,EAAE,IAAc;IAC1E,MAAM,IAAI,GAAG,MAAM,CAAC,IAA6B,CAAC;IAElD,IAAI,IAAI,KAAK,OAAO,EAAE,CAAC;QACrB,OAAO,yBAAyB,SAAS,CAAC,MAAM,CAAC,KAAM,EAAE,SAAS,EAAE,CAAC,GAAG,IAAI,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC;IAC1F,CAAC;SAAM,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;QAC7B,MAAM,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC;QAC5D,OAAO,0BAA0B,cAAc,GAAG,CAAC;IACrD,CAAC;SAAM,CAAC;QACN,OAAO;YACL,OAAO,EAAE,yBAAyB;YAClC,MAAM,EAAE,wBAAwB;YAChC,aAAa,EAAE,+BAA+B;YAC9C,QAAQ,EAAE,0BAA0B;YACpC,IAAI,EAAE,sBAAsB;YAC5B,IAAI,EAAE,sBAAsB;YAC5B,MAAM,EAAE,wBAAwB;YAChC,OAAO,EAAE,yBAAyB;YAClC,WAAW,EAAE,6BAA6B;YAC1C,aAAa,EAAE,+BAA+B;YAC9C,SAAS,EAAE,2BAA2B;YACtC,OAAO,EAAE,yBAAyB;SACnC,CAAC,IAAI,CAAC,CAAC;IACV,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,QAAQ,CAAC,CAAC,oBAAoB,CAC5B,IAAY,EACZ,qBAA6B,EAC7B,QAAkB;IAElB,MAAM,YAAY,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,YAAY,CAAC,CAAC;IAE9D,MAAM,oBAAoB,IAAI,mCAAmC,CAAC;IAClE,KAAK,CAAC,CAAC,MAAM,CACX,OAAO,CACL,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,YAAY,CAAC,EAC1C,CAAC,CAAC,EAAE,EAAE,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAC1C,CACF,CAAC;IACF,IAAI,YAAY,EAAE,CAAC;QACjB,KAAK,CAAC,CAAC,MAAM,CACX,UAAU,CAAC;YACT,IAAI,EAAE,WAAW;YACjB,IAAI,EAAE,qBAAqB,GAAG,IAAI;YAClC,WAAW,EAAE,sBAAsB;SACpC,CAAC,CACH,CAAC;IACJ,CAAC;IACD,MAAM,GAAG,CAAC;AACZ,CAAC;AAED;;GAEG;AACH,QAAQ,CAAC,CAAC,MAAM,CAAC,QAA0B,EAAE,QAAgB,CAAC;IAC5D,MAAM,iBAAiB,GAAG,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAE5C,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE,CAAC;QAC5B,MAAM,iBAAiB,GAAG,IAAI,CAAC;IACjC,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,QAAQ,CAAC,CAAC,OAAO,CAAS,EAAgB,EAAE,CAA+B;IACzE,KAAK,MAAM,KAAK,IAAI,EAAE,EAAE,CAAC;QACvB,MAAM,MAAM,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC;QACxB,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC,QAAQ,IAAI,MAAM,EAAE,CAAC;YAC/E,KAAK,CAAC,CAAC,MAAsB,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,MAAY,CAAC;QACrB,CAAC;IACH,CAAC;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { EOL } from \"node:os\";\nimport type { DocumentTypeDetails, DocumentModelDetails } from \"../index.js\";\nimport type { DocumentFieldSchema } from \"../generated/index.js\";\nimport type { DocumentField } from \"../models/index.js\";\nimport { defaultResultFields, getFeatures } from \"./defaultFields.js\";\nimport type { Field } from \"./utils.js\";\nimport { camelCase, capitalize, uncapitalize } from \"./utils.js\";\n\n// NOTE: currently, this command is set up to generate sample files for the SDK itself. If we want to expose this\n// functionality outside of samples, then we'll need to change the way the headers are generated for external use.\n\n/**\n * The header to append to the top of every file.\n */\nconst sampleHeader = `\n/**\n * @azsdk-util\n * @azsdk-skip-javascript\n */\n`;\n\n/**\n * Generate a header with the model information.\n */\nfunction templateHeader(model: DocumentModelDetails, test: boolean): string {\n return `// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n${!test ? sampleHeader : \"\"}\n// Model: ${model.modelId}\n// Description: ${model.description}\n// API Version: ${model.apiVersion}\n// Created: ${new Date().toDateString()}\n\n`;\n}\n\n/**\n * Generate a TypeScript source file for a given model.\n *\n * @param model - the ModelInfo to generate code for\n * @returns a string of TypeScript source code\n */\nexport async function writeModelCode(model: DocumentModelDetails, test: boolean): Promise<string> {\n let contents = templateHeader(model, test);\n\n if (test) {\n contents += 'import * as fr from \"../../../src\";' + EOL + EOL;\n } else {\n contents += 'import * as fr from \"@azure/ai-form-recognizer\";' + EOL + EOL;\n }\n\n const modelNameSlug = [\n ...model.modelId\n .split(\"-\", 2)\n // for lack of a flatMap\n .reduce((acc, cur) => [...acc, ...cur.split(\".\")], [] as string[]),\n ];\n\n const modelName = camelCase([...modelNameSlug, \"Model\"]);\n const documentTypeName = camelCase([...modelNameSlug, \"Document\"]);\n const resultName = camelCase([...modelNameSlug, \"Result\"]);\n\n const modelFeatures = getFeatures(model);\n\n const hasDocuments = modelFeatures.some((f) => f === \"_documents\");\n\n if (model.description) {\n contents += \"/**\" + EOL;\n contents +=\n model.description\n // Split the comment into lines.\n .split(/\\r?\\n/)\n // Add the asterisk to the beginning of each line to block-align the comment\n .map((l) => \" * \" + l)\n .join(EOL) + EOL;\n contents += \" */\" + EOL;\n }\n\n // Create the exported model object. We rely on a function named `modelInfo` to exist in the module scope, and we will\n // add it later.\n contents += `export const ${modelName} = fr.createModelFromSchema(modelInfo()) as fr.DocumentModel<${resultName}>;${\n EOL + EOL\n }`;\n\n // Write the main Result interface.\n for (const line of writeResultInterface(resultName, documentTypeName, modelFeatures)) {\n contents += line + EOL;\n }\n\n contents += EOL;\n\n // Now add a variant for each document type in the model, if there are any.\n if (hasDocuments) {\n const variants = extractModelVariants(model, modelNameSlug);\n\n const variantNames = Object.keys(variants);\n\n const documentType =\n variantNames.length > 1\n ? // In the case of multiple variants, a union\n variantNames.map((n) => `${EOL} | ${n}`).join(\"\")\n : variantNames.length === 1\n ? // For a single variant, just emit its name.\n ` ${variantNames[0]}`\n : // This should probably be unreachable, since there's no case where\n // `hasDocuments && variantNames.length === 0`, but we'll still emit \"never\" just in case.\n \" never\";\n\n // Finally, add the type alias for the type of a Document\n contents += `export type ${documentTypeName} =${documentType};${EOL + EOL}`;\n\n // Write the root interface for the variant.\n for (const info of Object.values(variants)) {\n for (const line of writeDocTypeInterface(info)) {\n contents += line + EOL;\n }\n\n contents += EOL;\n }\n\n // Write all the fields interfaces that the variant uses\n for (const info of Object.values(variants)) {\n for (const line of writeFieldsInterfaces(info)) {\n contents += line + EOL;\n }\n\n contents += EOL;\n }\n }\n\n // Finally, emit the modelInfo function that provides the raw schema.\n contents += \"/**\" + EOL;\n contents += \" * The raw model schema.\" + EOL;\n contents += \" */\" + EOL;\n contents += \"function modelInfo() {\" + EOL;\n contents +=\n ` return ${JSON.stringify(model, null, 2)\n .split(/\\r?\\n/)\n .join(EOL + \" \")} as const;` + EOL;\n contents += \"}\" + EOL;\n\n return contents;\n}\n\n// Get the doc type variants of a model.\nfunction extractModelVariants(\n model: DocumentModelDetails,\n _rootSlug: string[],\n): Record<string, DocType> {\n const result: ReturnType<typeof extractModelVariants> = {};\n\n for (const [docType, info] of Object.entries(model.docTypes ?? {})) {\n const slug = docType.split(\".\");\n const docTypeName = camelCase(slug);\n result[docTypeName] = {\n ...info,\n name: docTypeName,\n originalDocType: docType,\n slug,\n };\n }\n\n return result;\n}\n\n// Write the interface for a given DocType.\nfunction* writeDocTypeInterface(docType: DocType): Iterable<string> {\n if (docType.description) {\n yield \"/**\";\n yield* docType.description.split(/\\r?\\n/).map((l) => \" * \" + l);\n yield \" */\";\n }\n yield `export interface ${docType.name} {`;\n yield* indent(\n writeField({\n name: \"docType\",\n type: `\"${docType.originalDocType}\"`,\n docContents: `Document type: \"${docType.originalDocType}\".`,\n }),\n );\n\n yield* indent(\n writeField({\n name: \"fields\",\n type: docType.name + \"Fields\",\n docContents: \"Document fields.\",\n }),\n );\n\n yield* indent(\n flatMap(\n [\n {\n name: \"boundingRegions\",\n docContents: \"Bounding regions covering the document.\",\n type: \"fr.BoundingRegion[]\",\n optional: true,\n },\n {\n name: \"spans\",\n docContents:\n \"Locations of the document's elements in the `content` text (reading-order-concatenated content).\",\n type: \"fr.DocumentSpan[]\",\n },\n {\n name: \"confidence\",\n docContents: \"The service's confidence that it has correctly extracted the document.\",\n type: \"number\",\n },\n ],\n writeField,\n ),\n );\n\n yield \"}\";\n}\n\n/**\n * Write a field and its comment to an iterable of strings.\n */\nfunction* writeField(field: Field): Iterable<string> {\n yield \"/**\";\n yield* field.docContents.split(/\\r?\\n/).map((line) => ` * ${line}`);\n yield \" */\";\n\n // If the field name has an illegal character, we need to quote it.\n const needsQuotes = !/^[a-zA-Z_$][a-zA-Z_$0-9]*$/.test(field.name);\n const printedFieldName = needsQuotes ? `\"${field.name}\"` : field.name;\n\n yield `${printedFieldName}${field.optional ? \"?\" : \"\"}: ${field.type};`;\n}\n\n/**\n * Recursively write the fields interfaces for a given document type.\n */\nfunction* writeFieldsInterfaces(docType: DocType): Iterable<string> {\n const name = docType.name + \"Fields\";\n\n yield* writeInterfaceDeclaration(name, docType.description, docType.fieldSchema);\n\n // Recursively visit all child interfaces and write them.\n yield* (function* collectNestedInterfaces(\n fields: Record<string, DocumentFieldSchema>,\n namingContext: string,\n ): Iterable<string> {\n for (const [fieldName, schema] of Object.entries(fields)) {\n if (schema.type === \"array\" && schema.items?.type === \"object\") {\n // Generate element interface and recur if the nested type is an object\n\n const nextNamingContext = namingContext + fieldName + \"Element\";\n\n yield \"\";\n yield \"\";\n yield* writeInterfaceDeclaration(\n nextNamingContext,\n schema.description,\n schema.items?.properties ?? {},\n );\n\n yield* collectNestedInterfaces(schema.items?.properties ?? {}, nextNamingContext);\n } else if (schema.type === \"object\") {\n // Generate named interface and recur\n\n yield \"\";\n yield \"\";\n yield* writeInterfaceDeclaration(\n namingContext + fieldName,\n schema.description,\n schema.properties ?? {},\n );\n\n yield* collectNestedInterfaces(schema.properties ?? {}, namingContext + fieldName);\n }\n }\n })(docType.fieldSchema, docType.name);\n\n /**\n * Write a simple interface declaration.\n *\n * @param interfaceName - the symbolic name of the interface\n * @param description - the interface's documentation string.\n * @param fields - the fields of the interface (a map of names to schemas)\n */\n function* writeInterfaceDeclaration(\n interfaceName: string,\n description: string | undefined,\n fields: Record<string, DocumentFieldSchema>,\n ): Generator<string, void, any> {\n yield \"/**\";\n yield ` * Describes the fields of \\`${interfaceName}\\`.`;\n if (description) {\n yield \" * \";\n yield* description.split(/\\r?\\n/).map((l) => \" * \" + l);\n }\n yield \" */\";\n const prefix = `export interface ${interfaceName} {`;\n const suffix = \"}\";\n\n const fieldEntries = Object.entries(fields);\n\n if (fieldEntries.length === 0) {\n yield prefix + suffix;\n } else {\n yield prefix;\n yield* indent(\n flatMap(\n fieldEntries.map(([fieldName, schema]) => ({\n // Uncapitalize the field name and remove all whitespace\n name: uncapitalize(fieldName).replace(/\\s/g, \"\"),\n type: writeType(schema, fieldName, docType.slug),\n docContents: schema.description ?? `\\`${docType.name}\\` \"${fieldName}\" field`,\n optional: true as const,\n })),\n writeField,\n ),\n );\n yield suffix;\n }\n }\n}\n\n/**\n * Generate a type name for a field schema.\n *\n * @param schema - the schema of the type\n * @param name - the name to append to the field type, if necessary (only applies to objects and arrays)\n * @param slug - the current taxonomic naming context of the field name (used to name object types)\n * @returns a string representing the type name for the field\n */\nfunction writeType(schema: DocumentFieldSchema, name: string, slug: string[]): string {\n const kind = schema.type as DocumentField[\"kind\"];\n\n if (kind === \"array\") {\n return `fr.DocumentArrayField<${writeType(schema.items!, \"Element\", [...slug, name])}>`;\n } else if (kind === \"object\") {\n const propertiesName = slug.map(capitalize).join(\"\") + name;\n return `fr.DocumentObjectField<${propertiesName}>`;\n } else {\n return {\n integer: \"fr.DocumentIntegerField\",\n string: \"fr.DocumentStringField\",\n countryRegion: \"fr.DocumentCountryRegionField\",\n currency: \"fr.DocumentCurrencyField\",\n time: \"fr.DocumentTimeField\",\n date: \"fr.DocumentDateField\",\n number: \"fr.DocumentNumberField\",\n boolean: \"fr.DocumentBooleanField\",\n phoneNumber: \"fr.DocumentPhoneNumberField\",\n selectionMark: \"fr.DocumentSelectionMarkField\",\n signature: \"fr.DocumentSignatureField\",\n address: \"fr.DocumentAddressField\",\n }[kind];\n }\n}\n\n/**\n * Generate an AnalyzeResult-level interface.\n *\n * @param name - the name of the interface.\n * @param documentInterfaceName - the name of the interface's document type.\n * @param features - the features supported by the model\n */\nfunction* writeResultInterface(\n name: string,\n documentInterfaceName: string,\n features: string[],\n): Iterable<string> {\n const hasDocuments = features.some((f) => f === \"_documents\");\n\n yield `export interface ${name} extends fr.AnalyzeResultCommon {`;\n yield* indent(\n flatMap(\n features.filter((f) => f !== \"_documents\"),\n (f) => writeField(defaultResultFields[f]),\n ),\n );\n if (hasDocuments) {\n yield* indent(\n writeField({\n name: \"documents\",\n type: documentInterfaceName + \"[]\",\n docContents: \"Extracted documents.\",\n }),\n );\n }\n yield \"}\";\n}\n\n/**\n * A helper function to indent an iterable.\n */\nfunction* indent(contents: Iterable<string>, level: number = 2): Iterable<string> {\n const indentationString = \" \".repeat(level);\n\n for (const line of contents) {\n yield indentationString + line;\n }\n}\n\n/**\n * A `flatMap` helper that works with generic iterables, not only arrays. If the result of `f` is iterable, its elements\n * will be produced instead of itself.\n *\n * @param it - the iterable to map over\n * @param f - the function to apply to each item in `it`\n */\nfunction* flatMap<T1, T2>(it: Iterable<T1>, f: (v: T1) => T2 | Iterable<T2>): Iterable<T2> {\n for (const value of it) {\n const result = f(value);\n if (typeof result === \"object\" && result !== null && Symbol.iterator in result) {\n yield* result as Iterable<T2>;\n } else {\n yield result as T2;\n }\n }\n}\n\n/**\n * Helper interface for representing a DocType during generation, allowing for renaming and contextual, taxonomic naming.\n */\ninterface DocType extends DocumentTypeDetails {\n originalDocType: string;\n name: string;\n slug: string[];\n}\n"]}
|
|
@@ -39,6 +39,8 @@ import { makeServiceClient, Mappers, SERIALIZER } from "./util.js";
|
|
|
39
39
|
* ```
|
|
40
40
|
*/
|
|
41
41
|
export class DocumentAnalysisClient {
|
|
42
|
+
_restClient;
|
|
43
|
+
_tracing;
|
|
42
44
|
constructor(endpoint, credential, options = {}) {
|
|
43
45
|
this._restClient = makeServiceClient(endpoint, credential, options);
|
|
44
46
|
this._tracing = createTracingClient({
|
|
@@ -82,12 +84,18 @@ export class DocumentAnalysisClient {
|
|
|
82
84
|
return this.createUnifiedPoller((abortSignal) => {
|
|
83
85
|
const [contentType, analyzeRequest] = toAnalyzeRequest(input);
|
|
84
86
|
if (contentType === "application/json") {
|
|
85
|
-
return this._restClient.documentModels.analyzeDocument(initialModelId, contentType,
|
|
86
|
-
|
|
87
|
+
return this._restClient.documentModels.analyzeDocument(initialModelId, contentType, {
|
|
88
|
+
...options,
|
|
89
|
+
abortSignal,
|
|
90
|
+
analyzeRequest,
|
|
91
|
+
});
|
|
87
92
|
}
|
|
88
93
|
else {
|
|
89
|
-
return this._restClient.documentModels.analyzeDocument(initialModelId, contentType,
|
|
90
|
-
|
|
94
|
+
return this._restClient.documentModels.analyzeDocument(initialModelId, contentType, {
|
|
95
|
+
...options,
|
|
96
|
+
abortSignal,
|
|
97
|
+
analyzeRequest,
|
|
98
|
+
});
|
|
91
99
|
}
|
|
92
100
|
}, {
|
|
93
101
|
initialModelId,
|
|
@@ -212,12 +220,18 @@ export class DocumentAnalysisClient {
|
|
|
212
220
|
return this.createUnifiedPoller(async (abortSignal) => {
|
|
213
221
|
const [contentType, classifyRequest] = toAnalyzeRequest(input);
|
|
214
222
|
if (contentType === "application/json") {
|
|
215
|
-
return this._restClient.documentClassifiers.classifyDocument(classifierId, contentType,
|
|
216
|
-
|
|
223
|
+
return this._restClient.documentClassifiers.classifyDocument(classifierId, contentType, {
|
|
224
|
+
...options,
|
|
225
|
+
abortSignal,
|
|
226
|
+
classifyRequest,
|
|
227
|
+
});
|
|
217
228
|
}
|
|
218
229
|
else {
|
|
219
|
-
return this._restClient.documentClassifiers.classifyDocument(classifierId, contentType,
|
|
220
|
-
|
|
230
|
+
return this._restClient.documentClassifiers.classifyDocument(classifierId, contentType, {
|
|
231
|
+
...options,
|
|
232
|
+
abortSignal,
|
|
233
|
+
classifyRequest,
|
|
234
|
+
});
|
|
221
235
|
}
|
|
222
236
|
}, {
|
|
223
237
|
initialModelId: classifierId,
|
|
@@ -239,8 +253,8 @@ export class DocumentAnalysisClient {
|
|
|
239
253
|
// TODO: what should we do if resumeFrom.modelId is different from initialModelId?
|
|
240
254
|
// And what do we do with the redundant input??
|
|
241
255
|
const getAnalyzeResult = (ctx, operationLocation) => this._tracing.withSpan("DocumentAnalysisClient.createAnalysisPoller-getAnalyzeResult", definition.options, (finalOptions) => this._restClient.sendOperationRequest({
|
|
242
|
-
options:
|
|
243
|
-
|
|
256
|
+
options: {
|
|
257
|
+
onResponse: async (rawResponse, ...args) => {
|
|
244
258
|
// Capture the `Retry-After` header if it was sent.
|
|
245
259
|
const retryAfterHeader = rawResponse.headers.get("retry-after");
|
|
246
260
|
// Convert the header value to milliseconds. If the header is not a valid number, then it is an HTTP
|
|
@@ -258,11 +272,13 @@ export class DocumentAnalysisClient {
|
|
|
258
272
|
ctx.updateDelay(undefined);
|
|
259
273
|
}
|
|
260
274
|
// Forward the `onResponse` callback if it was provided.
|
|
261
|
-
return
|
|
262
|
-
}
|
|
275
|
+
return finalOptions.onResponse?.(rawResponse, ...args);
|
|
276
|
+
},
|
|
277
|
+
...finalOptions,
|
|
263
278
|
// We need to pass the abort signal from the context rather than from the options, since the user could
|
|
264
279
|
// poll the LRO with a different AbortSignal than it was instantiated with.
|
|
265
|
-
abortSignal: ctx.abortSignal
|
|
280
|
+
abortSignal: ctx.abortSignal,
|
|
281
|
+
},
|
|
266
282
|
}, {
|
|
267
283
|
path: operationLocation,
|
|
268
284
|
httpMethod: "GET",
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"documentAnalysisClient.js","sourceRoot":"","sources":["../../src/documentAnalysisClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAE1D,OAAO,EAAE,2BAA2B,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAM1E,OAAO,EAAE,OAAO,EAAE,MAAM,kCAAkC,CAAC;AAQ3D,OAAO,EACL,4BAA4B,EAC5B,oCAAoC,GACrC,MAAM,mBAAmB,CAAC;AAE3B,OAAO,EAAE,GAAG,EAAE,MAAM,sBAAsB,CAAC;AAI3C,OAAO,EAAE,iBAAiB,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AAInE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;AACH,MAAM,OAAO,sBAAsB;IAgEjC,YACE,QAAgB,EAChB,UAA2C,EAC3C,UAAyC,EAAE;QAE3C,IAAI,CAAC,WAAW,GAAG,iBAAiB,CAAC,QAAQ,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QACpE,IAAI,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YAClC,WAAW,EAAE,2BAA2B;YACxC,cAAc,EAAE,WAAW;YAC3B,SAAS,EAAE,6BAA6B;SACzC,CAAC,CAAC;IACL,CAAC;IA2JM,KAAK,CAAC,oBAAoB,CAC/B,KAAsC,EACtC,QAAmC;IACnC,8DAA8D;IAC9D,UAA2C,EAAE;QAE7C,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,6CAA6C,EAC7C,OAAO;QACP,+GAA+G;QAC/G,wDAAwD;QACxD,IAAI,CAAC,OAAO,CAAC,IAAI,CACf,IAAI,EACJ,KAAK,EACL,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,CAClF,CACF,CAAC;IACJ,CAAC;IA4IM,KAAK,CAAC,2BAA2B,CACtC,KAAsC,EACtC,WAAmB;IACnB,8DAA8D;IAC9D,UAA2C,EAAE;QAE7C,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,oDAAoD,EACpD,OAAO,EACP,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CAC3D,CAAC;IACJ,CAAC;IAED;;;;;;;OAOG;IACK,OAAO,CACb,KAAsC,EACtC,KAAqB,EACrB,OAAwC;QAExC,MAAM,EACJ,OAAO,EAAE,cAAc,EACvB,UAAU,EAAE,iBAAiB,EAC7B,eAAe,GAChB,GAAG,OAAO,KAAK,KAAK,QAAQ;YAC3B,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,SAAS,EAAE,eAAe,EAAE,CAAC,CAAgB,EAAE,EAAE,CAAC,CAAC,EAAE;YACrF,CAAC,CAAC,KAAK,CAAC;QAEV,IAAI,iBAAiB,IAAI,iBAAiB,KAAK,2BAA2B,EAAE,CAAC;YAC3E,MAAM,IAAI,KAAK,CACb;gBACE,2DAA2D,iBAAiB,GAAG;gBAC/E,2BAA2B,2BAA2B,GAAG;gBACzD,mEAAmE;aACpE,CAAC,IAAI,CAAC,GAAG,CAAC,CACZ,CAAC;QACJ,CAAC;QAED,OAAO,IAAI,CAAC,mBAAmB,CAC7B,CAAC,WAAW,EAAE,EAAE;YACd,MAAM,CAAC,WAAW,EAAE,cAAc,CAAC,GAAG,gBAAgB,CAAC,KAAK,CAAC,CAAC;YAE9D,IAAI,WAAW,KAAK,kBAAkB,EAAE,CAAC;gBACvC,OAAO,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,eAAe,CAAC,cAAc,EAAE,WAAW,kCAC7E,OAAO,KACV,WAAW;oBACX,cAAc,IACd,CAAC;YACL,CAAC;iBAAM,CAAC;gBACN,OAAO,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,eAAe,CAAC,cAAc,EAAE,WAAW,kCAC7E,OAAO,KACV,WAAW;oBACX,cAAc,IACd,CAAC;YACL,CAAC;QACH,CAAC,EACD;YACE,cAAc;YACd,OAAO;YACP,eAAe,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,eAAe,CAAC,4BAA4B,CAAC,MAAM,CAAC,CAAC;SACnF,CACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,KAAK,CAAC,qBAAqB,CAChC,YAAoB,EACpB,QAAmC;IACnC,8DAA8D;IAC9D,UAAmC,EAAE;QAErC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,8CAA8C,EAC9C,OAAO,EACP,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CACjE,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,KAAK,CAAC,4BAA4B,CACvC,YAAoB,EACpB,WAAmB;IACnB,8DAA8D;IAC9D,UAAmC,EAAE;QAErC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,qDAAqD,EACrD,OAAO,EACP,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CACnE,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACK,QAAQ,CACd,YAAoB,EACpB,KAAqB,EACrB,OAAgC;QAEhC,OAAO,IAAI,CAAC,mBAAmB,CAC7B,KAAK,EAAE,WAAW,EAAE,EAAE;YACpB,MAAM,CAAC,WAAW,EAAE,eAAe,CAAC,GAAG,gBAAgB,CAAC,KAAK,CAAC,CAAC;YAE/D,IAAI,WAAW,KAAK,kBAAkB,EAAE,CAAC;gBACvC,OAAO,IAAI,CAAC,WAAW,CAAC,mBAAmB,CAAC,gBAAgB,CAC1D,YAAY,EACZ,WAAkB,kCAEb,OAAO,KACV,WAAW;oBACX,eAAe,IAElB,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,OAAO,IAAI,CAAC,WAAW,CAAC,mBAAmB,CAAC,gBAAgB,CAC1D,YAAY,EACZ,WAAkB,kCAEb,OAAO,KACV,WAAW;oBACX,eAAe,IAElB,CAAC;YACJ,CAAC;QACH,CAAC,EACD;YACE,cAAc,EAAE,YAAY;YAC5B,OAAO;YACP,eAAe,EAAE,4BAA4B;SAC9C,CACF,CAAC;IACJ,CAAC;IAED;;;;;;;;OAQG;IACK,KAAK,CAAC,mBAAmB,CAC/B,cAE4C,EAC5C,UAA+C;QAE/C,MAAM,EAAE,UAAU,EAAE,GAAG,UAAU,CAAC,OAAO,CAAC;QAE1C,kFAAkF;QAClF,+CAA+C;QAE/C,MAAM,gBAAgB,GAAG,CACvB,GAAqB,EACrB,iBAAyB,EACQ,EAAE,CACnC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,8DAA8D,EAC9D,UAAU,CAAC,OAAO,EAClB,CAAC,YAAY,EAAE,EAAE,CACf,IAAI,CAAC,WAAW,CAAC,oBAAoB,CACnC;YACE,OAAO,gCACL,UAAU,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,EAAE,EAAE;;oBACzC,mDAAmD;oBACnD,MAAM,gBAAgB,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;oBAChE,oGAAoG;oBACpG,QAAQ;oBACR,IAAI,gBAAgB,EAAE,CAAC;wBACrB,MAAM,YAAY,GAAG,MAAM,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;wBACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,EAAE,CAAC;4BAChC,GAAG,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;wBAChC,CAAC;6BAAM,CAAC;4BACN,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC;wBAC7D,CAAC;oBACH,CAAC;yBAAM,CAAC;wBACN,GAAG,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;oBAC7B,CAAC;oBAED,wDAAwD;oBACxD,OAAO,MAAA,YAAY,CAAC,UAAU,6DAAG,WAAW,EAAE,GAAG,IAAI,CAAC,CAAC;gBACzD,CAAC,IACE,YAAY;gBACf,uGAAuG;gBACvG,2EAA2E;gBAC3E,WAAW,EAAE,GAAG,CAAC,WAAW,GAC7B;SACF,EACD;YACE,IAAI,EAAE,iBAAiB;YACvB,UAAU,EAAE,KAAK;YACjB,SAAS,EAAE;gBACT,GAAG,EAAE;oBACH,UAAU,EAAE,OAAO,CAAC,sBAAsB;iBAC3C;gBACD,OAAO,EAAE;oBACP,UAAU,EAAE,OAAO,CAAC,aAAa;iBAClC;aACF;YACD,6DAA6D;YAC7D,gBAAgB,EAAE,CAAC,OAAO,CAAC;YAC3B,UAAU,EAAE,UAAU;SACvB,CACF,CACJ,CAAC;QAEJ,MAAM,MAAM;QACV,0DAA0D;QAC1D,UAAU,KAAK,SAAS;YACtB,CAAC,CAAC,KAAK,EAAE,GAAqB,EAAE,EAAE,CAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,oDAAoD,EACpD,UAAU,CAAC,OAAO,EAClB,KAAK,IAAI,EAAE;gBACT,MAAM,EAAE,aAAa,EAAE,iBAAiB,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAI1E,CAAC;gBAEF,IAAI,CAAC,aAAa,IAAI,aAAa,KAAK,WAAW,EAAE,CAAC;oBACpD,MAAM,IAAI,KAAK,CACb;wBACE,sFAAsF;wBACtF,0BAA0B,aAAa,gBAAgB,WAAW,KAAK;qBACxE,CAAC,IAAI,CAAC,GAAG,CAAC,CACZ,CAAC;gBACJ,CAAC;gBAED,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;gBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,OAAO,EACP,iBAAiB,EACjB,MAAM,CACP,CAAC;YACJ,CAAC,CACF;YACL,CAAC,CAAC,iEAAiE;gBACjE,KAAK,EAAE,GAAqB,EAAE,EAAE,CAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,mDAAmD,EACnD,UAAU,CAAC,OAAO,EAClB,KAAK,IAAI,EAAE;oBACT,MAAM,EAAE,iBAAiB,EAAE,GAAG,MAAM,cAAc,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;oBAEpE,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;wBACpC,MAAM,IAAI,KAAK,CACb,qEAAqE,CACtE,CAAC;oBACJ,CAAC;oBAED,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;oBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,UAAU,CAAC,cAAc,EACzB,iBAAiB,EACjB,MAAM,CACP,CAAC;gBACJ,CAAC,CACF,CAAC;QAEV,MAAM,MAAM,GAAG,MAAM,GAAG,CACtB;YACE,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,EAAE,iBAAiB,EAAE,OAAO,EAAE,EAAE,EAAE,CAClD,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,kDAAkD,EAClD,EAAE,EACF,KAAK,IAAI,EAAE;gBACT,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;gBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,OAAO,EACP,iBAAiB,EACjB,MAAM,CACP,CAAC;YACJ,CAAC,CACF;YACH,SAAS,EAAE,CAAC,EAAE,iBAAiB,EAAE,OAAO,EAAE,EAAE,EAAE,CAC5C,IAAI,CAAC,SAAS,CAAC,EAAE,aAAa,EAAE,WAAW,EAAE,EAAE,EAAE,OAAO,EAAE,iBAAiB,EAAE,CAAC;SACjF,EACD,UAAU,CAAC,OAAO,CAAC,kBAAkB,EACrC,UAAU,CAAC,OAAO,CAAC,WAAW,CAC/B,CAAC;QAEF,IAAI,UAAU,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;YAChD,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YACjD,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAC;QAC5D,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CAGF;AAED;;;GAGG;AACH,SAAS,gBAAgB,CACvB,KAAqB;IAIrB,QAAQ,KAAK,CAAC,IAAI,EAAE,CAAC;QACnB,KAAK,MAAM;YACT,OAAO,CAAC,0BAA0B,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;QAClD,KAAK,KAAK;YACR,OAAO,CAAC,kBAAkB,EAAE,EAAE,SAAS,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QACxD,KAAK,QAAQ;YACX,OAAO,CAAC,kBAAkB,EAAE,EAAE,YAAY,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,CAAC,CAAC;YACR,MAAM,SAAS,GAAU,KAAK,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,wCAAwC,SAAS,EAAE,CAAC,CAAC;QACvE,CAAC;IACH,CAAC;AACH,CAAC;AAED;;GAEG;AACH,uFAAuF;AAEvF,SAAS,MAAM,CACb,IAAO,EACP,KAA2F;IAE3F,OAAO;QACL,IAAI;QACJ,CAAC,IAAI,CAAC,EAAE,KAAK;KACe,CAAC;AACjC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport type { KeyCredential, TokenCredential } from \"@azure/core-auth\";\nimport { createTracingClient } from \"@azure/core-tracing\";\nimport type { TracingClient } from \"@azure/core-tracing\";\nimport { FORM_RECOGNIZER_API_VERSION, SDK_VERSION } from \"./constants.js\";\nimport type {\n AnalyzeDocumentRequest,\n AnalyzeResultOperation,\n GeneratedClient,\n} from \"./generated/index.js\";\nimport { accept1 } from \"./generated/models/parameters.js\";\nimport type {\n AnalysisOperationDefinition,\n AnalysisPoller,\n AnalyzeResult,\n DocumentAnalysisPollOperationState,\n FormRecognizerRequestBody,\n} from \"./lro/analysis.js\";\nimport {\n toAnalyzeResultFromGenerated,\n toDocumentAnalysisPollOperationState,\n} from \"./lro/analysis.js\";\nimport type { OperationContext } from \"./lro/util/poller.js\";\nimport { lro } from \"./lro/util/poller.js\";\nimport type { AnalyzeDocumentOptions } from \"./options/AnalyzeDocumentOptions.js\";\nimport type { DocumentAnalysisClientOptions } from \"./options/FormRecognizerClientOptions.js\";\nimport type { DocumentModel } from \"./documentModel.js\";\nimport { makeServiceClient, Mappers, SERIALIZER } from \"./util.js\";\nimport type { AbortSignalLike } from \"@azure/abort-controller\";\nimport type { ClassifyDocumentOptions } from \"./options/ClassifyDocumentOptions.js\";\n\n/**\n * A client for interacting with the Form Recognizer service's analysis features.\n *\n * ### Examples:\n *\n * The Form Recognizer service and clients support two means of authentication:\n *\n * #### Azure Active Directory\n *\n * ```ts snippet:ReadmeSampleCreateClient_TokenCredential\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n * ```\n *\n * #### API Key (Subscription Key)\n *\n * ```ts snippet:ReadmeSampleCreateClient_KeyCredential\n * import { AzureKeyCredential, DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new AzureKeyCredential(\"<API key>\");\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n * ```\n */\nexport class DocumentAnalysisClient {\n private _restClient: GeneratedClient;\n private _tracing: TracingClient;\n\n /**\n * Create a `DocumentAnalysisClient` instance from a resource endpoint and a an Azure Identity `TokenCredential`.\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity) package for more information about\n * authenticating with Azure Active Directory.\n *\n * ### Example:\n *\n * ```ts snippet:ReadmeSampleCreateClient_TokenCredential\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n * ```\n *\n * @param endpoint - the endpoint URL of an Azure Cognitive Services instance\n * @param credential - a TokenCredential instance from the `@azure/identity` package\n * @param options - optional settings for configuring all methods in the client\n */\n public constructor(\n endpoint: string,\n credential: TokenCredential,\n options?: DocumentAnalysisClientOptions,\n );\n /**\n * Create a `DocumentAnalysisClient` instance from a resource endpoint and a static API key (`KeyCredential`),\n *\n * ### Example:\n *\n * ```ts snippet:ReadmeSampleCreateClient_KeyCredential\n * import { AzureKeyCredential, DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new AzureKeyCredential(\"<API key>\");\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n * ```\n *\n * @param endpoint - the endpoint URL of an Azure Cognitive Services instance\n * @param credential - a KeyCredential containing the Cognitive Services instance subscription key\n * @param options - optional settings for configuring all methods in the client\n */\n public constructor(\n endpoint: string,\n credential: KeyCredential,\n options?: DocumentAnalysisClientOptions,\n );\n /**\n * @hidden\n */\n public constructor(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options?: DocumentAnalysisClientOptions,\n );\n public constructor(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options: DocumentAnalysisClientOptions = {},\n ) {\n this._restClient = makeServiceClient(endpoint, credential, options);\n this._tracing = createTracingClient({\n packageName: \"@azure/ai-form-recognizer\",\n packageVersion: SDK_VERSION,\n namespace: \"Microsoft.CognitiveServices\",\n });\n }\n\n // #region Analysis\n\n /**\n * Extract data from an input using a model given by its unique ID.\n *\n * This operation supports custom as well as prebuilt models. For example, to use the prebuilt invoice model, provide\n * the model ID \"prebuilt-invoice\", or to use the simpler prebuilt layout model, provide the model ID\n * \"prebuilt-layout\".\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis, and the values in any\n * extracted documents' fields depend on the document types in the model (if any) and their corresponding field\n * schemas.\n *\n * ### Examples\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * ```ts snippet:ReadmeSamplePrebuiltReceipt\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { createReadStream } from \"node:fs\";\n * import { PrebuiltReceiptModel } from \"../samples-dev/prebuilt/prebuilt-receipt.js\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const path = \"<path to a document>\";\n * const readStream = createReadStream(path);\n *\n * // The PrebuiltReceiptModel `DocumentModel` instance encodes both the model ID and a stronger return type for the operation\n * const poller = await client.beginAnalyzeDocument(PrebuiltReceiptModel, readStream, {\n * onProgress: ({ status }) => {\n * console.log(`status: ${status}`);\n * },\n * });\n *\n * const {\n * documents: [receiptDocument],\n * } = await poller.pollUntilDone();\n *\n * // The fields of the document constitute the extracted receipt data.\n * const receipt = receiptDocument.fields;\n *\n * if (receipt === undefined) {\n * throw new Error(\"Expected at least one receipt in analysis result.\");\n * }\n *\n * console.log(`Receipt data (${receiptDocument.docType})`);\n * console.log(\" Merchant Name:\", receipt.merchantName?.value);\n *\n * // The items of the receipt are an example of a `DocumentArrayValue`\n * if (receipt.items !== undefined) {\n * console.log(\"Items:\");\n * for (const { properties: item } of receipt.items.values) {\n * console.log(\"- Description:\", item.description?.value);\n * console.log(\" Total Price:\", item.totalPrice?.value);\n * }\n * }\n *\n * console.log(\" Total:\", receipt.total?.value);\n * ```\n *\n *\n * @param modelId - the unique ID (name) of the model within this client's resource\n * @param document - a {@link FormRecognizerRequestBody} that will be uploaded with the request\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocument(\n modelId: string,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions,\n ): Promise<AnalysisPoller>;\n /**\n * Extract data from an input using a model that has a known, strongly-typed document schema (a {@link DocumentModel}).\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis. In TypeScript, the type\n * of the result for this method overload is inferred from the type of the input `DocumentModel`.\n *\n * ### Examples\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * If the input provided is a string, it will be treated as a URL to the location of a document to be analyzed. See the\n * {@link beginAnalyzeDocumentFromUrl} method for more information. Use of that method is preferred when using URLs,\n * and URL support is only provided in this method for backwards compatibility.\n *\n * ```ts snippet:ReadmeSamplePrebuiltReceipt\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { createReadStream } from \"node:fs\";\n * import { PrebuiltReceiptModel } from \"../samples-dev/prebuilt/prebuilt-receipt.js\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const path = \"<path to a document>\";\n * const readStream = createReadStream(path);\n *\n * // The PrebuiltReceiptModel `DocumentModel` instance encodes both the model ID and a stronger return type for the operation\n * const poller = await client.beginAnalyzeDocument(PrebuiltReceiptModel, readStream, {\n * onProgress: ({ status }) => {\n * console.log(`status: ${status}`);\n * },\n * });\n *\n * const {\n * documents: [receiptDocument],\n * } = await poller.pollUntilDone();\n *\n * // The fields of the document constitute the extracted receipt data.\n * const receipt = receiptDocument.fields;\n *\n * if (receipt === undefined) {\n * throw new Error(\"Expected at least one receipt in analysis result.\");\n * }\n *\n * console.log(`Receipt data (${receiptDocument.docType})`);\n * console.log(\" Merchant Name:\", receipt.merchantName?.value);\n *\n * // The items of the receipt are an example of a `DocumentArrayValue`\n * if (receipt.items !== undefined) {\n * console.log(\"Items:\");\n * for (const { properties: item } of receipt.items.values) {\n * console.log(\"- Description:\", item.description?.value);\n * console.log(\" Total Price:\", item.totalPrice?.value);\n * }\n * }\n *\n * console.log(\" Total:\", receipt.total?.value);\n * ```\n *\n * @param model - a {@link DocumentModel} representing the model to use for analysis and the expected output type\n * @param document - a {@link FormRecognizerRequestBody} that will be uploaded with the request\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult` with documents that have\n * the result type associated with the input model\n */\n public async beginAnalyzeDocument<Result>(\n model: DocumentModel<Result>,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions<Result>,\n ): Promise<AnalysisPoller<Result>>;\n public async beginAnalyzeDocument(\n model: string | DocumentModel<unknown>,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: AnalyzeDocumentOptions<unknown> = {},\n ): Promise<AnalysisPoller<unknown>> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginAnalyzeDocument\",\n options,\n // In the first version of the SDK, the document input was treated as a URL if it was a string, and we preserve\n // this behavior to avoid introducing a breaking change.\n this.analyze.bind(\n this,\n model,\n typeof document === \"string\" ? source(\"url\", document) : source(\"body\", document),\n ),\n );\n }\n\n /**\n * Extract data from an input using a model given by its unique ID.\n *\n * This operation supports custom as well as prebuilt models. For example, to use the prebuilt invoice model, provide\n * the model ID \"prebuilt-invoice\", or to use the simpler prebuilt layout model, provide the model ID\n * \"prebuilt-layout\".\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis, and the values in any\n * extracted documents' fields depend on the document types in the model (if any) and their corresponding field\n * schemas.\n *\n * ### Examples\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```ts snippet:ReadmeSampleReceiptModelID_URL\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import {\n * DocumentAnalysisClient,\n * DocumentStringField,\n * DocumentArrayField,\n * DocumentObjectField,\n * } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const poller = await client.beginAnalyzeDocumentFromUrl(\n * \"prebuilt-receipt\",\n * // The Document Intelligence service will access the following URL to a receipt image and extract data from it\n * \"https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png\",\n * );\n * poller.onProgress((state) => console.log(\"Operation:\", state.modelId, state.status));\n *\n * const { documents } = await poller.pollUntilDone();\n *\n * const result = documents && documents[0];\n * if (result) {\n * const receipt = result.fields;\n * console.log(\"=== Receipt Information ===\");\n * console.log(\"Type:\", result.docType);\n * console.log(\"Merchant:\", (receipt[\"MerchantName\"] as DocumentStringField).value);\n *\n * console.log(\"Items:\");\n * for (const { properties: item } of ((receipt[\"Items\"] as DocumentArrayField).values ||\n * []) as DocumentObjectField[]) {\n * console.log(\"- Description:\", (item[\"Description\"] as DocumentStringField).value);\n * console.log(\" Total Price:\", (item[\"TotalPrice\"] as DocumentStringField).value);\n * }\n * } else {\n * throw new Error(\"Expected at least one receipt in the result.\");\n * }\n * ```\n *\n * @param modelId - the unique ID (name) of the model within this client's resource\n * @param documentUrl - a URL (string) to an input document accessible from the public internet\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocumentFromUrl(\n modelId: string,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions,\n ): Promise<AnalysisPoller>;\n /**\n * Extract data from an input using a model that has a known, strongly-typed document schema (a {@link DocumentModel}).\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis. In TypeScript, the type\n * of the result for this method overload is inferred from the type of the input `DocumentModel`.\n *\n * ### Examples\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```ts snippet:ReadmeSampleReceiptPrebuilt_URL\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { PrebuiltReceiptModel } from \"../samples-dev/prebuilt/prebuilt-receipt.js\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const poller = await client.beginAnalyzeDocumentFromUrl(\n * PrebuiltReceiptModel,\n * // The Document Intelligence service will access the following URL to a receipt image and extract data from it\n * \"https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png\",\n * );\n *\n * const {\n * documents: [document],\n * } = await poller.pollUntilDone();\n *\n * // Use of PrebuiltModels.Receipt above (rather than the raw model ID), as it adds strong typing of the model's output\n * if (document) {\n * const { merchantName, items, total } = document.fields;\n *\n * console.log(\"=== Receipt Information ===\");\n * console.log(\"Type:\", document.docType);\n * console.log(\"Merchant:\", merchantName && merchantName.value);\n *\n * console.log(\"Items:\");\n * for (const item of (items && items.values) || []) {\n * const { description, totalPrice } = item.properties;\n *\n * console.log(\"- Description:\", description && description.value);\n * console.log(\" Total Price:\", totalPrice && totalPrice.value);\n * }\n *\n * console.log(\"Total:\", total && total.value);\n * } else {\n * throw new Error(\"Expected at least one receipt in the result.\");\n * }\n * ```\n *\n * @param model - a {@link DocumentModel} representing the model to use for analysis and the expected output type\n * @param documentUrl - a URL (string) to an input document accessible from the public internet\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocumentFromUrl<Result>(\n model: DocumentModel<Result>,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions<Result>,\n ): Promise<AnalysisPoller<Result>>;\n public async beginAnalyzeDocumentFromUrl(\n model: string | DocumentModel<unknown>,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: AnalyzeDocumentOptions<unknown> = {},\n ): Promise<AnalysisPoller<unknown>> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginAnalyzeDocumentFromUrl\",\n options,\n this.analyze.bind(this, model, source(\"url\", documentUrl)),\n );\n }\n\n /**\n * A helper method for running analysis polymorphically.\n *\n * @param model - the model ID or DocumentModel to use for analysis\n * @param input - the string URL or request body to use\n * @param options - analysis options\n * @returns - an analysis poller\n */\n private analyze(\n model: string | DocumentModel<unknown>,\n input: DocumentSource,\n options: AnalyzeDocumentOptions<unknown>,\n ): Promise<AnalysisPoller<unknown>> {\n const {\n modelId: initialModelId,\n apiVersion: requestApiVersion,\n transformResult,\n } = typeof model === \"string\"\n ? { modelId: model, apiVersion: undefined, transformResult: (v: AnalyzeResult) => v }\n : model;\n\n if (requestApiVersion && requestApiVersion !== FORM_RECOGNIZER_API_VERSION) {\n throw new Error(\n [\n `API Version mismatch: the provided model wants version: ${requestApiVersion},`,\n `but the client is using ${FORM_RECOGNIZER_API_VERSION}.`,\n \"The API version of the model must match the client's API version.\",\n ].join(\" \"),\n );\n }\n\n return this.createUnifiedPoller<unknown>(\n (abortSignal) => {\n const [contentType, analyzeRequest] = toAnalyzeRequest(input);\n\n if (contentType === \"application/json\") {\n return this._restClient.documentModels.analyzeDocument(initialModelId, contentType, {\n ...options,\n abortSignal,\n analyzeRequest,\n });\n } else {\n return this._restClient.documentModels.analyzeDocument(initialModelId, contentType, {\n ...options,\n abortSignal,\n analyzeRequest,\n });\n }\n },\n {\n initialModelId,\n options,\n transformResult: (result) => transformResult(toAnalyzeResultFromGenerated(result)),\n },\n );\n }\n\n /**\n * Classify a document using a custom classifier given by its ID.\n *\n * This method produces a long-running operation (poller) that will eventually produce an `AnalyzeResult`. This is the\n * same type as `beginAnalyzeDocument` and `beginAnalyzeDocumentFromUrl`, but the result will only contain a small\n * subset of its fields. Only the `documents` field and `pages` field will be populated, and only minimal page\n * information will be returned. The `documents` field will contain information about all the identified documents and\n * the `docType` that they were classified as.\n *\n * ### Example\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * ```ts snippet:ReadmeSampleClassifyDocument_File\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { createReadStream } from \"node:fs\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const path = \"<path to a document>\";\n * const readStream = createReadStream(path);\n *\n * const poller = await client.beginClassifyDocument(\"<classifier id>\", readStream);\n *\n * const result = await poller.pollUntilDone();\n *\n * if (result?.documents?.length === 0) {\n * throw new Error(\"Failed to extract any documents.\");\n * }\n *\n * for (const document of result.documents) {\n * console.log(\n * `Extracted a document with type '${document.docType}' on page ${document.boundingRegions?.[0].pageNumber} (confidence: ${document.confidence})`,\n * );\n * }\n * ```\n *\n * @param classifierId - the ID of the custom classifier to use for analysis\n * @param document - the document to classify\n * @param options - options for the classification operation\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginClassifyDocument(\n classifierId: string,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: ClassifyDocumentOptions = {},\n ): Promise<AnalysisPoller> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginClassifyDocument\",\n options,\n this.classify.bind(this, classifierId, source(\"body\", document)),\n );\n }\n\n /**\n * Classify a document from a URL using a custom classifier given by its ID.\n *\n * This method produces a long-running operation (poller) that will eventually produce an `AnalyzeResult`. This is the\n * same type as `beginAnalyzeDocument` and `beginAnalyzeDocumentFromUrl`, but the result will only contain a small\n * subset of its fields. Only the `documents` field and `pages` field will be populated, and only minimal page\n * information will be returned. The `documents` field will contain information about all the identified documents and\n * the `docType` that they were classified as.\n *\n * ### Example\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```ts snippet:ReadmeSampleClassifyDocument\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const documentUrl =\n * \"https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/invoice/Invoice_1.pdf\";\n *\n * const poller = await client.beginClassifyDocumentFromUrl(\"<classifier id>\", documentUrl);\n *\n * const result = await poller.pollUntilDone();\n *\n * if (result?.documents?.length === 0) {\n * throw new Error(\"Failed to extract any documents.\");\n * }\n *\n * for (const document of result.documents) {\n * console.log(\n * `Extracted a document with type '${document.docType}' on page ${document.boundingRegions?.[0].pageNumber} (confidence: ${document.confidence})`,\n * );\n * }\n * ```\n * @param classifierId - the ID of the custom classifier to use for analysis\n * @param documentUrl - the URL of the document to classify\n * @param options -\n * @returns\n */\n public async beginClassifyDocumentFromUrl(\n classifierId: string,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: ClassifyDocumentOptions = {},\n ): Promise<AnalysisPoller> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginClassifyDocumentFromUrl\",\n options,\n this.classify.bind(this, classifierId, source(\"url\", documentUrl)),\n );\n }\n\n /**\n * A helper method for running classification polymorphically.\n * @param classifierId - the ID of the classifier to use\n * @param input - the string URL or request body to use\n * @param options - analysis options\n * @returns an analysis poller\n */\n private classify(\n classifierId: string,\n input: DocumentSource,\n options: ClassifyDocumentOptions,\n ): Promise<AnalysisPoller> {\n return this.createUnifiedPoller(\n async (abortSignal) => {\n const [contentType, classifyRequest] = toAnalyzeRequest(input);\n\n if (contentType === \"application/json\") {\n return this._restClient.documentClassifiers.classifyDocument(\n classifierId,\n contentType as any,\n {\n ...options,\n abortSignal,\n classifyRequest,\n },\n );\n } else {\n return this._restClient.documentClassifiers.classifyDocument(\n classifierId,\n contentType as any,\n {\n ...options,\n abortSignal,\n classifyRequest,\n },\n );\n }\n },\n {\n initialModelId: classifierId,\n options,\n transformResult: toAnalyzeResultFromGenerated,\n },\n );\n }\n\n /**\n * Create an LRO poller that handles analysis operations.\n *\n * This is the meat of all analysis polling operations.\n *\n * @param startOperation - function that starts the operation and returns the operation location\n * @param definition - operation definition (initial model ID, operation transforms, request options)\n * @returns - an analysis poller that produces the given return types according to the operation spec\n */\n private async createUnifiedPoller<Result>(\n startOperation: (\n abortSignal: AbortSignalLike | undefined,\n ) => Promise<{ operationLocation?: string }>,\n definition: AnalysisOperationDefinition<Result>,\n ): Promise<AnalysisPoller<Result>> {\n const { resumeFrom } = definition.options;\n\n // TODO: what should we do if resumeFrom.modelId is different from initialModelId?\n // And what do we do with the redundant input??\n\n const getAnalyzeResult = (\n ctx: OperationContext,\n operationLocation: string,\n ): Promise<AnalyzeResultOperation> =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-getAnalyzeResult\",\n definition.options,\n (finalOptions) =>\n this._restClient.sendOperationRequest<AnalyzeResultOperation>(\n {\n options: {\n onResponse: async (rawResponse, ...args) => {\n // Capture the `Retry-After` header if it was sent.\n const retryAfterHeader = rawResponse.headers.get(\"retry-after\");\n // Convert the header value to milliseconds. If the header is not a valid number, then it is an HTTP\n // date.\n if (retryAfterHeader) {\n const retryAfterMs = Number(retryAfterHeader) * 1000;\n if (!Number.isNaN(retryAfterMs)) {\n ctx.updateDelay(retryAfterMs);\n } else {\n ctx.updateDelay(Date.parse(retryAfterHeader) - Date.now());\n }\n } else {\n ctx.updateDelay(undefined);\n }\n\n // Forward the `onResponse` callback if it was provided.\n return finalOptions.onResponse?.(rawResponse, ...args);\n },\n ...finalOptions,\n // We need to pass the abort signal from the context rather than from the options, since the user could\n // poll the LRO with a different AbortSignal than it was instantiated with.\n abortSignal: ctx.abortSignal,\n },\n },\n {\n path: operationLocation,\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.AnalyzeResultOperation,\n },\n default: {\n bodyMapper: Mappers.ErrorResponse,\n },\n },\n // URL is fully-formed, so we don't need any query parameters\n headerParameters: [accept1],\n serializer: SERIALIZER,\n },\n ),\n );\n\n const toInit =\n // If the user gave us a stored token, we'll poll it again\n resumeFrom !== undefined\n ? async (ctx: OperationContext) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-resume\",\n definition.options,\n async () => {\n const { clientVersion, operationLocation, modelId } = JSON.parse(resumeFrom) as {\n clientVersion?: string;\n operationLocation: string;\n modelId: string;\n };\n\n if (!clientVersion || clientVersion !== SDK_VERSION) {\n throw new Error(\n [\n \"Cannot restore poller from a serialized state from a different version of the client\",\n `library (restoreFrom: '${clientVersion}', current: '${SDK_VERSION}').`,\n ].join(\" \"),\n );\n }\n\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n modelId,\n operationLocation,\n result,\n );\n },\n )\n : // Otherwise, we'll start a new operation from the initialModelId\n async (ctx: OperationContext) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-start\",\n definition.options,\n async () => {\n const { operationLocation } = await startOperation(ctx.abortSignal);\n\n if (operationLocation === undefined) {\n throw new Error(\n \"Unable to start analysis operation: no Operation-Location received.\",\n );\n }\n\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n definition.initialModelId,\n operationLocation,\n result,\n );\n },\n );\n\n const poller = await lro<Result, DocumentAnalysisPollOperationState<Result>>(\n {\n init: toInit,\n poll: async (ctx, { operationLocation, modelId }) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-poll\",\n {},\n async () => {\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n modelId,\n operationLocation,\n result,\n );\n },\n ),\n serialize: ({ operationLocation, modelId }) =>\n JSON.stringify({ clientVersion: SDK_VERSION, id: modelId, operationLocation }),\n },\n definition.options.updateIntervalInMs,\n definition.options.abortSignal,\n );\n\n if (definition.options.onProgress !== undefined) {\n poller.onProgress(definition.options.onProgress);\n definition.options.onProgress(poller.getOperationState());\n }\n\n return poller;\n }\n\n // #endregion\n}\n\n/**\n * Produce an appropriate pair of content-type and analyzeRequest value for the analysis request.\n * @internal\n */\nfunction toAnalyzeRequest(\n input: DocumentSource,\n):\n | [\"application/json\", AnalyzeDocumentRequest]\n | [\"application/octet-stream\", FormRecognizerRequestBody] {\n switch (input.kind) {\n case \"body\":\n return [\"application/octet-stream\", input.body];\n case \"url\":\n return [\"application/json\", { urlSource: input.url }];\n case \"base64\":\n return [\"application/json\", { base64Source: input.base64 }];\n default: {\n const __exhaust: never = input;\n throw new Error(`Unreachable 'toAnalyzeRequest' case: ${__exhaust}`);\n }\n }\n}\n\n/**\n * The input to a document analysis operation.\n */\n// type DocumentSource = DocumentBodySource | DocumentUrlSource | DocumentBase64Source;\n\nfunction source<K extends DocumentSource[\"kind\"]>(\n kind: K,\n value: Extract<DocumentSource, { kind: K }>[K & keyof Extract<DocumentSource, { kind: K }>],\n): DocumentSource {\n return {\n kind,\n [kind]: value,\n } as unknown as DocumentSource;\n}\n\n/**\n * The input to a document analysis operation.\n *\n * @internal\n */\ntype DocumentSource = {\n [K in keyof DocumentSourceTypes]: {\n /** The input kind. */\n kind: K;\n } & { [_ in K]: DocumentSourceTypes[K] };\n}[keyof DocumentSourceTypes];\n\n/**\n * A map of input discriminants to concrete input types.\n *\n * @internal\n */\ninterface DocumentSourceTypes {\n /**\n * A document buffer or stream to be uploaded in the request body.\n */\n body: FormRecognizerRequestBody;\n\n /**\n * A URL to a document to be analyzed.\n */\n url: string;\n\n /**\n * The data of a document to be analyzed. This is NOT base64-encoded, but will\n * be base64-encoded by the client before uploading.\n *\n * NOTE: This is never used by the client because it is inefficient compared to direct uploads and does not currently\n * support any features that `body` does not.\n */\n base64: Uint8Array;\n}\n"]}
|
|
1
|
+
{"version":3,"file":"documentAnalysisClient.js","sourceRoot":"","sources":["../../src/documentAnalysisClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAE1D,OAAO,EAAE,2BAA2B,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAM1E,OAAO,EAAE,OAAO,EAAE,MAAM,kCAAkC,CAAC;AAQ3D,OAAO,EACL,4BAA4B,EAC5B,oCAAoC,GACrC,MAAM,mBAAmB,CAAC;AAE3B,OAAO,EAAE,GAAG,EAAE,MAAM,sBAAsB,CAAC;AAI3C,OAAO,EAAE,iBAAiB,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AAInE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;AACH,MAAM,OAAO,sBAAsB;IACzB,WAAW,CAAkB;IAC7B,QAAQ,CAAgB;IA8DhC,YACE,QAAgB,EAChB,UAA2C,EAC3C,UAAyC,EAAE;QAE3C,IAAI,CAAC,WAAW,GAAG,iBAAiB,CAAC,QAAQ,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QACpE,IAAI,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YAClC,WAAW,EAAE,2BAA2B;YACxC,cAAc,EAAE,WAAW;YAC3B,SAAS,EAAE,6BAA6B;SACzC,CAAC,CAAC;IACL,CAAC;IA2JM,KAAK,CAAC,oBAAoB,CAC/B,KAAsC,EACtC,QAAmC;IACnC,8DAA8D;IAC9D,UAA2C,EAAE;QAE7C,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,6CAA6C,EAC7C,OAAO;QACP,+GAA+G;QAC/G,wDAAwD;QACxD,IAAI,CAAC,OAAO,CAAC,IAAI,CACf,IAAI,EACJ,KAAK,EACL,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,CAClF,CACF,CAAC;IACJ,CAAC;IA4IM,KAAK,CAAC,2BAA2B,CACtC,KAAsC,EACtC,WAAmB;IACnB,8DAA8D;IAC9D,UAA2C,EAAE;QAE7C,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,oDAAoD,EACpD,OAAO,EACP,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CAC3D,CAAC;IACJ,CAAC;IAED;;;;;;;OAOG;IACK,OAAO,CACb,KAAsC,EACtC,KAAqB,EACrB,OAAwC;QAExC,MAAM,EACJ,OAAO,EAAE,cAAc,EACvB,UAAU,EAAE,iBAAiB,EAC7B,eAAe,GAChB,GAAG,OAAO,KAAK,KAAK,QAAQ;YAC3B,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,SAAS,EAAE,eAAe,EAAE,CAAC,CAAgB,EAAE,EAAE,CAAC,CAAC,EAAE;YACrF,CAAC,CAAC,KAAK,CAAC;QAEV,IAAI,iBAAiB,IAAI,iBAAiB,KAAK,2BAA2B,EAAE,CAAC;YAC3E,MAAM,IAAI,KAAK,CACb;gBACE,2DAA2D,iBAAiB,GAAG;gBAC/E,2BAA2B,2BAA2B,GAAG;gBACzD,mEAAmE;aACpE,CAAC,IAAI,CAAC,GAAG,CAAC,CACZ,CAAC;QACJ,CAAC;QAED,OAAO,IAAI,CAAC,mBAAmB,CAC7B,CAAC,WAAW,EAAE,EAAE;YACd,MAAM,CAAC,WAAW,EAAE,cAAc,CAAC,GAAG,gBAAgB,CAAC,KAAK,CAAC,CAAC;YAE9D,IAAI,WAAW,KAAK,kBAAkB,EAAE,CAAC;gBACvC,OAAO,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,eAAe,CAAC,cAAc,EAAE,WAAW,EAAE;oBAClF,GAAG,OAAO;oBACV,WAAW;oBACX,cAAc;iBACf,CAAC,CAAC;YACL,CAAC;iBAAM,CAAC;gBACN,OAAO,IAAI,CAAC,WAAW,CAAC,cAAc,CAAC,eAAe,CAAC,cAAc,EAAE,WAAW,EAAE;oBAClF,GAAG,OAAO;oBACV,WAAW;oBACX,cAAc;iBACf,CAAC,CAAC;YACL,CAAC;QACH,CAAC,EACD;YACE,cAAc;YACd,OAAO;YACP,eAAe,EAAE,CAAC,MAAM,EAAE,EAAE,CAAC,eAAe,CAAC,4BAA4B,CAAC,MAAM,CAAC,CAAC;SACnF,CACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,KAAK,CAAC,qBAAqB,CAChC,YAAoB,EACpB,QAAmC;IACnC,8DAA8D;IAC9D,UAAmC,EAAE;QAErC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,8CAA8C,EAC9C,OAAO,EACP,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CACjE,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,KAAK,CAAC,4BAA4B,CACvC,YAAoB,EACpB,WAAmB;IACnB,8DAA8D;IAC9D,UAAmC,EAAE;QAErC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAC3B,qDAAqD,EACrD,OAAO,EACP,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CACnE,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACK,QAAQ,CACd,YAAoB,EACpB,KAAqB,EACrB,OAAgC;QAEhC,OAAO,IAAI,CAAC,mBAAmB,CAC7B,KAAK,EAAE,WAAW,EAAE,EAAE;YACpB,MAAM,CAAC,WAAW,EAAE,eAAe,CAAC,GAAG,gBAAgB,CAAC,KAAK,CAAC,CAAC;YAE/D,IAAI,WAAW,KAAK,kBAAkB,EAAE,CAAC;gBACvC,OAAO,IAAI,CAAC,WAAW,CAAC,mBAAmB,CAAC,gBAAgB,CAC1D,YAAY,EACZ,WAAkB,EAClB;oBACE,GAAG,OAAO;oBACV,WAAW;oBACX,eAAe;iBAChB,CACF,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,OAAO,IAAI,CAAC,WAAW,CAAC,mBAAmB,CAAC,gBAAgB,CAC1D,YAAY,EACZ,WAAkB,EAClB;oBACE,GAAG,OAAO;oBACV,WAAW;oBACX,eAAe;iBAChB,CACF,CAAC;YACJ,CAAC;QACH,CAAC,EACD;YACE,cAAc,EAAE,YAAY;YAC5B,OAAO;YACP,eAAe,EAAE,4BAA4B;SAC9C,CACF,CAAC;IACJ,CAAC;IAED;;;;;;;;OAQG;IACK,KAAK,CAAC,mBAAmB,CAC/B,cAE4C,EAC5C,UAA+C;QAE/C,MAAM,EAAE,UAAU,EAAE,GAAG,UAAU,CAAC,OAAO,CAAC;QAE1C,kFAAkF;QAClF,+CAA+C;QAE/C,MAAM,gBAAgB,GAAG,CACvB,GAAqB,EACrB,iBAAyB,EACQ,EAAE,CACnC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,8DAA8D,EAC9D,UAAU,CAAC,OAAO,EAClB,CAAC,YAAY,EAAE,EAAE,CACf,IAAI,CAAC,WAAW,CAAC,oBAAoB,CACnC;YACE,OAAO,EAAE;gBACP,UAAU,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,EAAE,EAAE;oBACzC,mDAAmD;oBACnD,MAAM,gBAAgB,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;oBAChE,oGAAoG;oBACpG,QAAQ;oBACR,IAAI,gBAAgB,EAAE,CAAC;wBACrB,MAAM,YAAY,GAAG,MAAM,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;wBACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,EAAE,CAAC;4BAChC,GAAG,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;wBAChC,CAAC;6BAAM,CAAC;4BACN,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC;wBAC7D,CAAC;oBACH,CAAC;yBAAM,CAAC;wBACN,GAAG,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;oBAC7B,CAAC;oBAED,wDAAwD;oBACxD,OAAO,YAAY,CAAC,UAAU,EAAE,CAAC,WAAW,EAAE,GAAG,IAAI,CAAC,CAAC;gBACzD,CAAC;gBACD,GAAG,YAAY;gBACf,uGAAuG;gBACvG,2EAA2E;gBAC3E,WAAW,EAAE,GAAG,CAAC,WAAW;aAC7B;SACF,EACD;YACE,IAAI,EAAE,iBAAiB;YACvB,UAAU,EAAE,KAAK;YACjB,SAAS,EAAE;gBACT,GAAG,EAAE;oBACH,UAAU,EAAE,OAAO,CAAC,sBAAsB;iBAC3C;gBACD,OAAO,EAAE;oBACP,UAAU,EAAE,OAAO,CAAC,aAAa;iBAClC;aACF;YACD,6DAA6D;YAC7D,gBAAgB,EAAE,CAAC,OAAO,CAAC;YAC3B,UAAU,EAAE,UAAU;SACvB,CACF,CACJ,CAAC;QAEJ,MAAM,MAAM;QACV,0DAA0D;QAC1D,UAAU,KAAK,SAAS;YACtB,CAAC,CAAC,KAAK,EAAE,GAAqB,EAAE,EAAE,CAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,oDAAoD,EACpD,UAAU,CAAC,OAAO,EAClB,KAAK,IAAI,EAAE;gBACT,MAAM,EAAE,aAAa,EAAE,iBAAiB,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAI1E,CAAC;gBAEF,IAAI,CAAC,aAAa,IAAI,aAAa,KAAK,WAAW,EAAE,CAAC;oBACpD,MAAM,IAAI,KAAK,CACb;wBACE,sFAAsF;wBACtF,0BAA0B,aAAa,gBAAgB,WAAW,KAAK;qBACxE,CAAC,IAAI,CAAC,GAAG,CAAC,CACZ,CAAC;gBACJ,CAAC;gBAED,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;gBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,OAAO,EACP,iBAAiB,EACjB,MAAM,CACP,CAAC;YACJ,CAAC,CACF;YACL,CAAC,CAAC,iEAAiE;gBACjE,KAAK,EAAE,GAAqB,EAAE,EAAE,CAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,mDAAmD,EACnD,UAAU,CAAC,OAAO,EAClB,KAAK,IAAI,EAAE;oBACT,MAAM,EAAE,iBAAiB,EAAE,GAAG,MAAM,cAAc,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;oBAEpE,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;wBACpC,MAAM,IAAI,KAAK,CACb,qEAAqE,CACtE,CAAC;oBACJ,CAAC;oBAED,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;oBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,UAAU,CAAC,cAAc,EACzB,iBAAiB,EACjB,MAAM,CACP,CAAC;gBACJ,CAAC,CACF,CAAC;QAEV,MAAM,MAAM,GAAG,MAAM,GAAG,CACtB;YACE,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,EAAE,iBAAiB,EAAE,OAAO,EAAE,EAAE,EAAE,CAClD,IAAI,CAAC,QAAQ,CAAC,QAAQ,CACpB,kDAAkD,EAClD,EAAE,EACF,KAAK,IAAI,EAAE;gBACT,MAAM,MAAM,GAAG,MAAM,gBAAgB,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;gBAE9D,OAAO,oCAAoC,CACzC,UAAU,EACV,OAAO,EACP,iBAAiB,EACjB,MAAM,CACP,CAAC;YACJ,CAAC,CACF;YACH,SAAS,EAAE,CAAC,EAAE,iBAAiB,EAAE,OAAO,EAAE,EAAE,EAAE,CAC5C,IAAI,CAAC,SAAS,CAAC,EAAE,aAAa,EAAE,WAAW,EAAE,EAAE,EAAE,OAAO,EAAE,iBAAiB,EAAE,CAAC;SACjF,EACD,UAAU,CAAC,OAAO,CAAC,kBAAkB,EACrC,UAAU,CAAC,OAAO,CAAC,WAAW,CAC/B,CAAC;QAEF,IAAI,UAAU,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;YAChD,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YACjD,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAC;QAC5D,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CAGF;AAED;;;GAGG;AACH,SAAS,gBAAgB,CACvB,KAAqB;IAIrB,QAAQ,KAAK,CAAC,IAAI,EAAE,CAAC;QACnB,KAAK,MAAM;YACT,OAAO,CAAC,0BAA0B,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;QAClD,KAAK,KAAK;YACR,OAAO,CAAC,kBAAkB,EAAE,EAAE,SAAS,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;QACxD,KAAK,QAAQ;YACX,OAAO,CAAC,kBAAkB,EAAE,EAAE,YAAY,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,CAAC,CAAC;YACR,MAAM,SAAS,GAAU,KAAK,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,wCAAwC,SAAS,EAAE,CAAC,CAAC;QACvE,CAAC;IACH,CAAC;AACH,CAAC;AAED;;GAEG;AACH,uFAAuF;AAEvF,SAAS,MAAM,CACb,IAAO,EACP,KAA2F;IAE3F,OAAO;QACL,IAAI;QACJ,CAAC,IAAI,CAAC,EAAE,KAAK;KACe,CAAC;AACjC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport type { KeyCredential, TokenCredential } from \"@azure/core-auth\";\nimport { createTracingClient } from \"@azure/core-tracing\";\nimport type { TracingClient } from \"@azure/core-tracing\";\nimport { FORM_RECOGNIZER_API_VERSION, SDK_VERSION } from \"./constants.js\";\nimport type {\n AnalyzeDocumentRequest,\n AnalyzeResultOperation,\n GeneratedClient,\n} from \"./generated/index.js\";\nimport { accept1 } from \"./generated/models/parameters.js\";\nimport type {\n AnalysisOperationDefinition,\n AnalysisPoller,\n AnalyzeResult,\n DocumentAnalysisPollOperationState,\n FormRecognizerRequestBody,\n} from \"./lro/analysis.js\";\nimport {\n toAnalyzeResultFromGenerated,\n toDocumentAnalysisPollOperationState,\n} from \"./lro/analysis.js\";\nimport type { OperationContext } from \"./lro/util/poller.js\";\nimport { lro } from \"./lro/util/poller.js\";\nimport type { AnalyzeDocumentOptions } from \"./options/AnalyzeDocumentOptions.js\";\nimport type { DocumentAnalysisClientOptions } from \"./options/FormRecognizerClientOptions.js\";\nimport type { DocumentModel } from \"./documentModel.js\";\nimport { makeServiceClient, Mappers, SERIALIZER } from \"./util.js\";\nimport type { AbortSignalLike } from \"@azure/abort-controller\";\nimport type { ClassifyDocumentOptions } from \"./options/ClassifyDocumentOptions.js\";\n\n/**\n * A client for interacting with the Form Recognizer service's analysis features.\n *\n * ### Examples:\n *\n * The Form Recognizer service and clients support two means of authentication:\n *\n * #### Azure Active Directory\n *\n * ```ts snippet:ReadmeSampleCreateClient_TokenCredential\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n * ```\n *\n * #### API Key (Subscription Key)\n *\n * ```ts snippet:ReadmeSampleCreateClient_KeyCredential\n * import { AzureKeyCredential, DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new AzureKeyCredential(\"<API key>\");\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n * ```\n */\nexport class DocumentAnalysisClient {\n private _restClient: GeneratedClient;\n private _tracing: TracingClient;\n\n /**\n * Create a `DocumentAnalysisClient` instance from a resource endpoint and a an Azure Identity `TokenCredential`.\n *\n * See the [`@azure/identity`](https://npmjs.com/package/\\@azure/identity) package for more information about\n * authenticating with Azure Active Directory.\n *\n * ### Example:\n *\n * ```ts snippet:ReadmeSampleCreateClient_TokenCredential\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n * ```\n *\n * @param endpoint - the endpoint URL of an Azure Cognitive Services instance\n * @param credential - a TokenCredential instance from the `@azure/identity` package\n * @param options - optional settings for configuring all methods in the client\n */\n public constructor(\n endpoint: string,\n credential: TokenCredential,\n options?: DocumentAnalysisClientOptions,\n );\n /**\n * Create a `DocumentAnalysisClient` instance from a resource endpoint and a static API key (`KeyCredential`),\n *\n * ### Example:\n *\n * ```ts snippet:ReadmeSampleCreateClient_KeyCredential\n * import { AzureKeyCredential, DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new AzureKeyCredential(\"<API key>\");\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n * ```\n *\n * @param endpoint - the endpoint URL of an Azure Cognitive Services instance\n * @param credential - a KeyCredential containing the Cognitive Services instance subscription key\n * @param options - optional settings for configuring all methods in the client\n */\n public constructor(\n endpoint: string,\n credential: KeyCredential,\n options?: DocumentAnalysisClientOptions,\n );\n /**\n * @hidden\n */\n public constructor(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options?: DocumentAnalysisClientOptions,\n );\n public constructor(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options: DocumentAnalysisClientOptions = {},\n ) {\n this._restClient = makeServiceClient(endpoint, credential, options);\n this._tracing = createTracingClient({\n packageName: \"@azure/ai-form-recognizer\",\n packageVersion: SDK_VERSION,\n namespace: \"Microsoft.CognitiveServices\",\n });\n }\n\n // #region Analysis\n\n /**\n * Extract data from an input using a model given by its unique ID.\n *\n * This operation supports custom as well as prebuilt models. For example, to use the prebuilt invoice model, provide\n * the model ID \"prebuilt-invoice\", or to use the simpler prebuilt layout model, provide the model ID\n * \"prebuilt-layout\".\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis, and the values in any\n * extracted documents' fields depend on the document types in the model (if any) and their corresponding field\n * schemas.\n *\n * ### Examples\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * ```ts snippet:ReadmeSamplePrebuiltReceipt\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { createReadStream } from \"node:fs\";\n * import { PrebuiltReceiptModel } from \"../samples-dev/prebuilt/prebuilt-receipt.js\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const path = \"<path to a document>\";\n * const readStream = createReadStream(path);\n *\n * // The PrebuiltReceiptModel `DocumentModel` instance encodes both the model ID and a stronger return type for the operation\n * const poller = await client.beginAnalyzeDocument(PrebuiltReceiptModel, readStream, {\n * onProgress: ({ status }) => {\n * console.log(`status: ${status}`);\n * },\n * });\n *\n * const {\n * documents: [receiptDocument],\n * } = await poller.pollUntilDone();\n *\n * // The fields of the document constitute the extracted receipt data.\n * const receipt = receiptDocument.fields;\n *\n * if (receipt === undefined) {\n * throw new Error(\"Expected at least one receipt in analysis result.\");\n * }\n *\n * console.log(`Receipt data (${receiptDocument.docType})`);\n * console.log(\" Merchant Name:\", receipt.merchantName?.value);\n *\n * // The items of the receipt are an example of a `DocumentArrayValue`\n * if (receipt.items !== undefined) {\n * console.log(\"Items:\");\n * for (const { properties: item } of receipt.items.values) {\n * console.log(\"- Description:\", item.description?.value);\n * console.log(\" Total Price:\", item.totalPrice?.value);\n * }\n * }\n *\n * console.log(\" Total:\", receipt.total?.value);\n * ```\n *\n *\n * @param modelId - the unique ID (name) of the model within this client's resource\n * @param document - a {@link FormRecognizerRequestBody} that will be uploaded with the request\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocument(\n modelId: string,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions,\n ): Promise<AnalysisPoller>;\n /**\n * Extract data from an input using a model that has a known, strongly-typed document schema (a {@link DocumentModel}).\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis. In TypeScript, the type\n * of the result for this method overload is inferred from the type of the input `DocumentModel`.\n *\n * ### Examples\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * If the input provided is a string, it will be treated as a URL to the location of a document to be analyzed. See the\n * {@link beginAnalyzeDocumentFromUrl} method for more information. Use of that method is preferred when using URLs,\n * and URL support is only provided in this method for backwards compatibility.\n *\n * ```ts snippet:ReadmeSamplePrebuiltReceipt\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { createReadStream } from \"node:fs\";\n * import { PrebuiltReceiptModel } from \"../samples-dev/prebuilt/prebuilt-receipt.js\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const path = \"<path to a document>\";\n * const readStream = createReadStream(path);\n *\n * // The PrebuiltReceiptModel `DocumentModel` instance encodes both the model ID and a stronger return type for the operation\n * const poller = await client.beginAnalyzeDocument(PrebuiltReceiptModel, readStream, {\n * onProgress: ({ status }) => {\n * console.log(`status: ${status}`);\n * },\n * });\n *\n * const {\n * documents: [receiptDocument],\n * } = await poller.pollUntilDone();\n *\n * // The fields of the document constitute the extracted receipt data.\n * const receipt = receiptDocument.fields;\n *\n * if (receipt === undefined) {\n * throw new Error(\"Expected at least one receipt in analysis result.\");\n * }\n *\n * console.log(`Receipt data (${receiptDocument.docType})`);\n * console.log(\" Merchant Name:\", receipt.merchantName?.value);\n *\n * // The items of the receipt are an example of a `DocumentArrayValue`\n * if (receipt.items !== undefined) {\n * console.log(\"Items:\");\n * for (const { properties: item } of receipt.items.values) {\n * console.log(\"- Description:\", item.description?.value);\n * console.log(\" Total Price:\", item.totalPrice?.value);\n * }\n * }\n *\n * console.log(\" Total:\", receipt.total?.value);\n * ```\n *\n * @param model - a {@link DocumentModel} representing the model to use for analysis and the expected output type\n * @param document - a {@link FormRecognizerRequestBody} that will be uploaded with the request\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult` with documents that have\n * the result type associated with the input model\n */\n public async beginAnalyzeDocument<Result>(\n model: DocumentModel<Result>,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions<Result>,\n ): Promise<AnalysisPoller<Result>>;\n public async beginAnalyzeDocument(\n model: string | DocumentModel<unknown>,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: AnalyzeDocumentOptions<unknown> = {},\n ): Promise<AnalysisPoller<unknown>> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginAnalyzeDocument\",\n options,\n // In the first version of the SDK, the document input was treated as a URL if it was a string, and we preserve\n // this behavior to avoid introducing a breaking change.\n this.analyze.bind(\n this,\n model,\n typeof document === \"string\" ? source(\"url\", document) : source(\"body\", document),\n ),\n );\n }\n\n /**\n * Extract data from an input using a model given by its unique ID.\n *\n * This operation supports custom as well as prebuilt models. For example, to use the prebuilt invoice model, provide\n * the model ID \"prebuilt-invoice\", or to use the simpler prebuilt layout model, provide the model ID\n * \"prebuilt-layout\".\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis, and the values in any\n * extracted documents' fields depend on the document types in the model (if any) and their corresponding field\n * schemas.\n *\n * ### Examples\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```ts snippet:ReadmeSampleReceiptModelID_URL\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import {\n * DocumentAnalysisClient,\n * DocumentStringField,\n * DocumentArrayField,\n * DocumentObjectField,\n * } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const poller = await client.beginAnalyzeDocumentFromUrl(\n * \"prebuilt-receipt\",\n * // The Document Intelligence service will access the following URL to a receipt image and extract data from it\n * \"https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png\",\n * );\n * poller.onProgress((state) => console.log(\"Operation:\", state.modelId, state.status));\n *\n * const { documents } = await poller.pollUntilDone();\n *\n * const result = documents && documents[0];\n * if (result) {\n * const receipt = result.fields;\n * console.log(\"=== Receipt Information ===\");\n * console.log(\"Type:\", result.docType);\n * console.log(\"Merchant:\", (receipt[\"MerchantName\"] as DocumentStringField).value);\n *\n * console.log(\"Items:\");\n * for (const { properties: item } of ((receipt[\"Items\"] as DocumentArrayField).values ||\n * []) as DocumentObjectField[]) {\n * console.log(\"- Description:\", (item[\"Description\"] as DocumentStringField).value);\n * console.log(\" Total Price:\", (item[\"TotalPrice\"] as DocumentStringField).value);\n * }\n * } else {\n * throw new Error(\"Expected at least one receipt in the result.\");\n * }\n * ```\n *\n * @param modelId - the unique ID (name) of the model within this client's resource\n * @param documentUrl - a URL (string) to an input document accessible from the public internet\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocumentFromUrl(\n modelId: string,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions,\n ): Promise<AnalysisPoller>;\n /**\n * Extract data from an input using a model that has a known, strongly-typed document schema (a {@link DocumentModel}).\n *\n * The fields produced in the `AnalyzeResult` depend on the model that is used for analysis. In TypeScript, the type\n * of the result for this method overload is inferred from the type of the input `DocumentModel`.\n *\n * ### Examples\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```ts snippet:ReadmeSampleReceiptPrebuilt_URL\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { PrebuiltReceiptModel } from \"../samples-dev/prebuilt/prebuilt-receipt.js\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const poller = await client.beginAnalyzeDocumentFromUrl(\n * PrebuiltReceiptModel,\n * // The Document Intelligence service will access the following URL to a receipt image and extract data from it\n * \"https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/receipt/contoso-receipt.png\",\n * );\n *\n * const {\n * documents: [document],\n * } = await poller.pollUntilDone();\n *\n * // Use of PrebuiltModels.Receipt above (rather than the raw model ID), as it adds strong typing of the model's output\n * if (document) {\n * const { merchantName, items, total } = document.fields;\n *\n * console.log(\"=== Receipt Information ===\");\n * console.log(\"Type:\", document.docType);\n * console.log(\"Merchant:\", merchantName && merchantName.value);\n *\n * console.log(\"Items:\");\n * for (const item of (items && items.values) || []) {\n * const { description, totalPrice } = item.properties;\n *\n * console.log(\"- Description:\", description && description.value);\n * console.log(\" Total Price:\", totalPrice && totalPrice.value);\n * }\n *\n * console.log(\"Total:\", total && total.value);\n * } else {\n * throw new Error(\"Expected at least one receipt in the result.\");\n * }\n * ```\n *\n * @param model - a {@link DocumentModel} representing the model to use for analysis and the expected output type\n * @param documentUrl - a URL (string) to an input document accessible from the public internet\n * @param options - optional settings for the analysis operation and poller\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginAnalyzeDocumentFromUrl<Result>(\n model: DocumentModel<Result>,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options?: AnalyzeDocumentOptions<Result>,\n ): Promise<AnalysisPoller<Result>>;\n public async beginAnalyzeDocumentFromUrl(\n model: string | DocumentModel<unknown>,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: AnalyzeDocumentOptions<unknown> = {},\n ): Promise<AnalysisPoller<unknown>> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginAnalyzeDocumentFromUrl\",\n options,\n this.analyze.bind(this, model, source(\"url\", documentUrl)),\n );\n }\n\n /**\n * A helper method for running analysis polymorphically.\n *\n * @param model - the model ID or DocumentModel to use for analysis\n * @param input - the string URL or request body to use\n * @param options - analysis options\n * @returns - an analysis poller\n */\n private analyze(\n model: string | DocumentModel<unknown>,\n input: DocumentSource,\n options: AnalyzeDocumentOptions<unknown>,\n ): Promise<AnalysisPoller<unknown>> {\n const {\n modelId: initialModelId,\n apiVersion: requestApiVersion,\n transformResult,\n } = typeof model === \"string\"\n ? { modelId: model, apiVersion: undefined, transformResult: (v: AnalyzeResult) => v }\n : model;\n\n if (requestApiVersion && requestApiVersion !== FORM_RECOGNIZER_API_VERSION) {\n throw new Error(\n [\n `API Version mismatch: the provided model wants version: ${requestApiVersion},`,\n `but the client is using ${FORM_RECOGNIZER_API_VERSION}.`,\n \"The API version of the model must match the client's API version.\",\n ].join(\" \"),\n );\n }\n\n return this.createUnifiedPoller<unknown>(\n (abortSignal) => {\n const [contentType, analyzeRequest] = toAnalyzeRequest(input);\n\n if (contentType === \"application/json\") {\n return this._restClient.documentModels.analyzeDocument(initialModelId, contentType, {\n ...options,\n abortSignal,\n analyzeRequest,\n });\n } else {\n return this._restClient.documentModels.analyzeDocument(initialModelId, contentType, {\n ...options,\n abortSignal,\n analyzeRequest,\n });\n }\n },\n {\n initialModelId,\n options,\n transformResult: (result) => transformResult(toAnalyzeResultFromGenerated(result)),\n },\n );\n }\n\n /**\n * Classify a document using a custom classifier given by its ID.\n *\n * This method produces a long-running operation (poller) that will eventually produce an `AnalyzeResult`. This is the\n * same type as `beginAnalyzeDocument` and `beginAnalyzeDocumentFromUrl`, but the result will only contain a small\n * subset of its fields. Only the `documents` field and `pages` field will be populated, and only minimal page\n * information will be returned. The `documents` field will contain information about all the identified documents and\n * the `docType` that they were classified as.\n *\n * ### Example\n *\n * This method supports streamable request bodies ({@link FormRecognizerRequestBody}) such as Node.JS `ReadableStream`\n * objects, browser `Blob`s, and `ArrayBuffer`s. The contents of the body will be uploaded to the service for analysis.\n *\n * ```ts snippet:ReadmeSampleClassifyDocument_File\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n * import { createReadStream } from \"node:fs\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const path = \"<path to a document>\";\n * const readStream = createReadStream(path);\n *\n * const poller = await client.beginClassifyDocument(\"<classifier id>\", readStream);\n *\n * const result = await poller.pollUntilDone();\n *\n * if (result?.documents?.length === 0) {\n * throw new Error(\"Failed to extract any documents.\");\n * }\n *\n * for (const document of result.documents) {\n * console.log(\n * `Extracted a document with type '${document.docType}' on page ${document.boundingRegions?.[0].pageNumber} (confidence: ${document.confidence})`,\n * );\n * }\n * ```\n *\n * @param classifierId - the ID of the custom classifier to use for analysis\n * @param document - the document to classify\n * @param options - options for the classification operation\n * @returns a long-running operation (poller) that will eventually produce an `AnalyzeResult`\n */\n public async beginClassifyDocument(\n classifierId: string,\n document: FormRecognizerRequestBody,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: ClassifyDocumentOptions = {},\n ): Promise<AnalysisPoller> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginClassifyDocument\",\n options,\n this.classify.bind(this, classifierId, source(\"body\", document)),\n );\n }\n\n /**\n * Classify a document from a URL using a custom classifier given by its ID.\n *\n * This method produces a long-running operation (poller) that will eventually produce an `AnalyzeResult`. This is the\n * same type as `beginAnalyzeDocument` and `beginAnalyzeDocumentFromUrl`, but the result will only contain a small\n * subset of its fields. Only the `documents` field and `pages` field will be populated, and only minimal page\n * information will be returned. The `documents` field will contain information about all the identified documents and\n * the `docType` that they were classified as.\n *\n * ### Example\n *\n * This method supports extracting data from a file at a given URL. The Form Recognizer service will attempt to\n * download a file using the submitted URL, so the URL must be accessible from the public internet. For example, a SAS\n * token can be used to grant read access to a blob in Azure Storage, and the service will use the SAS-encoded URL to\n * request the file.\n *\n * ```ts snippet:ReadmeSampleClassifyDocument\n * import { DefaultAzureCredential } from \"@azure/identity\";\n * import { DocumentAnalysisClient } from \"@azure/ai-form-recognizer\";\n *\n * const credential = new DefaultAzureCredential();\n * const client = new DocumentAnalysisClient(\n * \"https://<resource name>.cognitiveservices.azure.com\",\n * credential,\n * );\n *\n * const documentUrl =\n * \"https://raw.githubusercontent.com/Azure/azure-sdk-for-js/main/sdk/formrecognizer/ai-form-recognizer/assets/invoice/Invoice_1.pdf\";\n *\n * const poller = await client.beginClassifyDocumentFromUrl(\"<classifier id>\", documentUrl);\n *\n * const result = await poller.pollUntilDone();\n *\n * if (result?.documents?.length === 0) {\n * throw new Error(\"Failed to extract any documents.\");\n * }\n *\n * for (const document of result.documents) {\n * console.log(\n * `Extracted a document with type '${document.docType}' on page ${document.boundingRegions?.[0].pageNumber} (confidence: ${document.confidence})`,\n * );\n * }\n * ```\n * @param classifierId - the ID of the custom classifier to use for analysis\n * @param documentUrl - the URL of the document to classify\n * @param options -\n * @returns\n */\n public async beginClassifyDocumentFromUrl(\n classifierId: string,\n documentUrl: string,\n // eslint-disable-next-line @azure/azure-sdk/ts-naming-options\n options: ClassifyDocumentOptions = {},\n ): Promise<AnalysisPoller> {\n return this._tracing.withSpan(\n \"DocumentAnalysisClient.beginClassifyDocumentFromUrl\",\n options,\n this.classify.bind(this, classifierId, source(\"url\", documentUrl)),\n );\n }\n\n /**\n * A helper method for running classification polymorphically.\n * @param classifierId - the ID of the classifier to use\n * @param input - the string URL or request body to use\n * @param options - analysis options\n * @returns an analysis poller\n */\n private classify(\n classifierId: string,\n input: DocumentSource,\n options: ClassifyDocumentOptions,\n ): Promise<AnalysisPoller> {\n return this.createUnifiedPoller(\n async (abortSignal) => {\n const [contentType, classifyRequest] = toAnalyzeRequest(input);\n\n if (contentType === \"application/json\") {\n return this._restClient.documentClassifiers.classifyDocument(\n classifierId,\n contentType as any,\n {\n ...options,\n abortSignal,\n classifyRequest,\n },\n );\n } else {\n return this._restClient.documentClassifiers.classifyDocument(\n classifierId,\n contentType as any,\n {\n ...options,\n abortSignal,\n classifyRequest,\n },\n );\n }\n },\n {\n initialModelId: classifierId,\n options,\n transformResult: toAnalyzeResultFromGenerated,\n },\n );\n }\n\n /**\n * Create an LRO poller that handles analysis operations.\n *\n * This is the meat of all analysis polling operations.\n *\n * @param startOperation - function that starts the operation and returns the operation location\n * @param definition - operation definition (initial model ID, operation transforms, request options)\n * @returns - an analysis poller that produces the given return types according to the operation spec\n */\n private async createUnifiedPoller<Result>(\n startOperation: (\n abortSignal: AbortSignalLike | undefined,\n ) => Promise<{ operationLocation?: string }>,\n definition: AnalysisOperationDefinition<Result>,\n ): Promise<AnalysisPoller<Result>> {\n const { resumeFrom } = definition.options;\n\n // TODO: what should we do if resumeFrom.modelId is different from initialModelId?\n // And what do we do with the redundant input??\n\n const getAnalyzeResult = (\n ctx: OperationContext,\n operationLocation: string,\n ): Promise<AnalyzeResultOperation> =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-getAnalyzeResult\",\n definition.options,\n (finalOptions) =>\n this._restClient.sendOperationRequest<AnalyzeResultOperation>(\n {\n options: {\n onResponse: async (rawResponse, ...args) => {\n // Capture the `Retry-After` header if it was sent.\n const retryAfterHeader = rawResponse.headers.get(\"retry-after\");\n // Convert the header value to milliseconds. If the header is not a valid number, then it is an HTTP\n // date.\n if (retryAfterHeader) {\n const retryAfterMs = Number(retryAfterHeader) * 1000;\n if (!Number.isNaN(retryAfterMs)) {\n ctx.updateDelay(retryAfterMs);\n } else {\n ctx.updateDelay(Date.parse(retryAfterHeader) - Date.now());\n }\n } else {\n ctx.updateDelay(undefined);\n }\n\n // Forward the `onResponse` callback if it was provided.\n return finalOptions.onResponse?.(rawResponse, ...args);\n },\n ...finalOptions,\n // We need to pass the abort signal from the context rather than from the options, since the user could\n // poll the LRO with a different AbortSignal than it was instantiated with.\n abortSignal: ctx.abortSignal,\n },\n },\n {\n path: operationLocation,\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.AnalyzeResultOperation,\n },\n default: {\n bodyMapper: Mappers.ErrorResponse,\n },\n },\n // URL is fully-formed, so we don't need any query parameters\n headerParameters: [accept1],\n serializer: SERIALIZER,\n },\n ),\n );\n\n const toInit =\n // If the user gave us a stored token, we'll poll it again\n resumeFrom !== undefined\n ? async (ctx: OperationContext) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-resume\",\n definition.options,\n async () => {\n const { clientVersion, operationLocation, modelId } = JSON.parse(resumeFrom) as {\n clientVersion?: string;\n operationLocation: string;\n modelId: string;\n };\n\n if (!clientVersion || clientVersion !== SDK_VERSION) {\n throw new Error(\n [\n \"Cannot restore poller from a serialized state from a different version of the client\",\n `library (restoreFrom: '${clientVersion}', current: '${SDK_VERSION}').`,\n ].join(\" \"),\n );\n }\n\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n modelId,\n operationLocation,\n result,\n );\n },\n )\n : // Otherwise, we'll start a new operation from the initialModelId\n async (ctx: OperationContext) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-start\",\n definition.options,\n async () => {\n const { operationLocation } = await startOperation(ctx.abortSignal);\n\n if (operationLocation === undefined) {\n throw new Error(\n \"Unable to start analysis operation: no Operation-Location received.\",\n );\n }\n\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n definition.initialModelId,\n operationLocation,\n result,\n );\n },\n );\n\n const poller = await lro<Result, DocumentAnalysisPollOperationState<Result>>(\n {\n init: toInit,\n poll: async (ctx, { operationLocation, modelId }) =>\n this._tracing.withSpan(\n \"DocumentAnalysisClient.createAnalysisPoller-poll\",\n {},\n async () => {\n const result = await getAnalyzeResult(ctx, operationLocation);\n\n return toDocumentAnalysisPollOperationState(\n definition,\n modelId,\n operationLocation,\n result,\n );\n },\n ),\n serialize: ({ operationLocation, modelId }) =>\n JSON.stringify({ clientVersion: SDK_VERSION, id: modelId, operationLocation }),\n },\n definition.options.updateIntervalInMs,\n definition.options.abortSignal,\n );\n\n if (definition.options.onProgress !== undefined) {\n poller.onProgress(definition.options.onProgress);\n definition.options.onProgress(poller.getOperationState());\n }\n\n return poller;\n }\n\n // #endregion\n}\n\n/**\n * Produce an appropriate pair of content-type and analyzeRequest value for the analysis request.\n * @internal\n */\nfunction toAnalyzeRequest(\n input: DocumentSource,\n):\n | [\"application/json\", AnalyzeDocumentRequest]\n | [\"application/octet-stream\", FormRecognizerRequestBody] {\n switch (input.kind) {\n case \"body\":\n return [\"application/octet-stream\", input.body];\n case \"url\":\n return [\"application/json\", { urlSource: input.url }];\n case \"base64\":\n return [\"application/json\", { base64Source: input.base64 }];\n default: {\n const __exhaust: never = input;\n throw new Error(`Unreachable 'toAnalyzeRequest' case: ${__exhaust}`);\n }\n }\n}\n\n/**\n * The input to a document analysis operation.\n */\n// type DocumentSource = DocumentBodySource | DocumentUrlSource | DocumentBase64Source;\n\nfunction source<K extends DocumentSource[\"kind\"]>(\n kind: K,\n value: Extract<DocumentSource, { kind: K }>[K & keyof Extract<DocumentSource, { kind: K }>],\n): DocumentSource {\n return {\n kind,\n [kind]: value,\n } as unknown as DocumentSource;\n}\n\n/**\n * The input to a document analysis operation.\n *\n * @internal\n */\ntype DocumentSource = {\n [K in keyof DocumentSourceTypes]: {\n /** The input kind. */\n kind: K;\n } & { [_ in K]: DocumentSourceTypes[K] };\n}[keyof DocumentSourceTypes];\n\n/**\n * A map of input discriminants to concrete input types.\n *\n * @internal\n */\ninterface DocumentSourceTypes {\n /**\n * A document buffer or stream to be uploaded in the request body.\n */\n body: FormRecognizerRequestBody;\n\n /**\n * A URL to a document to be analyzed.\n */\n url: string;\n\n /**\n * The data of a document to be analyzed. This is NOT base64-encoded, but will\n * be base64-encoded by the client before uploading.\n *\n * NOTE: This is never used by the client because it is inefficient compared to direct uploads and does not currently\n * support any features that `body` does not.\n */\n base64: Uint8Array;\n}\n"]}
|