@azure/ai-content-understanding 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +591 -0
- package/dist/browser/api/contentUnderstandingContext.d.ts +15 -0
- package/dist/browser/api/contentUnderstandingContext.js +27 -0
- package/dist/browser/api/contentUnderstandingContext.js.map +1 -0
- package/dist/browser/api/index.d.ts +4 -0
- package/dist/browser/api/index.js +5 -0
- package/dist/browser/api/index.js.map +1 -0
- package/dist/browser/api/operations.d.ts +67 -0
- package/dist/browser/api/operations.js +488 -0
- package/dist/browser/api/operations.js.map +1 -0
- package/dist/browser/api/options.d.ts +94 -0
- package/dist/browser/api/options.js +4 -0
- package/dist/browser/api/options.js.map +1 -0
- package/dist/browser/contentUnderstandingClient.d.ts +65 -0
- package/dist/browser/contentUnderstandingClient.js +161 -0
- package/dist/browser/contentUnderstandingClient.js.map +1 -0
- package/dist/browser/index.d.ts +6 -0
- package/dist/browser/index.js +14 -0
- package/dist/browser/index.js.map +1 -0
- package/dist/browser/logger.d.ts +2 -0
- package/dist/browser/logger.js +5 -0
- package/dist/browser/logger.js.map +1 -0
- package/dist/browser/models/index.d.ts +2 -0
- package/dist/browser/models/index.js +4 -0
- package/dist/browser/models/index.js.map +1 -0
- package/dist/browser/models/models.d.ts +846 -0
- package/dist/browser/models/models.js +1005 -0
- package/dist/browser/models/models.js.map +1 -0
- package/dist/browser/package.json +3 -0
- package/dist/browser/static-helpers/pagingHelpers.d.ts +74 -0
- package/dist/browser/static-helpers/pagingHelpers.js +143 -0
- package/dist/browser/static-helpers/pagingHelpers.js.map +1 -0
- package/dist/browser/static-helpers/pollingHelpers.d.ts +34 -0
- package/dist/browser/static-helpers/pollingHelpers.js +95 -0
- package/dist/browser/static-helpers/pollingHelpers.js.map +1 -0
- package/dist/browser/static-helpers/serialization/get-binary-response-browser.mjs.map +1 -0
- package/dist/browser/static-helpers/serialization/get-binary-response.d.ts +10 -0
- package/dist/browser/static-helpers/serialization/get-binary-response.js +19 -0
- package/dist/browser/static-helpers/serialization/serialize-record.d.ts +2 -0
- package/dist/browser/static-helpers/serialization/serialize-record.js +22 -0
- package/dist/browser/static-helpers/serialization/serialize-record.js.map +1 -0
- package/dist/browser/static-helpers/urlTemplate.d.ts +5 -0
- package/dist/browser/static-helpers/urlTemplate.js +200 -0
- package/dist/browser/static-helpers/urlTemplate.js.map +1 -0
- package/dist/commonjs/api/contentUnderstandingContext.d.ts +15 -0
- package/dist/commonjs/api/contentUnderstandingContext.js +30 -0
- package/dist/commonjs/api/contentUnderstandingContext.js.map +1 -0
- package/dist/commonjs/api/index.d.ts +4 -0
- package/dist/commonjs/api/index.js +24 -0
- package/dist/commonjs/api/index.js.map +1 -0
- package/dist/commonjs/api/operations.d.ts +67 -0
- package/dist/commonjs/api/operations.js +535 -0
- package/dist/commonjs/api/operations.js.map +1 -0
- package/dist/commonjs/api/options.d.ts +94 -0
- package/dist/commonjs/api/options.js +5 -0
- package/dist/commonjs/api/options.js.map +1 -0
- package/dist/commonjs/contentUnderstandingClient.d.ts +65 -0
- package/dist/commonjs/contentUnderstandingClient.js +160 -0
- package/dist/commonjs/contentUnderstandingClient.js.map +1 -0
- package/dist/commonjs/index.d.ts +6 -0
- package/dist/commonjs/index.js +19 -0
- package/dist/commonjs/index.js.map +1 -0
- package/dist/commonjs/logger.d.ts +2 -0
- package/dist/commonjs/logger.js +8 -0
- package/dist/commonjs/logger.js.map +1 -0
- package/dist/commonjs/models/index.d.ts +2 -0
- package/dist/commonjs/models/index.js +8 -0
- package/dist/commonjs/models/index.js.map +1 -0
- package/dist/commonjs/models/models.d.ts +846 -0
- package/dist/commonjs/models/models.js +1103 -0
- package/dist/commonjs/models/models.js.map +1 -0
- package/dist/commonjs/package.json +3 -0
- package/dist/commonjs/static-helpers/pagingHelpers.d.ts +74 -0
- package/dist/commonjs/static-helpers/pagingHelpers.js +146 -0
- package/dist/commonjs/static-helpers/pagingHelpers.js.map +1 -0
- package/dist/commonjs/static-helpers/pollingHelpers.d.ts +34 -0
- package/dist/commonjs/static-helpers/pollingHelpers.js +98 -0
- package/dist/commonjs/static-helpers/pollingHelpers.js.map +1 -0
- package/dist/commonjs/static-helpers/serialization/get-binary-response.d.ts +10 -0
- package/dist/commonjs/static-helpers/serialization/get-binary-response.js +26 -0
- package/dist/commonjs/static-helpers/serialization/get-binary-response.js.map +1 -0
- package/dist/commonjs/static-helpers/serialization/serialize-record.d.ts +2 -0
- package/dist/commonjs/static-helpers/serialization/serialize-record.js +25 -0
- package/dist/commonjs/static-helpers/serialization/serialize-record.js.map +1 -0
- package/dist/commonjs/static-helpers/urlTemplate.d.ts +5 -0
- package/dist/commonjs/static-helpers/urlTemplate.js +203 -0
- package/dist/commonjs/static-helpers/urlTemplate.js.map +1 -0
- package/dist/commonjs/tsdoc-metadata.json +11 -0
- package/dist/esm/api/contentUnderstandingContext.d.ts +15 -0
- package/dist/esm/api/contentUnderstandingContext.js +27 -0
- package/dist/esm/api/contentUnderstandingContext.js.map +1 -0
- package/dist/esm/api/index.d.ts +4 -0
- package/dist/esm/api/index.js +5 -0
- package/dist/esm/api/index.js.map +1 -0
- package/dist/esm/api/operations.d.ts +67 -0
- package/dist/esm/api/operations.js +488 -0
- package/dist/esm/api/operations.js.map +1 -0
- package/dist/esm/api/options.d.ts +94 -0
- package/dist/esm/api/options.js +4 -0
- package/dist/esm/api/options.js.map +1 -0
- package/dist/esm/contentUnderstandingClient.d.ts +65 -0
- package/dist/esm/contentUnderstandingClient.js +161 -0
- package/dist/esm/contentUnderstandingClient.js.map +1 -0
- package/dist/esm/index.d.ts +6 -0
- package/dist/esm/index.js +14 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/logger.d.ts +2 -0
- package/dist/esm/logger.js +5 -0
- package/dist/esm/logger.js.map +1 -0
- package/dist/esm/models/index.d.ts +2 -0
- package/dist/esm/models/index.js +4 -0
- package/dist/esm/models/index.js.map +1 -0
- package/dist/esm/models/models.d.ts +846 -0
- package/dist/esm/models/models.js +1005 -0
- package/dist/esm/models/models.js.map +1 -0
- package/dist/esm/package.json +3 -0
- package/dist/esm/static-helpers/pagingHelpers.d.ts +74 -0
- package/dist/esm/static-helpers/pagingHelpers.js +143 -0
- package/dist/esm/static-helpers/pagingHelpers.js.map +1 -0
- package/dist/esm/static-helpers/pollingHelpers.d.ts +34 -0
- package/dist/esm/static-helpers/pollingHelpers.js +95 -0
- package/dist/esm/static-helpers/pollingHelpers.js.map +1 -0
- package/dist/esm/static-helpers/serialization/get-binary-response.d.ts +10 -0
- package/dist/esm/static-helpers/serialization/get-binary-response.js +23 -0
- package/dist/esm/static-helpers/serialization/get-binary-response.js.map +1 -0
- package/dist/esm/static-helpers/serialization/serialize-record.d.ts +2 -0
- package/dist/esm/static-helpers/serialization/serialize-record.js +22 -0
- package/dist/esm/static-helpers/serialization/serialize-record.js.map +1 -0
- package/dist/esm/static-helpers/urlTemplate.d.ts +5 -0
- package/dist/esm/static-helpers/urlTemplate.js +200 -0
- package/dist/esm/static-helpers/urlTemplate.js.map +1 -0
- package/dist/react-native/api/contentUnderstandingContext.d.ts +15 -0
- package/dist/react-native/api/contentUnderstandingContext.js +27 -0
- package/dist/react-native/api/contentUnderstandingContext.js.map +1 -0
- package/dist/react-native/api/index.d.ts +4 -0
- package/dist/react-native/api/index.js +5 -0
- package/dist/react-native/api/index.js.map +1 -0
- package/dist/react-native/api/operations.d.ts +67 -0
- package/dist/react-native/api/operations.js +488 -0
- package/dist/react-native/api/operations.js.map +1 -0
- package/dist/react-native/api/options.d.ts +94 -0
- package/dist/react-native/api/options.js +4 -0
- package/dist/react-native/api/options.js.map +1 -0
- package/dist/react-native/contentUnderstandingClient.d.ts +65 -0
- package/dist/react-native/contentUnderstandingClient.js +161 -0
- package/dist/react-native/contentUnderstandingClient.js.map +1 -0
- package/dist/react-native/index.d.ts +6 -0
- package/dist/react-native/index.js +14 -0
- package/dist/react-native/index.js.map +1 -0
- package/dist/react-native/logger.d.ts +2 -0
- package/dist/react-native/logger.js +5 -0
- package/dist/react-native/logger.js.map +1 -0
- package/dist/react-native/models/index.d.ts +2 -0
- package/dist/react-native/models/index.js +4 -0
- package/dist/react-native/models/index.js.map +1 -0
- package/dist/react-native/models/models.d.ts +846 -0
- package/dist/react-native/models/models.js +1005 -0
- package/dist/react-native/models/models.js.map +1 -0
- package/dist/react-native/package.json +3 -0
- package/dist/react-native/static-helpers/pagingHelpers.d.ts +74 -0
- package/dist/react-native/static-helpers/pagingHelpers.js +143 -0
- package/dist/react-native/static-helpers/pagingHelpers.js.map +1 -0
- package/dist/react-native/static-helpers/pollingHelpers.d.ts +34 -0
- package/dist/react-native/static-helpers/pollingHelpers.js +95 -0
- package/dist/react-native/static-helpers/pollingHelpers.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/get-binary-response.d.ts +10 -0
- package/dist/react-native/static-helpers/serialization/get-binary-response.js +23 -0
- package/dist/react-native/static-helpers/serialization/get-binary-response.js.map +1 -0
- package/dist/react-native/static-helpers/serialization/serialize-record.d.ts +2 -0
- package/dist/react-native/static-helpers/serialization/serialize-record.js +22 -0
- package/dist/react-native/static-helpers/serialization/serialize-record.js.map +1 -0
- package/dist/react-native/static-helpers/urlTemplate.d.ts +5 -0
- package/dist/react-native/static-helpers/urlTemplate.js +200 -0
- package/dist/react-native/static-helpers/urlTemplate.js.map +1 -0
- package/package.json +180 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
Copyright (c) Microsoft Corporation.
|
|
2
|
+
|
|
3
|
+
MIT License
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,591 @@
|
|
|
1
|
+
# Azure AI Content Understanding client library for JavaScript
|
|
2
|
+
|
|
3
|
+
Azure AI Content Understanding is a multimodal AI service that extracts semantic content from documents, video, audio, and image files. It transforms unstructured content into structured, machine-readable data optimized for retrieval-augmented generation (RAG) and automated workflows.
|
|
4
|
+
|
|
5
|
+
Use the client library for Azure AI Content Understanding to:
|
|
6
|
+
|
|
7
|
+
* **Extract document content** - Extract text, tables, figures, layout information, and structured markdown from documents (PDF, images with text or hand-written text, Office documents and more)
|
|
8
|
+
* **Transcribe and analyze audio** - Convert audio content into searchable transcripts with speaker diarization and timing information
|
|
9
|
+
* **Analyze video content** - Extract visual frames, transcribe audio tracks, and generate structured summaries from video files
|
|
10
|
+
* **Leverage prebuilt analyzers** - Use production-ready prebuilt analyzers across industries including finance and tax (invoices, receipts, tax forms), identity verification (passports, driver's licenses), mortgage and lending (loan applications, appraisals), procurement and contracts (purchase orders, agreements), and utilities (billing statements)
|
|
11
|
+
* **Create custom analyzers** - Build domain-specific analyzers for specialized content extraction needs across all four modalities (documents, video, audio, and images)
|
|
12
|
+
* **Classify documents and video** - Automatically categorize and extract information from documents and video by type
|
|
13
|
+
|
|
14
|
+
Key links:
|
|
15
|
+
|
|
16
|
+
- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/contentunderstanding/ai-content-understanding)
|
|
17
|
+
- Package (NPM)
|
|
18
|
+
- [Product documentation][product_docs]
|
|
19
|
+
- [Samples][samples_directory]
|
|
20
|
+
|
|
21
|
+
## Getting started
|
|
22
|
+
|
|
23
|
+
### Currently supported environments
|
|
24
|
+
|
|
25
|
+
- [LTS versions of Node.js](https://github.com/nodejs/release#release-schedule)
|
|
26
|
+
- Latest versions of Safari, Chrome, Edge and Firefox.
|
|
27
|
+
|
|
28
|
+
See our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details.
|
|
29
|
+
|
|
30
|
+
### Prerequisites
|
|
31
|
+
|
|
32
|
+
- An [Azure subscription][azure_sub]
|
|
33
|
+
- A [Microsoft Foundry resource][cu_quickstart] created in a [supported region][cu_region_support]
|
|
34
|
+
|
|
35
|
+
### Install the `@azure/ai-content-understanding` package
|
|
36
|
+
|
|
37
|
+
Install the Azure Content Understanding client library for JavaScript with `npm`:
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
npm install @azure/ai-content-understanding
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
### Configure your Microsoft Foundry resource
|
|
44
|
+
|
|
45
|
+
Before using the Content Understanding SDK, you need to set up a Microsoft Foundry resource and deploy the required large language models. Content Understanding currently uses OpenAI GPT models (such as gpt-4.1, gpt-4.1-mini, and text-embedding-3-large).
|
|
46
|
+
|
|
47
|
+
#### Step 1: Create Microsoft Foundry resource
|
|
48
|
+
|
|
49
|
+
> **Important:** You must create your Microsoft Foundry resource in a region that supports Content Understanding. For a list of available regions, see [Azure Content Understanding region and language support][cu_region_support].
|
|
50
|
+
|
|
51
|
+
1. Follow the steps in the [Azure Content Understanding quickstart][cu_quickstart] to create a Microsoft Foundry resource in the Azure portal
|
|
52
|
+
2. Get your Foundry resource's endpoint URL from Azure Portal:
|
|
53
|
+
- Go to [Azure Portal][azure_portal]
|
|
54
|
+
- Navigate to your Microsoft Foundry resource
|
|
55
|
+
- Go to **Resource Management** > **Keys and Endpoint**
|
|
56
|
+
- Copy the **Endpoint** URL (typically `https://<your-resource-name>.services.ai.azure.com/`)
|
|
57
|
+
|
|
58
|
+
**Important: Grant Required Permissions**
|
|
59
|
+
|
|
60
|
+
After creating your Microsoft Foundry resource, you must grant yourself the **Cognitive Services User** role to enable API calls for setting default model deployments:
|
|
61
|
+
|
|
62
|
+
1. Go to [Azure Portal][azure_portal]
|
|
63
|
+
2. Navigate to your Microsoft Foundry resource
|
|
64
|
+
3. Go to **Access Control (IAM)** in the left menu
|
|
65
|
+
4. Click **Add** > **Add role assignment**
|
|
66
|
+
5. Select the **Cognitive Services User** role
|
|
67
|
+
6. Assign it to yourself (or the user/service principal that will run the application)
|
|
68
|
+
|
|
69
|
+
> **Note:** This role assignment is required even if you are the owner of the resource. Without this role, you will not be able to call the Content Understanding API to configure model deployments for prebuilt analyzers.
|
|
70
|
+
|
|
71
|
+
#### Step 2: Deploy required models
|
|
72
|
+
|
|
73
|
+
**Important:** The prebuilt and custom analyzers require large language model deployments. You must deploy at least these models before using prebuilt analyzers and custom analyzers:
|
|
74
|
+
- `prebuilt-documentSearch`, `prebuilt-imageSearch`, `prebuilt-audioSearch`, `prebuilt-videoSearch` require **gpt-4.1-mini** and **text-embedding-3-large**
|
|
75
|
+
- Other prebuilt analyzers like `prebuilt-invoice`, `prebuilt-receipt` require **gpt-4.1** and **text-embedding-3-large**
|
|
76
|
+
|
|
77
|
+
To deploy a model:
|
|
78
|
+
|
|
79
|
+
1. In Microsoft Foundry, go to **Deployments** > **Deploy model** > **Deploy base model**
|
|
80
|
+
2. Search for and select the model you want to deploy. Currently, prebuilt analyzers require models such as `gpt-4.1`, `gpt-4.1-mini`, and `text-embedding-3-large`
|
|
81
|
+
3. Complete the deployment with your preferred settings
|
|
82
|
+
4. Note the deployment name you chose (by convention, use the model name as the deployment name, e.g., `gpt-4.1` for the `gpt-4.1` model)
|
|
83
|
+
|
|
84
|
+
Repeat this process for each model required by your prebuilt analyzers.
|
|
85
|
+
|
|
86
|
+
For more information on deploying models, see [Create model deployments in Microsoft Foundry portal][deploy_models_docs].
|
|
87
|
+
|
|
88
|
+
#### Step 3: Configure model deployments (required for prebuilt analyzers)
|
|
89
|
+
|
|
90
|
+
> **IMPORTANT:** This is a **one-time setup per Microsoft Foundry resource** that maps your deployed models to those required by the prebuilt analyzers and custom models. If you have multiple Microsoft Foundry resources, you need to configure each one separately.
|
|
91
|
+
|
|
92
|
+
You need to configure the default model mappings in your Microsoft Foundry resource. This can be done programmatically using the SDK. The configuration maps your deployed models (currently gpt-4.1, gpt-4.1-mini, and text-embedding-3-large) to the large language models required by prebuilt analyzers.
|
|
93
|
+
|
|
94
|
+
To configure model deployments using code, see the [Update Defaults sample][sample_update_defaults] for a complete example. Here's a quick overview:
|
|
95
|
+
|
|
96
|
+
```typescript snippet:ignore
|
|
97
|
+
import { ContentUnderstandingClient } from "@azure/ai-content-understanding";
|
|
98
|
+
import { DefaultAzureCredential } from "@azure/identity";
|
|
99
|
+
|
|
100
|
+
const endpoint = process.env["CONTENTUNDERSTANDING_ENDPOINT"]!;
|
|
101
|
+
const client = new ContentUnderstandingClient(endpoint, new DefaultAzureCredential());
|
|
102
|
+
|
|
103
|
+
// Map your deployed models to the models required by prebuilt analyzers
|
|
104
|
+
const updatedDefaults = await client.updateDefaults({
|
|
105
|
+
modelDeployments: {
|
|
106
|
+
"gpt-4.1": process.env["GPT_4_1_DEPLOYMENT"]!,
|
|
107
|
+
"gpt-4.1-mini": process.env["GPT_4_1_MINI_DEPLOYMENT"]!,
|
|
108
|
+
"text-embedding-3-large": process.env["TEXT_EMBEDDING_3_LARGE_DEPLOYMENT"]!,
|
|
109
|
+
},
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
console.log("Model deployments configured successfully!");
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
> **Note:** The configuration is persisted in your Microsoft Foundry resource, so you only need to run this once per resource (or whenever you change your deployment names).
|
|
116
|
+
|
|
117
|
+
### Authenticate the client
|
|
118
|
+
|
|
119
|
+
To authenticate the client, you need your Microsoft Foundry resource endpoint and credentials. You can use either an API key or Microsoft Entra ID authentication.
|
|
120
|
+
|
|
121
|
+
#### Using DefaultAzureCredential
|
|
122
|
+
|
|
123
|
+
The simplest way to authenticate is using `DefaultAzureCredential`, which supports multiple authentication methods and works well in both local development and production environments.
|
|
124
|
+
|
|
125
|
+
To use the [DefaultAzureCredential][defaultazurecredential] provider shown below, or other credential providers provided with the Azure SDK, please install the `@azure/identity` package:
|
|
126
|
+
|
|
127
|
+
```bash
|
|
128
|
+
npm install @azure/identity
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
Using Node.js and Node-like environments, you can use the `DefaultAzureCredential` class to authenticate the client.
|
|
132
|
+
|
|
133
|
+
```ts snippet:ReadmeSampleCreateClient_Node
|
|
134
|
+
import { ContentUnderstandingClient } from "@azure/ai-content-understanding";
|
|
135
|
+
import { DefaultAzureCredential } from "@azure/identity";
|
|
136
|
+
|
|
137
|
+
const client = new ContentUnderstandingClient("<endpoint>", new DefaultAzureCredential());
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
For browser environments, use the `InteractiveBrowserCredential` from the `@azure/identity` package to authenticate.
|
|
141
|
+
|
|
142
|
+
```ts snippet:ReadmeSampleCreateClient_Browser
|
|
143
|
+
import { InteractiveBrowserCredential } from "@azure/identity";
|
|
144
|
+
import { ContentUnderstandingClient } from "@azure/ai-content-understanding";
|
|
145
|
+
|
|
146
|
+
const credential = new InteractiveBrowserCredential({
|
|
147
|
+
tenantId: "<YOUR_TENANT_ID>",
|
|
148
|
+
clientId: "<YOUR_CLIENT_ID>",
|
|
149
|
+
});
|
|
150
|
+
const client = new ContentUnderstandingClient("<endpoint>", credential);
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
#### Using API key
|
|
154
|
+
|
|
155
|
+
You can also authenticate using an API key from your Microsoft Foundry resource:
|
|
156
|
+
|
|
157
|
+
```typescript snippet:ignore
|
|
158
|
+
import { ContentUnderstandingClient } from "@azure/ai-content-understanding";
|
|
159
|
+
import { AzureKeyCredential } from "@azure/core-auth";
|
|
160
|
+
|
|
161
|
+
const endpoint = process.env["CONTENTUNDERSTANDING_ENDPOINT"]!;
|
|
162
|
+
const apiKey = process.env["CONTENTUNDERSTANDING_KEY"]!;
|
|
163
|
+
const client = new ContentUnderstandingClient(endpoint, new AzureKeyCredential(apiKey));
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
To get your API key:
|
|
167
|
+
1. Go to [Azure Portal][azure_portal]
|
|
168
|
+
2. Navigate to your Microsoft Foundry resource
|
|
169
|
+
3. Go to **Resource Management** > **Keys and Endpoint**
|
|
170
|
+
4. Copy one of the **Keys** (Key1 or Key2)
|
|
171
|
+
|
|
172
|
+
For more information on authentication, see [Azure Identity client library][azure_identity].
|
|
173
|
+
|
|
174
|
+
### JavaScript Bundle
|
|
175
|
+
To use this client library in the browser, first you need to use a bundler. For details on how to do this, please refer to our [bundling documentation](https://aka.ms/AzureSDKBundling).
|
|
176
|
+
|
|
177
|
+
## Key concepts
|
|
178
|
+
|
|
179
|
+
### Prebuilt analyzers
|
|
180
|
+
|
|
181
|
+
Content Understanding provides a rich set of prebuilt analyzers that are ready to use without any configuration. These analyzers are powered by knowledge bases of thousands of real-world document examples, enabling them to understand document structure and adapt to variations in format and content.
|
|
182
|
+
|
|
183
|
+
Prebuilt analyzers are organized into several categories:
|
|
184
|
+
|
|
185
|
+
* **RAG analyzers** - Optimized for retrieval-augmented generation scenarios with semantic analysis and markdown extraction. These analyzers return markdown and a one-paragraph `Summary` for each content item:
|
|
186
|
+
* **`prebuilt-documentSearch`** - Extracts content from documents (PDF, images, Office documents) with layout preservation, table detection, figure analysis, and structured markdown output. Optimized for RAG scenarios.
|
|
187
|
+
* **`prebuilt-imageSearch`** - Analyzes standalone images and returns a one-paragraph description of the image content. Optimized for image understanding and search scenarios. For images that contain text (including hand-written text), use `prebuilt-documentSearch`.
|
|
188
|
+
* **`prebuilt-audioSearch`** - Transcribes audio content with speaker diarization, timing information, and conversation summaries. Supports multilingual transcription.
|
|
189
|
+
* **`prebuilt-videoSearch`** - Analyzes video content with visual frame extraction, audio transcription, and structured summaries. Provides temporal alignment of visual and audio content and can return multiple segments per video.
|
|
190
|
+
* **Content extraction analyzers** - Focus on OCR and layout analysis (e.g., `prebuilt-read`, `prebuilt-layout`)
|
|
191
|
+
* **Base analyzers** - Fundamental content processing capabilities used as parent analyzers for custom analyzers (e.g., `prebuilt-document`, `prebuilt-image`, `prebuilt-audio`, `prebuilt-video`)
|
|
192
|
+
* **Domain-specific analyzers** - Preconfigured analyzers for common document categories including financial documents (invoices, receipts, bank statements), identity documents (passports, driver's licenses), tax forms, mortgage documents, and contracts, and utilities (billing statements)
|
|
193
|
+
* **Utility analyzers** - Specialized tools for schema generation and field extraction (e.g., `prebuilt-documentFieldSchema`, `prebuilt-documentFields`)
|
|
194
|
+
|
|
195
|
+
For a complete list of available prebuilt analyzers and their capabilities, see the [Prebuilt analyzers documentation][prebuilt_analyzers_docs].
|
|
196
|
+
|
|
197
|
+
### Custom analyzers
|
|
198
|
+
|
|
199
|
+
You can create custom analyzers with specific field schemas for multi-modal content processing (documents, images, audio, video). Custom analyzers allow you to extract domain-specific information tailored to your use case.
|
|
200
|
+
|
|
201
|
+
### Content types
|
|
202
|
+
|
|
203
|
+
The API returns different content types based on the input:
|
|
204
|
+
|
|
205
|
+
* **`document`** - For document files (PDF, HTML, images, Office documents such as Word, Excel, PowerPoint, and more). Provides basic information such as page count and MIME type. Retrieve detailed information including pages, tables, figures, paragraphs, and many others.
|
|
206
|
+
* **`audioVisual`** - For audio and video files. Provides basic information such as timing information (start/end times) and frame dimensions (for video). Retrieve detailed information including transcript phrases, timing information, and for video, key frame references and more.
|
|
207
|
+
|
|
208
|
+
### Asynchronous operations
|
|
209
|
+
|
|
210
|
+
Content Understanding operations are asynchronous long-running operations. The workflow is:
|
|
211
|
+
|
|
212
|
+
1. **Begin Analysis** - Start the analysis operation (returns immediately with an operation location)
|
|
213
|
+
2. **Poll for Results** - Poll the operation location until the analysis completes
|
|
214
|
+
3. **Process Results** - Extract and display the structured results
|
|
215
|
+
|
|
216
|
+
The SDK provides poller types that handle polling automatically when using `pollUntilDone()`. For analysis operations, the SDK returns a poller that provides access to the operation ID. This operation ID can be used with `getResultFile` and `deleteResult` methods.
|
|
217
|
+
|
|
218
|
+
### Main classes
|
|
219
|
+
|
|
220
|
+
* **`ContentUnderstandingClient`** - The main client for analyzing content, as well as creating, managing, and configuring analyzers
|
|
221
|
+
* **`AnalysisResult`** - Contains the structured results of an analysis operation, including content elements, markdown, and metadata
|
|
222
|
+
|
|
223
|
+
### Thread safety
|
|
224
|
+
|
|
225
|
+
We guarantee that all client instance methods are thread-safe and independent of each other. This ensures that the recommendation of reusing client instances is always safe, even across threads.
|
|
226
|
+
|
|
227
|
+
### Additional concepts
|
|
228
|
+
|
|
229
|
+
[Client options][client_options] |
|
|
230
|
+
[Accessing the response][accessing_response] |
|
|
231
|
+
[Long-running operations][long_running_operations] |
|
|
232
|
+
[Handling failures][handling_failures] |
|
|
233
|
+
[Diagnostics][diagnostics] |
|
|
234
|
+
[Client lifetime][client_lifetime]
|
|
235
|
+
|
|
236
|
+
## Examples
|
|
237
|
+
|
|
238
|
+
You can familiarize yourself with different APIs using [Samples][samples_directory].
|
|
239
|
+
|
|
240
|
+
The samples demonstrate:
|
|
241
|
+
|
|
242
|
+
* **Configuration** - Configure model deployment defaults for prebuilt analyzers and custom analyzers
|
|
243
|
+
* **Document Content Extraction** - Extract structured markdown content from PDFs and images using `prebuilt-documentSearch`, optimized for RAG (Retrieval-Augmented Generation) applications
|
|
244
|
+
* **Multi-Modal Content Analysis** - Analyze content from URLs across all modalities: extract markdown and summaries from documents, images, audio, and video using `prebuilt-documentSearch`, `prebuilt-imageSearch`, `prebuilt-audioSearch`, and `prebuilt-videoSearch`
|
|
245
|
+
* **Domain-Specific Analysis** - Extract structured fields from invoices using `prebuilt-invoice`
|
|
246
|
+
* **Advanced Document Features** - Extract charts, hyperlinks, formulas, and annotations from documents
|
|
247
|
+
* **Custom Analyzers** - Create custom analyzers with field schemas for specialized extraction needs
|
|
248
|
+
* **Document Classification** - Create and use classifiers to categorize documents
|
|
249
|
+
* **Analyzer Management** - Get, list, update, copy, and delete analyzers
|
|
250
|
+
* **Result Management** - Retrieve result files from video analysis and delete analysis results
|
|
251
|
+
|
|
252
|
+
### Extract markdown content from documents
|
|
253
|
+
|
|
254
|
+
Use the `prebuilt-documentSearch` analyzer to extract markdown content from documents:
|
|
255
|
+
|
|
256
|
+
```typescript snippet:ignore
|
|
257
|
+
import { ContentUnderstandingClient } from "@azure/ai-content-understanding";
|
|
258
|
+
import { DefaultAzureCredential } from "@azure/identity";
|
|
259
|
+
|
|
260
|
+
const endpoint = process.env["CONTENTUNDERSTANDING_ENDPOINT"]!;
|
|
261
|
+
const client = new ContentUnderstandingClient(endpoint, new DefaultAzureCredential());
|
|
262
|
+
|
|
263
|
+
const documentUrl = "https://example.com/sample_invoice.pdf";
|
|
264
|
+
|
|
265
|
+
// Analyze document using prebuilt-documentSearch
|
|
266
|
+
const poller = client.analyze("prebuilt-documentSearch", [{ url: documentUrl }]);
|
|
267
|
+
const result = await poller.pollUntilDone();
|
|
268
|
+
|
|
269
|
+
// Extract markdown content
|
|
270
|
+
if (result.contents && result.contents.length > 0) {
|
|
271
|
+
const content = result.contents[0];
|
|
272
|
+
console.log("Markdown Content:");
|
|
273
|
+
console.log(content.markdown);
|
|
274
|
+
|
|
275
|
+
// Access document-specific properties
|
|
276
|
+
if (content.kind === "document") {
|
|
277
|
+
console.log(`Pages: ${content.startPageNumber} - ${content.endPageNumber}`);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
```
|
|
281
|
+
|
|
282
|
+
### Extract structured fields from invoices
|
|
283
|
+
|
|
284
|
+
Use the `prebuilt-invoice` analyzer to extract structured invoice fields:
|
|
285
|
+
|
|
286
|
+
```typescript snippet:ignore
|
|
287
|
+
import {
|
|
288
|
+
ContentUnderstandingClient,
|
|
289
|
+
type DocumentContent,
|
|
290
|
+
type ContentFieldUnion,
|
|
291
|
+
} from "@azure/ai-content-understanding";
|
|
292
|
+
import { DefaultAzureCredential } from "@azure/identity";
|
|
293
|
+
|
|
294
|
+
const endpoint = process.env["CONTENTUNDERSTANDING_ENDPOINT"]!;
|
|
295
|
+
const client = new ContentUnderstandingClient(endpoint, new DefaultAzureCredential());
|
|
296
|
+
|
|
297
|
+
const invoiceUrl = "https://example.com/invoice.pdf";
|
|
298
|
+
|
|
299
|
+
// Analyze invoice using prebuilt-invoice analyzer
|
|
300
|
+
const poller = client.analyze("prebuilt-invoice", [{ url: invoiceUrl }]);
|
|
301
|
+
const result = await poller.pollUntilDone();
|
|
302
|
+
|
|
303
|
+
if (result.contents && result.contents.length > 0) {
|
|
304
|
+
const content = result.contents[0] as DocumentContent;
|
|
305
|
+
|
|
306
|
+
// Helper function to extract field values
|
|
307
|
+
const getFieldValue = (field: ContentFieldUnion | undefined): string | undefined => {
|
|
308
|
+
if (!field) return undefined;
|
|
309
|
+
if ("valueString" in field) return field.valueString;
|
|
310
|
+
if ("valueDate" in field) return field.valueDate;
|
|
311
|
+
if ("valueNumber" in field) return String(field.valueNumber);
|
|
312
|
+
return undefined;
|
|
313
|
+
};
|
|
314
|
+
|
|
315
|
+
// Extract invoice fields
|
|
316
|
+
const customerName = getFieldValue(content.fields?.["CustomerName"]);
|
|
317
|
+
const invoiceTotal = getFieldValue(content.fields?.["InvoiceTotal"]);
|
|
318
|
+
const invoiceDate = getFieldValue(content.fields?.["InvoiceDate"]);
|
|
319
|
+
|
|
320
|
+
console.log(`Customer Name: ${customerName ?? "(None)"}`);
|
|
321
|
+
console.log(`Invoice Total: ${invoiceTotal ?? "(None)"}`);
|
|
322
|
+
console.log(`Invoice Date: ${invoiceDate ?? "(None)"}`);
|
|
323
|
+
}
|
|
324
|
+
```
|
|
325
|
+
|
|
326
|
+
See the [samples directory][samples_directory] for complete examples.
|
|
327
|
+
|
|
328
|
+
## Troubleshooting
|
|
329
|
+
|
|
330
|
+
### Common issues
|
|
331
|
+
|
|
332
|
+
**Error: "Access denied due to invalid subscription key or wrong API endpoint"**
|
|
333
|
+
- Verify your endpoint URL is correct and includes the trailing slash
|
|
334
|
+
- Ensure your API key is valid or that your Microsoft Entra ID credentials have the correct permissions
|
|
335
|
+
- Make sure you have the **Cognitive Services User** role assigned to your account
|
|
336
|
+
|
|
337
|
+
**Error: "Model deployment not found" or "Default model deployment not configured"**
|
|
338
|
+
- Ensure you have deployed the required models (gpt-4.1, gpt-4.1-mini, text-embedding-3-large) in Microsoft Foundry
|
|
339
|
+
- Verify you have configured the default model deployments (see [Configure Model Deployments](#step-3-configure-model-deployments-required-for-prebuilt-analyzers))
|
|
340
|
+
- Check that your deployment names match what you configured in the defaults
|
|
341
|
+
|
|
342
|
+
**Error: "Operation failed" or timeout**
|
|
343
|
+
- Content Understanding operations are asynchronous and may take time to complete
|
|
344
|
+
- Ensure you are properly polling for results using `pollUntilDone()` on the poller object
|
|
345
|
+
- Check the operation status for more details about the failure
|
|
346
|
+
|
|
347
|
+
### Logging
|
|
348
|
+
|
|
349
|
+
Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`:
|
|
350
|
+
|
|
351
|
+
```ts snippet:SetLogLevel
|
|
352
|
+
import { setLogLevel } from "@azure/logger";
|
|
353
|
+
|
|
354
|
+
setLogLevel("info");
|
|
355
|
+
```
|
|
356
|
+
|
|
357
|
+
For more detailed instructions on how to enable logs, you can look at the [@azure/logger package docs](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger).
|
|
358
|
+
|
|
359
|
+
## Testing
|
|
360
|
+
|
|
361
|
+
This SDK includes comprehensive tests that can be run in different modes.
|
|
362
|
+
|
|
363
|
+
### Quick start
|
|
364
|
+
|
|
365
|
+
```bash
|
|
366
|
+
# Install dependencies
|
|
367
|
+
pnpm install
|
|
368
|
+
|
|
369
|
+
# Build the SDK
|
|
370
|
+
npx turbo build --filter=@azure/ai-content-understanding...
|
|
371
|
+
|
|
372
|
+
# Run tests in playback mode (no Azure resources needed)
|
|
373
|
+
pnpm test
|
|
374
|
+
```
|
|
375
|
+
|
|
376
|
+
### Test modes
|
|
377
|
+
|
|
378
|
+
- **Playback Mode** (default): Uses pre-recorded HTTP interactions, no Azure resources required
|
|
379
|
+
- **Record Mode**: Runs against live Azure services and records interactions for future playback
|
|
380
|
+
- **Live Mode**: Runs against live Azure services without recording
|
|
381
|
+
|
|
382
|
+
### Setting up the environment for live/record tests
|
|
383
|
+
|
|
384
|
+
1. Copy `test/sample.env` to `test/.env`:
|
|
385
|
+
|
|
386
|
+
```bash
|
|
387
|
+
cp test/sample.env test/.env
|
|
388
|
+
```
|
|
389
|
+
|
|
390
|
+
2. Edit `test/.env` and fill in your actual values:
|
|
391
|
+
- `CONTENTUNDERSTANDING_ENDPOINT`: Your Microsoft Foundry resource endpoint
|
|
392
|
+
- `CONTENTUNDERSTANDING_KEY`: Your API key (optional if using DefaultAzureCredential)
|
|
393
|
+
- Model deployment names (required for prebuilt analyzers)
|
|
394
|
+
|
|
395
|
+
### Run tests in record mode
|
|
396
|
+
|
|
397
|
+
To record new test interactions or update existing ones:
|
|
398
|
+
|
|
399
|
+
```bash
|
|
400
|
+
# Run tests in record mode
|
|
401
|
+
TEST_MODE=record pnpm test
|
|
402
|
+
```
|
|
403
|
+
|
|
404
|
+
### Run tests in playback mode
|
|
405
|
+
|
|
406
|
+
To run tests without Azure resources (using pre-recorded interactions):
|
|
407
|
+
|
|
408
|
+
```bash
|
|
409
|
+
# Simply run tests (playback is the default mode)
|
|
410
|
+
pnpm test
|
|
411
|
+
|
|
412
|
+
# Or explicitly set playback mode
|
|
413
|
+
TEST_MODE=playback pnpm test
|
|
414
|
+
```
|
|
415
|
+
|
|
416
|
+
### Package-scoped / faster workflows
|
|
417
|
+
|
|
418
|
+
- Build only this package and its dependencies:
|
|
419
|
+
|
|
420
|
+
```bash
|
|
421
|
+
npx turbo build --filter=@azure/ai-content-understanding... --token 1
|
|
422
|
+
```
|
|
423
|
+
|
|
424
|
+
- Run only Node tests for faster iteration (skip browser tests):
|
|
425
|
+
|
|
426
|
+
```bash
|
|
427
|
+
TEST_MODE=record pnpm test:node # or TEST_MODE=playback pnpm test:node
|
|
428
|
+
```
|
|
429
|
+
|
|
430
|
+
### Environment variables
|
|
431
|
+
|
|
432
|
+
You can set credentials in multiple ways:
|
|
433
|
+
|
|
434
|
+
1. **Preferred**: Create `test/.env` by copying `test/sample.env` and filling your values
|
|
435
|
+
2. **Fallback**: Place a `.env` at the package root (same directory as `package.json`)
|
|
436
|
+
3. **Shell export**: Export credentials directly in your shell:
|
|
437
|
+
|
|
438
|
+
```bash
|
|
439
|
+
export CONTENTUNDERSTANDING_ENDPOINT="https://<your-resource>.services.ai.azure.com/"
|
|
440
|
+
export CONTENTUNDERSTANDING_KEY="<your_key_here>"
|
|
441
|
+
TEST_MODE=record pnpm test:node
|
|
442
|
+
```
|
|
443
|
+
|
|
444
|
+
### Debug tips
|
|
445
|
+
|
|
446
|
+
When running tests in record mode, watch for debug lines printed by the test setup:
|
|
447
|
+
|
|
448
|
+
```
|
|
449
|
+
DEBUG ENV ENDPOINT DEFINED: true
|
|
450
|
+
DEBUG ENV KEY DEFINED: true
|
|
451
|
+
```
|
|
452
|
+
|
|
453
|
+
> **Important:** Do NOT commit real keys. Keep `test/sample.env` as the template and ensure `test/.env` is in your `.gitignore`.
|
|
454
|
+
|
|
455
|
+
### Troubleshooting tests
|
|
456
|
+
|
|
457
|
+
- **"key must be a non-empty string"**: The test process couldn't find your `CONTENTUNDERSTANDING_KEY`. Ensure `test/.env` or package-root `.env` is present and contains the key (or export it in your shell) before running tests.
|
|
458
|
+
- **"Invalid request" LRO errors**: Ensure your service/region supports the analyzer used by the tests and that network access is available for URL-based inputs.
|
|
459
|
+
|
|
460
|
+
### Running Samples Locally
|
|
461
|
+
|
|
462
|
+
The samples directories are excluded from the pnpm workspace to avoid dependency conflicts. To run samples with the local development version of the package:
|
|
463
|
+
|
|
464
|
+
> **Note:** Running `pnpm link` and `pnpm install` inside the samples folders will update local files like `package.json` and `pnpm-lock.yaml` under the samples directories. These changes are only for local testing and should not be checked in. If you accidentally modify them, use `git restore <path>` to revert.
|
|
465
|
+
|
|
466
|
+
1. Build the package:
|
|
467
|
+
|
|
468
|
+
```bash
|
|
469
|
+
npx turbo build --filter=@azure/ai-content-understanding...
|
|
470
|
+
```
|
|
471
|
+
|
|
472
|
+
2. Link the local package in the samples directories:
|
|
473
|
+
|
|
474
|
+
```bash
|
|
475
|
+
cd sdk/contentunderstanding/ai-content-understanding/samples/v1/typescript
|
|
476
|
+
pnpm link ../../../
|
|
477
|
+
cd ../javascript
|
|
478
|
+
pnpm link ../../../
|
|
479
|
+
```
|
|
480
|
+
|
|
481
|
+
3. Install dependencies in the samples directories:
|
|
482
|
+
|
|
483
|
+
```bash
|
|
484
|
+
cd sdk/contentunderstanding/ai-content-understanding/samples/v1/typescript
|
|
485
|
+
pnpm install
|
|
486
|
+
cd ../javascript
|
|
487
|
+
pnpm install
|
|
488
|
+
```
|
|
489
|
+
|
|
490
|
+
#### Alternative (no package.json/lockfile changes)
|
|
491
|
+
|
|
492
|
+
If you want to use the local package without modifying sample `package.json` or `pnpm-lock.yaml`, install from a packed tarball without saving:
|
|
493
|
+
|
|
494
|
+
1. Build the package:
|
|
495
|
+
|
|
496
|
+
```bash
|
|
497
|
+
npx turbo build --filter=@azure/ai-content-understanding...
|
|
498
|
+
```
|
|
499
|
+
|
|
500
|
+
2. Create a local tarball:
|
|
501
|
+
|
|
502
|
+
```bash
|
|
503
|
+
cd sdk/contentunderstanding/ai-content-understanding
|
|
504
|
+
pnpm pack --pack-destination /tmp
|
|
505
|
+
```
|
|
506
|
+
|
|
507
|
+
3. Install the tarball in the samples (no save, no lockfile):
|
|
508
|
+
|
|
509
|
+
```bash
|
|
510
|
+
cd sdk/contentunderstanding/ai-content-understanding/samples/v1/typescript
|
|
511
|
+
npm install --no-save --no-package-lock /tmp/azure-ai-content-understanding-*.tgz
|
|
512
|
+
cd ../javascript
|
|
513
|
+
npm install --no-save --no-package-lock /tmp/azure-ai-content-understanding-*.tgz
|
|
514
|
+
```
|
|
515
|
+
|
|
516
|
+
#### Running a sample
|
|
517
|
+
|
|
518
|
+
After installing dependencies, you can run individual samples.
|
|
519
|
+
|
|
520
|
+
**Setting up environment variables:**
|
|
521
|
+
|
|
522
|
+
Copy the `sample.env` file to create a `.env` file in the sample directory root. Run the following commands from the package root (`sdk/contentunderstanding/ai-content-understanding`):
|
|
523
|
+
|
|
524
|
+
```bash
|
|
525
|
+
# For TypeScript samples
|
|
526
|
+
cp sample.env samples/v1/typescript/.env
|
|
527
|
+
|
|
528
|
+
# For JavaScript samples
|
|
529
|
+
cp sample.env samples/v1/javascript/.env
|
|
530
|
+
```
|
|
531
|
+
|
|
532
|
+
Then edit the `.env` file and fill in your actual values:
|
|
533
|
+
|
|
534
|
+
```bash
|
|
535
|
+
CONTENTUNDERSTANDING_ENDPOINT=https://<your-resource>.services.ai.azure.com/
|
|
536
|
+
CONTENTUNDERSTANDING_KEY=<your-api-key>
|
|
537
|
+
```
|
|
538
|
+
|
|
539
|
+
> **Note:** The `.env` file should be at the sample folder root (same level as `package.json`), not inside `src/` or `dist/`.
|
|
540
|
+
|
|
541
|
+
**TypeScript samples:**
|
|
542
|
+
|
|
543
|
+
```bash
|
|
544
|
+
cd samples/v1/typescript
|
|
545
|
+
npm run build
|
|
546
|
+
node dist/analyzeBinary.js
|
|
547
|
+
```
|
|
548
|
+
|
|
549
|
+
**JavaScript samples:**
|
|
550
|
+
|
|
551
|
+
```bash
|
|
552
|
+
cd samples/v1/javascript
|
|
553
|
+
node analyzeBinary.js
|
|
554
|
+
```
|
|
555
|
+
|
|
556
|
+
For full setup instructions and available samples, see:
|
|
557
|
+
|
|
558
|
+
- [TypeScript samples README](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/contentunderstanding/ai-content-understanding/samples/v1/typescript/README.md)
|
|
559
|
+
- [JavaScript samples README](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/contentunderstanding/ai-content-understanding/samples/v1/javascript/README.md)
|
|
560
|
+
|
|
561
|
+
## Next steps
|
|
562
|
+
|
|
563
|
+
* Explore the [samples directory][samples_directory] for complete code examples
|
|
564
|
+
* Read the [Azure AI Content Understanding documentation][product_docs] for detailed service information
|
|
565
|
+
|
|
566
|
+
## Contributing
|
|
567
|
+
|
|
568
|
+
If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code.
|
|
569
|
+
|
|
570
|
+
## Related projects
|
|
571
|
+
|
|
572
|
+
- [Microsoft Azure SDK for JavaScript](https://github.com/Azure/azure-sdk-for-js)
|
|
573
|
+
|
|
574
|
+
<!-- LINKS -->
|
|
575
|
+
[azure_sub]: https://azure.microsoft.com/free/
|
|
576
|
+
[azure_portal]: https://portal.azure.com
|
|
577
|
+
[azure_identity]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/identity/identity
|
|
578
|
+
[defaultazurecredential]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/identity/identity#defaultazurecredential
|
|
579
|
+
[product_docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/
|
|
580
|
+
[cu_quickstart]: https://learn.microsoft.com/azure/ai-services/content-understanding/quickstart/use-rest-api?tabs=portal%2Cdocument
|
|
581
|
+
[cu_region_support]: https://learn.microsoft.com/azure/ai-services/content-understanding/language-region-support
|
|
582
|
+
[deploy_models_docs]: https://learn.microsoft.com/azure/ai-studio/how-to/deploy-models-openai
|
|
583
|
+
[prebuilt_analyzers_docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/prebuilt-analyzers
|
|
584
|
+
[samples_directory]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/contentunderstanding/ai-content-understanding/samples
|
|
585
|
+
[sample_update_defaults]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/contentunderstanding/ai-content-understanding/samples-dev/updateDefaults.ts
|
|
586
|
+
[client_options]: https://learn.microsoft.com/javascript/api/@azure/core-rest-pipeline/pipelineoptions?view=azure-node-latest
|
|
587
|
+
[accessing_response]: https://learn.microsoft.com/javascript/api/@azure/core-rest-pipeline/pipelineresponse?view=azure-node-latest
|
|
588
|
+
[long_running_operations]: https://learn.microsoft.com/javascript/api/@azure/core-lro?view=azure-node-latest
|
|
589
|
+
[handling_failures]: https://learn.microsoft.com/javascript/api/@azure/core-rest-pipeline/resterror?view=azure-node-latest
|
|
590
|
+
[diagnostics]: https://learn.microsoft.com/javascript/api/@azure/logger?view=azure-node-latest
|
|
591
|
+
[client_lifetime]: https://learn.microsoft.com/azure/developer/javascript/sdk/use-azure-sdk
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { Client, ClientOptions } from "@azure-rest/core-client";
|
|
2
|
+
import type { KeyCredential, TokenCredential } from "@azure/core-auth";
|
|
3
|
+
export interface ContentUnderstandingContext extends Client {
|
|
4
|
+
/** The API version to use for this operation. */
|
|
5
|
+
/** Known values of {@link KnownVersions} that the service accepts. */
|
|
6
|
+
apiVersion?: string;
|
|
7
|
+
}
|
|
8
|
+
/** Optional parameters for the client. */
|
|
9
|
+
export interface ContentUnderstandingClientOptionalParams extends ClientOptions {
|
|
10
|
+
/** The API version to use for this operation. */
|
|
11
|
+
/** Known values of {@link KnownVersions} that the service accepts. */
|
|
12
|
+
apiVersion?: string;
|
|
13
|
+
}
|
|
14
|
+
export declare function createContentUnderstanding(endpoint: string, credential: KeyCredential | TokenCredential, options?: ContentUnderstandingClientOptionalParams): ContentUnderstandingContext;
|
|
15
|
+
//# sourceMappingURL=contentUnderstandingContext.d.ts.map
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
// Copyright (c) Microsoft Corporation.
|
|
2
|
+
// Licensed under the MIT License.
|
|
3
|
+
import { logger } from "../logger.js";
|
|
4
|
+
import { getClient } from "@azure-rest/core-client";
|
|
5
|
+
// CUSTOMIZATION: EMITTER-FIX: Renamed 'endpointParam' to 'endpoint' for clarity and consistency.
|
|
6
|
+
// The emitter generates 'endpointParam' but 'endpoint' is the standard name.
|
|
7
|
+
export function createContentUnderstanding(endpoint, credential, options = {}) {
|
|
8
|
+
const endpointUrl = options.endpoint ?? `${endpoint}/contentunderstanding`;
|
|
9
|
+
const prefixFromOptions = options?.userAgentOptions?.userAgentPrefix;
|
|
10
|
+
const userAgentInfo = `azsdk-js-ai-content-understanding/1.0.0`;
|
|
11
|
+
const userAgentPrefix = prefixFromOptions
|
|
12
|
+
? `${prefixFromOptions} azsdk-js-api ${userAgentInfo}`
|
|
13
|
+
: `azsdk-js-api ${userAgentInfo}`;
|
|
14
|
+
const { apiVersion: _, ...updatedOptions } = {
|
|
15
|
+
...options,
|
|
16
|
+
userAgentOptions: { userAgentPrefix },
|
|
17
|
+
loggingOptions: { logger: options.loggingOptions?.logger ?? logger.info },
|
|
18
|
+
credentials: {
|
|
19
|
+
scopes: options.credentials?.scopes ?? ["https://cognitiveservices.azure.com/.default"],
|
|
20
|
+
apiKeyHeaderName: options.credentials?.apiKeyHeaderName ?? "Ocp-Apim-Subscription-Key",
|
|
21
|
+
},
|
|
22
|
+
};
|
|
23
|
+
const clientContext = getClient(endpointUrl, credential, updatedOptions);
|
|
24
|
+
const apiVersion = options.apiVersion;
|
|
25
|
+
return { ...clientContext, apiVersion };
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=contentUnderstandingContext.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"contentUnderstandingContext.js","sourceRoot":"","sources":["../../../src/api/contentUnderstandingContext.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAGtC,OAAO,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AAgBpD,iGAAiG;AACjG,6EAA6E;AAC7E,MAAM,UAAU,0BAA0B,CACxC,QAAgB,EAChB,UAA2C,EAC3C,UAAoD,EAAE;IAEtD,MAAM,WAAW,GAAG,OAAO,CAAC,QAAQ,IAAI,GAAG,QAAQ,uBAAuB,CAAC;IAC3E,MAAM,iBAAiB,GAAG,OAAO,EAAE,gBAAgB,EAAE,eAAe,CAAC;IACrE,MAAM,aAAa,GAAG,yCAAyC,CAAC;IAChE,MAAM,eAAe,GAAG,iBAAiB;QACvC,CAAC,CAAC,GAAG,iBAAiB,iBAAiB,aAAa,EAAE;QACtD,CAAC,CAAC,gBAAgB,aAAa,EAAE,CAAC;IACpC,MAAM,EAAE,UAAU,EAAE,CAAC,EAAE,GAAG,cAAc,EAAE,GAAG;QAC3C,GAAG,OAAO;QACV,gBAAgB,EAAE,EAAE,eAAe,EAAE;QACrC,cAAc,EAAE,EAAE,MAAM,EAAE,OAAO,CAAC,cAAc,EAAE,MAAM,IAAI,MAAM,CAAC,IAAI,EAAE;QACzE,WAAW,EAAE;YACX,MAAM,EAAE,OAAO,CAAC,WAAW,EAAE,MAAM,IAAI,CAAC,8CAA8C,CAAC;YACvF,gBAAgB,EAAE,OAAO,CAAC,WAAW,EAAE,gBAAgB,IAAI,2BAA2B;SACvF;KACF,CAAC;IACF,MAAM,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE,UAAU,EAAE,cAAc,CAAC,CAAC;IACzE,MAAM,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IACtC,OAAO,EAAE,GAAG,aAAa,EAAE,UAAU,EAAiC,CAAC;AACzE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { logger } from \"../logger.js\";\nimport { KnownVersions } from \"../models/models.js\";\nimport type { Client, ClientOptions } from \"@azure-rest/core-client\";\nimport { getClient } from \"@azure-rest/core-client\";\nimport type { KeyCredential, TokenCredential } from \"@azure/core-auth\";\n\nexport interface ContentUnderstandingContext extends Client {\n /** The API version to use for this operation. */\n /** Known values of {@link KnownVersions} that the service accepts. */\n apiVersion?: string;\n}\n\n/** Optional parameters for the client. */\nexport interface ContentUnderstandingClientOptionalParams extends ClientOptions {\n /** The API version to use for this operation. */\n /** Known values of {@link KnownVersions} that the service accepts. */\n apiVersion?: string;\n}\n\n// CUSTOMIZATION: EMITTER-FIX: Renamed 'endpointParam' to 'endpoint' for clarity and consistency.\n// The emitter generates 'endpointParam' but 'endpoint' is the standard name.\nexport function createContentUnderstanding(\n endpoint: string,\n credential: KeyCredential | TokenCredential,\n options: ContentUnderstandingClientOptionalParams = {},\n): ContentUnderstandingContext {\n const endpointUrl = options.endpoint ?? `${endpoint}/contentunderstanding`;\n const prefixFromOptions = options?.userAgentOptions?.userAgentPrefix;\n const userAgentInfo = `azsdk-js-ai-content-understanding/1.0.0`;\n const userAgentPrefix = prefixFromOptions\n ? `${prefixFromOptions} azsdk-js-api ${userAgentInfo}`\n : `azsdk-js-api ${userAgentInfo}`;\n const { apiVersion: _, ...updatedOptions } = {\n ...options,\n userAgentOptions: { userAgentPrefix },\n loggingOptions: { logger: options.loggingOptions?.logger ?? logger.info },\n credentials: {\n scopes: options.credentials?.scopes ?? [\"https://cognitiveservices.azure.com/.default\"],\n apiKeyHeaderName: options.credentials?.apiKeyHeaderName ?? \"Ocp-Apim-Subscription-Key\",\n },\n };\n const clientContext = getClient(endpointUrl, credential, updatedOptions);\n const apiVersion = options.apiVersion;\n return { ...clientContext, apiVersion } as ContentUnderstandingContext;\n}\n"]}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { createContentUnderstanding, ContentUnderstandingContext, ContentUnderstandingClientOptionalParams, } from "./contentUnderstandingContext.js";
|
|
2
|
+
export { updateDefaults, updateAnalyzer, listAnalyzers, grantCopyAuthorization, getResultFile, getResult, getOperationStatus, getDefaults, getAnalyzer, deleteResult, deleteAnalyzer, createAnalyzer, copyAnalyzer, analyzeBinary, analyze, } from "./operations.js";
|
|
3
|
+
export { UpdateDefaultsOptionalParams, UpdateAnalyzerOptionalParams, ListAnalyzersOptionalParams, GrantCopyAuthorizationOptionalParams, GetResultFileOptionalParams, GetResultOptionalParams, GetOperationStatusOptionalParams, GetDefaultsOptionalParams, GetAnalyzerOptionalParams, DeleteResultOptionalParams, DeleteAnalyzerOptionalParams, CreateAnalyzerOptionalParams, CopyAnalyzerOptionalParams, AnalyzeBinaryOptionalParams, AnalyzeOptionalParams, } from "./options.js";
|
|
4
|
+
//# sourceMappingURL=index.d.ts.map
|