@ai-sdk/azure 3.0.18 → 3.0.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,20 @@
1
1
  # @ai-sdk/azure
2
2
 
3
+ ## 3.0.19
4
+
5
+ ### Patch Changes
6
+
7
+ - 04c89b1: Provide Responses API providerMetadata types at the message / reasoning level.
8
+
9
+ - Export the following types for use in client code:
10
+ - `OpenaiResponsesProviderMetadata`
11
+ - `OpenaiResponsesReasoningProviderMetadata`
12
+ - `AzureResponsesProviderMetadata`
13
+ - `AzureResponsesReasoningProviderMetadata`
14
+
15
+ - Updated dependencies [04c89b1]
16
+ - @ai-sdk/openai@3.0.19
17
+
3
18
  ## 3.0.18
4
19
 
5
20
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -1,7 +1,7 @@
1
1
  export { OpenAIChatLanguageModelOptions, OpenAIResponsesProviderOptions } from '@ai-sdk/openai';
2
2
  import { ProviderV3, LanguageModelV3, EmbeddingModelV3, ImageModelV3, TranscriptionModelV3, SpeechModelV3 } from '@ai-sdk/provider';
3
3
  import { FetchFunction } from '@ai-sdk/provider-utils';
4
- import { codeInterpreter, fileSearch, imageGeneration, webSearchPreview, ResponsesTextProviderMetadata, ResponsesSourceDocumentProviderMetadata } from '@ai-sdk/openai/internal';
4
+ import { codeInterpreter, fileSearch, imageGeneration, webSearchPreview, ResponsesProviderMetadata, ResponsesReasoningProviderMetadata, ResponsesTextProviderMetadata, ResponsesSourceDocumentProviderMetadata } from '@ai-sdk/openai/internal';
5
5
 
6
6
  declare const azureOpenaiTools: {
7
7
  codeInterpreter: typeof codeInterpreter;
@@ -112,6 +112,12 @@ Default Azure OpenAI provider instance.
112
112
  */
113
113
  declare const azure: AzureOpenAIProvider;
114
114
 
115
+ type AzureResponsesProviderMetadata = {
116
+ azure: ResponsesProviderMetadata;
117
+ };
118
+ type AzureResponsesReasoningProviderMetadata = {
119
+ azure: ResponsesReasoningProviderMetadata;
120
+ };
115
121
  type AzureResponsesTextProviderMetadata = {
116
122
  azure: ResponsesTextProviderMetadata;
117
123
  };
@@ -121,4 +127,4 @@ type AzureResponsesSourceDocumentProviderMetadata = {
121
127
 
122
128
  declare const VERSION: string;
123
129
 
124
- export { type AzureOpenAIProvider, type AzureOpenAIProviderSettings, type AzureResponsesSourceDocumentProviderMetadata, type AzureResponsesTextProviderMetadata, VERSION, azure, createAzure };
130
+ export { type AzureOpenAIProvider, type AzureOpenAIProviderSettings, type AzureResponsesProviderMetadata, type AzureResponsesReasoningProviderMetadata, type AzureResponsesSourceDocumentProviderMetadata, type AzureResponsesTextProviderMetadata, VERSION, azure, createAzure };
package/dist/index.d.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  export { OpenAIChatLanguageModelOptions, OpenAIResponsesProviderOptions } from '@ai-sdk/openai';
2
2
  import { ProviderV3, LanguageModelV3, EmbeddingModelV3, ImageModelV3, TranscriptionModelV3, SpeechModelV3 } from '@ai-sdk/provider';
3
3
  import { FetchFunction } from '@ai-sdk/provider-utils';
4
- import { codeInterpreter, fileSearch, imageGeneration, webSearchPreview, ResponsesTextProviderMetadata, ResponsesSourceDocumentProviderMetadata } from '@ai-sdk/openai/internal';
4
+ import { codeInterpreter, fileSearch, imageGeneration, webSearchPreview, ResponsesProviderMetadata, ResponsesReasoningProviderMetadata, ResponsesTextProviderMetadata, ResponsesSourceDocumentProviderMetadata } from '@ai-sdk/openai/internal';
5
5
 
6
6
  declare const azureOpenaiTools: {
7
7
  codeInterpreter: typeof codeInterpreter;
@@ -112,6 +112,12 @@ Default Azure OpenAI provider instance.
112
112
  */
113
113
  declare const azure: AzureOpenAIProvider;
114
114
 
115
+ type AzureResponsesProviderMetadata = {
116
+ azure: ResponsesProviderMetadata;
117
+ };
118
+ type AzureResponsesReasoningProviderMetadata = {
119
+ azure: ResponsesReasoningProviderMetadata;
120
+ };
115
121
  type AzureResponsesTextProviderMetadata = {
116
122
  azure: ResponsesTextProviderMetadata;
117
123
  };
@@ -121,4 +127,4 @@ type AzureResponsesSourceDocumentProviderMetadata = {
121
127
 
122
128
  declare const VERSION: string;
123
129
 
124
- export { type AzureOpenAIProvider, type AzureOpenAIProviderSettings, type AzureResponsesSourceDocumentProviderMetadata, type AzureResponsesTextProviderMetadata, VERSION, azure, createAzure };
130
+ export { type AzureOpenAIProvider, type AzureOpenAIProviderSettings, type AzureResponsesProviderMetadata, type AzureResponsesReasoningProviderMetadata, type AzureResponsesSourceDocumentProviderMetadata, type AzureResponsesTextProviderMetadata, VERSION, azure, createAzure };
package/dist/index.js CHANGED
@@ -40,7 +40,7 @@ var azureOpenaiTools = {
40
40
  };
41
41
 
42
42
  // src/version.ts
43
- var VERSION = true ? "3.0.18" : "0.0.0-test";
43
+ var VERSION = true ? "3.0.19" : "0.0.0-test";
44
44
 
45
45
  // src/azure-openai-provider.ts
46
46
  function createAzure(options = {}) {
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/azure-openai-provider.ts","../src/azure-openai-tools.ts","../src/version.ts"],"sourcesContent":["export type {\n OpenAIResponsesProviderOptions,\n OpenAIChatLanguageModelOptions,\n} from '@ai-sdk/openai';\n\nexport { azure, createAzure } from './azure-openai-provider';\nexport type {\n AzureOpenAIProvider,\n AzureOpenAIProviderSettings,\n} from './azure-openai-provider';\nexport type {\n AzureResponsesTextProviderMetadata,\n AzureResponsesSourceDocumentProviderMetadata,\n} from './azure-openai-provider-metadata';\nexport { VERSION } from './version';\n","import {\n OpenAIChatLanguageModel,\n OpenAICompletionLanguageModel,\n OpenAIEmbeddingModel,\n OpenAIImageModel,\n OpenAIResponsesLanguageModel,\n OpenAISpeechModel,\n OpenAITranscriptionModel,\n} from '@ai-sdk/openai/internal';\nimport {\n EmbeddingModelV3,\n LanguageModelV3,\n ProviderV3,\n ImageModelV3,\n SpeechModelV3,\n TranscriptionModelV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n loadSetting,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport { azureOpenaiTools } from './azure-openai-tools';\nimport { VERSION } from './version';\n\nexport interface AzureOpenAIProvider extends ProviderV3 {\n (deploymentId: string): LanguageModelV3;\n\n /**\n Creates an Azure OpenAI responses API model for text generation.\n */\n languageModel(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI responses API model for text generation.\n */\n responses(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI completion model for text generation.\n */\n completion(deploymentId: string): LanguageModelV3;\n\n /**\n * Creates an Azure OpenAI model for text embeddings.\n */\n embedding(deploymentId: string): EmbeddingModelV3;\n\n /**\n * Creates an Azure OpenAI model for text embeddings.\n */\n embeddingModel(deploymentId: string): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embedding` instead.\n */\n textEmbedding(deploymentId: string): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(deploymentId: string): EmbeddingModelV3;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n image(deploymentId: string): ImageModelV3;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n imageModel(deploymentId: string): ImageModelV3;\n\n /**\n * Creates an Azure OpenAI model for audio transcription.\n */\n transcription(deploymentId: string): TranscriptionModelV3;\n\n /**\n * Creates an Azure OpenAI model for speech generation.\n */\n speech(deploymentId: string): SpeechModelV3;\n\n /**\n * AzureOpenAI-specific tools.\n */\n tools: typeof azureOpenaiTools;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/v1{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/v1{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCustom api version to use. Defaults to `preview`.\n */\n apiVersion?: string;\n\n /**\nUse deployment-based URLs for specific model types. Set to true to use legacy deployment format:\n`{baseURL}/deployments/{deploymentId}{path}?api-version={apiVersion}` instead of\n`{baseURL}/v1{path}?api-version={apiVersion}`.\n */\n useDeploymentBasedUrls?: boolean;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => {\n const baseHeaders = {\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n };\n return withUserAgentSuffix(baseHeaders, `ai-sdk/azure/${VERSION}`);\n };\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const apiVersion = options.apiVersion ?? 'v1';\n\n const url = ({ path, modelId }: { path: string; modelId: string }) => {\n const baseUrlPrefix =\n options.baseURL ?? `https://${getResourceName()}.openai.azure.com/openai`;\n\n let fullUrl: URL;\n if (options.useDeploymentBasedUrls) {\n // Use deployment-based format for compatibility with certain Azure OpenAI models\n fullUrl = new URL(`${baseUrlPrefix}/deployments/${modelId}${path}`);\n } else {\n // Use v1 API format - no deployment ID in URL\n fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);\n }\n\n fullUrl.searchParams.set('api-version', apiVersion);\n return fullUrl.toString();\n };\n\n const createChatModel = (deploymentName: string) =>\n new OpenAIChatLanguageModel(deploymentName, {\n provider: 'azure.chat',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: string) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: 'azure.completion',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: string) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: 'azure.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const createResponsesModel = (modelId: string) =>\n new OpenAIResponsesLanguageModel(modelId, {\n provider: 'azure.responses',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n fileIdPrefixes: ['assistant-'],\n });\n\n const createImageModel = (modelId: string) =>\n new OpenAIImageModel(modelId, {\n provider: 'azure.image',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: string) =>\n new OpenAITranscriptionModel(modelId, {\n provider: 'azure.transcription',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: string) =>\n new OpenAISpeechModel(modelId, {\n provider: 'azure.speech',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (deploymentId: string) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createResponsesModel(deploymentId);\n };\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createResponsesModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.responses = createResponsesModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n provider.tools = azureOpenaiTools;\n return provider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure();\n","import {\n codeInterpreter,\n fileSearch,\n imageGeneration,\n webSearchPreview,\n} from '@ai-sdk/openai/internal';\n\nexport const azureOpenaiTools: {\n codeInterpreter: typeof codeInterpreter;\n fileSearch: typeof fileSearch;\n imageGeneration: typeof imageGeneration;\n webSearchPreview: typeof webSearchPreview;\n} = {\n codeInterpreter,\n fileSearch,\n imageGeneration,\n webSearchPreview,\n};\n","// Version string of this package injected at build time.\ndeclare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AASP,4BAKO;;;ACtBP,sBAKO;AAEA,IAAM,mBAKT;AAAA,EACF;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACfO,IAAM,UACX,OACI,WACA;;;AF0IC,SAAS,YACd,UAAuC,CAAC,GACnB;AAjJvB;AAkJE,QAAM,aAAa,MAAM;AACvB,UAAM,cAAc;AAAA,MAClB,eAAW,kCAAW;AAAA,QACpB,QAAQ,QAAQ;AAAA,QAChB,yBAAyB;AAAA,QACzB,aAAa;AAAA,MACf,CAAC;AAAA,MACD,GAAG,QAAQ;AAAA,IACb;AACA,eAAO,2CAAoB,aAAa,gBAAgB,OAAO,EAAE;AAAA,EACnE;AAEA,QAAM,kBAAkB,UACtB,mCAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAa,aAAQ,eAAR,YAAsB;AAEzC,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAAyC;AAxKxE,QAAAC;AAyKI,UAAM,iBACJA,MAAA,QAAQ,YAAR,OAAAA,MAAmB,WAAW,gBAAgB,CAAC;AAEjD,QAAI;AACJ,QAAI,QAAQ,wBAAwB;AAElC,gBAAU,IAAI,IAAI,GAAG,aAAa,gBAAgB,OAAO,GAAG,IAAI,EAAE;AAAA,IACpE,OAAO;AAEL,gBAAU,IAAI,IAAI,GAAG,aAAa,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,YAAQ,aAAa,IAAI,eAAe,UAAU;AAClD,WAAO,QAAQ,SAAS;AAAA,EAC1B;AAEA,QAAM,kBAAkB,CAAC,mBACvB,IAAI,yCAAwB,gBAAgB;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,+CAA8B,SAAS;AAAA,IACzC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sCAAqB,SAAS;AAAA,IAChC,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,8CAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,gBAAgB,CAAC,YAAY;AAAA,EAC/B,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,kCAAiB,SAAS;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,0CAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,mCAAkB,SAAS;AAAA,IAC7B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,cAAsB;AAC/C,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,qBAAqB,YAAY;AAAA,EAC1C;AAEA,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,iBAAiB;AAC1B,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAClB,WAAS,QAAQ;AACjB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY;","names":["import_internal","_a"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/azure-openai-provider.ts","../src/azure-openai-tools.ts","../src/version.ts"],"sourcesContent":["export type {\n OpenAIResponsesProviderOptions,\n OpenAIChatLanguageModelOptions,\n} from '@ai-sdk/openai';\n\nexport { azure, createAzure } from './azure-openai-provider';\nexport type {\n AzureOpenAIProvider,\n AzureOpenAIProviderSettings,\n} from './azure-openai-provider';\nexport type {\n AzureResponsesProviderMetadata,\n AzureResponsesReasoningProviderMetadata,\n AzureResponsesTextProviderMetadata,\n AzureResponsesSourceDocumentProviderMetadata,\n} from './azure-openai-provider-metadata';\nexport { VERSION } from './version';\n","import {\n OpenAIChatLanguageModel,\n OpenAICompletionLanguageModel,\n OpenAIEmbeddingModel,\n OpenAIImageModel,\n OpenAIResponsesLanguageModel,\n OpenAISpeechModel,\n OpenAITranscriptionModel,\n} from '@ai-sdk/openai/internal';\nimport {\n EmbeddingModelV3,\n LanguageModelV3,\n ProviderV3,\n ImageModelV3,\n SpeechModelV3,\n TranscriptionModelV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n loadSetting,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport { azureOpenaiTools } from './azure-openai-tools';\nimport { VERSION } from './version';\n\nexport interface AzureOpenAIProvider extends ProviderV3 {\n (deploymentId: string): LanguageModelV3;\n\n /**\n Creates an Azure OpenAI responses API model for text generation.\n */\n languageModel(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI responses API model for text generation.\n */\n responses(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI completion model for text generation.\n */\n completion(deploymentId: string): LanguageModelV3;\n\n /**\n * Creates an Azure OpenAI model for text embeddings.\n */\n embedding(deploymentId: string): EmbeddingModelV3;\n\n /**\n * Creates an Azure OpenAI model for text embeddings.\n */\n embeddingModel(deploymentId: string): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embedding` instead.\n */\n textEmbedding(deploymentId: string): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(deploymentId: string): EmbeddingModelV3;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n image(deploymentId: string): ImageModelV3;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n imageModel(deploymentId: string): ImageModelV3;\n\n /**\n * Creates an Azure OpenAI model for audio transcription.\n */\n transcription(deploymentId: string): TranscriptionModelV3;\n\n /**\n * Creates an Azure OpenAI model for speech generation.\n */\n speech(deploymentId: string): SpeechModelV3;\n\n /**\n * AzureOpenAI-specific tools.\n */\n tools: typeof azureOpenaiTools;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/v1{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/v1{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCustom api version to use. Defaults to `preview`.\n */\n apiVersion?: string;\n\n /**\nUse deployment-based URLs for specific model types. Set to true to use legacy deployment format:\n`{baseURL}/deployments/{deploymentId}{path}?api-version={apiVersion}` instead of\n`{baseURL}/v1{path}?api-version={apiVersion}`.\n */\n useDeploymentBasedUrls?: boolean;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => {\n const baseHeaders = {\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n };\n return withUserAgentSuffix(baseHeaders, `ai-sdk/azure/${VERSION}`);\n };\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const apiVersion = options.apiVersion ?? 'v1';\n\n const url = ({ path, modelId }: { path: string; modelId: string }) => {\n const baseUrlPrefix =\n options.baseURL ?? `https://${getResourceName()}.openai.azure.com/openai`;\n\n let fullUrl: URL;\n if (options.useDeploymentBasedUrls) {\n // Use deployment-based format for compatibility with certain Azure OpenAI models\n fullUrl = new URL(`${baseUrlPrefix}/deployments/${modelId}${path}`);\n } else {\n // Use v1 API format - no deployment ID in URL\n fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);\n }\n\n fullUrl.searchParams.set('api-version', apiVersion);\n return fullUrl.toString();\n };\n\n const createChatModel = (deploymentName: string) =>\n new OpenAIChatLanguageModel(deploymentName, {\n provider: 'azure.chat',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: string) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: 'azure.completion',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: string) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: 'azure.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const createResponsesModel = (modelId: string) =>\n new OpenAIResponsesLanguageModel(modelId, {\n provider: 'azure.responses',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n fileIdPrefixes: ['assistant-'],\n });\n\n const createImageModel = (modelId: string) =>\n new OpenAIImageModel(modelId, {\n provider: 'azure.image',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: string) =>\n new OpenAITranscriptionModel(modelId, {\n provider: 'azure.transcription',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: string) =>\n new OpenAISpeechModel(modelId, {\n provider: 'azure.speech',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (deploymentId: string) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createResponsesModel(deploymentId);\n };\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createResponsesModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.responses = createResponsesModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n provider.tools = azureOpenaiTools;\n return provider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure();\n","import {\n codeInterpreter,\n fileSearch,\n imageGeneration,\n webSearchPreview,\n} from '@ai-sdk/openai/internal';\n\nexport const azureOpenaiTools: {\n codeInterpreter: typeof codeInterpreter;\n fileSearch: typeof fileSearch;\n imageGeneration: typeof imageGeneration;\n webSearchPreview: typeof webSearchPreview;\n} = {\n codeInterpreter,\n fileSearch,\n imageGeneration,\n webSearchPreview,\n};\n","// Version string of this package injected at build time.\ndeclare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AASP,4BAKO;;;ACtBP,sBAKO;AAEA,IAAM,mBAKT;AAAA,EACF;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACfO,IAAM,UACX,OACI,WACA;;;AF0IC,SAAS,YACd,UAAuC,CAAC,GACnB;AAjJvB;AAkJE,QAAM,aAAa,MAAM;AACvB,UAAM,cAAc;AAAA,MAClB,eAAW,kCAAW;AAAA,QACpB,QAAQ,QAAQ;AAAA,QAChB,yBAAyB;AAAA,QACzB,aAAa;AAAA,MACf,CAAC;AAAA,MACD,GAAG,QAAQ;AAAA,IACb;AACA,eAAO,2CAAoB,aAAa,gBAAgB,OAAO,EAAE;AAAA,EACnE;AAEA,QAAM,kBAAkB,UACtB,mCAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAa,aAAQ,eAAR,YAAsB;AAEzC,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAAyC;AAxKxE,QAAAC;AAyKI,UAAM,iBACJA,MAAA,QAAQ,YAAR,OAAAA,MAAmB,WAAW,gBAAgB,CAAC;AAEjD,QAAI;AACJ,QAAI,QAAQ,wBAAwB;AAElC,gBAAU,IAAI,IAAI,GAAG,aAAa,gBAAgB,OAAO,GAAG,IAAI,EAAE;AAAA,IACpE,OAAO;AAEL,gBAAU,IAAI,IAAI,GAAG,aAAa,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,YAAQ,aAAa,IAAI,eAAe,UAAU;AAClD,WAAO,QAAQ,SAAS;AAAA,EAC1B;AAEA,QAAM,kBAAkB,CAAC,mBACvB,IAAI,yCAAwB,gBAAgB;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,+CAA8B,SAAS;AAAA,IACzC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sCAAqB,SAAS;AAAA,IAChC,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,8CAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,gBAAgB,CAAC,YAAY;AAAA,EAC/B,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,kCAAiB,SAAS;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,0CAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,mCAAkB,SAAS;AAAA,IAC7B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,cAAsB;AAC/C,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,qBAAqB,YAAY;AAAA,EAC1C;AAEA,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,iBAAiB;AAC1B,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAClB,WAAS,QAAQ;AACjB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY;","names":["import_internal","_a"]}
package/dist/index.mjs CHANGED
@@ -29,7 +29,7 @@ var azureOpenaiTools = {
29
29
  };
30
30
 
31
31
  // src/version.ts
32
- var VERSION = true ? "3.0.18" : "0.0.0-test";
32
+ var VERSION = true ? "3.0.19" : "0.0.0-test";
33
33
 
34
34
  // src/azure-openai-provider.ts
35
35
  function createAzure(options = {}) {
package/docs/04-azure.mdx CHANGED
@@ -322,24 +322,34 @@ The following provider options are available:
322
322
 
323
323
  The Azure OpenAI provider also returns provider-specific metadata:
324
324
 
325
+ For Responses models (`azure(deploymentName)`), you can type this metadata using `AzureResponsesProviderMetadata`:
326
+
325
327
  ```ts
326
- const { providerMetadata } = await generateText({
328
+ import { azure, type AzureResponsesProviderMetadata } from '@ai-sdk/azure';
329
+ import { generateText } from 'ai';
330
+
331
+ const result = await generateText({
327
332
  model: azure('your-deployment-name'),
328
333
  });
329
334
 
330
- const openaiMetadata = providerMetadata?.openai;
331
- ```
335
+ const providerMetadata = result.providerMetadata as
336
+ | AzureResponsesProviderMetadata
337
+ | undefined;
332
338
 
333
- The following OpenAI-specific metadata is returned:
339
+ const { responseId, logprobs, serviceTier } = providerMetadata?.azure ?? {};
334
340
 
335
- - **responseId** _string_
336
- The ID of the response. Can be used to continue a conversation.
341
+ // responseId can be used to continue a conversation (previousResponseId).
342
+ console.log(responseId);
343
+ ```
337
344
 
338
- - **cachedPromptTokens** _number_
339
- The number of prompt tokens that were a cache hit.
345
+ The following Azure-specific metadata may be returned:
340
346
 
341
- - **reasoningTokens** _number_
342
- The number of reasoning tokens that the model generated.
347
+ - **responseId** _string | null | undefined_
348
+ The ID of the response. Can be used to continue a conversation.
349
+ - **logprobs** _(optional)_
350
+ Log probabilities of output tokens (when enabled).
351
+ - **serviceTier** _(optional)_
352
+ Service tier information returned by the API.
343
353
 
344
354
  <Note>
345
355
  The providerMetadata is only returned with the default responses API, and is
@@ -652,6 +662,51 @@ for (const part of result.content) {
652
662
  API.
653
663
  </Note>
654
664
 
665
+ #### Typed providerMetadata in Reasoning Parts
666
+
667
+ When using the Azure OpenAI Responses API, reasoning output parts can include provider metadata.
668
+ To handle this metadata in a type-safe way, use `AzureResponsesReasoningProviderMetadata`.
669
+
670
+ For reasoning parts, when `part.type === 'reasoning'`, the `providerMetadata` is provided in the form of `AzureResponsesReasoningProviderMetadata`.
671
+
672
+ This metadata includes the following fields:
673
+
674
+ - `itemId`
675
+ The ID of the reasoning item in the Responses API.
676
+ - `reasoningEncryptedContent` (optional)
677
+ Encrypted reasoning content (only returned when requested via `include: ['reasoning.encrypted_content']`).
678
+
679
+ ```ts
680
+ import {
681
+ azure,
682
+ type AzureResponsesReasoningProviderMetadata,
683
+ type OpenAIResponsesProviderOptions,
684
+ } from '@ai-sdk/azure';
685
+ import { generateText } from 'ai';
686
+
687
+ const result = await generateText({
688
+ model: azure('your-deployment-name'),
689
+ prompt: 'How many "r"s are in the word "strawberry"?',
690
+ providerOptions: {
691
+ azure: {
692
+ store: false,
693
+ include: ['reasoning.encrypted_content'],
694
+ } satisfies OpenAIResponsesProviderOptions,
695
+ },
696
+ });
697
+
698
+ for (const part of result.content) {
699
+ if (part.type === 'reasoning') {
700
+ const providerMetadata = part.providerMetadata as
701
+ | AzureResponsesReasoningProviderMetadata
702
+ | undefined;
703
+
704
+ const { itemId, reasoningEncryptedContent } = providerMetadata?.azure ?? {};
705
+ console.log(itemId, reasoningEncryptedContent);
706
+ }
707
+ }
708
+ ```
709
+
655
710
  #### Typed providerMetadata in Source Document Parts
656
711
 
657
712
  For source document parts, when `part.type === 'source'` and `sourceType === 'document'`, the `providerMetadata` is provided as `AzureResponsesSourceDocumentProviderMetadata`.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/azure",
3
- "version": "3.0.18",
3
+ "version": "3.0.19",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -29,9 +29,9 @@
29
29
  }
30
30
  },
31
31
  "dependencies": {
32
- "@ai-sdk/openai": "3.0.18",
33
32
  "@ai-sdk/provider": "3.0.5",
34
- "@ai-sdk/provider-utils": "4.0.9"
33
+ "@ai-sdk/provider-utils": "4.0.9",
34
+ "@ai-sdk/openai": "3.0.19"
35
35
  },
36
36
  "devDependencies": {
37
37
  "@types/node": "20.17.24",
@@ -1,8 +1,18 @@
1
1
  import {
2
+ ResponsesProviderMetadata,
3
+ ResponsesReasoningProviderMetadata,
2
4
  ResponsesSourceDocumentProviderMetadata,
3
5
  ResponsesTextProviderMetadata,
4
6
  } from '@ai-sdk/openai/internal';
5
7
 
8
+ export type AzureResponsesProviderMetadata = {
9
+ azure: ResponsesProviderMetadata;
10
+ };
11
+
12
+ export type AzureResponsesReasoningProviderMetadata = {
13
+ azure: ResponsesReasoningProviderMetadata;
14
+ };
15
+
6
16
  export type AzureResponsesTextProviderMetadata = {
7
17
  azure: ResponsesTextProviderMetadata;
8
18
  };
package/src/index.ts CHANGED
@@ -9,6 +9,8 @@ export type {
9
9
  AzureOpenAIProviderSettings,
10
10
  } from './azure-openai-provider';
11
11
  export type {
12
+ AzureResponsesProviderMetadata,
13
+ AzureResponsesReasoningProviderMetadata,
12
14
  AzureResponsesTextProviderMetadata,
13
15
  AzureResponsesSourceDocumentProviderMetadata,
14
16
  } from './azure-openai-provider-metadata';