@ai-sdk/azure 2.0.9 → 2.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,34 @@
1
1
  # @ai-sdk/azure
2
2
 
3
+ ## 2.0.11
4
+
5
+ ### Patch Changes
6
+
7
+ - 097b452: feat(openai, azure): add configurable file ID prefixes for Responses API
8
+
9
+ - Added `fileIdPrefixes` option to OpenAI Responses API configuration
10
+ - Azure OpenAI now supports `assistant-` prefixed file IDs (replacing previous `file-` prefix support)
11
+ - OpenAI maintains backward compatibility with default `file-` prefix
12
+ - File ID detection is disabled when `fileIdPrefixes` is undefined, gracefully falling back to base64 processing
13
+
14
+ - 0853d6b: fix(provider/azure): add deployment-based URL support for transcription compatibility
15
+ - Updated dependencies [097b452]
16
+ - Updated dependencies [87cf954]
17
+ - Updated dependencies [a3d98a9]
18
+ - Updated dependencies [38ac190]
19
+ - Updated dependencies [110d167]
20
+ - Updated dependencies [8d3c747]
21
+ - @ai-sdk/openai@2.0.11
22
+ - @ai-sdk/provider-utils@3.0.2
23
+
24
+ ## 2.0.10
25
+
26
+ ### Patch Changes
27
+
28
+ - Updated dependencies [a274b01]
29
+ - Updated dependencies [b48e0ff]
30
+ - @ai-sdk/openai@2.0.10
31
+
3
32
  ## 2.0.9
4
33
 
5
34
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -76,6 +76,12 @@ interface AzureOpenAIProviderSettings {
76
76
  Custom api version to use. Defaults to `preview`.
77
77
  */
78
78
  apiVersion?: string;
79
+ /**
80
+ Use deployment-based URLs for specific model types. Set to true to use legacy deployment format:
81
+ `{baseURL}/deployments/{deploymentId}{path}?api-version={apiVersion}` instead of
82
+ `{baseURL}/v1{path}?api-version={apiVersion}`.
83
+ */
84
+ useDeploymentBasedUrls?: boolean;
79
85
  }
80
86
  /**
81
87
  Create an Azure OpenAI provider instance.
package/dist/index.d.ts CHANGED
@@ -76,6 +76,12 @@ interface AzureOpenAIProviderSettings {
76
76
  Custom api version to use. Defaults to `preview`.
77
77
  */
78
78
  apiVersion?: string;
79
+ /**
80
+ Use deployment-based URLs for specific model types. Set to true to use legacy deployment format:
81
+ `{baseURL}/deployments/{deploymentId}{path}?api-version={apiVersion}` instead of
82
+ `{baseURL}/v1{path}?api-version={apiVersion}`.
83
+ */
84
+ useDeploymentBasedUrls?: boolean;
79
85
  }
80
86
  /**
81
87
  Create an Azure OpenAI provider instance.
package/dist/index.js CHANGED
@@ -48,7 +48,12 @@ function createAzure(options = {}) {
48
48
  const url = ({ path, modelId }) => {
49
49
  var _a2;
50
50
  const baseUrlPrefix = (_a2 = options.baseURL) != null ? _a2 : `https://${getResourceName()}.openai.azure.com/openai`;
51
- const fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);
51
+ let fullUrl;
52
+ if (options.useDeploymentBasedUrls) {
53
+ fullUrl = new URL(`${baseUrlPrefix}/deployments/${modelId}${path}`);
54
+ } else {
55
+ fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);
56
+ }
52
57
  fullUrl.searchParams.set("api-version", apiVersion);
53
58
  return fullUrl.toString();
54
59
  };
@@ -74,7 +79,8 @@ function createAzure(options = {}) {
74
79
  provider: "azure.responses",
75
80
  url,
76
81
  headers: getHeaders,
77
- fetch: options.fetch
82
+ fetch: options.fetch,
83
+ fileIdPrefixes: ["assistant-"]
78
84
  });
79
85
  const createImageModel = (modelId) => new import_internal.OpenAIImageModel(modelId, {
80
86
  provider: "azure.image",
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/azure-openai-provider.ts"],"sourcesContent":["export { azure, createAzure } from './azure-openai-provider';\nexport type {\n AzureOpenAIProvider,\n AzureOpenAIProviderSettings,\n} from './azure-openai-provider';\n","import {\n OpenAIChatLanguageModel,\n OpenAICompletionLanguageModel,\n OpenAIEmbeddingModel,\n OpenAIImageModel,\n OpenAIResponsesLanguageModel,\n OpenAISpeechModel,\n OpenAITranscriptionModel,\n} from '@ai-sdk/openai/internal';\nimport {\n EmbeddingModelV2,\n LanguageModelV2,\n ProviderV2,\n ImageModelV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, loadApiKey, loadSetting } from '@ai-sdk/provider-utils';\n\nexport interface AzureOpenAIProvider extends ProviderV2 {\n (deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n languageModel(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI responses API model for text generation.\n */\n responses(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI completion model for text generation.\n */\n completion(deploymentId: string): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding` instead.\n */\n embedding(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n image(deploymentId: string): ImageModelV2;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n imageModel(deploymentId: string): ImageModelV2;\n\n textEmbedding(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\nCreates an Azure OpenAI model for text embeddings.\n */\n textEmbeddingModel(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\n * Creates an Azure OpenAI model for audio transcription.\n */\n transcription(deploymentId: string): TranscriptionModelV2;\n\n /**\n * Creates an Azure OpenAI model for speech generation.\n */\n speech(deploymentId: string): SpeechModelV2;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/v1{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/v1{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCustom api version to use. Defaults to `preview`.\n */\n apiVersion?: string;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => ({\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n });\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const apiVersion = options.apiVersion ?? 'preview';\n\n const url = ({ path, modelId }: { path: string; modelId: string }) => {\n const baseUrlPrefix =\n options.baseURL ?? `https://${getResourceName()}.openai.azure.com/openai`;\n // Use v1 API format - no deployment ID in URL\n const fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);\n fullUrl.searchParams.set('api-version', apiVersion);\n return fullUrl.toString();\n };\n\n const createChatModel = (deploymentName: string) =>\n new OpenAIChatLanguageModel(deploymentName, {\n provider: 'azure.chat',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: string) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: 'azure.completion',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: string) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: 'azure.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const createResponsesModel = (modelId: string) =>\n new OpenAIResponsesLanguageModel(modelId, {\n provider: 'azure.responses',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createImageModel = (modelId: string) =>\n new OpenAIImageModel(modelId, {\n provider: 'azure.image',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: string) =>\n new OpenAITranscriptionModel(modelId, {\n provider: 'azure.transcription',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: string) =>\n new OpenAISpeechModel(modelId, {\n provider: 'azure.speech',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (deploymentId: string) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(deploymentId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.responses = createResponsesModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n return provider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,sBAQO;AASP,4BAAuD;AAmGhD,SAAS,YACd,UAAuC,CAAC,GACnB;AAtHvB;AAuHE,QAAM,aAAa,OAAO;AAAA,IACxB,eAAW,kCAAW;AAAA,MACpB,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,UACtB,mCAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAa,aAAQ,eAAR,YAAsB;AAEzC,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAAyC;AA1IxE,QAAAA;AA2II,UAAM,iBACJA,MAAA,QAAQ,YAAR,OAAAA,MAAmB,WAAW,gBAAgB,CAAC;AAEjD,UAAM,UAAU,IAAI,IAAI,GAAG,aAAa,MAAM,IAAI,EAAE;AACpD,YAAQ,aAAa,IAAI,eAAe,UAAU;AAClD,WAAO,QAAQ,SAAS;AAAA,EAC1B;AAEA,QAAM,kBAAkB,CAAC,mBACvB,IAAI,wCAAwB,gBAAgB;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,8CAA8B,SAAS;AAAA,IACzC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,qCAAqB,SAAS;AAAA,IAChC,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,6CAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,iCAAiB,SAAS;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,yCAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,kCAAkB,SAAS;AAAA,IAC7B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,cAAsB;AAC/C,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,YAAY;AAAA,EACrC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAClB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY;","names":["_a"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/azure-openai-provider.ts"],"sourcesContent":["export { azure, createAzure } from './azure-openai-provider';\nexport type {\n AzureOpenAIProvider,\n AzureOpenAIProviderSettings,\n} from './azure-openai-provider';\n","import {\n OpenAIChatLanguageModel,\n OpenAICompletionLanguageModel,\n OpenAIEmbeddingModel,\n OpenAIImageModel,\n OpenAIResponsesLanguageModel,\n OpenAISpeechModel,\n OpenAITranscriptionModel,\n} from '@ai-sdk/openai/internal';\nimport {\n EmbeddingModelV2,\n LanguageModelV2,\n ProviderV2,\n ImageModelV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, loadApiKey, loadSetting } from '@ai-sdk/provider-utils';\n\nexport interface AzureOpenAIProvider extends ProviderV2 {\n (deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n languageModel(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI responses API model for text generation.\n */\n responses(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI completion model for text generation.\n */\n completion(deploymentId: string): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding` instead.\n */\n embedding(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n image(deploymentId: string): ImageModelV2;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n imageModel(deploymentId: string): ImageModelV2;\n\n textEmbedding(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\nCreates an Azure OpenAI model for text embeddings.\n */\n textEmbeddingModel(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\n * Creates an Azure OpenAI model for audio transcription.\n */\n transcription(deploymentId: string): TranscriptionModelV2;\n\n /**\n * Creates an Azure OpenAI model for speech generation.\n */\n speech(deploymentId: string): SpeechModelV2;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/v1{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/v1{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCustom api version to use. Defaults to `preview`.\n */\n apiVersion?: string;\n\n /**\nUse deployment-based URLs for specific model types. Set to true to use legacy deployment format: \n`{baseURL}/deployments/{deploymentId}{path}?api-version={apiVersion}` instead of \n`{baseURL}/v1{path}?api-version={apiVersion}`.\n */\n useDeploymentBasedUrls?: boolean;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => ({\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n });\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const apiVersion = options.apiVersion ?? 'preview';\n\n const url = ({ path, modelId }: { path: string; modelId: string }) => {\n const baseUrlPrefix =\n options.baseURL ?? `https://${getResourceName()}.openai.azure.com/openai`;\n\n let fullUrl: URL;\n if (options.useDeploymentBasedUrls) {\n // Use deployment-based format for compatibility with certain Azure OpenAI models\n fullUrl = new URL(`${baseUrlPrefix}/deployments/${modelId}${path}`);\n } else {\n // Use v1 API format - no deployment ID in URL\n fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);\n }\n\n fullUrl.searchParams.set('api-version', apiVersion);\n return fullUrl.toString();\n };\n\n const createChatModel = (deploymentName: string) =>\n new OpenAIChatLanguageModel(deploymentName, {\n provider: 'azure.chat',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: string) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: 'azure.completion',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: string) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: 'azure.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const createResponsesModel = (modelId: string) =>\n new OpenAIResponsesLanguageModel(modelId, {\n provider: 'azure.responses',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n fileIdPrefixes: ['assistant-'],\n });\n\n const createImageModel = (modelId: string) =>\n new OpenAIImageModel(modelId, {\n provider: 'azure.image',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: string) =>\n new OpenAITranscriptionModel(modelId, {\n provider: 'azure.transcription',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: string) =>\n new OpenAISpeechModel(modelId, {\n provider: 'azure.speech',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (deploymentId: string) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(deploymentId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.responses = createResponsesModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n return provider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,sBAQO;AASP,4BAAuD;AA0GhD,SAAS,YACd,UAAuC,CAAC,GACnB;AA7HvB;AA8HE,QAAM,aAAa,OAAO;AAAA,IACxB,eAAW,kCAAW;AAAA,MACpB,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,UACtB,mCAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAa,aAAQ,eAAR,YAAsB;AAEzC,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAAyC;AAjJxE,QAAAA;AAkJI,UAAM,iBACJA,MAAA,QAAQ,YAAR,OAAAA,MAAmB,WAAW,gBAAgB,CAAC;AAEjD,QAAI;AACJ,QAAI,QAAQ,wBAAwB;AAElC,gBAAU,IAAI,IAAI,GAAG,aAAa,gBAAgB,OAAO,GAAG,IAAI,EAAE;AAAA,IACpE,OAAO;AAEL,gBAAU,IAAI,IAAI,GAAG,aAAa,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,YAAQ,aAAa,IAAI,eAAe,UAAU;AAClD,WAAO,QAAQ,SAAS;AAAA,EAC1B;AAEA,QAAM,kBAAkB,CAAC,mBACvB,IAAI,wCAAwB,gBAAgB;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,8CAA8B,SAAS;AAAA,IACzC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,qCAAqB,SAAS;AAAA,IAChC,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,6CAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,gBAAgB,CAAC,YAAY;AAAA,EAC/B,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,iCAAiB,SAAS;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,yCAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,kCAAkB,SAAS;AAAA,IAC7B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,cAAsB;AAC/C,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,YAAY;AAAA,EACrC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAClB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY;","names":["_a"]}
package/dist/index.mjs CHANGED
@@ -29,7 +29,12 @@ function createAzure(options = {}) {
29
29
  const url = ({ path, modelId }) => {
30
30
  var _a2;
31
31
  const baseUrlPrefix = (_a2 = options.baseURL) != null ? _a2 : `https://${getResourceName()}.openai.azure.com/openai`;
32
- const fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);
32
+ let fullUrl;
33
+ if (options.useDeploymentBasedUrls) {
34
+ fullUrl = new URL(`${baseUrlPrefix}/deployments/${modelId}${path}`);
35
+ } else {
36
+ fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);
37
+ }
33
38
  fullUrl.searchParams.set("api-version", apiVersion);
34
39
  return fullUrl.toString();
35
40
  };
@@ -55,7 +60,8 @@ function createAzure(options = {}) {
55
60
  provider: "azure.responses",
56
61
  url,
57
62
  headers: getHeaders,
58
- fetch: options.fetch
63
+ fetch: options.fetch,
64
+ fileIdPrefixes: ["assistant-"]
59
65
  });
60
66
  const createImageModel = (modelId) => new OpenAIImageModel(modelId, {
61
67
  provider: "azure.image",
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/azure-openai-provider.ts"],"sourcesContent":["import {\n OpenAIChatLanguageModel,\n OpenAICompletionLanguageModel,\n OpenAIEmbeddingModel,\n OpenAIImageModel,\n OpenAIResponsesLanguageModel,\n OpenAISpeechModel,\n OpenAITranscriptionModel,\n} from '@ai-sdk/openai/internal';\nimport {\n EmbeddingModelV2,\n LanguageModelV2,\n ProviderV2,\n ImageModelV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, loadApiKey, loadSetting } from '@ai-sdk/provider-utils';\n\nexport interface AzureOpenAIProvider extends ProviderV2 {\n (deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n languageModel(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI responses API model for text generation.\n */\n responses(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI completion model for text generation.\n */\n completion(deploymentId: string): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding` instead.\n */\n embedding(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n image(deploymentId: string): ImageModelV2;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n imageModel(deploymentId: string): ImageModelV2;\n\n textEmbedding(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\nCreates an Azure OpenAI model for text embeddings.\n */\n textEmbeddingModel(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\n * Creates an Azure OpenAI model for audio transcription.\n */\n transcription(deploymentId: string): TranscriptionModelV2;\n\n /**\n * Creates an Azure OpenAI model for speech generation.\n */\n speech(deploymentId: string): SpeechModelV2;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/v1{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/v1{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCustom api version to use. Defaults to `preview`.\n */\n apiVersion?: string;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => ({\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n });\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const apiVersion = options.apiVersion ?? 'preview';\n\n const url = ({ path, modelId }: { path: string; modelId: string }) => {\n const baseUrlPrefix =\n options.baseURL ?? `https://${getResourceName()}.openai.azure.com/openai`;\n // Use v1 API format - no deployment ID in URL\n const fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);\n fullUrl.searchParams.set('api-version', apiVersion);\n return fullUrl.toString();\n };\n\n const createChatModel = (deploymentName: string) =>\n new OpenAIChatLanguageModel(deploymentName, {\n provider: 'azure.chat',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: string) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: 'azure.completion',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: string) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: 'azure.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const createResponsesModel = (modelId: string) =>\n new OpenAIResponsesLanguageModel(modelId, {\n provider: 'azure.responses',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createImageModel = (modelId: string) =>\n new OpenAIImageModel(modelId, {\n provider: 'azure.image',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: string) =>\n new OpenAITranscriptionModel(modelId, {\n provider: 'azure.transcription',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: string) =>\n new OpenAISpeechModel(modelId, {\n provider: 'azure.speech',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (deploymentId: string) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(deploymentId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.responses = createResponsesModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n return provider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure();\n"],"mappings":";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AASP,SAAwB,YAAY,mBAAmB;AAmGhD,SAAS,YACd,UAAuC,CAAC,GACnB;AAtHvB;AAuHE,QAAM,aAAa,OAAO;AAAA,IACxB,WAAW,WAAW;AAAA,MACpB,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,MACtB,YAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAa,aAAQ,eAAR,YAAsB;AAEzC,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAAyC;AA1IxE,QAAAA;AA2II,UAAM,iBACJA,MAAA,QAAQ,YAAR,OAAAA,MAAmB,WAAW,gBAAgB,CAAC;AAEjD,UAAM,UAAU,IAAI,IAAI,GAAG,aAAa,MAAM,IAAI,EAAE;AACpD,YAAQ,aAAa,IAAI,eAAe,UAAU;AAClD,WAAO,QAAQ,SAAS;AAAA,EAC1B;AAEA,QAAM,kBAAkB,CAAC,mBACvB,IAAI,wBAAwB,gBAAgB;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,8BAA8B,SAAS;AAAA,IACzC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,qBAAqB,SAAS;AAAA,IAChC,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,6BAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,iBAAiB,SAAS;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,kBAAkB,SAAS;AAAA,IAC7B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,cAAsB;AAC/C,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,YAAY;AAAA,EACrC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAClB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY;","names":["_a"]}
1
+ {"version":3,"sources":["../src/azure-openai-provider.ts"],"sourcesContent":["import {\n OpenAIChatLanguageModel,\n OpenAICompletionLanguageModel,\n OpenAIEmbeddingModel,\n OpenAIImageModel,\n OpenAIResponsesLanguageModel,\n OpenAISpeechModel,\n OpenAITranscriptionModel,\n} from '@ai-sdk/openai/internal';\nimport {\n EmbeddingModelV2,\n LanguageModelV2,\n ProviderV2,\n ImageModelV2,\n SpeechModelV2,\n TranscriptionModelV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, loadApiKey, loadSetting } from '@ai-sdk/provider-utils';\n\nexport interface AzureOpenAIProvider extends ProviderV2 {\n (deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n languageModel(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI responses API model for text generation.\n */\n responses(deploymentId: string): LanguageModelV2;\n\n /**\nCreates an Azure OpenAI completion model for text generation.\n */\n completion(deploymentId: string): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding` instead.\n */\n embedding(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n image(deploymentId: string): ImageModelV2;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n imageModel(deploymentId: string): ImageModelV2;\n\n textEmbedding(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\nCreates an Azure OpenAI model for text embeddings.\n */\n textEmbeddingModel(deploymentId: string): EmbeddingModelV2<string>;\n\n /**\n * Creates an Azure OpenAI model for audio transcription.\n */\n transcription(deploymentId: string): TranscriptionModelV2;\n\n /**\n * Creates an Azure OpenAI model for speech generation.\n */\n speech(deploymentId: string): SpeechModelV2;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/v1{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/v1{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCustom api version to use. Defaults to `preview`.\n */\n apiVersion?: string;\n\n /**\nUse deployment-based URLs for specific model types. Set to true to use legacy deployment format: \n`{baseURL}/deployments/{deploymentId}{path}?api-version={apiVersion}` instead of \n`{baseURL}/v1{path}?api-version={apiVersion}`.\n */\n useDeploymentBasedUrls?: boolean;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => ({\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n });\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const apiVersion = options.apiVersion ?? 'preview';\n\n const url = ({ path, modelId }: { path: string; modelId: string }) => {\n const baseUrlPrefix =\n options.baseURL ?? `https://${getResourceName()}.openai.azure.com/openai`;\n\n let fullUrl: URL;\n if (options.useDeploymentBasedUrls) {\n // Use deployment-based format for compatibility with certain Azure OpenAI models\n fullUrl = new URL(`${baseUrlPrefix}/deployments/${modelId}${path}`);\n } else {\n // Use v1 API format - no deployment ID in URL\n fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);\n }\n\n fullUrl.searchParams.set('api-version', apiVersion);\n return fullUrl.toString();\n };\n\n const createChatModel = (deploymentName: string) =>\n new OpenAIChatLanguageModel(deploymentName, {\n provider: 'azure.chat',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: string) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: 'azure.completion',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: string) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: 'azure.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const createResponsesModel = (modelId: string) =>\n new OpenAIResponsesLanguageModel(modelId, {\n provider: 'azure.responses',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n fileIdPrefixes: ['assistant-'],\n });\n\n const createImageModel = (modelId: string) =>\n new OpenAIImageModel(modelId, {\n provider: 'azure.image',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: string) =>\n new OpenAITranscriptionModel(modelId, {\n provider: 'azure.transcription',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: string) =>\n new OpenAISpeechModel(modelId, {\n provider: 'azure.speech',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (deploymentId: string) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(deploymentId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.responses = createResponsesModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n return provider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure();\n"],"mappings":";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AASP,SAAwB,YAAY,mBAAmB;AA0GhD,SAAS,YACd,UAAuC,CAAC,GACnB;AA7HvB;AA8HE,QAAM,aAAa,OAAO;AAAA,IACxB,WAAW,WAAW;AAAA,MACpB,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,MACtB,YAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAa,aAAQ,eAAR,YAAsB;AAEzC,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAAyC;AAjJxE,QAAAA;AAkJI,UAAM,iBACJA,MAAA,QAAQ,YAAR,OAAAA,MAAmB,WAAW,gBAAgB,CAAC;AAEjD,QAAI;AACJ,QAAI,QAAQ,wBAAwB;AAElC,gBAAU,IAAI,IAAI,GAAG,aAAa,gBAAgB,OAAO,GAAG,IAAI,EAAE;AAAA,IACpE,OAAO;AAEL,gBAAU,IAAI,IAAI,GAAG,aAAa,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,YAAQ,aAAa,IAAI,eAAe,UAAU;AAClD,WAAO,QAAQ,SAAS;AAAA,EAC1B;AAEA,QAAM,kBAAkB,CAAC,mBACvB,IAAI,wBAAwB,gBAAgB;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,8BAA8B,SAAS;AAAA,IACzC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,qBAAqB,SAAS;AAAA,IAChC,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,6BAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,gBAAgB,CAAC,YAAY;AAAA,EAC/B,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,iBAAiB,SAAS;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,kBAAkB,SAAS;AAAA,IAC7B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,cAAsB;AAC/C,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,YAAY;AAAA,EACrC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAClB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY;","names":["_a"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/azure",
3
- "version": "2.0.9",
3
+ "version": "2.0.11",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -19,9 +19,9 @@
19
19
  }
20
20
  },
21
21
  "dependencies": {
22
- "@ai-sdk/openai": "2.0.9",
22
+ "@ai-sdk/openai": "2.0.11",
23
23
  "@ai-sdk/provider": "2.0.0",
24
- "@ai-sdk/provider-utils": "3.0.1"
24
+ "@ai-sdk/provider-utils": "3.0.2"
25
25
  },
26
26
  "devDependencies": {
27
27
  "@types/node": "20.17.24",