@ai-sdk/azure 3.0.7 → 3.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/dist/index.d.mts +9 -2
- package/dist/index.d.ts +9 -2
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1 -1
- package/package.json +2 -2
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,22 @@
|
|
|
1
1
|
# @ai-sdk/azure
|
|
2
2
|
|
|
3
|
+
## 3.0.8
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 330bd92: Fix Responses `code_interpreter` annotations and add typed providerMetadata
|
|
8
|
+
|
|
9
|
+
- Align Responses API `code_interpreter` annotation types with the official spec.
|
|
10
|
+
- Add tests to ensure the overlapping parts of the Zod schemas used by `doGenerate` and `doStream` stay in sync.
|
|
11
|
+
- Export the following types for use in client code:
|
|
12
|
+
- `OpenaiResponsesTextProviderMetadata`
|
|
13
|
+
- `OpenaiResponsesSourceDocumentProviderMetadata`
|
|
14
|
+
- `AzureResponsesTextProviderMetadata`
|
|
15
|
+
- `AzureResponsesSourceDocumentProviderMetadata`
|
|
16
|
+
|
|
17
|
+
- Updated dependencies [330bd92]
|
|
18
|
+
- @ai-sdk/openai@3.0.8
|
|
19
|
+
|
|
3
20
|
## 3.0.7
|
|
4
21
|
|
|
5
22
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
export { OpenAIChatLanguageModelOptions, OpenAIResponsesProviderOptions } from '@ai-sdk/openai';
|
|
2
2
|
import { ProviderV3, LanguageModelV3, EmbeddingModelV3, ImageModelV3, TranscriptionModelV3, SpeechModelV3 } from '@ai-sdk/provider';
|
|
3
3
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
4
|
-
import { codeInterpreter, fileSearch, imageGeneration, webSearchPreview } from '@ai-sdk/openai/internal';
|
|
4
|
+
import { codeInterpreter, fileSearch, imageGeneration, webSearchPreview, ResponsesTextProviderMetadata, ResponsesSourceDocumentProviderMetadata } from '@ai-sdk/openai/internal';
|
|
5
5
|
|
|
6
6
|
declare const azureOpenaiTools: {
|
|
7
7
|
codeInterpreter: typeof codeInterpreter;
|
|
@@ -112,6 +112,13 @@ Default Azure OpenAI provider instance.
|
|
|
112
112
|
*/
|
|
113
113
|
declare const azure: AzureOpenAIProvider;
|
|
114
114
|
|
|
115
|
+
type AzureResponsesTextProviderMetadata = {
|
|
116
|
+
azure: ResponsesTextProviderMetadata;
|
|
117
|
+
};
|
|
118
|
+
type AzureResponsesSourceDocumentProviderMetadata = {
|
|
119
|
+
azure: ResponsesSourceDocumentProviderMetadata;
|
|
120
|
+
};
|
|
121
|
+
|
|
115
122
|
declare const VERSION: string;
|
|
116
123
|
|
|
117
|
-
export { type AzureOpenAIProvider, type AzureOpenAIProviderSettings, VERSION, azure, createAzure };
|
|
124
|
+
export { type AzureOpenAIProvider, type AzureOpenAIProviderSettings, type AzureResponsesSourceDocumentProviderMetadata, type AzureResponsesTextProviderMetadata, VERSION, azure, createAzure };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
export { OpenAIChatLanguageModelOptions, OpenAIResponsesProviderOptions } from '@ai-sdk/openai';
|
|
2
2
|
import { ProviderV3, LanguageModelV3, EmbeddingModelV3, ImageModelV3, TranscriptionModelV3, SpeechModelV3 } from '@ai-sdk/provider';
|
|
3
3
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
4
|
-
import { codeInterpreter, fileSearch, imageGeneration, webSearchPreview } from '@ai-sdk/openai/internal';
|
|
4
|
+
import { codeInterpreter, fileSearch, imageGeneration, webSearchPreview, ResponsesTextProviderMetadata, ResponsesSourceDocumentProviderMetadata } from '@ai-sdk/openai/internal';
|
|
5
5
|
|
|
6
6
|
declare const azureOpenaiTools: {
|
|
7
7
|
codeInterpreter: typeof codeInterpreter;
|
|
@@ -112,6 +112,13 @@ Default Azure OpenAI provider instance.
|
|
|
112
112
|
*/
|
|
113
113
|
declare const azure: AzureOpenAIProvider;
|
|
114
114
|
|
|
115
|
+
type AzureResponsesTextProviderMetadata = {
|
|
116
|
+
azure: ResponsesTextProviderMetadata;
|
|
117
|
+
};
|
|
118
|
+
type AzureResponsesSourceDocumentProviderMetadata = {
|
|
119
|
+
azure: ResponsesSourceDocumentProviderMetadata;
|
|
120
|
+
};
|
|
121
|
+
|
|
115
122
|
declare const VERSION: string;
|
|
116
123
|
|
|
117
|
-
export { type AzureOpenAIProvider, type AzureOpenAIProviderSettings, VERSION, azure, createAzure };
|
|
124
|
+
export { type AzureOpenAIProvider, type AzureOpenAIProviderSettings, type AzureResponsesSourceDocumentProviderMetadata, type AzureResponsesTextProviderMetadata, VERSION, azure, createAzure };
|
package/dist/index.js
CHANGED
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/azure-openai-provider.ts","../src/azure-openai-tools.ts","../src/version.ts"],"sourcesContent":["export type {\n OpenAIResponsesProviderOptions,\n OpenAIChatLanguageModelOptions,\n} from '@ai-sdk/openai';\n\nexport { azure, createAzure } from './azure-openai-provider';\nexport type {\n AzureOpenAIProvider,\n AzureOpenAIProviderSettings,\n} from './azure-openai-provider';\nexport { VERSION } from './version';\n","import {\n OpenAIChatLanguageModel,\n OpenAICompletionLanguageModel,\n OpenAIEmbeddingModel,\n OpenAIImageModel,\n OpenAIResponsesLanguageModel,\n OpenAISpeechModel,\n OpenAITranscriptionModel,\n} from '@ai-sdk/openai/internal';\nimport {\n EmbeddingModelV3,\n LanguageModelV3,\n ProviderV3,\n ImageModelV3,\n SpeechModelV3,\n TranscriptionModelV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n loadSetting,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport { azureOpenaiTools } from './azure-openai-tools';\nimport { VERSION } from './version';\n\nexport interface AzureOpenAIProvider extends ProviderV3 {\n (deploymentId: string): LanguageModelV3;\n\n /**\n Creates an Azure OpenAI responses API model for text generation.\n */\n languageModel(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI responses API model for text generation.\n */\n responses(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI completion model for text generation.\n */\n completion(deploymentId: string): LanguageModelV3;\n\n /**\n * Creates an Azure OpenAI model for text embeddings.\n */\n embedding(deploymentId: string): EmbeddingModelV3;\n\n /**\n * Creates an Azure OpenAI model for text embeddings.\n */\n embeddingModel(deploymentId: string): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embedding` instead.\n */\n textEmbedding(deploymentId: string): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(deploymentId: string): EmbeddingModelV3;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n image(deploymentId: string): ImageModelV3;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n imageModel(deploymentId: string): ImageModelV3;\n\n /**\n * Creates an Azure OpenAI model for audio transcription.\n */\n transcription(deploymentId: string): TranscriptionModelV3;\n\n /**\n * Creates an Azure OpenAI model for speech generation.\n */\n speech(deploymentId: string): SpeechModelV3;\n\n /**\n * AzureOpenAI-specific tools.\n */\n tools: typeof azureOpenaiTools;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/v1{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/v1{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCustom api version to use. Defaults to `preview`.\n */\n apiVersion?: string;\n\n /**\nUse deployment-based URLs for specific model types. Set to true to use legacy deployment format:\n`{baseURL}/deployments/{deploymentId}{path}?api-version={apiVersion}` instead of\n`{baseURL}/v1{path}?api-version={apiVersion}`.\n */\n useDeploymentBasedUrls?: boolean;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => {\n const baseHeaders = {\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n };\n return withUserAgentSuffix(baseHeaders, `ai-sdk/azure/${VERSION}`);\n };\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const apiVersion = options.apiVersion ?? 'v1';\n\n const url = ({ path, modelId }: { path: string; modelId: string }) => {\n const baseUrlPrefix =\n options.baseURL ?? `https://${getResourceName()}.openai.azure.com/openai`;\n\n let fullUrl: URL;\n if (options.useDeploymentBasedUrls) {\n // Use deployment-based format for compatibility with certain Azure OpenAI models\n fullUrl = new URL(`${baseUrlPrefix}/deployments/${modelId}${path}`);\n } else {\n // Use v1 API format - no deployment ID in URL\n fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);\n }\n\n fullUrl.searchParams.set('api-version', apiVersion);\n return fullUrl.toString();\n };\n\n const createChatModel = (deploymentName: string) =>\n new OpenAIChatLanguageModel(deploymentName, {\n provider: 'azure.chat',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: string) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: 'azure.completion',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: string) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: 'azure.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const createResponsesModel = (modelId: string) =>\n new OpenAIResponsesLanguageModel(modelId, {\n provider: 'azure.responses',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n fileIdPrefixes: ['assistant-'],\n });\n\n const createImageModel = (modelId: string) =>\n new OpenAIImageModel(modelId, {\n provider: 'azure.image',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: string) =>\n new OpenAITranscriptionModel(modelId, {\n provider: 'azure.transcription',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: string) =>\n new OpenAISpeechModel(modelId, {\n provider: 'azure.speech',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (deploymentId: string) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createResponsesModel(deploymentId);\n };\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createResponsesModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.responses = createResponsesModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n provider.tools = azureOpenaiTools;\n return provider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure();\n","import {\n codeInterpreter,\n fileSearch,\n imageGeneration,\n webSearchPreview,\n} from '@ai-sdk/openai/internal';\n\nexport const azureOpenaiTools: {\n codeInterpreter: typeof codeInterpreter;\n fileSearch: typeof fileSearch;\n imageGeneration: typeof imageGeneration;\n webSearchPreview: typeof webSearchPreview;\n} = {\n codeInterpreter,\n fileSearch,\n imageGeneration,\n webSearchPreview,\n};\n","// Version string of this package injected at build time.\ndeclare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AASP,4BAKO;;;ACtBP,sBAKO;AAEA,IAAM,mBAKT;AAAA,EACF;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACfO,IAAM,UACX,OACI,UACA;;;AF0IC,SAAS,YACd,UAAuC,CAAC,GACnB;AAjJvB;AAkJE,QAAM,aAAa,MAAM;AACvB,UAAM,cAAc;AAAA,MAClB,eAAW,kCAAW;AAAA,QACpB,QAAQ,QAAQ;AAAA,QAChB,yBAAyB;AAAA,QACzB,aAAa;AAAA,MACf,CAAC;AAAA,MACD,GAAG,QAAQ;AAAA,IACb;AACA,eAAO,2CAAoB,aAAa,gBAAgB,OAAO,EAAE;AAAA,EACnE;AAEA,QAAM,kBAAkB,UACtB,mCAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAa,aAAQ,eAAR,YAAsB;AAEzC,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAAyC;AAxKxE,QAAAC;AAyKI,UAAM,iBACJA,MAAA,QAAQ,YAAR,OAAAA,MAAmB,WAAW,gBAAgB,CAAC;AAEjD,QAAI;AACJ,QAAI,QAAQ,wBAAwB;AAElC,gBAAU,IAAI,IAAI,GAAG,aAAa,gBAAgB,OAAO,GAAG,IAAI,EAAE;AAAA,IACpE,OAAO;AAEL,gBAAU,IAAI,IAAI,GAAG,aAAa,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,YAAQ,aAAa,IAAI,eAAe,UAAU;AAClD,WAAO,QAAQ,SAAS;AAAA,EAC1B;AAEA,QAAM,kBAAkB,CAAC,mBACvB,IAAI,yCAAwB,gBAAgB;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,+CAA8B,SAAS;AAAA,IACzC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sCAAqB,SAAS;AAAA,IAChC,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,8CAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,gBAAgB,CAAC,YAAY;AAAA,EAC/B,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,kCAAiB,SAAS;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,0CAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,mCAAkB,SAAS;AAAA,IAC7B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,cAAsB;AAC/C,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,qBAAqB,YAAY;AAAA,EAC1C;AAEA,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,iBAAiB;AAC1B,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAClB,WAAS,QAAQ;AACjB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY;","names":["import_internal","_a"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/azure-openai-provider.ts","../src/azure-openai-tools.ts","../src/version.ts"],"sourcesContent":["export type {\n OpenAIResponsesProviderOptions,\n OpenAIChatLanguageModelOptions,\n} from '@ai-sdk/openai';\n\nexport { azure, createAzure } from './azure-openai-provider';\nexport type {\n AzureOpenAIProvider,\n AzureOpenAIProviderSettings,\n} from './azure-openai-provider';\nexport type {\n AzureResponsesTextProviderMetadata,\n AzureResponsesSourceDocumentProviderMetadata,\n} from './azure-openai-provider-metadata';\nexport { VERSION } from './version';\n","import {\n OpenAIChatLanguageModel,\n OpenAICompletionLanguageModel,\n OpenAIEmbeddingModel,\n OpenAIImageModel,\n OpenAIResponsesLanguageModel,\n OpenAISpeechModel,\n OpenAITranscriptionModel,\n} from '@ai-sdk/openai/internal';\nimport {\n EmbeddingModelV3,\n LanguageModelV3,\n ProviderV3,\n ImageModelV3,\n SpeechModelV3,\n TranscriptionModelV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n loadSetting,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport { azureOpenaiTools } from './azure-openai-tools';\nimport { VERSION } from './version';\n\nexport interface AzureOpenAIProvider extends ProviderV3 {\n (deploymentId: string): LanguageModelV3;\n\n /**\n Creates an Azure OpenAI responses API model for text generation.\n */\n languageModel(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI responses API model for text generation.\n */\n responses(deploymentId: string): LanguageModelV3;\n\n /**\nCreates an Azure OpenAI completion model for text generation.\n */\n completion(deploymentId: string): LanguageModelV3;\n\n /**\n * Creates an Azure OpenAI model for text embeddings.\n */\n embedding(deploymentId: string): EmbeddingModelV3;\n\n /**\n * Creates an Azure OpenAI model for text embeddings.\n */\n embeddingModel(deploymentId: string): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embedding` instead.\n */\n textEmbedding(deploymentId: string): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(deploymentId: string): EmbeddingModelV3;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n image(deploymentId: string): ImageModelV3;\n\n /**\n * Creates an Azure OpenAI DALL-E model for image generation.\n */\n imageModel(deploymentId: string): ImageModelV3;\n\n /**\n * Creates an Azure OpenAI model for audio transcription.\n */\n transcription(deploymentId: string): TranscriptionModelV3;\n\n /**\n * Creates an Azure OpenAI model for speech generation.\n */\n speech(deploymentId: string): SpeechModelV3;\n\n /**\n * AzureOpenAI-specific tools.\n */\n tools: typeof azureOpenaiTools;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/v1{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/v1{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nCustom api version to use. Defaults to `preview`.\n */\n apiVersion?: string;\n\n /**\nUse deployment-based URLs for specific model types. Set to true to use legacy deployment format:\n`{baseURL}/deployments/{deploymentId}{path}?api-version={apiVersion}` instead of\n`{baseURL}/v1{path}?api-version={apiVersion}`.\n */\n useDeploymentBasedUrls?: boolean;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => {\n const baseHeaders = {\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n };\n return withUserAgentSuffix(baseHeaders, `ai-sdk/azure/${VERSION}`);\n };\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const apiVersion = options.apiVersion ?? 'v1';\n\n const url = ({ path, modelId }: { path: string; modelId: string }) => {\n const baseUrlPrefix =\n options.baseURL ?? `https://${getResourceName()}.openai.azure.com/openai`;\n\n let fullUrl: URL;\n if (options.useDeploymentBasedUrls) {\n // Use deployment-based format for compatibility with certain Azure OpenAI models\n fullUrl = new URL(`${baseUrlPrefix}/deployments/${modelId}${path}`);\n } else {\n // Use v1 API format - no deployment ID in URL\n fullUrl = new URL(`${baseUrlPrefix}/v1${path}`);\n }\n\n fullUrl.searchParams.set('api-version', apiVersion);\n return fullUrl.toString();\n };\n\n const createChatModel = (deploymentName: string) =>\n new OpenAIChatLanguageModel(deploymentName, {\n provider: 'azure.chat',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: string) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: 'azure.completion',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: string) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: 'azure.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const createResponsesModel = (modelId: string) =>\n new OpenAIResponsesLanguageModel(modelId, {\n provider: 'azure.responses',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n fileIdPrefixes: ['assistant-'],\n });\n\n const createImageModel = (modelId: string) =>\n new OpenAIImageModel(modelId, {\n provider: 'azure.image',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: string) =>\n new OpenAITranscriptionModel(modelId, {\n provider: 'azure.transcription',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: string) =>\n new OpenAISpeechModel(modelId, {\n provider: 'azure.speech',\n url,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (deploymentId: string) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createResponsesModel(deploymentId);\n };\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createResponsesModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.responses = createResponsesModel;\n provider.transcription = createTranscriptionModel;\n provider.speech = createSpeechModel;\n provider.tools = azureOpenaiTools;\n return provider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure();\n","import {\n codeInterpreter,\n fileSearch,\n imageGeneration,\n webSearchPreview,\n} from '@ai-sdk/openai/internal';\n\nexport const azureOpenaiTools: {\n codeInterpreter: typeof codeInterpreter;\n fileSearch: typeof fileSearch;\n imageGeneration: typeof imageGeneration;\n webSearchPreview: typeof webSearchPreview;\n} = {\n codeInterpreter,\n fileSearch,\n imageGeneration,\n webSearchPreview,\n};\n","// Version string of this package injected at build time.\ndeclare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAQO;AASP,4BAKO;;;ACtBP,sBAKO;AAEA,IAAM,mBAKT;AAAA,EACF;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ACfO,IAAM,UACX,OACI,UACA;;;AF0IC,SAAS,YACd,UAAuC,CAAC,GACnB;AAjJvB;AAkJE,QAAM,aAAa,MAAM;AACvB,UAAM,cAAc;AAAA,MAClB,eAAW,kCAAW;AAAA,QACpB,QAAQ,QAAQ;AAAA,QAChB,yBAAyB;AAAA,QACzB,aAAa;AAAA,MACf,CAAC;AAAA,MACD,GAAG,QAAQ;AAAA,IACb;AACA,eAAO,2CAAoB,aAAa,gBAAgB,OAAO,EAAE;AAAA,EACnE;AAEA,QAAM,kBAAkB,UACtB,mCAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAa,aAAQ,eAAR,YAAsB;AAEzC,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAAyC;AAxKxE,QAAAC;AAyKI,UAAM,iBACJA,MAAA,QAAQ,YAAR,OAAAA,MAAmB,WAAW,gBAAgB,CAAC;AAEjD,QAAI;AACJ,QAAI,QAAQ,wBAAwB;AAElC,gBAAU,IAAI,IAAI,GAAG,aAAa,gBAAgB,OAAO,GAAG,IAAI,EAAE;AAAA,IACpE,OAAO;AAEL,gBAAU,IAAI,IAAI,GAAG,aAAa,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,YAAQ,aAAa,IAAI,eAAe,UAAU;AAClD,WAAO,QAAQ,SAAS;AAAA,EAC1B;AAEA,QAAM,kBAAkB,CAAC,mBACvB,IAAI,yCAAwB,gBAAgB;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,+CAA8B,SAAS;AAAA,IACzC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sCAAqB,SAAS;AAAA,IAChC,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,8CAA6B,SAAS;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,gBAAgB,CAAC,YAAY;AAAA,EAC/B,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,kCAAiB,SAAS;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,0CAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,mCAAkB,SAAS;AAAA,IAC7B,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,cAAsB;AAC/C,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,qBAAqB,YAAY;AAAA,EAC1C;AAEA,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,iBAAiB;AAC1B,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,SAAS;AAClB,WAAS,QAAQ;AACjB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY;","names":["import_internal","_a"]}
|
package/dist/index.mjs
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ai-sdk/azure",
|
|
3
|
-
"version": "3.0.
|
|
3
|
+
"version": "3.0.8",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
}
|
|
21
21
|
},
|
|
22
22
|
"dependencies": {
|
|
23
|
-
"@ai-sdk/openai": "3.0.
|
|
23
|
+
"@ai-sdk/openai": "3.0.8",
|
|
24
24
|
"@ai-sdk/provider": "3.0.2",
|
|
25
25
|
"@ai-sdk/provider-utils": "4.0.4"
|
|
26
26
|
},
|