@ai-sdk/azure 0.0.24 → 0.0.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +2 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2 -2
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -5
package/dist/index.d.mts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { OpenAIChatSettings, OpenAIChatLanguageModel, OpenAIEmbeddingSettings, OpenAIEmbeddingModel, OpenAICompletionSettings, OpenAICompletionLanguageModel } from '@ai-sdk/openai/internal';
|
|
2
|
+
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
2
3
|
|
|
3
4
|
interface AzureOpenAIProvider {
|
|
4
5
|
(deploymentId: string, settings?: OpenAIChatSettings): OpenAIChatLanguageModel;
|
|
@@ -49,7 +50,7 @@ interface AzureOpenAIProviderSettings {
|
|
|
49
50
|
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
|
50
51
|
or to provide a custom fetch implementation for e.g. testing.
|
|
51
52
|
*/
|
|
52
|
-
fetch?:
|
|
53
|
+
fetch?: FetchFunction;
|
|
53
54
|
}
|
|
54
55
|
/**
|
|
55
56
|
Create an Azure OpenAI provider instance.
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { OpenAIChatSettings, OpenAIChatLanguageModel, OpenAIEmbeddingSettings, OpenAIEmbeddingModel, OpenAICompletionSettings, OpenAICompletionLanguageModel } from '@ai-sdk/openai/internal';
|
|
2
|
+
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
2
3
|
|
|
3
4
|
interface AzureOpenAIProvider {
|
|
4
5
|
(deploymentId: string, settings?: OpenAIChatSettings): OpenAIChatLanguageModel;
|
|
@@ -49,7 +50,7 @@ interface AzureOpenAIProviderSettings {
|
|
|
49
50
|
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
|
50
51
|
or to provide a custom fetch implementation for e.g. testing.
|
|
51
52
|
*/
|
|
52
|
-
fetch?:
|
|
53
|
+
fetch?: FetchFunction;
|
|
53
54
|
}
|
|
54
55
|
/**
|
|
55
56
|
Create an Azure OpenAI provider instance.
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/azure-openai-provider.ts"],"sourcesContent":["export * from './azure-openai-provider';\n","import {\n OpenAIChatLanguageModel,\n OpenAIChatSettings,\n
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/azure-openai-provider.ts"],"sourcesContent":["export * from './azure-openai-provider';\n","import {\n OpenAIChatLanguageModel,\n OpenAIChatSettings,\n OpenAICompletionLanguageModel,\n OpenAICompletionSettings,\n OpenAIEmbeddingModel,\n OpenAIEmbeddingSettings,\n} from '@ai-sdk/openai/internal';\nimport { FetchFunction, loadApiKey, loadSetting } from '@ai-sdk/provider-utils';\n\nexport interface AzureOpenAIProvider {\n (\n deploymentId: string,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n languageModel(\n deploymentId: string,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(\n deploymentId: string,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an Azure OpenAI model for text embeddings.\n */\n embedding(\n deploymentId: string,\n settings?: OpenAIEmbeddingSettings,\n ): OpenAIEmbeddingModel;\n\n /**\nCreates an Azure OpenAI model for text embeddings.\n */\n textEmbedding(\n deploymentId: string,\n settings?: OpenAIEmbeddingSettings,\n ): OpenAIEmbeddingModel;\n\n /**\n * Creates an Azure OpenAI completion model for text generation.\n */\n completion(\n deploymentId: string,\n settings?: OpenAICompletionSettings,\n ): OpenAICompletionLanguageModel;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/deployments/{modelId}{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/{modelId}{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => ({\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n });\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const url = ({ path, modelId }: { path: string; modelId: string }) =>\n options.baseURL\n ? `${options.baseURL}/${modelId}${path}?api-version=2024-06-01`\n : `https://${getResourceName()}.openai.azure.com/openai/deployments/${modelId}${path}?api-version=2024-06-01`;\n\n const createChatModel = (\n deploymentName: string,\n settings: OpenAIChatSettings = {},\n ) =>\n new OpenAIChatLanguageModel(deploymentName, settings, {\n provider: 'azure-openai.chat',\n url,\n headers: getHeaders,\n compatibility: 'compatible',\n fetch: options.fetch,\n });\n\n const createCompletionModel = (\n modelId: string,\n settings: OpenAICompletionSettings = {},\n ) =>\n new OpenAICompletionLanguageModel(modelId, settings, {\n provider: 'azure-openai.completion',\n url,\n compatibility: 'compatible',\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: string,\n settings: OpenAIEmbeddingSettings = {},\n ) =>\n new OpenAIEmbeddingModel(modelId, settings, {\n provider: 'azure-openai.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const provider = function (\n deploymentId: string,\n settings?: OpenAIChatSettings | OpenAICompletionSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(deploymentId, settings as OpenAIChatSettings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n\n return provider as AzureOpenAIProvider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure({});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,sBAOO;AACP,4BAAuD;AAqFhD,SAAS,YACd,UAAuC,CAAC,GACnB;AACrB,QAAM,aAAa,OAAO;AAAA,IACxB,eAAW,kCAAW;AAAA,MACpB,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,UACtB,mCAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAC3B,QAAQ,UACJ,GAAG,QAAQ,OAAO,IAAI,OAAO,GAAG,IAAI,4BACpC,WAAW,gBAAgB,CAAC,wCAAwC,OAAO,GAAG,IAAI;AAExF,QAAM,kBAAkB,CACtB,gBACA,WAA+B,CAAC,MAEhC,IAAI,wCAAwB,gBAAgB,UAAU;AAAA,IACpD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,eAAe;AAAA,IACf,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAAqC,CAAC,MAEtC,IAAI,8CAA8B,SAAS,UAAU;AAAA,IACnD,UAAU;AAAA,IACV;AAAA,IACA,eAAe;AAAA,IACf,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAoC,CAAC,MAErC,IAAI,qCAAqB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,cACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,cAAc,QAA8B;AAAA,EACrE;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AAEzB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY,CAAC,CAAC;","names":[]}
|
package/dist/index.mjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
// src/azure-openai-provider.ts
|
|
2
2
|
import {
|
|
3
3
|
OpenAIChatLanguageModel,
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
OpenAICompletionLanguageModel,
|
|
5
|
+
OpenAIEmbeddingModel
|
|
6
6
|
} from "@ai-sdk/openai/internal";
|
|
7
7
|
import { loadApiKey, loadSetting } from "@ai-sdk/provider-utils";
|
|
8
8
|
function createAzure(options = {}) {
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/azure-openai-provider.ts"],"sourcesContent":["import {\n OpenAIChatLanguageModel,\n OpenAIChatSettings,\n
|
|
1
|
+
{"version":3,"sources":["../src/azure-openai-provider.ts"],"sourcesContent":["import {\n OpenAIChatLanguageModel,\n OpenAIChatSettings,\n OpenAICompletionLanguageModel,\n OpenAICompletionSettings,\n OpenAIEmbeddingModel,\n OpenAIEmbeddingSettings,\n} from '@ai-sdk/openai/internal';\nimport { FetchFunction, loadApiKey, loadSetting } from '@ai-sdk/provider-utils';\n\nexport interface AzureOpenAIProvider {\n (\n deploymentId: string,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n languageModel(\n deploymentId: string,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an Azure OpenAI chat model for text generation.\n */\n chat(\n deploymentId: string,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an Azure OpenAI model for text embeddings.\n */\n embedding(\n deploymentId: string,\n settings?: OpenAIEmbeddingSettings,\n ): OpenAIEmbeddingModel;\n\n /**\nCreates an Azure OpenAI model for text embeddings.\n */\n textEmbedding(\n deploymentId: string,\n settings?: OpenAIEmbeddingSettings,\n ): OpenAIEmbeddingModel;\n\n /**\n * Creates an Azure OpenAI completion model for text generation.\n */\n completion(\n deploymentId: string,\n settings?: OpenAICompletionSettings,\n ): OpenAICompletionLanguageModel;\n}\n\nexport interface AzureOpenAIProviderSettings {\n /**\nName of the Azure OpenAI resource. Either this or `baseURL` can be used.\n\nThe resource name is used in the assembled URL: `https://{resourceName}.openai.azure.com/openai/deployments/{modelId}{path}`.\n */\n resourceName?: string;\n\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers. Either this or `resourceName` can be used.\nWhen a baseURL is provided, the resourceName is ignored.\n\nWith a baseURL, the resolved URL is `{baseURL}/{modelId}{path}`.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an Azure OpenAI provider instance.\n */\nexport function createAzure(\n options: AzureOpenAIProviderSettings = {},\n): AzureOpenAIProvider {\n const getHeaders = () => ({\n 'api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'AZURE_API_KEY',\n description: 'Azure OpenAI',\n }),\n ...options.headers,\n });\n\n const getResourceName = () =>\n loadSetting({\n settingValue: options.resourceName,\n settingName: 'resourceName',\n environmentVariableName: 'AZURE_RESOURCE_NAME',\n description: 'Azure OpenAI resource name',\n });\n\n const url = ({ path, modelId }: { path: string; modelId: string }) =>\n options.baseURL\n ? `${options.baseURL}/${modelId}${path}?api-version=2024-06-01`\n : `https://${getResourceName()}.openai.azure.com/openai/deployments/${modelId}${path}?api-version=2024-06-01`;\n\n const createChatModel = (\n deploymentName: string,\n settings: OpenAIChatSettings = {},\n ) =>\n new OpenAIChatLanguageModel(deploymentName, settings, {\n provider: 'azure-openai.chat',\n url,\n headers: getHeaders,\n compatibility: 'compatible',\n fetch: options.fetch,\n });\n\n const createCompletionModel = (\n modelId: string,\n settings: OpenAICompletionSettings = {},\n ) =>\n new OpenAICompletionLanguageModel(modelId, settings, {\n provider: 'azure-openai.completion',\n url,\n compatibility: 'compatible',\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: string,\n settings: OpenAIEmbeddingSettings = {},\n ) =>\n new OpenAIEmbeddingModel(modelId, settings, {\n provider: 'azure-openai.embeddings',\n headers: getHeaders,\n url,\n fetch: options.fetch,\n });\n\n const provider = function (\n deploymentId: string,\n settings?: OpenAIChatSettings | OpenAICompletionSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Azure OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(deploymentId, settings as OpenAIChatSettings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n\n return provider as AzureOpenAIProvider;\n}\n\n/**\nDefault Azure OpenAI provider instance.\n */\nexport const azure = createAzure({});\n"],"mappings":";AAAA;AAAA,EACE;AAAA,EAEA;AAAA,EAEA;AAAA,OAEK;AACP,SAAwB,YAAY,mBAAmB;AAqFhD,SAAS,YACd,UAAuC,CAAC,GACnB;AACrB,QAAM,aAAa,OAAO;AAAA,IACxB,WAAW,WAAW;AAAA,MACpB,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,MACtB,YAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,MAAM,CAAC,EAAE,MAAM,QAAQ,MAC3B,QAAQ,UACJ,GAAG,QAAQ,OAAO,IAAI,OAAO,GAAG,IAAI,4BACpC,WAAW,gBAAgB,CAAC,wCAAwC,OAAO,GAAG,IAAI;AAExF,QAAM,kBAAkB,CACtB,gBACA,WAA+B,CAAC,MAEhC,IAAI,wBAAwB,gBAAgB,UAAU;AAAA,IACpD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,eAAe;AAAA,IACf,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAAqC,CAAC,MAEtC,IAAI,8BAA8B,SAAS,UAAU;AAAA,IACnD,UAAU;AAAA,IACV;AAAA,IACA,eAAe;AAAA,IACf,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAoC,CAAC,MAErC,IAAI,qBAAqB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,cACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,cAAc,QAA8B;AAAA,EACrE;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AAEzB,SAAO;AACT;AAKO,IAAM,QAAQ,YAAY,CAAC,CAAC;","names":[]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ai-sdk/azure",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.26",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -18,14 +18,14 @@
|
|
|
18
18
|
}
|
|
19
19
|
},
|
|
20
20
|
"dependencies": {
|
|
21
|
-
"@ai-sdk/openai": "0.0.
|
|
22
|
-
"@ai-sdk/provider": "0.0.
|
|
23
|
-
"@ai-sdk/provider-utils": "1.0.
|
|
21
|
+
"@ai-sdk/openai": "0.0.48",
|
|
22
|
+
"@ai-sdk/provider": "0.0.20",
|
|
23
|
+
"@ai-sdk/provider-utils": "1.0.13"
|
|
24
24
|
},
|
|
25
25
|
"devDependencies": {
|
|
26
26
|
"@types/node": "^18",
|
|
27
27
|
"tsup": "^8",
|
|
28
|
-
"typescript": "5.
|
|
28
|
+
"typescript": "5.5.4",
|
|
29
29
|
"zod": "3.23.8",
|
|
30
30
|
"@vercel/ai-tsconfig": "0.0.0"
|
|
31
31
|
},
|