@ai-sdk/deepinfra 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/LICENSE +13 -0
- package/README.md +36 -0
- package/dist/index.d.mts +58 -0
- package/dist/index.d.ts +58 -0
- package/dist/index.js +78 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +57 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +64 -0
package/CHANGELOG.md
ADDED
package/LICENSE
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
Copyright 2023 Vercel, Inc.
|
|
2
|
+
|
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
you may not use this file except in compliance with the License.
|
|
5
|
+
You may obtain a copy of the License at
|
|
6
|
+
|
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
|
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
See the License for the specific language governing permissions and
|
|
13
|
+
limitations under the License.
|
package/README.md
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# AI SDK - DeepInfra Provider
|
|
2
|
+
|
|
3
|
+
The **[DeepInfra provider](https://sdk.vercel.ai/providers/ai-sdk-providers/deepinfra)** for the [AI SDK](https://sdk.vercel.ai/docs)
|
|
4
|
+
contains language model support for the DeepInfra API, giving you access to models like Llama 3, Mixtral, and other state-of-the-art LLMs.
|
|
5
|
+
|
|
6
|
+
## Setup
|
|
7
|
+
|
|
8
|
+
The DeepInfra provider is available in the `@ai-sdk/deepinfra` module. You can install it with
|
|
9
|
+
|
|
10
|
+
```bash
|
|
11
|
+
npm i @ai-sdk/deepinfra
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## Provider Instance
|
|
15
|
+
|
|
16
|
+
You can import the default provider instance `deepinfra` from `@ai-sdk/deepinfra`:
|
|
17
|
+
|
|
18
|
+
```ts
|
|
19
|
+
import { deepinfra } from '@ai-sdk/deepinfra';
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
## Example
|
|
23
|
+
|
|
24
|
+
```ts
|
|
25
|
+
import { deepinfra } from '@ai-sdk/deepinfra';
|
|
26
|
+
import { generateText } from 'ai';
|
|
27
|
+
|
|
28
|
+
const { text } = await generateText({
|
|
29
|
+
model: deepinfra('meta-llama/Llama-3.3-70B-Instruct'),
|
|
30
|
+
prompt: 'Write a vegetarian lasagna recipe for 4 people.',
|
|
31
|
+
});
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## Documentation
|
|
35
|
+
|
|
36
|
+
Please check out the **[DeepInfra provider documentation](https://sdk.vercel.ai/providers/ai-sdk-providers/deepinfra)** for more information.
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
|
+
import { OpenAICompatibleChatSettings, OpenAICompatibleEmbeddingSettings, OpenAICompatibleCompletionSettings } from '@ai-sdk/openai-compatible';
|
|
4
|
+
export { OpenAICompatibleErrorData as DeepInfraErrorData } from '@ai-sdk/openai-compatible';
|
|
5
|
+
|
|
6
|
+
type DeepInfraChatModelId = 'meta-llama/Llama-3.3-70B-Instruct' | 'meta-llama/Llama-3.3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct' | 'meta-llama/Meta-Llama-3.1-8B-Instruct' | 'meta-llama/Meta-Llama-3.1-405B-Instruct' | 'Qwen/QwQ-32B-Preview' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct' | 'meta-llama/Llama-3.2-90B-Vision-Instruct' | 'meta-llama/Llama-3.2-11B-Vision-Instruct' | 'microsoft/WizardLM-2-8x22B' | '01-ai/Yi-34B-Chat' | 'Austism/chronos-hermes-13b-v2' | 'Gryphe/MythoMax-L2-13b' | 'Gryphe/MythoMax-L2-13b-turbo' | 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1' | 'KoboldAI/LLaMA2-13B-Tiefighter' | 'NousResearch/Hermes-3-Llama-3.1-405B' | 'Phind/Phind-CodeLlama-34B-v2' | 'Qwen/Qwen2-72B-Instruct' | 'Qwen/Qwen2-7B-Instruct' | 'Qwen/Qwen2.5-7B-Instruct' | 'Qwen/Qwen2.5-Coder-7B' | 'Sao10K/L3-70B-Euryale-v2.1' | 'Sao10K/L3-8B-Lunaris-v1' | 'Sao10K/L3.1-70B-Euryale-v2.2' | 'bigcode/starcoder2-15b' | 'bigcode/starcoder2-15b-instruct-v0.1' | 'codellama/CodeLlama-34b-Instruct-hf' | 'codellama/CodeLlama-70b-Instruct-hf' | 'cognitivecomputations/dolphin-2.6-mixtral-8x7b' | 'cognitivecomputations/dolphin-2.9.1-llama-3-70b' | 'databricks/dbrx-instruct' | 'deepinfra/airoboros-70b' | 'google/codegemma-7b-it' | 'google/gemma-1.1-7b-it' | 'google/gemma-2-27b-it' | 'google/gemma-2-9b-it' | 'lizpreciatior/lzlv_70b_fp16_hf' | 'mattshumer/Reflection-Llama-3.1-70B' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Llama-2-70b-chat-hf' | 'meta-llama/Llama-2-7b-chat-hf' | 'meta-llama/Llama-3.2-1B-Instruct' | 'meta-llama/Llama-3.2-3B-Instruct' | 'meta-llama/Meta-Llama-3-70B-Instruct' | 'meta-llama/Meta-Llama-3-8B-Instruct' | 'microsoft/Phi-3-medium-4k-instruct' | 'microsoft/WizardLM-2-7B' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'mistralai/Mistral-Nemo-Instruct-2407' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'mistralai/Mixtral-8x22B-v0.1' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'nvidia/Nemotron-4-340B-Instruct' | 'openbmb/MiniCPM-Llama3-V-2_5' | 'openchat/openchat-3.6-8b' | 'openchat/openchat_3.5' | (string & {});
|
|
7
|
+
interface DeepInfraChatSettings extends OpenAICompatibleChatSettings {
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
type DeepInfraEmbeddingModelId = 'BAAI/bge-base-en-v1.5' | 'BAAI/bge-large-en-v1.5' | 'BAAI/bge-m3' | 'intfloat/e5-base-v2' | 'intfloat/e5-large-v2' | 'intfloat/multilingual-e5-large' | 'sentence-transformers/all-MiniLM-L12-v2' | 'sentence-transformers/all-MiniLM-L6-v2' | 'sentence-transformers/all-mpnet-base-v2' | 'sentence-transformers/clip-ViT-B-32' | 'sentence-transformers/clip-ViT-B-32-multilingual-v1' | 'sentence-transformers/multi-qa-mpnet-base-dot-v1' | 'sentence-transformers/paraphrase-MiniLM-L6-v2' | 'shibing624/text2vec-base-chinese' | 'thenlper/gte-base' | 'thenlper/gte-large' | (string & {});
|
|
11
|
+
interface DeepInfraEmbeddingSettings extends OpenAICompatibleEmbeddingSettings {
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
type DeepInfraCompletionModelId = DeepInfraChatModelId;
|
|
15
|
+
interface DeepInfraCompletionSettings extends OpenAICompatibleCompletionSettings {
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
interface DeepInfraProviderSettings {
|
|
19
|
+
/**
|
|
20
|
+
DeepInfra API key.
|
|
21
|
+
*/
|
|
22
|
+
apiKey?: string;
|
|
23
|
+
/**
|
|
24
|
+
Base URL for the API calls.
|
|
25
|
+
*/
|
|
26
|
+
baseURL?: string;
|
|
27
|
+
/**
|
|
28
|
+
Custom headers to include in the requests.
|
|
29
|
+
*/
|
|
30
|
+
headers?: Record<string, string>;
|
|
31
|
+
/**
|
|
32
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
|
33
|
+
or to provide a custom fetch implementation for e.g. testing.
|
|
34
|
+
*/
|
|
35
|
+
fetch?: FetchFunction;
|
|
36
|
+
}
|
|
37
|
+
interface DeepInfraProvider {
|
|
38
|
+
/**
|
|
39
|
+
Creates a model for text generation.
|
|
40
|
+
*/
|
|
41
|
+
(modelId: DeepInfraChatModelId, settings?: DeepInfraChatSettings): LanguageModelV1;
|
|
42
|
+
/**
|
|
43
|
+
Creates a chat model for text generation.
|
|
44
|
+
*/
|
|
45
|
+
chatModel(modelId: DeepInfraChatModelId, settings?: DeepInfraChatSettings): LanguageModelV1;
|
|
46
|
+
/**
|
|
47
|
+
Creates a completion model for text generation.
|
|
48
|
+
*/
|
|
49
|
+
completionModel(modelId: DeepInfraCompletionModelId, settings?: DeepInfraCompletionSettings): LanguageModelV1;
|
|
50
|
+
/**
|
|
51
|
+
Creates a text embedding model for text generation.
|
|
52
|
+
*/
|
|
53
|
+
textEmbeddingModel(modelId: DeepInfraEmbeddingModelId, settings?: DeepInfraEmbeddingSettings): EmbeddingModelV1<string>;
|
|
54
|
+
}
|
|
55
|
+
declare function createDeepInfra(options?: DeepInfraProviderSettings): DeepInfraProvider;
|
|
56
|
+
declare const deepinfra: DeepInfraProvider;
|
|
57
|
+
|
|
58
|
+
export { type DeepInfraProvider, type DeepInfraProviderSettings, createDeepInfra, deepinfra };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
|
+
import { OpenAICompatibleChatSettings, OpenAICompatibleEmbeddingSettings, OpenAICompatibleCompletionSettings } from '@ai-sdk/openai-compatible';
|
|
4
|
+
export { OpenAICompatibleErrorData as DeepInfraErrorData } from '@ai-sdk/openai-compatible';
|
|
5
|
+
|
|
6
|
+
type DeepInfraChatModelId = 'meta-llama/Llama-3.3-70B-Instruct' | 'meta-llama/Llama-3.3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct' | 'meta-llama/Meta-Llama-3.1-8B-Instruct' | 'meta-llama/Meta-Llama-3.1-405B-Instruct' | 'Qwen/QwQ-32B-Preview' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct' | 'meta-llama/Llama-3.2-90B-Vision-Instruct' | 'meta-llama/Llama-3.2-11B-Vision-Instruct' | 'microsoft/WizardLM-2-8x22B' | '01-ai/Yi-34B-Chat' | 'Austism/chronos-hermes-13b-v2' | 'Gryphe/MythoMax-L2-13b' | 'Gryphe/MythoMax-L2-13b-turbo' | 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1' | 'KoboldAI/LLaMA2-13B-Tiefighter' | 'NousResearch/Hermes-3-Llama-3.1-405B' | 'Phind/Phind-CodeLlama-34B-v2' | 'Qwen/Qwen2-72B-Instruct' | 'Qwen/Qwen2-7B-Instruct' | 'Qwen/Qwen2.5-7B-Instruct' | 'Qwen/Qwen2.5-Coder-7B' | 'Sao10K/L3-70B-Euryale-v2.1' | 'Sao10K/L3-8B-Lunaris-v1' | 'Sao10K/L3.1-70B-Euryale-v2.2' | 'bigcode/starcoder2-15b' | 'bigcode/starcoder2-15b-instruct-v0.1' | 'codellama/CodeLlama-34b-Instruct-hf' | 'codellama/CodeLlama-70b-Instruct-hf' | 'cognitivecomputations/dolphin-2.6-mixtral-8x7b' | 'cognitivecomputations/dolphin-2.9.1-llama-3-70b' | 'databricks/dbrx-instruct' | 'deepinfra/airoboros-70b' | 'google/codegemma-7b-it' | 'google/gemma-1.1-7b-it' | 'google/gemma-2-27b-it' | 'google/gemma-2-9b-it' | 'lizpreciatior/lzlv_70b_fp16_hf' | 'mattshumer/Reflection-Llama-3.1-70B' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Llama-2-70b-chat-hf' | 'meta-llama/Llama-2-7b-chat-hf' | 'meta-llama/Llama-3.2-1B-Instruct' | 'meta-llama/Llama-3.2-3B-Instruct' | 'meta-llama/Meta-Llama-3-70B-Instruct' | 'meta-llama/Meta-Llama-3-8B-Instruct' | 'microsoft/Phi-3-medium-4k-instruct' | 'microsoft/WizardLM-2-7B' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'mistralai/Mistral-Nemo-Instruct-2407' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'mistralai/Mixtral-8x22B-v0.1' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'nvidia/Nemotron-4-340B-Instruct' | 'openbmb/MiniCPM-Llama3-V-2_5' | 'openchat/openchat-3.6-8b' | 'openchat/openchat_3.5' | (string & {});
|
|
7
|
+
interface DeepInfraChatSettings extends OpenAICompatibleChatSettings {
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
type DeepInfraEmbeddingModelId = 'BAAI/bge-base-en-v1.5' | 'BAAI/bge-large-en-v1.5' | 'BAAI/bge-m3' | 'intfloat/e5-base-v2' | 'intfloat/e5-large-v2' | 'intfloat/multilingual-e5-large' | 'sentence-transformers/all-MiniLM-L12-v2' | 'sentence-transformers/all-MiniLM-L6-v2' | 'sentence-transformers/all-mpnet-base-v2' | 'sentence-transformers/clip-ViT-B-32' | 'sentence-transformers/clip-ViT-B-32-multilingual-v1' | 'sentence-transformers/multi-qa-mpnet-base-dot-v1' | 'sentence-transformers/paraphrase-MiniLM-L6-v2' | 'shibing624/text2vec-base-chinese' | 'thenlper/gte-base' | 'thenlper/gte-large' | (string & {});
|
|
11
|
+
interface DeepInfraEmbeddingSettings extends OpenAICompatibleEmbeddingSettings {
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
type DeepInfraCompletionModelId = DeepInfraChatModelId;
|
|
15
|
+
interface DeepInfraCompletionSettings extends OpenAICompatibleCompletionSettings {
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
interface DeepInfraProviderSettings {
|
|
19
|
+
/**
|
|
20
|
+
DeepInfra API key.
|
|
21
|
+
*/
|
|
22
|
+
apiKey?: string;
|
|
23
|
+
/**
|
|
24
|
+
Base URL for the API calls.
|
|
25
|
+
*/
|
|
26
|
+
baseURL?: string;
|
|
27
|
+
/**
|
|
28
|
+
Custom headers to include in the requests.
|
|
29
|
+
*/
|
|
30
|
+
headers?: Record<string, string>;
|
|
31
|
+
/**
|
|
32
|
+
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
|
33
|
+
or to provide a custom fetch implementation for e.g. testing.
|
|
34
|
+
*/
|
|
35
|
+
fetch?: FetchFunction;
|
|
36
|
+
}
|
|
37
|
+
interface DeepInfraProvider {
|
|
38
|
+
/**
|
|
39
|
+
Creates a model for text generation.
|
|
40
|
+
*/
|
|
41
|
+
(modelId: DeepInfraChatModelId, settings?: DeepInfraChatSettings): LanguageModelV1;
|
|
42
|
+
/**
|
|
43
|
+
Creates a chat model for text generation.
|
|
44
|
+
*/
|
|
45
|
+
chatModel(modelId: DeepInfraChatModelId, settings?: DeepInfraChatSettings): LanguageModelV1;
|
|
46
|
+
/**
|
|
47
|
+
Creates a completion model for text generation.
|
|
48
|
+
*/
|
|
49
|
+
completionModel(modelId: DeepInfraCompletionModelId, settings?: DeepInfraCompletionSettings): LanguageModelV1;
|
|
50
|
+
/**
|
|
51
|
+
Creates a text embedding model for text generation.
|
|
52
|
+
*/
|
|
53
|
+
textEmbeddingModel(modelId: DeepInfraEmbeddingModelId, settings?: DeepInfraEmbeddingSettings): EmbeddingModelV1<string>;
|
|
54
|
+
}
|
|
55
|
+
declare function createDeepInfra(options?: DeepInfraProviderSettings): DeepInfraProvider;
|
|
56
|
+
declare const deepinfra: DeepInfraProvider;
|
|
57
|
+
|
|
58
|
+
export { type DeepInfraProvider, type DeepInfraProviderSettings, createDeepInfra, deepinfra };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var src_exports = {};
|
|
22
|
+
__export(src_exports, {
|
|
23
|
+
createDeepInfra: () => createDeepInfra,
|
|
24
|
+
deepinfra: () => deepinfra
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(src_exports);
|
|
27
|
+
|
|
28
|
+
// src/deepinfra-provider.ts
|
|
29
|
+
var import_openai_compatible = require("@ai-sdk/openai-compatible");
|
|
30
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
31
|
+
function createDeepInfra(options = {}) {
|
|
32
|
+
var _a;
|
|
33
|
+
const baseURL = (0, import_provider_utils.withoutTrailingSlash)(
|
|
34
|
+
(_a = options.baseURL) != null ? _a : "https://api.deepinfra.com/v1/openai"
|
|
35
|
+
);
|
|
36
|
+
const getHeaders = () => ({
|
|
37
|
+
Authorization: `Bearer ${(0, import_provider_utils.loadApiKey)({
|
|
38
|
+
apiKey: options.apiKey,
|
|
39
|
+
environmentVariableName: "DEEPINFRA_API_KEY",
|
|
40
|
+
description: "DeepInfra's API key"
|
|
41
|
+
})}`,
|
|
42
|
+
...options.headers
|
|
43
|
+
});
|
|
44
|
+
const getCommonModelConfig = (modelType) => ({
|
|
45
|
+
provider: `deepinfra.${modelType}`,
|
|
46
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
47
|
+
headers: getHeaders,
|
|
48
|
+
fetch: options.fetch
|
|
49
|
+
});
|
|
50
|
+
const createChatModel = (modelId, settings = {}) => {
|
|
51
|
+
return new import_openai_compatible.OpenAICompatibleChatLanguageModel(modelId, settings, {
|
|
52
|
+
...getCommonModelConfig("chat"),
|
|
53
|
+
defaultObjectGenerationMode: "json"
|
|
54
|
+
});
|
|
55
|
+
};
|
|
56
|
+
const createCompletionModel = (modelId, settings = {}) => new import_openai_compatible.OpenAICompatibleCompletionLanguageModel(
|
|
57
|
+
modelId,
|
|
58
|
+
settings,
|
|
59
|
+
getCommonModelConfig("completion")
|
|
60
|
+
);
|
|
61
|
+
const createTextEmbeddingModel = (modelId, settings = {}) => new import_openai_compatible.OpenAICompatibleEmbeddingModel(
|
|
62
|
+
modelId,
|
|
63
|
+
settings,
|
|
64
|
+
getCommonModelConfig("embedding")
|
|
65
|
+
);
|
|
66
|
+
const provider = (modelId, settings) => createChatModel(modelId, settings);
|
|
67
|
+
provider.completionModel = createCompletionModel;
|
|
68
|
+
provider.chatModel = createChatModel;
|
|
69
|
+
provider.textEmbeddingModel = createTextEmbeddingModel;
|
|
70
|
+
return provider;
|
|
71
|
+
}
|
|
72
|
+
var deepinfra = createDeepInfra();
|
|
73
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
74
|
+
0 && (module.exports = {
|
|
75
|
+
createDeepInfra,
|
|
76
|
+
deepinfra
|
|
77
|
+
});
|
|
78
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/deepinfra-provider.ts"],"sourcesContent":["import { OpenAICompatibleErrorData } from '@ai-sdk/openai-compatible';\n\nexport { createDeepInfra, deepinfra } from './deepinfra-provider';\nexport type {\n DeepInfraProvider,\n DeepInfraProviderSettings,\n} from './deepinfra-provider';\nexport type { OpenAICompatibleErrorData as DeepInfraErrorData } from '@ai-sdk/openai-compatible';\n","import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';\nimport {\n OpenAICompatibleChatLanguageModel,\n OpenAICompatibleCompletionLanguageModel,\n OpenAICompatibleEmbeddingModel,\n} from '@ai-sdk/openai-compatible';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport {\n DeepInfraChatModelId,\n DeepInfraChatSettings,\n} from './deepinfra-chat-settings';\nimport {\n DeepInfraEmbeddingModelId,\n DeepInfraEmbeddingSettings,\n} from './deepinfra-embedding-settings';\nimport {\n DeepInfraCompletionModelId,\n DeepInfraCompletionSettings,\n} from './deepinfra-completion-settings';\n\nexport interface DeepInfraProviderSettings {\n /**\nDeepInfra API key.\n*/\n apiKey?: string;\n /**\nBase URL for the API calls.\n*/\n baseURL?: string;\n /**\nCustom headers to include in the requests.\n*/\n headers?: Record<string, string>;\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n*/\n fetch?: FetchFunction;\n}\n\nexport interface DeepInfraProvider {\n /**\nCreates a model for text generation.\n*/\n (\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a chat model for text generation.\n*/\n chatModel(\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a completion model for text generation.\n*/\n completionModel(\n modelId: DeepInfraCompletionModelId,\n settings?: DeepInfraCompletionSettings,\n ): LanguageModelV1;\n\n /**\nCreates a text embedding model for text generation.\n*/\n textEmbeddingModel(\n modelId: DeepInfraEmbeddingModelId,\n settings?: DeepInfraEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport function createDeepInfra(\n options: DeepInfraProviderSettings = {},\n): DeepInfraProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.deepinfra.com/v1/openai',\n );\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'DEEPINFRA_API_KEY',\n description: \"DeepInfra's API key\",\n })}`,\n ...options.headers,\n });\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `deepinfra.${modelType}`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createChatModel = (\n modelId: DeepInfraChatModelId,\n settings: DeepInfraChatSettings = {},\n ) => {\n return new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'json',\n });\n };\n\n const createCompletionModel = (\n modelId: DeepInfraCompletionModelId,\n settings: DeepInfraCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createTextEmbeddingModel = (\n modelId: DeepInfraEmbeddingModelId,\n settings: DeepInfraEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const provider = (\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ) => createChatModel(modelId, settings);\n\n provider.completionModel = createCompletionModel;\n provider.chatModel = createChatModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n\n return provider as DeepInfraProvider;\n}\n\nexport const deepinfra = createDeepInfra();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,+BAIO;AACP,4BAIO;AAoEA,SAAS,gBACd,UAAqC,CAAC,GACnB;AAhFrB;AAiFE,QAAM,cAAU;AAAA,KACd,aAAQ,YAAR,YAAmB;AAAA,EACrB;AACA,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,kCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AASA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,aAAa,SAAS;AAAA,IAChC,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAkC,CAAC,MAChC;AACH,WAAO,IAAI,2DAAkC,SAAS,UAAU;AAAA,MAC9D,GAAG,qBAAqB,MAAM;AAAA,MAC9B,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAwC,CAAC,MAEzC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,2BAA2B,CAC/B,SACA,WAAuC,CAAC,MAExC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,WAAW,CACf,SACA,aACG,gBAAgB,SAAS,QAAQ;AAEtC,WAAS,kBAAkB;AAC3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,YAAY,gBAAgB;","names":[]}
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
// src/deepinfra-provider.ts
|
|
2
|
+
import {
|
|
3
|
+
OpenAICompatibleChatLanguageModel,
|
|
4
|
+
OpenAICompatibleCompletionLanguageModel,
|
|
5
|
+
OpenAICompatibleEmbeddingModel
|
|
6
|
+
} from "@ai-sdk/openai-compatible";
|
|
7
|
+
import {
|
|
8
|
+
loadApiKey,
|
|
9
|
+
withoutTrailingSlash
|
|
10
|
+
} from "@ai-sdk/provider-utils";
|
|
11
|
+
function createDeepInfra(options = {}) {
|
|
12
|
+
var _a;
|
|
13
|
+
const baseURL = withoutTrailingSlash(
|
|
14
|
+
(_a = options.baseURL) != null ? _a : "https://api.deepinfra.com/v1/openai"
|
|
15
|
+
);
|
|
16
|
+
const getHeaders = () => ({
|
|
17
|
+
Authorization: `Bearer ${loadApiKey({
|
|
18
|
+
apiKey: options.apiKey,
|
|
19
|
+
environmentVariableName: "DEEPINFRA_API_KEY",
|
|
20
|
+
description: "DeepInfra's API key"
|
|
21
|
+
})}`,
|
|
22
|
+
...options.headers
|
|
23
|
+
});
|
|
24
|
+
const getCommonModelConfig = (modelType) => ({
|
|
25
|
+
provider: `deepinfra.${modelType}`,
|
|
26
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
27
|
+
headers: getHeaders,
|
|
28
|
+
fetch: options.fetch
|
|
29
|
+
});
|
|
30
|
+
const createChatModel = (modelId, settings = {}) => {
|
|
31
|
+
return new OpenAICompatibleChatLanguageModel(modelId, settings, {
|
|
32
|
+
...getCommonModelConfig("chat"),
|
|
33
|
+
defaultObjectGenerationMode: "json"
|
|
34
|
+
});
|
|
35
|
+
};
|
|
36
|
+
const createCompletionModel = (modelId, settings = {}) => new OpenAICompatibleCompletionLanguageModel(
|
|
37
|
+
modelId,
|
|
38
|
+
settings,
|
|
39
|
+
getCommonModelConfig("completion")
|
|
40
|
+
);
|
|
41
|
+
const createTextEmbeddingModel = (modelId, settings = {}) => new OpenAICompatibleEmbeddingModel(
|
|
42
|
+
modelId,
|
|
43
|
+
settings,
|
|
44
|
+
getCommonModelConfig("embedding")
|
|
45
|
+
);
|
|
46
|
+
const provider = (modelId, settings) => createChatModel(modelId, settings);
|
|
47
|
+
provider.completionModel = createCompletionModel;
|
|
48
|
+
provider.chatModel = createChatModel;
|
|
49
|
+
provider.textEmbeddingModel = createTextEmbeddingModel;
|
|
50
|
+
return provider;
|
|
51
|
+
}
|
|
52
|
+
var deepinfra = createDeepInfra();
|
|
53
|
+
export {
|
|
54
|
+
createDeepInfra,
|
|
55
|
+
deepinfra
|
|
56
|
+
};
|
|
57
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/deepinfra-provider.ts"],"sourcesContent":["import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';\nimport {\n OpenAICompatibleChatLanguageModel,\n OpenAICompatibleCompletionLanguageModel,\n OpenAICompatibleEmbeddingModel,\n} from '@ai-sdk/openai-compatible';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport {\n DeepInfraChatModelId,\n DeepInfraChatSettings,\n} from './deepinfra-chat-settings';\nimport {\n DeepInfraEmbeddingModelId,\n DeepInfraEmbeddingSettings,\n} from './deepinfra-embedding-settings';\nimport {\n DeepInfraCompletionModelId,\n DeepInfraCompletionSettings,\n} from './deepinfra-completion-settings';\n\nexport interface DeepInfraProviderSettings {\n /**\nDeepInfra API key.\n*/\n apiKey?: string;\n /**\nBase URL for the API calls.\n*/\n baseURL?: string;\n /**\nCustom headers to include in the requests.\n*/\n headers?: Record<string, string>;\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n*/\n fetch?: FetchFunction;\n}\n\nexport interface DeepInfraProvider {\n /**\nCreates a model for text generation.\n*/\n (\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a chat model for text generation.\n*/\n chatModel(\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a completion model for text generation.\n*/\n completionModel(\n modelId: DeepInfraCompletionModelId,\n settings?: DeepInfraCompletionSettings,\n ): LanguageModelV1;\n\n /**\nCreates a text embedding model for text generation.\n*/\n textEmbeddingModel(\n modelId: DeepInfraEmbeddingModelId,\n settings?: DeepInfraEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport function createDeepInfra(\n options: DeepInfraProviderSettings = {},\n): DeepInfraProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.deepinfra.com/v1/openai',\n );\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'DEEPINFRA_API_KEY',\n description: \"DeepInfra's API key\",\n })}`,\n ...options.headers,\n });\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `deepinfra.${modelType}`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createChatModel = (\n modelId: DeepInfraChatModelId,\n settings: DeepInfraChatSettings = {},\n ) => {\n return new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'json',\n });\n };\n\n const createCompletionModel = (\n modelId: DeepInfraCompletionModelId,\n settings: DeepInfraCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createTextEmbeddingModel = (\n modelId: DeepInfraEmbeddingModelId,\n settings: DeepInfraEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const provider = (\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ) => createChatModel(modelId, settings);\n\n provider.completionModel = createCompletionModel;\n provider.chatModel = createChatModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n\n return provider as DeepInfraProvider;\n}\n\nexport const deepinfra = createDeepInfra();\n"],"mappings":";AACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AAoEA,SAAS,gBACd,UAAqC,CAAC,GACnB;AAhFrB;AAiFE,QAAM,UAAU;AAAA,KACd,aAAQ,YAAR,YAAmB;AAAA,EACrB;AACA,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AASA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,aAAa,SAAS;AAAA,IAChC,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAkC,CAAC,MAChC;AACH,WAAO,IAAI,kCAAkC,SAAS,UAAU;AAAA,MAC9D,GAAG,qBAAqB,MAAM;AAAA,MAC9B,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAwC,CAAC,MAEzC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,2BAA2B,CAC/B,SACA,WAAuC,CAAC,MAExC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,WAAW,CACf,SACA,aACG,gBAAgB,SAAS,QAAQ;AAEtC,WAAS,kBAAkB;AAC3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,YAAY,gBAAgB;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@ai-sdk/deepinfra",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"license": "Apache-2.0",
|
|
5
|
+
"sideEffects": false,
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"module": "./dist/index.mjs",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"files": [
|
|
10
|
+
"dist/**/*",
|
|
11
|
+
"CHANGELOG.md"
|
|
12
|
+
],
|
|
13
|
+
"exports": {
|
|
14
|
+
"./package.json": "./package.json",
|
|
15
|
+
".": {
|
|
16
|
+
"types": "./dist/index.d.ts",
|
|
17
|
+
"import": "./dist/index.mjs",
|
|
18
|
+
"require": "./dist/index.js"
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"dependencies": {
|
|
22
|
+
"@ai-sdk/openai-compatible": "0.0.10",
|
|
23
|
+
"@ai-sdk/provider": "1.0.2",
|
|
24
|
+
"@ai-sdk/provider-utils": "2.0.4"
|
|
25
|
+
},
|
|
26
|
+
"devDependencies": {
|
|
27
|
+
"@types/node": "^18",
|
|
28
|
+
"tsup": "^8",
|
|
29
|
+
"typescript": "5.6.3",
|
|
30
|
+
"zod": "3.23.8",
|
|
31
|
+
"@vercel/ai-tsconfig": "0.0.0"
|
|
32
|
+
},
|
|
33
|
+
"peerDependencies": {
|
|
34
|
+
"zod": "^3.0.0"
|
|
35
|
+
},
|
|
36
|
+
"engines": {
|
|
37
|
+
"node": ">=18"
|
|
38
|
+
},
|
|
39
|
+
"publishConfig": {
|
|
40
|
+
"access": "public"
|
|
41
|
+
},
|
|
42
|
+
"homepage": "https://sdk.vercel.ai/docs",
|
|
43
|
+
"repository": {
|
|
44
|
+
"type": "git",
|
|
45
|
+
"url": "git+https://github.com/vercel/ai.git"
|
|
46
|
+
},
|
|
47
|
+
"bugs": {
|
|
48
|
+
"url": "https://github.com/vercel/ai/issues"
|
|
49
|
+
},
|
|
50
|
+
"keywords": [
|
|
51
|
+
"ai"
|
|
52
|
+
],
|
|
53
|
+
"scripts": {
|
|
54
|
+
"build": "tsup",
|
|
55
|
+
"build:watch": "tsup --watch",
|
|
56
|
+
"clean": "rm -rf dist",
|
|
57
|
+
"lint": "eslint \"./**/*.ts*\"",
|
|
58
|
+
"type-check": "tsc --noEmit",
|
|
59
|
+
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
|
60
|
+
"test": "pnpm test:node && pnpm test:edge",
|
|
61
|
+
"test:edge": "vitest --config vitest.edge.config.js --run",
|
|
62
|
+
"test:node": "vitest --config vitest.node.config.js --run"
|
|
63
|
+
}
|
|
64
|
+
}
|