@ai-sdk/deepinfra 0.0.4 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @ai-sdk/deepinfra
2
2
 
3
+ ## 0.0.5
4
+
5
+ ### Patch Changes
6
+
7
+ - 32742bb: feat (providers): Add DeepSeek-V3 model to deepinfra/togetherai.
8
+
3
9
  ## 0.0.4
4
10
 
5
11
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -3,7 +3,7 @@ import { FetchFunction } from '@ai-sdk/provider-utils';
3
3
  import { OpenAICompatibleChatSettings, OpenAICompatibleEmbeddingSettings, OpenAICompatibleCompletionSettings } from '@ai-sdk/openai-compatible';
4
4
  export { OpenAICompatibleErrorData as DeepInfraErrorData } from '@ai-sdk/openai-compatible';
5
5
 
6
- type DeepInfraChatModelId = 'meta-llama/Llama-3.3-70B-Instruct' | 'meta-llama/Llama-3.3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct' | 'meta-llama/Meta-Llama-3.1-8B-Instruct' | 'meta-llama/Meta-Llama-3.1-405B-Instruct' | 'Qwen/QwQ-32B-Preview' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct' | 'meta-llama/Llama-3.2-90B-Vision-Instruct' | 'meta-llama/Llama-3.2-11B-Vision-Instruct' | 'microsoft/WizardLM-2-8x22B' | '01-ai/Yi-34B-Chat' | 'Austism/chronos-hermes-13b-v2' | 'Gryphe/MythoMax-L2-13b' | 'Gryphe/MythoMax-L2-13b-turbo' | 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1' | 'KoboldAI/LLaMA2-13B-Tiefighter' | 'NousResearch/Hermes-3-Llama-3.1-405B' | 'Phind/Phind-CodeLlama-34B-v2' | 'Qwen/Qwen2-72B-Instruct' | 'Qwen/Qwen2-7B-Instruct' | 'Qwen/Qwen2.5-7B-Instruct' | 'Qwen/Qwen2.5-Coder-7B' | 'Sao10K/L3-70B-Euryale-v2.1' | 'Sao10K/L3-8B-Lunaris-v1' | 'Sao10K/L3.1-70B-Euryale-v2.2' | 'bigcode/starcoder2-15b' | 'bigcode/starcoder2-15b-instruct-v0.1' | 'codellama/CodeLlama-34b-Instruct-hf' | 'codellama/CodeLlama-70b-Instruct-hf' | 'cognitivecomputations/dolphin-2.6-mixtral-8x7b' | 'cognitivecomputations/dolphin-2.9.1-llama-3-70b' | 'databricks/dbrx-instruct' | 'deepinfra/airoboros-70b' | 'google/codegemma-7b-it' | 'google/gemma-1.1-7b-it' | 'google/gemma-2-27b-it' | 'google/gemma-2-9b-it' | 'lizpreciatior/lzlv_70b_fp16_hf' | 'mattshumer/Reflection-Llama-3.1-70B' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Llama-2-70b-chat-hf' | 'meta-llama/Llama-2-7b-chat-hf' | 'meta-llama/Llama-3.2-1B-Instruct' | 'meta-llama/Llama-3.2-3B-Instruct' | 'meta-llama/Meta-Llama-3-70B-Instruct' | 'meta-llama/Meta-Llama-3-8B-Instruct' | 'microsoft/Phi-3-medium-4k-instruct' | 'microsoft/WizardLM-2-7B' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'mistralai/Mistral-Nemo-Instruct-2407' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'mistralai/Mixtral-8x22B-v0.1' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'nvidia/Nemotron-4-340B-Instruct' | 'openbmb/MiniCPM-Llama3-V-2_5' | 'openchat/openchat-3.6-8b' | 'openchat/openchat_3.5' | (string & {});
6
+ type DeepInfraChatModelId = 'meta-llama/Llama-3.3-70B-Instruct' | 'meta-llama/Llama-3.3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct' | 'meta-llama/Meta-Llama-3.1-8B-Instruct' | 'meta-llama/Meta-Llama-3.1-405B-Instruct' | 'Qwen/QwQ-32B-Preview' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct' | 'meta-llama/Llama-3.2-90B-Vision-Instruct' | 'meta-llama/Llama-3.2-11B-Vision-Instruct' | 'microsoft/WizardLM-2-8x22B' | '01-ai/Yi-34B-Chat' | 'Austism/chronos-hermes-13b-v2' | 'Gryphe/MythoMax-L2-13b' | 'Gryphe/MythoMax-L2-13b-turbo' | 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1' | 'KoboldAI/LLaMA2-13B-Tiefighter' | 'NousResearch/Hermes-3-Llama-3.1-405B' | 'Phind/Phind-CodeLlama-34B-v2' | 'Qwen/Qwen2-72B-Instruct' | 'Qwen/Qwen2-7B-Instruct' | 'Qwen/Qwen2.5-7B-Instruct' | 'Qwen/Qwen2.5-Coder-7B' | 'Sao10K/L3-70B-Euryale-v2.1' | 'Sao10K/L3-8B-Lunaris-v1' | 'Sao10K/L3.1-70B-Euryale-v2.2' | 'bigcode/starcoder2-15b' | 'bigcode/starcoder2-15b-instruct-v0.1' | 'codellama/CodeLlama-34b-Instruct-hf' | 'codellama/CodeLlama-70b-Instruct-hf' | 'cognitivecomputations/dolphin-2.6-mixtral-8x7b' | 'cognitivecomputations/dolphin-2.9.1-llama-3-70b' | 'databricks/dbrx-instruct' | 'deepinfra/airoboros-70b' | 'deepseek-ai/DeepSeek-V3' | 'google/codegemma-7b-it' | 'google/gemma-1.1-7b-it' | 'google/gemma-2-27b-it' | 'google/gemma-2-9b-it' | 'lizpreciatior/lzlv_70b_fp16_hf' | 'mattshumer/Reflection-Llama-3.1-70B' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Llama-2-70b-chat-hf' | 'meta-llama/Llama-2-7b-chat-hf' | 'meta-llama/Llama-3.2-1B-Instruct' | 'meta-llama/Llama-3.2-3B-Instruct' | 'meta-llama/Meta-Llama-3-70B-Instruct' | 'meta-llama/Meta-Llama-3-8B-Instruct' | 'microsoft/Phi-3-medium-4k-instruct' | 'microsoft/WizardLM-2-7B' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'mistralai/Mistral-Nemo-Instruct-2407' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'mistralai/Mixtral-8x22B-v0.1' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'nvidia/Nemotron-4-340B-Instruct' | 'openbmb/MiniCPM-Llama3-V-2_5' | 'openchat/openchat-3.6-8b' | 'openchat/openchat_3.5' | (string & {});
7
7
  interface DeepInfraChatSettings extends OpenAICompatibleChatSettings {
8
8
  }
9
9
 
package/dist/index.d.ts CHANGED
@@ -3,7 +3,7 @@ import { FetchFunction } from '@ai-sdk/provider-utils';
3
3
  import { OpenAICompatibleChatSettings, OpenAICompatibleEmbeddingSettings, OpenAICompatibleCompletionSettings } from '@ai-sdk/openai-compatible';
4
4
  export { OpenAICompatibleErrorData as DeepInfraErrorData } from '@ai-sdk/openai-compatible';
5
5
 
6
- type DeepInfraChatModelId = 'meta-llama/Llama-3.3-70B-Instruct' | 'meta-llama/Llama-3.3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct' | 'meta-llama/Meta-Llama-3.1-8B-Instruct' | 'meta-llama/Meta-Llama-3.1-405B-Instruct' | 'Qwen/QwQ-32B-Preview' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct' | 'meta-llama/Llama-3.2-90B-Vision-Instruct' | 'meta-llama/Llama-3.2-11B-Vision-Instruct' | 'microsoft/WizardLM-2-8x22B' | '01-ai/Yi-34B-Chat' | 'Austism/chronos-hermes-13b-v2' | 'Gryphe/MythoMax-L2-13b' | 'Gryphe/MythoMax-L2-13b-turbo' | 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1' | 'KoboldAI/LLaMA2-13B-Tiefighter' | 'NousResearch/Hermes-3-Llama-3.1-405B' | 'Phind/Phind-CodeLlama-34B-v2' | 'Qwen/Qwen2-72B-Instruct' | 'Qwen/Qwen2-7B-Instruct' | 'Qwen/Qwen2.5-7B-Instruct' | 'Qwen/Qwen2.5-Coder-7B' | 'Sao10K/L3-70B-Euryale-v2.1' | 'Sao10K/L3-8B-Lunaris-v1' | 'Sao10K/L3.1-70B-Euryale-v2.2' | 'bigcode/starcoder2-15b' | 'bigcode/starcoder2-15b-instruct-v0.1' | 'codellama/CodeLlama-34b-Instruct-hf' | 'codellama/CodeLlama-70b-Instruct-hf' | 'cognitivecomputations/dolphin-2.6-mixtral-8x7b' | 'cognitivecomputations/dolphin-2.9.1-llama-3-70b' | 'databricks/dbrx-instruct' | 'deepinfra/airoboros-70b' | 'google/codegemma-7b-it' | 'google/gemma-1.1-7b-it' | 'google/gemma-2-27b-it' | 'google/gemma-2-9b-it' | 'lizpreciatior/lzlv_70b_fp16_hf' | 'mattshumer/Reflection-Llama-3.1-70B' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Llama-2-70b-chat-hf' | 'meta-llama/Llama-2-7b-chat-hf' | 'meta-llama/Llama-3.2-1B-Instruct' | 'meta-llama/Llama-3.2-3B-Instruct' | 'meta-llama/Meta-Llama-3-70B-Instruct' | 'meta-llama/Meta-Llama-3-8B-Instruct' | 'microsoft/Phi-3-medium-4k-instruct' | 'microsoft/WizardLM-2-7B' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'mistralai/Mistral-Nemo-Instruct-2407' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'mistralai/Mixtral-8x22B-v0.1' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'nvidia/Nemotron-4-340B-Instruct' | 'openbmb/MiniCPM-Llama3-V-2_5' | 'openchat/openchat-3.6-8b' | 'openchat/openchat_3.5' | (string & {});
6
+ type DeepInfraChatModelId = 'meta-llama/Llama-3.3-70B-Instruct' | 'meta-llama/Llama-3.3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct' | 'meta-llama/Meta-Llama-3.1-8B-Instruct' | 'meta-llama/Meta-Llama-3.1-405B-Instruct' | 'Qwen/QwQ-32B-Preview' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct' | 'meta-llama/Llama-3.2-90B-Vision-Instruct' | 'meta-llama/Llama-3.2-11B-Vision-Instruct' | 'microsoft/WizardLM-2-8x22B' | '01-ai/Yi-34B-Chat' | 'Austism/chronos-hermes-13b-v2' | 'Gryphe/MythoMax-L2-13b' | 'Gryphe/MythoMax-L2-13b-turbo' | 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1' | 'KoboldAI/LLaMA2-13B-Tiefighter' | 'NousResearch/Hermes-3-Llama-3.1-405B' | 'Phind/Phind-CodeLlama-34B-v2' | 'Qwen/Qwen2-72B-Instruct' | 'Qwen/Qwen2-7B-Instruct' | 'Qwen/Qwen2.5-7B-Instruct' | 'Qwen/Qwen2.5-Coder-7B' | 'Sao10K/L3-70B-Euryale-v2.1' | 'Sao10K/L3-8B-Lunaris-v1' | 'Sao10K/L3.1-70B-Euryale-v2.2' | 'bigcode/starcoder2-15b' | 'bigcode/starcoder2-15b-instruct-v0.1' | 'codellama/CodeLlama-34b-Instruct-hf' | 'codellama/CodeLlama-70b-Instruct-hf' | 'cognitivecomputations/dolphin-2.6-mixtral-8x7b' | 'cognitivecomputations/dolphin-2.9.1-llama-3-70b' | 'databricks/dbrx-instruct' | 'deepinfra/airoboros-70b' | 'deepseek-ai/DeepSeek-V3' | 'google/codegemma-7b-it' | 'google/gemma-1.1-7b-it' | 'google/gemma-2-27b-it' | 'google/gemma-2-9b-it' | 'lizpreciatior/lzlv_70b_fp16_hf' | 'mattshumer/Reflection-Llama-3.1-70B' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Llama-2-70b-chat-hf' | 'meta-llama/Llama-2-7b-chat-hf' | 'meta-llama/Llama-3.2-1B-Instruct' | 'meta-llama/Llama-3.2-3B-Instruct' | 'meta-llama/Meta-Llama-3-70B-Instruct' | 'meta-llama/Meta-Llama-3-8B-Instruct' | 'microsoft/Phi-3-medium-4k-instruct' | 'microsoft/WizardLM-2-7B' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'mistralai/Mistral-Nemo-Instruct-2407' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'mistralai/Mixtral-8x22B-v0.1' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'nvidia/Nemotron-4-340B-Instruct' | 'openbmb/MiniCPM-Llama3-V-2_5' | 'openchat/openchat-3.6-8b' | 'openchat/openchat_3.5' | (string & {});
7
7
  interface DeepInfraChatSettings extends OpenAICompatibleChatSettings {
8
8
  }
9
9
 
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/deepinfra-provider.ts"],"sourcesContent":["import { OpenAICompatibleErrorData } from '@ai-sdk/openai-compatible';\n\nexport { createDeepInfra, deepinfra } from './deepinfra-provider';\nexport type {\n DeepInfraProvider,\n DeepInfraProviderSettings,\n} from './deepinfra-provider';\nexport type { OpenAICompatibleErrorData as DeepInfraErrorData } from '@ai-sdk/openai-compatible';\n","import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';\nimport {\n OpenAICompatibleChatLanguageModel,\n OpenAICompatibleCompletionLanguageModel,\n OpenAICompatibleEmbeddingModel,\n} from '@ai-sdk/openai-compatible';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport {\n DeepInfraChatModelId,\n DeepInfraChatSettings,\n} from './deepinfra-chat-settings';\nimport {\n DeepInfraEmbeddingModelId,\n DeepInfraEmbeddingSettings,\n} from './deepinfra-embedding-settings';\nimport {\n DeepInfraCompletionModelId,\n DeepInfraCompletionSettings,\n} from './deepinfra-completion-settings';\n\nexport interface DeepInfraProviderSettings {\n /**\nDeepInfra API key.\n*/\n apiKey?: string;\n /**\nBase URL for the API calls.\n*/\n baseURL?: string;\n /**\nCustom headers to include in the requests.\n*/\n headers?: Record<string, string>;\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n*/\n fetch?: FetchFunction;\n}\n\nexport interface DeepInfraProvider {\n /**\nCreates a model for text generation.\n*/\n (\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a chat model for text generation.\n*/\n chatModel(\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a completion model for text generation.\n*/\n completionModel(\n modelId: DeepInfraCompletionModelId,\n settings?: DeepInfraCompletionSettings,\n ): LanguageModelV1;\n\n /**\nCreates a text embedding model for text generation.\n*/\n textEmbeddingModel(\n modelId: DeepInfraEmbeddingModelId,\n settings?: DeepInfraEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport function createDeepInfra(\n options: DeepInfraProviderSettings = {},\n): DeepInfraProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.deepinfra.com/v1/openai',\n );\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'DEEPINFRA_API_KEY',\n description: \"DeepInfra's API key\",\n })}`,\n ...options.headers,\n });\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `deepinfra.${modelType}`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createChatModel = (\n modelId: DeepInfraChatModelId,\n settings: DeepInfraChatSettings = {},\n ) => {\n return new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'json',\n });\n };\n\n const createCompletionModel = (\n modelId: DeepInfraCompletionModelId,\n settings: DeepInfraCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createTextEmbeddingModel = (\n modelId: DeepInfraEmbeddingModelId,\n settings: DeepInfraEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const provider = (\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ) => createChatModel(modelId, settings);\n\n provider.completionModel = createCompletionModel;\n provider.chatModel = createChatModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n\n return provider as DeepInfraProvider;\n}\n\nexport const deepinfra = createDeepInfra();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,+BAIO;AACP,4BAIO;AAoEA,SAAS,gBACd,UAAqC,CAAC,GACnB;AAhFrB;AAiFE,QAAM,cAAU;AAAA,KACd,aAAQ,YAAR,YAAmB;AAAA,EACrB;AACA,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,kCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AASA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,aAAa,SAAS;AAAA,IAChC,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAkC,CAAC,MAChC;AACH,WAAO,IAAI,2DAAkC,SAAS,UAAU;AAAA,MAC9D,GAAG,qBAAqB,MAAM;AAAA,MAC9B,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAwC,CAAC,MAEzC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,2BAA2B,CAC/B,SACA,WAAuC,CAAC,MAExC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,WAAW,CACf,SACA,aACG,gBAAgB,SAAS,QAAQ;AAEtC,WAAS,kBAAkB;AAC3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,YAAY,gBAAgB;","names":[]}
1
+ {"version":3,"sources":["../src/index.ts","../src/deepinfra-provider.ts"],"sourcesContent":["export { createDeepInfra, deepinfra } from './deepinfra-provider';\nexport type {\n DeepInfraProvider,\n DeepInfraProviderSettings,\n} from './deepinfra-provider';\nexport type { OpenAICompatibleErrorData as DeepInfraErrorData } from '@ai-sdk/openai-compatible';\n","import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';\nimport {\n OpenAICompatibleChatLanguageModel,\n OpenAICompatibleCompletionLanguageModel,\n OpenAICompatibleEmbeddingModel,\n} from '@ai-sdk/openai-compatible';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport {\n DeepInfraChatModelId,\n DeepInfraChatSettings,\n} from './deepinfra-chat-settings';\nimport {\n DeepInfraEmbeddingModelId,\n DeepInfraEmbeddingSettings,\n} from './deepinfra-embedding-settings';\nimport {\n DeepInfraCompletionModelId,\n DeepInfraCompletionSettings,\n} from './deepinfra-completion-settings';\n\nexport interface DeepInfraProviderSettings {\n /**\nDeepInfra API key.\n*/\n apiKey?: string;\n /**\nBase URL for the API calls.\n*/\n baseURL?: string;\n /**\nCustom headers to include in the requests.\n*/\n headers?: Record<string, string>;\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n*/\n fetch?: FetchFunction;\n}\n\nexport interface DeepInfraProvider {\n /**\nCreates a model for text generation.\n*/\n (\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a chat model for text generation.\n*/\n chatModel(\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a completion model for text generation.\n*/\n completionModel(\n modelId: DeepInfraCompletionModelId,\n settings?: DeepInfraCompletionSettings,\n ): LanguageModelV1;\n\n /**\nCreates a text embedding model for text generation.\n*/\n textEmbeddingModel(\n modelId: DeepInfraEmbeddingModelId,\n settings?: DeepInfraEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport function createDeepInfra(\n options: DeepInfraProviderSettings = {},\n): DeepInfraProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.deepinfra.com/v1/openai',\n );\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'DEEPINFRA_API_KEY',\n description: \"DeepInfra's API key\",\n })}`,\n ...options.headers,\n });\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `deepinfra.${modelType}`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createChatModel = (\n modelId: DeepInfraChatModelId,\n settings: DeepInfraChatSettings = {},\n ) => {\n return new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'json',\n });\n };\n\n const createCompletionModel = (\n modelId: DeepInfraCompletionModelId,\n settings: DeepInfraCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createTextEmbeddingModel = (\n modelId: DeepInfraEmbeddingModelId,\n settings: DeepInfraEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const provider = (\n modelId: DeepInfraChatModelId,\n settings?: DeepInfraChatSettings,\n ) => createChatModel(modelId, settings);\n\n provider.completionModel = createCompletionModel;\n provider.chatModel = createChatModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n\n return provider as DeepInfraProvider;\n}\n\nexport const deepinfra = createDeepInfra();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,+BAIO;AACP,4BAIO;AAoEA,SAAS,gBACd,UAAqC,CAAC,GACnB;AAhFrB;AAiFE,QAAM,cAAU;AAAA,KACd,aAAQ,YAAR,YAAmB;AAAA,EACrB;AACA,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,kCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AASA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,aAAa,SAAS;AAAA,IAChC,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAkC,CAAC,MAChC;AACH,WAAO,IAAI,2DAAkC,SAAS,UAAU;AAAA,MAC9D,GAAG,qBAAqB,MAAM;AAAA,MAC9B,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAwC,CAAC,MAEzC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,2BAA2B,CAC/B,SACA,WAAuC,CAAC,MAExC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,WAAW,CACf,SACA,aACG,gBAAgB,SAAS,QAAQ;AAEtC,WAAS,kBAAkB;AAC3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,YAAY,gBAAgB;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/deepinfra",
3
- "version": "0.0.4",
3
+ "version": "0.0.5",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",