@chainfuse/ai-tools 0.13.26 → 0.13.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import type { GoogleGenerativeAIProvider } from '@ai-sdk/google';
|
|
2
1
|
import { AiBase } from '../base.mjs';
|
|
3
2
|
import type { AiRequestConfig } from '../types.mjs';
|
|
4
3
|
export declare class AiCustomProviders extends AiBase {
|
|
@@ -6,7 +5,7 @@ export declare class AiCustomProviders extends AiBase {
|
|
|
6
5
|
azOpenai(args: AiRequestConfig): Promise<import("./types.mts").AzureOpenAIProvider>;
|
|
7
6
|
anthropic(args: AiRequestConfig): Promise<import("@ai-sdk/anthropic").AnthropicProvider>;
|
|
8
7
|
private static workersAiIsRest;
|
|
9
|
-
cfWorkersAi(args: AiRequestConfig): Promise<import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b",
|
|
8
|
+
cfWorkersAi(args: AiRequestConfig): Promise<import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/openai/gpt-oss-120b" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/openai/gpt-oss-20b" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", "@cf/openai/gpt-oss-120b" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/openai/gpt-oss-20b" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", string, string>>;
|
|
10
9
|
custom(args: AiRequestConfig): Promise<import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<string, string, string, string>>;
|
|
11
|
-
googleAi(args: AiRequestConfig): Promise<GoogleGenerativeAIProvider>;
|
|
10
|
+
googleAi(args: AiRequestConfig): Promise<import("@ai-sdk/google").GoogleGenerativeAIProvider>;
|
|
12
11
|
}
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { Helpers } from '@chainfuse/helpers';
|
|
2
|
-
import { AiModels } from '@chainfuse/types/ai-tools';
|
|
3
2
|
import { enabledCloudflareLlmProviders } from '@chainfuse/types/ai-tools/workers-ai';
|
|
4
3
|
import { customProvider, TypeValidationError, wrapLanguageModel } from 'ai';
|
|
5
4
|
import { ZodError } from 'zod/v3';
|
|
@@ -53,7 +52,8 @@ export class AiCustomProviders extends AiBase {
|
|
|
53
52
|
.forEach((choice) => {
|
|
54
53
|
controller.enqueue({
|
|
55
54
|
type: 'text-delta',
|
|
56
|
-
|
|
55
|
+
delta: choice.delta.content,
|
|
56
|
+
id: '',
|
|
57
57
|
});
|
|
58
58
|
});
|
|
59
59
|
}
|
|
@@ -90,7 +90,10 @@ export class AiCustomProviders extends AiBase {
|
|
|
90
90
|
* 2. Remove start and end
|
|
91
91
|
* 3. Trim again to remove any leading/trailing whitespace
|
|
92
92
|
*/
|
|
93
|
-
|
|
93
|
+
content: result.content.map((content) => ({
|
|
94
|
+
...content,
|
|
95
|
+
...('text' in content && { text: content.text.trim().replace(codeFenceStart, '').replace(codefenceEnd, '').trim() }),
|
|
96
|
+
})),
|
|
94
97
|
};
|
|
95
98
|
}
|
|
96
99
|
return result;
|
|
@@ -111,14 +114,6 @@ export class AiCustomProviders extends AiBase {
|
|
|
111
114
|
return new AiRawProviders(this.config).custom(args);
|
|
112
115
|
}
|
|
113
116
|
async googleAi(args) {
|
|
114
|
-
|
|
115
|
-
return customProvider({
|
|
116
|
-
languageModels: {
|
|
117
|
-
[AiModels.LanguageModels.GoogleGenerativeAi.gemini_flash_lite_search.split(':').slice(1).join(':')]: fallbackProvider(AiModels.LanguageModels.GoogleGenerativeAi.gemini_flash_lite_search.split(':')[1], { useSearchGrounding: true }),
|
|
118
|
-
[AiModels.LanguageModels.GoogleGenerativeAi.gemini_flash_search.split(':').slice(1).join(':')]: fallbackProvider(AiModels.LanguageModels.GoogleGenerativeAi.gemini_flash_search.split(':')[1], { useSearchGrounding: true }),
|
|
119
|
-
[AiModels.LanguageModels.GoogleGenerativeAi.gemini_pro_search.split(':').slice(1).join(':')]: fallbackProvider(AiModels.LanguageModels.GoogleGenerativeAi.gemini_pro_search.split(':')[1], { useSearchGrounding: true }),
|
|
120
|
-
},
|
|
121
|
-
fallbackProvider,
|
|
122
|
-
});
|
|
117
|
+
return new AiRawProviders(this.config).googleAi(args);
|
|
123
118
|
}
|
|
124
119
|
}
|
|
@@ -13,5 +13,5 @@ export declare class AiRawProviders extends AiBase {
|
|
|
13
13
|
custom(args: AiRequestConfig): Promise<OpenAICompatibleProvider<string, string, string, string>>;
|
|
14
14
|
googleAi(args: AiRequestConfig): Promise<import("@ai-sdk/google").GoogleGenerativeAIProvider>;
|
|
15
15
|
restWorkersAi(args: AiRequestConfig): Promise<OpenAICompatibleProvider<cloudflareModelPossibilities<'Text Generation'>, cloudflareModelPossibilities<'Text Generation'>, cloudflareModelPossibilities<'Text Embeddings'>>>;
|
|
16
|
-
bindingWorkersAi(args: AiRequestConfig): Promise<OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", string, string>>;
|
|
16
|
+
bindingWorkersAi(args: AiRequestConfig): Promise<OpenAICompatibleProvider<"@cf/openai/gpt-oss-120b" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/openai/gpt-oss-20b" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", "@cf/openai/gpt-oss-120b" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/openai/gpt-oss-20b" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", string, string>>;
|
|
17
17
|
}
|
|
@@ -68,7 +68,6 @@ export class AiRawProviders extends AiBase {
|
|
|
68
68
|
...(args.cache && { 'cf-aig-cache-ttl': (typeof args.cache === 'boolean' ? (args.cache ? this.cacheTtl : 0) : args.cache).toString() }),
|
|
69
69
|
...(args.skipCache && { 'cf-aig-skip-cache': 'true' }),
|
|
70
70
|
},
|
|
71
|
-
compatibility: 'strict',
|
|
72
71
|
fetch: async (input, rawInit) => {
|
|
73
72
|
const startRoundTrip = performance.now();
|
|
74
73
|
const headers = new Headers(rawInit?.headers);
|
|
@@ -1,15 +1,32 @@
|
|
|
1
|
-
import type {
|
|
2
|
-
import type { EmbeddingModelV1, LanguageModelV1 } from '@ai-sdk/provider';
|
|
1
|
+
import type { EmbeddingModelV2, LanguageModelV2, ProviderV2 } from '@ai-sdk/provider';
|
|
3
2
|
import type { AzureChatModels, AzureEmbeddingModels } from '@chainfuse/types/ai-tools/azure';
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
(deploymentId: AzureChatModels, settings?: OpenAIChatSettings): LanguageModelV1;
|
|
3
|
+
export interface AzureOpenAIProvider extends ProviderV2 {
|
|
4
|
+
(deploymentId: AzureChatModels): LanguageModelV2;
|
|
7
5
|
/**
|
|
8
6
|
* Creates an Azure OpenAI chat model for text generation.
|
|
9
7
|
*/
|
|
10
|
-
languageModel(deploymentId: AzureChatModels
|
|
8
|
+
languageModel(deploymentId: AzureChatModels): LanguageModelV2;
|
|
9
|
+
/**
|
|
10
|
+
* Creates an Azure OpenAI chat model for text generation.
|
|
11
|
+
*/
|
|
12
|
+
chat(deploymentId: AzureChatModels): LanguageModelV2;
|
|
13
|
+
/**
|
|
14
|
+
* Creates an Azure OpenAI responses API model for text generation.
|
|
15
|
+
*/
|
|
16
|
+
responses(deploymentId: AzureChatModels): LanguageModelV2;
|
|
17
|
+
/**
|
|
18
|
+
* Creates an Azure OpenAI completion model for text generation.
|
|
19
|
+
*/
|
|
20
|
+
completion(deploymentId: AzureChatModels): LanguageModelV2;
|
|
21
|
+
/**
|
|
22
|
+
* Creates an Azure OpenAI DALL-E model for image generation.
|
|
23
|
+
*/
|
|
24
|
+
/**
|
|
25
|
+
* Creates an Azure OpenAI DALL-E model for image generation.
|
|
26
|
+
*/
|
|
27
|
+
textEmbedding(deploymentId: AzureEmbeddingModels): EmbeddingModelV2<string>;
|
|
11
28
|
/**
|
|
12
29
|
* Creates an Azure OpenAI model for text embeddings.
|
|
13
30
|
*/
|
|
14
|
-
textEmbeddingModel(deploymentId: AzureEmbeddingModels
|
|
31
|
+
textEmbeddingModel(deploymentId: AzureEmbeddingModels): EmbeddingModelV2<string>;
|
|
15
32
|
}
|
package/dist/registry.d.mts
CHANGED
|
@@ -7,7 +7,7 @@ export declare class AiRegistry extends AiBase {
|
|
|
7
7
|
anthropic: import("@ai-sdk/anthropic").AnthropicProvider;
|
|
8
8
|
custom: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<string, string, string, string>;
|
|
9
9
|
'google.generative-ai': import("@ai-sdk/google").GoogleGenerativeAIProvider;
|
|
10
|
-
workersai: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b",
|
|
10
|
+
workersai: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/openai/gpt-oss-120b" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/openai/gpt-oss-20b" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", "@cf/openai/gpt-oss-120b" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/openai/gpt-oss-20b" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", string, string>;
|
|
11
11
|
}>>;
|
|
12
12
|
registry(args: AiRequestConfig): Promise<import("ai").ProviderRegistryProvider<Readonly<{
|
|
13
13
|
openai: import("@ai-sdk/openai").OpenAIProvider;
|
|
@@ -15,6 +15,6 @@ export declare class AiRegistry extends AiBase {
|
|
|
15
15
|
anthropic: import("@ai-sdk/anthropic").AnthropicProvider;
|
|
16
16
|
custom: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<string, string, string, string>;
|
|
17
17
|
'google.generative-ai': import("@ai-sdk/google").GoogleGenerativeAIProvider;
|
|
18
|
-
workersai: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b",
|
|
18
|
+
workersai: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/openai/gpt-oss-120b" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/openai/gpt-oss-20b" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", "@cf/openai/gpt-oss-120b" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/qwen/qwen2.5-coder-32b-instruct" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistralai/mistral-small-3.1-24b-instruct" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/openai/gpt-oss-20b" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106" | "@cf/meta/llama-4-scout-17b-16e-instruct" | "@cf/google/gemma-3-12b-it" | "@cf/qwen/qwq-32b", string, string>;
|
|
19
19
|
}>, ":">>;
|
|
20
20
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@chainfuse/ai-tools",
|
|
3
|
-
"version": "0.13.
|
|
3
|
+
"version": "0.13.28",
|
|
4
4
|
"description": "",
|
|
5
5
|
"author": "ChainFuse",
|
|
6
6
|
"homepage": "https://github.com/ChainFuse/packages/tree/main/packages/ai-tools#readme",
|
|
@@ -48,21 +48,22 @@
|
|
|
48
48
|
},
|
|
49
49
|
"prettier": "@demosjarco/prettier-config",
|
|
50
50
|
"dependencies": {
|
|
51
|
-
"@ai-sdk/anthropic": "^
|
|
52
|
-
"@ai-sdk/azure": "^
|
|
53
|
-
"@ai-sdk/google": "^
|
|
54
|
-
"@ai-sdk/openai": "^
|
|
55
|
-
"@ai-sdk/openai-compatible": "^0.2
|
|
56
|
-
"@chainfuse/helpers": "^3.4.
|
|
57
|
-
"@chainfuse/types": "^2.10.
|
|
58
|
-
"ai": "^
|
|
59
|
-
"chalk": "^5.
|
|
51
|
+
"@ai-sdk/anthropic": "^2.0.1",
|
|
52
|
+
"@ai-sdk/azure": "^2.0.5",
|
|
53
|
+
"@ai-sdk/google": "^2.0.3",
|
|
54
|
+
"@ai-sdk/openai": "^2.0.5",
|
|
55
|
+
"@ai-sdk/openai-compatible": "^1.0.2",
|
|
56
|
+
"@chainfuse/helpers": "^3.4.8",
|
|
57
|
+
"@chainfuse/types": "^2.10.29",
|
|
58
|
+
"ai": "^5.0.8",
|
|
59
|
+
"chalk": "^5.5.0",
|
|
60
60
|
"haversine-distance": "^1.2.4",
|
|
61
61
|
"workers-ai-provider": "^0.7.2"
|
|
62
62
|
},
|
|
63
63
|
"devDependencies": {
|
|
64
|
-
"@
|
|
65
|
-
"
|
|
64
|
+
"@ai-sdk/provider": "^2.0.0",
|
|
65
|
+
"@cloudflare/workers-types": "^4.20250807.0",
|
|
66
|
+
"openai": "^5.12.1"
|
|
66
67
|
},
|
|
67
|
-
"gitHead": "
|
|
68
|
+
"gitHead": "541693735b92bd92dc0d6c77235bcebe4ac62741"
|
|
68
69
|
}
|