@chainfuse/ai-tools 0.11.1 → 0.11.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/models.mjs +6 -1
- package/dist/providers/customProviders.d.mts +2 -2
- package/dist/providers/customProviders.mjs +9 -5
- package/dist/providers/rawProviders.d.mts +1 -1
- package/dist/registry.d.mts +10 -3
- package/dist/registry.mjs +1 -1
- package/dist/serverSelector.d.mts +1 -1
- package/package.json +9 -9
package/dist/models.mjs
CHANGED
|
@@ -4,12 +4,17 @@ export class AiModel extends AiBase {
|
|
|
4
4
|
return import('./registry.mjs')
|
|
5
5
|
.then(({ AiRegistry }) => new AiRegistry(this.config).registry(args))
|
|
6
6
|
.then((registry) => import('ai').then(({ extractReasoningMiddleware, wrapLanguageModel }) => wrapLanguageModel({
|
|
7
|
+
// @ts-expect-error types are or-ed, but correct
|
|
7
8
|
model: registry.languageModel(model ? `${modelOrProvider}:${model}` : modelOrProvider),
|
|
8
9
|
middleware: [extractReasoningMiddleware({ tagName: 'think' }), this.middleware],
|
|
9
10
|
})));
|
|
10
11
|
}
|
|
11
12
|
wrappedTextEmbeddingModel(args, modelOrProvider, model) {
|
|
12
|
-
return import('./registry.mjs')
|
|
13
|
+
return import('./registry.mjs')
|
|
14
|
+
.then(({ AiRegistry }) => new AiRegistry(this.config).registry(args))
|
|
15
|
+
.then((registry) =>
|
|
16
|
+
// @ts-expect-error types are or-ed, but correct
|
|
17
|
+
registry.textEmbeddingModel(model ? `${modelOrProvider}:${model}` : modelOrProvider));
|
|
13
18
|
}
|
|
14
19
|
get middleware() {
|
|
15
20
|
return {};
|
|
@@ -8,7 +8,7 @@ export declare class AiCustomProviders extends AiBase {
|
|
|
8
8
|
azOpenai(args: AiRequestConfig, filteredServers?: AzureServers): Promise<AzureOpenAIProvider>;
|
|
9
9
|
anthropic(args: AiRequestConfig): Promise<import("@ai-sdk/anthropic").AnthropicProvider>;
|
|
10
10
|
private static workersAiIsRest;
|
|
11
|
-
cfWorkersAi(args: AiRequestConfig): Promise<WorkersAIProvider | OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/baai/bge-m3" | "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-large-en-v1.5">>;
|
|
12
|
-
custom(args: AiRequestConfig): Promise<OpenAICompatibleProvider<string, string, string>>;
|
|
11
|
+
cfWorkersAi(args: AiRequestConfig): Promise<WorkersAIProvider | OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/baai/bge-m3" | "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-large-en-v1.5", string>>;
|
|
12
|
+
custom(args: AiRequestConfig): Promise<OpenAICompatibleProvider<string, string, string, string>>;
|
|
13
13
|
googleAi(args: AiRequestConfig): Promise<GoogleGenerativeAIProvider>;
|
|
14
14
|
}
|
|
@@ -10,8 +10,7 @@ export class AiCustomProviders extends AiBase {
|
|
|
10
10
|
return new AiRawProviders(this.config).oaiOpenai(args);
|
|
11
11
|
}
|
|
12
12
|
async azOpenai(args, filteredServers) {
|
|
13
|
-
|
|
14
|
-
filteredServers = await new ServerSelector(this.config).closestServers(await import('@chainfuse/types/ai-tools/catalog/azure').then(({ azureCatalog }) => azureCatalog));
|
|
13
|
+
filteredServers ??= await new ServerSelector(this.config).closestServers(await import('@chainfuse/types/ai-tools/catalog/azure').then(({ azureCatalog }) => azureCatalog));
|
|
15
14
|
const [server, ...servers] = filteredServers;
|
|
16
15
|
const raw = new AiRawProviders(this.config);
|
|
17
16
|
return customProvider({
|
|
@@ -20,7 +19,12 @@ export class AiCustomProviders extends AiBase {
|
|
|
20
19
|
const acc = await accPromise;
|
|
21
20
|
// @ts-expect-error override for types
|
|
22
21
|
acc[model.name] = wrapLanguageModel({
|
|
23
|
-
model: (await raw.azOpenai(args, server, 'inputTokenCost' in model || 'outputTokenCost' in model
|
|
22
|
+
model: (await raw.azOpenai(args, server, 'inputTokenCost' in model || 'outputTokenCost' in model
|
|
23
|
+
? {
|
|
24
|
+
inputTokenCost: 'inputTokenCost' in model && !isNaN(model.inputTokenCost) ? model.inputTokenCost : undefined,
|
|
25
|
+
outputTokenCost: 'outputTokenCost' in model && !isNaN(model.outputTokenCost) ? model.outputTokenCost : undefined,
|
|
26
|
+
}
|
|
27
|
+
: undefined))(model.name),
|
|
24
28
|
middleware: {
|
|
25
29
|
wrapGenerate: async ({ doGenerate, model, params }) => {
|
|
26
30
|
try {
|
|
@@ -47,8 +51,8 @@ export class AiCustomProviders extends AiBase {
|
|
|
47
51
|
const foundModel = server.languageModelAvailability.find((languageModel) => languageModel.name === model.modelId);
|
|
48
52
|
if (foundModel && ('inputTokenCost' in foundModel || 'outputTokenCost' in foundModel)) {
|
|
49
53
|
return {
|
|
50
|
-
inputTokenCost:
|
|
51
|
-
outputTokenCost:
|
|
54
|
+
inputTokenCost: 'inputTokenCost' in model && !isNaN(model.inputTokenCost) ? model.inputTokenCost : undefined,
|
|
55
|
+
outputTokenCost: 'outputTokenCost' in model && !isNaN(model.outputTokenCost) ? model.outputTokenCost : undefined,
|
|
52
56
|
};
|
|
53
57
|
}
|
|
54
58
|
else {
|
|
@@ -12,7 +12,7 @@ export declare class AiRawProviders extends AiBase {
|
|
|
12
12
|
outputTokenCost?: number;
|
|
13
13
|
}): Promise<import("@ai-sdk/azure").AzureOpenAIProvider>;
|
|
14
14
|
anthropic(args: AiRequestConfig): Promise<import("@ai-sdk/anthropic").AnthropicProvider>;
|
|
15
|
-
custom(args: AiRequestConfig): Promise<OpenAICompatibleProvider<string, string, string>>;
|
|
15
|
+
custom(args: AiRequestConfig): Promise<OpenAICompatibleProvider<string, string, string, string>>;
|
|
16
16
|
googleAi(args: AiRequestConfig): Promise<import("@ai-sdk/google").GoogleGenerativeAIProvider>;
|
|
17
17
|
restWorkersAi(args: AiRequestConfig): Promise<OpenAICompatibleProvider<cloudflareModelPossibilities<'Text Generation'>, cloudflareModelPossibilities<'Text Generation'>, cloudflareModelPossibilities<'Text Embeddings'>>>;
|
|
18
18
|
bindingWorkersAi(args: AiRequestConfig): Promise<WorkersAIProvider>;
|
package/dist/registry.d.mts
CHANGED
|
@@ -5,9 +5,16 @@ export declare class AiRegistry extends AiBase {
|
|
|
5
5
|
openai: import("@ai-sdk/openai").OpenAIProvider;
|
|
6
6
|
azure: import("./providers/types.mts").AzureOpenAIProvider;
|
|
7
7
|
anthropic: import("@ai-sdk/anthropic").AnthropicProvider;
|
|
8
|
-
custom: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<string, string, string>;
|
|
8
|
+
custom: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<string, string, string, string>;
|
|
9
9
|
'google.generative-ai': import("@ai-sdk/google").GoogleGenerativeAIProvider;
|
|
10
|
-
workersai: import("./providers/types.mts").WorkersAIProvider | import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/baai/bge-m3" | "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-large-en-v1.5">;
|
|
10
|
+
workersai: import("./providers/types.mts").WorkersAIProvider | import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/baai/bge-m3" | "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-large-en-v1.5", string>;
|
|
11
11
|
}>>;
|
|
12
|
-
registry(args: AiRequestConfig): Promise<import("ai").
|
|
12
|
+
registry(args: AiRequestConfig): Promise<import("ai").ProviderRegistryProvider<Readonly<{
|
|
13
|
+
openai: import("@ai-sdk/openai").OpenAIProvider;
|
|
14
|
+
azure: import("./providers/types.mts").AzureOpenAIProvider;
|
|
15
|
+
anthropic: import("@ai-sdk/anthropic").AnthropicProvider;
|
|
16
|
+
custom: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<string, string, string, string>;
|
|
17
|
+
'google.generative-ai': import("@ai-sdk/google").GoogleGenerativeAIProvider;
|
|
18
|
+
workersai: import("./providers/types.mts").WorkersAIProvider | import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-guard-3-8b" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/baai/bge-m3" | "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-large-en-v1.5", string>;
|
|
19
|
+
}>, ":">>;
|
|
13
20
|
}
|
package/dist/registry.mjs
CHANGED
|
@@ -11,6 +11,6 @@ export class AiRegistry extends AiBase {
|
|
|
11
11
|
}));
|
|
12
12
|
}
|
|
13
13
|
registry(args) {
|
|
14
|
-
return import('ai').then(async ({
|
|
14
|
+
return import('ai').then(async ({ createProviderRegistry }) => createProviderRegistry(await this.providers(args)));
|
|
15
15
|
}
|
|
16
16
|
}
|
|
@@ -3,7 +3,7 @@ import type { IncomingRequestCfProperties } from '@cloudflare/workers-types/expe
|
|
|
3
3
|
import { AiBase } from './base.mts';
|
|
4
4
|
import type { PrivacyRegion, Servers } from './types.mjs';
|
|
5
5
|
export declare class ServerSelector extends AiBase {
|
|
6
|
-
static determinePrivacyRegion(country?: IncomingRequestCfProperties['country'], continent?: IncomingRequestCfProperties['continent']): ("APPs" | "LGPD" | "
|
|
6
|
+
static determinePrivacyRegion(country?: IncomingRequestCfProperties['country'], continent?: IncomingRequestCfProperties['continent']): ("APPs" | "LGPD" | "PIPEDA" | "revFADP" | "GDPR" | "PDP" | "APPI" | "PIPA" | "NPDA" | "UK-GDPR" | "PoPIA")[];
|
|
7
7
|
determineLocation(geoRouting?: {
|
|
8
8
|
userCoordinate?: Coordinate;
|
|
9
9
|
country?: IncomingRequestCfProperties["country"];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@chainfuse/ai-tools",
|
|
3
|
-
"version": "0.11.
|
|
3
|
+
"version": "0.11.2",
|
|
4
4
|
"description": "",
|
|
5
5
|
"author": "ChainFuse",
|
|
6
6
|
"homepage": "https://github.com/ChainFuse/packages/tree/main/packages/ai-tools#readme",
|
|
@@ -48,21 +48,21 @@
|
|
|
48
48
|
},
|
|
49
49
|
"prettier": "@demosjarco/prettier-config",
|
|
50
50
|
"dependencies": {
|
|
51
|
-
"@ai-sdk/anthropic": "^1.2.
|
|
52
|
-
"@ai-sdk/azure": "^1.3.
|
|
51
|
+
"@ai-sdk/anthropic": "^1.2.2",
|
|
52
|
+
"@ai-sdk/azure": "^1.3.3",
|
|
53
53
|
"@ai-sdk/google": "^1.2.3",
|
|
54
54
|
"@ai-sdk/openai": "^1.0.5",
|
|
55
|
-
"@ai-sdk/openai-compatible": "^0.2.
|
|
56
|
-
"@chainfuse/helpers": "^2.2.
|
|
57
|
-
"@chainfuse/types": "^2.
|
|
58
|
-
"ai": "^4.2.
|
|
55
|
+
"@ai-sdk/openai-compatible": "^0.2.2",
|
|
56
|
+
"@chainfuse/helpers": "^2.2.2",
|
|
57
|
+
"@chainfuse/types": "^2.2.1",
|
|
58
|
+
"ai": "^4.2.6",
|
|
59
59
|
"chalk": "^5.4.1",
|
|
60
60
|
"haversine-distance": "^1.2.3",
|
|
61
61
|
"workers-ai-provider": "^0.2.0"
|
|
62
62
|
},
|
|
63
63
|
"devDependencies": {
|
|
64
64
|
"@cloudflare/workers-types": "^4.20250321.0",
|
|
65
|
-
"openai": "^4.89.
|
|
65
|
+
"openai": "^4.89.1"
|
|
66
66
|
},
|
|
67
|
-
"gitHead": "
|
|
67
|
+
"gitHead": "34dfde6bed7a2935ed783fed477c6357b3fa9b5e"
|
|
68
68
|
}
|