@chainfuse/ai-tools 0.2.2 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,10 +5,10 @@ import type { AiRequestConfig } from '../types.mjs';
5
5
  import type { AzureOpenAIProvider } from './types.mjs';
6
6
  export declare class AiCustomProviders extends AiBase {
7
7
  oaiOpenai(args: AiRequestConfig): Promise<import("@ai-sdk/openai").OpenAIProvider>;
8
- azOpenai(args: AiRequestConfig, [server, ...servers]?: import("../serverSelector/types.mjs").Server[]): Promise<AzureOpenAIProvider>;
8
+ azOpenai(args: AiRequestConfig, [server, ...servers]?: import("../serverSelector/types.mts").Server[]): Promise<AzureOpenAIProvider>;
9
9
  anthropic(args: AiRequestConfig): Promise<import("@ai-sdk/anthropic").AnthropicProvider>;
10
10
  private static workersAiIsRest;
11
- cfWorkersAi(args: AiRequestConfig): Promise<OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-large-en-v1.5">>;
11
+ cfWorkersAi(args: AiRequestConfig): Promise<OpenAICompatibleProvider<"@cf/meta/llama-2-7b-chat-int8" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/meta/llama-2-7b-chat-fp16" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/openchat/openchat-3.5-0106" | "@cf/tiiuae/falcon-7b-instruct" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@cf/microsoft/phi-2" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@hf/nexusflow/starling-lm-7b-beta" | "@hf/google/gemma-7b-it" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/google/gemma-2b-it-lora" | "@cf/google/gemma-7b-it-lora" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/meta/llama-3-8b-instruct" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/meta/llama-3-8b-instruct-awq" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@cf/meta/llama-3.2-3b-instruct" | "@cf/meta/llama-3.2-1b-instruct" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@cf/meta/llama-3.2-11b-vision-instruct", "@cf/meta/llama-2-7b-chat-int8" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/meta/llama-2-7b-chat-fp16" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/openchat/openchat-3.5-0106" | "@cf/tiiuae/falcon-7b-instruct" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@cf/microsoft/phi-2" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@hf/nexusflow/starling-lm-7b-beta" | "@hf/google/gemma-7b-it" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/google/gemma-2b-it-lora" | "@cf/google/gemma-7b-it-lora" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/meta/llama-3-8b-instruct" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/meta/llama-3-8b-instruct-awq" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@cf/meta/llama-3.2-3b-instruct" | "@cf/meta/llama-3.2-1b-instruct" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@cf/meta/llama-3.2-11b-vision-instruct", "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-large-en-v1.5">>;
12
12
  custom(args: AiRequestConfig): Promise<OpenAICompatibleProvider<string, string, string>>;
13
13
  googleAi(args: AiRequestConfig): Promise<GoogleGenerativeAIProvider>;
14
14
  }
@@ -1,4 +1,4 @@
1
- import { BufferHelpers, CryptoHelpers, DnsHelpers, Helpers } from '@chainfuse/helpers';
1
+ import { BufferHelpers, CryptoHelpers, DnsHelpers, Helpers, NetHelpers } from '@chainfuse/helpers';
2
2
  import haversine from 'haversine-distance';
3
3
  import { z } from 'zod';
4
4
  import { AiBase } from '../base.mjs';
@@ -6,25 +6,19 @@ export class AiRawProviders extends AiBase {
6
6
  // 2628288 seconds is what cf defines as 1 month in their cache rules
7
7
  cacheTtl = 2628288;
8
8
  async updateGatewayLog(response, metadataHeader, startRoundTrip, modelTime) {
9
- /**
10
- * @todo `cloudflare` rest package not updated to this endpoint yet
11
- */
12
- const updateMetadata = fetch(new URL(['client', 'v4', 'accounts', this.config.gateway.accountId, 'ai-gateway', 'gateways', this.config.environment, 'logs', response.headers.get('cf-aig-log-id')].join('/'), 'https://api.cloudflare.com'), {
13
- method: 'PATCH',
14
- headers: {
15
- Authorization: `Bearer ${this.config.gateway.apiToken}`,
16
- 'Content-Type': 'application/json',
9
+ const updateMetadata = NetHelpers.cfApi(this.config.gateway.apiToken).aiGateway.logs.edit(this.config.environment, response.headers.get('cf-aig-log-id'), {
10
+ account_id: this.config.gateway.accountId,
11
+ metadata: {
12
+ ...Object.entries(metadataHeader).reduce((acc, [key, value]) => {
13
+ acc[key] = typeof value === 'string' ? value : JSON.stringify(value);
14
+ return acc;
15
+ }, {}),
16
+ timing: JSON.stringify({
17
+ fromCache: response.headers.get('cf-aig-cache-status')?.toLowerCase() === 'hit',
18
+ totalRoundtripTime: performance.now() - startRoundTrip,
19
+ modelTime,
20
+ }),
17
21
  },
18
- body: JSON.stringify({
19
- metadata: {
20
- ...metadataHeader,
21
- timing: JSON.stringify({
22
- fromCache: response.headers.get('cf-aig-cache-status')?.toLowerCase() === 'hit',
23
- totalRoundtripTime: performance.now() - startRoundTrip,
24
- modelTime,
25
- }),
26
- },
27
- }),
28
22
  });
29
23
  if (this.config.backgroundContext) {
30
24
  this.config.backgroundContext.waitUntil(updateMetadata);
@@ -294,7 +288,7 @@ export class AiRawProviders extends AiBase {
294
288
  }
295
289
  else {
296
290
  // This always gets called, only throw error if actually being used
297
- return import('@ai-sdk/openai-compatible').then(async ({ createOpenAICompatible }) => createOpenAICompatible({
291
+ return import('@ai-sdk/openai-compatible').then(({ createOpenAICompatible }) => createOpenAICompatible({
298
292
  // Dummy url that'll never be hit
299
293
  baseURL: 'https://sushidata.com',
300
294
  name: 'custom',
@@ -3,11 +3,11 @@ import type { AiRequestConfig } from './types.mjs';
3
3
  export declare class AiRegistry extends AiBase {
4
4
  providers(args: AiRequestConfig): Promise<Readonly<{
5
5
  openai: import("@ai-sdk/openai").OpenAIProvider;
6
- azure: import("./providers/types.mjs").AzureOpenAIProvider;
6
+ azure: import("./providers/types.mts").AzureOpenAIProvider;
7
7
  anthropic: import("@ai-sdk/anthropic").AnthropicProvider;
8
8
  custom: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<string, string, string>;
9
9
  'google.generative-ai': import("@ai-sdk/google").GoogleGenerativeAIProvider;
10
- workersai: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-large-en-v1.5">;
10
+ workersai: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<"@cf/meta/llama-2-7b-chat-int8" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/meta/llama-2-7b-chat-fp16" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/openchat/openchat-3.5-0106" | "@cf/tiiuae/falcon-7b-instruct" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@cf/microsoft/phi-2" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@hf/nexusflow/starling-lm-7b-beta" | "@hf/google/gemma-7b-it" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/google/gemma-2b-it-lora" | "@cf/google/gemma-7b-it-lora" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/meta/llama-3-8b-instruct" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/meta/llama-3-8b-instruct-awq" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@cf/meta/llama-3.2-3b-instruct" | "@cf/meta/llama-3.2-1b-instruct" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@cf/meta/llama-3.2-11b-vision-instruct", "@cf/meta/llama-2-7b-chat-int8" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/meta/llama-2-7b-chat-fp16" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/openchat/openchat-3.5-0106" | "@cf/tiiuae/falcon-7b-instruct" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@cf/microsoft/phi-2" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@hf/nexusflow/starling-lm-7b-beta" | "@hf/google/gemma-7b-it" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/google/gemma-2b-it-lora" | "@cf/google/gemma-7b-it-lora" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/meta/llama-3-8b-instruct" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/meta/llama-3-8b-instruct-awq" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@cf/meta/llama-3.2-3b-instruct" | "@cf/meta/llama-3.2-1b-instruct" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@cf/meta/llama-3.2-11b-vision-instruct", "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-large-en-v1.5">;
11
11
  }>>;
12
12
  registry(args: AiRequestConfig): Promise<import("ai").Provider>;
13
13
  }
package/dist/types.d.mts CHANGED
@@ -109,11 +109,12 @@ export interface AiRequestMetadataTiming {
109
109
  }
110
110
  export interface AiRequestMetadata {
111
111
  dataspaceId: AiRequestConfig['dataspaceId'];
112
- serverInfo: AiRequestMetadataServerInfo | AiRequestMetadataServerInfoWithLocation | string;
112
+ serverInfo: AiRequestMetadataServerInfo | AiRequestMetadataServerInfoWithLocation;
113
113
  idempotencyId: AiRequestIdempotencyId;
114
- executor: AiRequestExecutor | string;
115
- timing: AiRequestMetadataTiming | string;
114
+ executor: AiRequestExecutor;
115
+ timing: AiRequestMetadataTiming;
116
116
  }
117
+ export type AiRequestMetadataStringified = Record<keyof AiRequestMetadata, string>;
117
118
  /**
118
119
  * Extracts the chunk type from an asynchronous iterable.
119
120
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@chainfuse/ai-tools",
3
- "version": "0.2.2",
3
+ "version": "0.2.3",
4
4
  "description": "",
5
5
  "author": "ChainFuse",
6
6
  "homepage": "https://github.com/ChainFuse/packages/tree/main/packages/ai-tools#readme",
@@ -49,13 +49,13 @@
49
49
  "prettier": "@demosjarco/prettier-config",
50
50
  "dependencies": {
51
51
  "@ai-sdk/anthropic": "^1.0.8",
52
- "@ai-sdk/azure": "^1.0.19",
53
- "@ai-sdk/google": "^1.0.15",
52
+ "@ai-sdk/azure": "^1.0.21",
53
+ "@ai-sdk/google": "^1.0.16",
54
54
  "@ai-sdk/openai": "^1.0.5",
55
- "@ai-sdk/openai-compatible": "^0.0.16",
56
- "@chainfuse/helpers": "^0.7.0",
57
- "@chainfuse/types": "^1.4.2",
58
- "ai": "^4.0.33",
55
+ "@ai-sdk/openai-compatible": "^0.0.17",
56
+ "@chainfuse/helpers": "^1.0.0",
57
+ "@chainfuse/types": "^1.5.0",
58
+ "ai": "^4.0.36",
59
59
  "chalk": "^5.4.1",
60
60
  "haversine-distance": "^1.2.3",
61
61
  "workers-ai-provider": "^0.0.10"
@@ -64,5 +64,5 @@
64
64
  "@cloudflare/workers-types": "^4.20250109.0",
65
65
  "openai": "^4.78.1"
66
66
  },
67
- "gitHead": "abed5b9d3a63bfe419045c38f7e1402de09f11a6"
67
+ "gitHead": "b035c50ca30c59049413ee8c45376181920fea55"
68
68
  }