@chainfuse/ai-tools 0.2.2 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -5,7 +5,7 @@ import type { AiRequestConfig } from '../types.mjs';
|
|
|
5
5
|
import type { AzureOpenAIProvider } from './types.mjs';
|
|
6
6
|
export declare class AiCustomProviders extends AiBase {
|
|
7
7
|
oaiOpenai(args: AiRequestConfig): Promise<import("@ai-sdk/openai").OpenAIProvider>;
|
|
8
|
-
azOpenai(args: AiRequestConfig, [server, ...servers]?: import("../serverSelector/types.
|
|
8
|
+
azOpenai(args: AiRequestConfig, [server, ...servers]?: import("../serverSelector/types.mts").Server[]): Promise<AzureOpenAIProvider>;
|
|
9
9
|
anthropic(args: AiRequestConfig): Promise<import("@ai-sdk/anthropic").AnthropicProvider>;
|
|
10
10
|
private static workersAiIsRest;
|
|
11
11
|
cfWorkersAi(args: AiRequestConfig): Promise<OpenAICompatibleProvider<"@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/qwen/qwen1.5-0.5b-chat" | "@cf/google/gemma-2b-it-lora" | "@hf/nexusflow/starling-lm-7b-beta" | "@cf/meta/llama-3-8b-instruct" | "@cf/meta/llama-3.2-3b-instruct" | "@hf/thebloke/llamaguard-7b-awq" | "@hf/thebloke/neural-chat-7b-v3-1-awq" | "@cf/meta/llama-2-7b-chat-fp16" | "@cf/mistral/mistral-7b-instruct-v0.1" | "@cf/mistral/mistral-7b-instruct-v0.2-lora" | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0" | "@hf/mistral/mistral-7b-instruct-v0.2" | "@cf/fblgit/una-cybertron-7b-v2-bf16" | "@cf/thebloke/discolm-german-7b-v1-awq" | "@cf/meta/llama-2-7b-chat-int8" | "@cf/meta/llama-3.1-8b-instruct-fp8" | "@hf/thebloke/mistral-7b-instruct-v0.1-awq" | "@cf/qwen/qwen1.5-7b-chat-awq" | "@cf/meta/llama-3.2-1b-instruct" | "@hf/thebloke/llama-2-13b-chat-awq" | "@hf/thebloke/deepseek-coder-6.7b-base-awq" | "@cf/meta-llama/llama-2-7b-chat-hf-lora" | "@cf/meta/llama-3.3-70b-instruct-fp8-fast" | "@hf/thebloke/openhermes-2.5-mistral-7b-awq" | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq" | "@cf/deepseek-ai/deepseek-math-7b-instruct" | "@cf/tiiuae/falcon-7b-instruct" | "@hf/nousresearch/hermes-2-pro-mistral-7b" | "@cf/meta/llama-3.1-8b-instruct" | "@cf/meta/llama-3.1-8b-instruct-awq" | "@hf/thebloke/zephyr-7b-beta-awq" | "@cf/google/gemma-7b-it-lora" | "@cf/qwen/qwen1.5-1.8b-chat" | "@cf/meta/llama-3-8b-instruct-awq" | "@cf/meta/llama-3.2-11b-vision-instruct" | "@cf/defog/sqlcoder-7b-2" | "@cf/microsoft/phi-2" | "@hf/meta-llama/meta-llama-3-8b-instruct" | "@hf/google/gemma-7b-it" | "@cf/qwen/qwen1.5-14b-chat-awq" | "@cf/openchat/openchat-3.5-0106", "@cf/baai/bge-small-en-v1.5" | "@cf/baai/bge-base-en-v1.5" | "@cf/baai/bge-large-en-v1.5">>;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BufferHelpers, CryptoHelpers, DnsHelpers, Helpers } from '@chainfuse/helpers';
|
|
1
|
+
import { BufferHelpers, CryptoHelpers, DnsHelpers, Helpers, NetHelpers } from '@chainfuse/helpers';
|
|
2
2
|
import haversine from 'haversine-distance';
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
import { AiBase } from '../base.mjs';
|
|
@@ -6,25 +6,19 @@ export class AiRawProviders extends AiBase {
|
|
|
6
6
|
// 2628288 seconds is what cf defines as 1 month in their cache rules
|
|
7
7
|
cacheTtl = 2628288;
|
|
8
8
|
async updateGatewayLog(response, metadataHeader, startRoundTrip, modelTime) {
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
9
|
+
const updateMetadata = NetHelpers.cfApi(this.config.gateway.apiToken).aiGateway.logs.edit(this.config.environment, response.headers.get('cf-aig-log-id'), {
|
|
10
|
+
account_id: this.config.gateway.accountId,
|
|
11
|
+
metadata: {
|
|
12
|
+
...Object.entries(metadataHeader).reduce((acc, [key, value]) => {
|
|
13
|
+
acc[key] = typeof value === 'string' ? value : JSON.stringify(value);
|
|
14
|
+
return acc;
|
|
15
|
+
}, {}),
|
|
16
|
+
timing: JSON.stringify({
|
|
17
|
+
fromCache: response.headers.get('cf-aig-cache-status')?.toLowerCase() === 'hit',
|
|
18
|
+
totalRoundtripTime: performance.now() - startRoundTrip,
|
|
19
|
+
modelTime,
|
|
20
|
+
}),
|
|
17
21
|
},
|
|
18
|
-
body: JSON.stringify({
|
|
19
|
-
metadata: {
|
|
20
|
-
...metadataHeader,
|
|
21
|
-
timing: JSON.stringify({
|
|
22
|
-
fromCache: response.headers.get('cf-aig-cache-status')?.toLowerCase() === 'hit',
|
|
23
|
-
totalRoundtripTime: performance.now() - startRoundTrip,
|
|
24
|
-
modelTime,
|
|
25
|
-
}),
|
|
26
|
-
},
|
|
27
|
-
}),
|
|
28
22
|
});
|
|
29
23
|
if (this.config.backgroundContext) {
|
|
30
24
|
this.config.backgroundContext.waitUntil(updateMetadata);
|
|
@@ -294,7 +288,7 @@ export class AiRawProviders extends AiBase {
|
|
|
294
288
|
}
|
|
295
289
|
else {
|
|
296
290
|
// This always gets called, only throw error if actually being used
|
|
297
|
-
return import('@ai-sdk/openai-compatible').then(
|
|
291
|
+
return import('@ai-sdk/openai-compatible').then(({ createOpenAICompatible }) => createOpenAICompatible({
|
|
298
292
|
// Dummy url that'll never be hit
|
|
299
293
|
baseURL: 'https://sushidata.com',
|
|
300
294
|
name: 'custom',
|
package/dist/registry.d.mts
CHANGED
|
@@ -3,7 +3,7 @@ import type { AiRequestConfig } from './types.mjs';
|
|
|
3
3
|
export declare class AiRegistry extends AiBase {
|
|
4
4
|
providers(args: AiRequestConfig): Promise<Readonly<{
|
|
5
5
|
openai: import("@ai-sdk/openai").OpenAIProvider;
|
|
6
|
-
azure: import("./providers/types.
|
|
6
|
+
azure: import("./providers/types.mts").AzureOpenAIProvider;
|
|
7
7
|
anthropic: import("@ai-sdk/anthropic").AnthropicProvider;
|
|
8
8
|
custom: import("@ai-sdk/openai-compatible").OpenAICompatibleProvider<string, string, string>;
|
|
9
9
|
'google.generative-ai': import("@ai-sdk/google").GoogleGenerativeAIProvider;
|
package/dist/types.d.mts
CHANGED
|
@@ -109,11 +109,12 @@ export interface AiRequestMetadataTiming {
|
|
|
109
109
|
}
|
|
110
110
|
export interface AiRequestMetadata {
|
|
111
111
|
dataspaceId: AiRequestConfig['dataspaceId'];
|
|
112
|
-
serverInfo: AiRequestMetadataServerInfo | AiRequestMetadataServerInfoWithLocation
|
|
112
|
+
serverInfo: AiRequestMetadataServerInfo | AiRequestMetadataServerInfoWithLocation;
|
|
113
113
|
idempotencyId: AiRequestIdempotencyId;
|
|
114
|
-
executor: AiRequestExecutor
|
|
115
|
-
timing: AiRequestMetadataTiming
|
|
114
|
+
executor: AiRequestExecutor;
|
|
115
|
+
timing: AiRequestMetadataTiming;
|
|
116
116
|
}
|
|
117
|
+
export type AiRequestMetadataStringified = Record<keyof AiRequestMetadata, string>;
|
|
117
118
|
/**
|
|
118
119
|
* Extracts the chunk type from an asynchronous iterable.
|
|
119
120
|
*/
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@chainfuse/ai-tools",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.4",
|
|
4
4
|
"description": "",
|
|
5
5
|
"author": "ChainFuse",
|
|
6
6
|
"homepage": "https://github.com/ChainFuse/packages/tree/main/packages/ai-tools#readme",
|
|
@@ -48,21 +48,21 @@
|
|
|
48
48
|
},
|
|
49
49
|
"prettier": "@demosjarco/prettier-config",
|
|
50
50
|
"dependencies": {
|
|
51
|
-
"@ai-sdk/anthropic": "^1.0
|
|
52
|
-
"@ai-sdk/azure": "^1.0
|
|
53
|
-
"@ai-sdk/google": "^1.0
|
|
51
|
+
"@ai-sdk/anthropic": "^1.1.0",
|
|
52
|
+
"@ai-sdk/azure": "^1.1.0",
|
|
53
|
+
"@ai-sdk/google": "^1.1.0",
|
|
54
54
|
"@ai-sdk/openai": "^1.0.5",
|
|
55
|
-
"@ai-sdk/openai-compatible": "^0.0
|
|
56
|
-
"@chainfuse/helpers": "^0.
|
|
57
|
-
"@chainfuse/types": "^1.
|
|
58
|
-
"ai": "^4.0
|
|
55
|
+
"@ai-sdk/openai-compatible": "^0.1.0",
|
|
56
|
+
"@chainfuse/helpers": "^1.0.1",
|
|
57
|
+
"@chainfuse/types": "^1.5.1",
|
|
58
|
+
"ai": "^4.1.0",
|
|
59
59
|
"chalk": "^5.4.1",
|
|
60
60
|
"haversine-distance": "^1.2.3",
|
|
61
61
|
"workers-ai-provider": "^0.0.10"
|
|
62
62
|
},
|
|
63
63
|
"devDependencies": {
|
|
64
|
-
"@cloudflare/workers-types": "^4.
|
|
65
|
-
"openai": "^4.
|
|
64
|
+
"@cloudflare/workers-types": "^4.20250121.0",
|
|
65
|
+
"openai": "^4.79.4"
|
|
66
66
|
},
|
|
67
|
-
"gitHead": "
|
|
67
|
+
"gitHead": "e5a8e425f75ed15d265f3acb2f8986e1200dec14"
|
|
68
68
|
}
|