@clinebot/llms 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +198 -0
- package/dist/config-browser.d.ts +3 -0
- package/dist/config.d.ts +3 -0
- package/dist/index.browser.d.ts +4 -0
- package/dist/index.browser.js +1 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +7 -0
- package/dist/models/generated-access.d.ts +4 -0
- package/dist/models/generated-provider-loaders.d.ts +13 -0
- package/dist/models/generated.d.ts +14 -0
- package/dist/models/index.d.ts +43 -0
- package/dist/models/models-dev-catalog.d.ts +32 -0
- package/dist/models/providers/aihubmix.d.ts +5 -0
- package/dist/models/providers/anthropic.d.ts +53 -0
- package/dist/models/providers/asksage.d.ts +5 -0
- package/dist/models/providers/baseten.d.ts +5 -0
- package/dist/models/providers/bedrock.d.ts +7 -0
- package/dist/models/providers/cerebras.d.ts +7 -0
- package/dist/models/providers/claude-code.d.ts +4 -0
- package/dist/models/providers/cline.d.ts +34 -0
- package/dist/models/providers/deepseek.d.ts +8 -0
- package/dist/models/providers/dify.d.ts +5 -0
- package/dist/models/providers/doubao.d.ts +7 -0
- package/dist/models/providers/fireworks.d.ts +8 -0
- package/dist/models/providers/gemini.d.ts +9 -0
- package/dist/models/providers/groq.d.ts +8 -0
- package/dist/models/providers/hicap.d.ts +5 -0
- package/dist/models/providers/huawei-cloud-maas.d.ts +5 -0
- package/dist/models/providers/huggingface.d.ts +6 -0
- package/dist/models/providers/index.d.ts +45 -0
- package/dist/models/providers/litellm.d.ts +5 -0
- package/dist/models/providers/lmstudio.d.ts +5 -0
- package/dist/models/providers/minimax.d.ts +7 -0
- package/dist/models/providers/mistral.d.ts +5 -0
- package/dist/models/providers/moonshot.d.ts +7 -0
- package/dist/models/providers/nebius.d.ts +7 -0
- package/dist/models/providers/nous-research.d.ts +7 -0
- package/dist/models/providers/oca.d.ts +9 -0
- package/dist/models/providers/ollama.d.ts +5 -0
- package/dist/models/providers/openai-codex.d.ts +10 -0
- package/dist/models/providers/openai.d.ts +9 -0
- package/dist/models/providers/opencode.d.ts +10 -0
- package/dist/models/providers/openrouter.d.ts +7 -0
- package/dist/models/providers/qwen-code.d.ts +7 -0
- package/dist/models/providers/qwen.d.ts +7 -0
- package/dist/models/providers/requesty.d.ts +6 -0
- package/dist/models/providers/sambanova.d.ts +7 -0
- package/dist/models/providers/sapaicore.d.ts +7 -0
- package/dist/models/providers/together.d.ts +8 -0
- package/dist/models/providers/vercel-ai-gateway.d.ts +5 -0
- package/dist/models/providers/vertex.d.ts +7 -0
- package/dist/models/providers/xai.d.ts +8 -0
- package/dist/models/providers/zai.d.ts +7 -0
- package/dist/models/query.d.ts +181 -0
- package/dist/models/registry.d.ts +123 -0
- package/dist/models/schemas/index.d.ts +7 -0
- package/dist/models/schemas/model.d.ts +340 -0
- package/dist/models/schemas/query.d.ts +191 -0
- package/dist/providers/handlers/ai-sdk-community.d.ts +46 -0
- package/dist/providers/handlers/ai-sdk-provider-base.d.ts +32 -0
- package/dist/providers/handlers/anthropic-base.d.ts +26 -0
- package/dist/providers/handlers/asksage.d.ts +12 -0
- package/dist/providers/handlers/auth.d.ts +5 -0
- package/dist/providers/handlers/base.d.ts +55 -0
- package/dist/providers/handlers/bedrock-base.d.ts +23 -0
- package/dist/providers/handlers/bedrock-client.d.ts +4 -0
- package/dist/providers/handlers/community-sdk.d.ts +97 -0
- package/dist/providers/handlers/fetch-base.d.ts +18 -0
- package/dist/providers/handlers/gemini-base.d.ts +25 -0
- package/dist/providers/handlers/index.d.ts +19 -0
- package/dist/providers/handlers/openai-base.d.ts +54 -0
- package/dist/providers/handlers/openai-responses.d.ts +64 -0
- package/dist/providers/handlers/providers.d.ts +43 -0
- package/dist/providers/handlers/r1-base.d.ts +62 -0
- package/dist/providers/handlers/registry.d.ts +106 -0
- package/dist/providers/handlers/vertex.d.ts +32 -0
- package/dist/providers/index.d.ts +100 -0
- package/dist/providers/public.browser.d.ts +2 -0
- package/dist/providers/public.d.ts +3 -0
- package/dist/providers/shared/openai-compatible.d.ts +10 -0
- package/dist/providers/transform/ai-sdk-community-format.d.ts +9 -0
- package/dist/providers/transform/anthropic-format.d.ts +24 -0
- package/dist/providers/transform/content-format.d.ts +3 -0
- package/dist/providers/transform/gemini-format.d.ts +19 -0
- package/dist/providers/transform/index.d.ts +10 -0
- package/dist/providers/transform/openai-format.d.ts +36 -0
- package/dist/providers/transform/r1-format.d.ts +26 -0
- package/dist/providers/types/config.d.ts +261 -0
- package/dist/providers/types/handler.d.ts +71 -0
- package/dist/providers/types/index.d.ts +11 -0
- package/dist/providers/types/messages.d.ts +139 -0
- package/dist/providers/types/model-info.d.ts +32 -0
- package/dist/providers/types/provider-ids.d.ts +63 -0
- package/dist/providers/types/settings.d.ts +308 -0
- package/dist/providers/types/stream.d.ts +106 -0
- package/dist/providers/utils/index.d.ts +7 -0
- package/dist/providers/utils/retry.d.ts +38 -0
- package/dist/providers/utils/stream-processor.d.ts +110 -0
- package/dist/providers/utils/tool-processor.d.ts +34 -0
- package/dist/sdk.d.ts +18 -0
- package/dist/types.d.ts +60 -0
- package/package.json +66 -0
- package/src/catalog.ts +20 -0
- package/src/config-browser.ts +11 -0
- package/src/config.ts +49 -0
- package/src/index.browser.ts +9 -0
- package/src/index.ts +10 -0
- package/src/live-providers.test.ts +137 -0
- package/src/models/generated-access.ts +41 -0
- package/src/models/generated-provider-loaders.ts +166 -0
- package/src/models/generated.ts +11997 -0
- package/src/models/index.ts +271 -0
- package/src/models/models-dev-catalog.test.ts +161 -0
- package/src/models/models-dev-catalog.ts +161 -0
- package/src/models/providers/aihubmix.ts +19 -0
- package/src/models/providers/anthropic.ts +60 -0
- package/src/models/providers/asksage.ts +19 -0
- package/src/models/providers/baseten.ts +21 -0
- package/src/models/providers/bedrock.ts +30 -0
- package/src/models/providers/cerebras.ts +24 -0
- package/src/models/providers/claude-code.ts +51 -0
- package/src/models/providers/cline.ts +25 -0
- package/src/models/providers/deepseek.ts +33 -0
- package/src/models/providers/dify.ts +17 -0
- package/src/models/providers/doubao.ts +33 -0
- package/src/models/providers/fireworks.ts +34 -0
- package/src/models/providers/gemini.ts +43 -0
- package/src/models/providers/groq.ts +33 -0
- package/src/models/providers/hicap.ts +18 -0
- package/src/models/providers/huawei-cloud-maas.ts +18 -0
- package/src/models/providers/huggingface.ts +22 -0
- package/src/models/providers/index.ts +162 -0
- package/src/models/providers/litellm.ts +19 -0
- package/src/models/providers/lmstudio.ts +22 -0
- package/src/models/providers/minimax.ts +34 -0
- package/src/models/providers/mistral.ts +19 -0
- package/src/models/providers/moonshot.ts +34 -0
- package/src/models/providers/nebius.ts +24 -0
- package/src/models/providers/nous-research.ts +21 -0
- package/src/models/providers/oca.ts +30 -0
- package/src/models/providers/ollama.ts +18 -0
- package/src/models/providers/openai-codex.ts +30 -0
- package/src/models/providers/openai.ts +43 -0
- package/src/models/providers/opencode.ts +28 -0
- package/src/models/providers/openrouter.ts +24 -0
- package/src/models/providers/qwen-code.ts +33 -0
- package/src/models/providers/qwen.ts +34 -0
- package/src/models/providers/requesty.ts +23 -0
- package/src/models/providers/sambanova.ts +23 -0
- package/src/models/providers/sapaicore.ts +34 -0
- package/src/models/providers/together.ts +35 -0
- package/src/models/providers/vercel-ai-gateway.ts +23 -0
- package/src/models/providers/vertex.ts +36 -0
- package/src/models/providers/xai.ts +34 -0
- package/src/models/providers/zai.ts +25 -0
- package/src/models/query.ts +407 -0
- package/src/models/registry.ts +511 -0
- package/src/models/schemas/index.ts +62 -0
- package/src/models/schemas/model.ts +308 -0
- package/src/models/schemas/query.ts +336 -0
- package/src/providers/browser.ts +4 -0
- package/src/providers/handlers/ai-sdk-community.ts +226 -0
- package/src/providers/handlers/ai-sdk-provider-base.ts +193 -0
- package/src/providers/handlers/anthropic-base.ts +372 -0
- package/src/providers/handlers/asksage.test.ts +103 -0
- package/src/providers/handlers/asksage.ts +138 -0
- package/src/providers/handlers/auth.test.ts +19 -0
- package/src/providers/handlers/auth.ts +121 -0
- package/src/providers/handlers/base.test.ts +46 -0
- package/src/providers/handlers/base.ts +160 -0
- package/src/providers/handlers/bedrock-base.ts +390 -0
- package/src/providers/handlers/bedrock-client.ts +100 -0
- package/src/providers/handlers/codex.test.ts +123 -0
- package/src/providers/handlers/community-sdk.test.ts +288 -0
- package/src/providers/handlers/community-sdk.ts +392 -0
- package/src/providers/handlers/fetch-base.ts +68 -0
- package/src/providers/handlers/gemini-base.ts +302 -0
- package/src/providers/handlers/index.ts +67 -0
- package/src/providers/handlers/openai-base.ts +277 -0
- package/src/providers/handlers/openai-responses.ts +598 -0
- package/src/providers/handlers/providers.test.ts +120 -0
- package/src/providers/handlers/providers.ts +563 -0
- package/src/providers/handlers/r1-base.ts +280 -0
- package/src/providers/handlers/registry.ts +185 -0
- package/src/providers/handlers/vertex.test.ts +124 -0
- package/src/providers/handlers/vertex.ts +292 -0
- package/src/providers/index.ts +534 -0
- package/src/providers/public.browser.ts +20 -0
- package/src/providers/public.ts +51 -0
- package/src/providers/shared/openai-compatible.ts +63 -0
- package/src/providers/transform/ai-sdk-community-format.test.ts +73 -0
- package/src/providers/transform/ai-sdk-community-format.ts +115 -0
- package/src/providers/transform/anthropic-format.ts +218 -0
- package/src/providers/transform/content-format.ts +34 -0
- package/src/providers/transform/format-conversion.test.ts +310 -0
- package/src/providers/transform/gemini-format.ts +167 -0
- package/src/providers/transform/index.ts +22 -0
- package/src/providers/transform/openai-format.ts +247 -0
- package/src/providers/transform/r1-format.ts +287 -0
- package/src/providers/types/config.ts +388 -0
- package/src/providers/types/handler.ts +87 -0
- package/src/providers/types/index.ts +120 -0
- package/src/providers/types/messages.ts +158 -0
- package/src/providers/types/model-info.test.ts +57 -0
- package/src/providers/types/model-info.ts +65 -0
- package/src/providers/types/provider-ids.test.ts +12 -0
- package/src/providers/types/provider-ids.ts +89 -0
- package/src/providers/types/settings.test.ts +49 -0
- package/src/providers/types/settings.ts +533 -0
- package/src/providers/types/stream.ts +117 -0
- package/src/providers/utils/index.ts +27 -0
- package/src/providers/utils/retry.test.ts +140 -0
- package/src/providers/utils/retry.ts +188 -0
- package/src/providers/utils/stream-processor.test.ts +232 -0
- package/src/providers/utils/stream-processor.ts +472 -0
- package/src/providers/utils/tool-processor.test.ts +34 -0
- package/src/providers/utils/tool-processor.ts +111 -0
- package/src/sdk.ts +264 -0
- package/src/types.ts +79 -0
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import type { ProviderConfig } from "../types";
|
|
2
|
+
|
|
3
|
+
type BedrockFactory = (modelId: string) => unknown;
|
|
4
|
+
|
|
5
|
+
let cachedCreateAmazonBedrock:
|
|
6
|
+
| ((options: {
|
|
7
|
+
region?: string;
|
|
8
|
+
accessKeyId?: string;
|
|
9
|
+
secretAccessKey?: string;
|
|
10
|
+
sessionToken?: string;
|
|
11
|
+
apiKey?: string;
|
|
12
|
+
baseURL?: string;
|
|
13
|
+
headers?: Record<string, string>;
|
|
14
|
+
credentialProvider?: () => PromiseLike<{
|
|
15
|
+
accessKeyId: string;
|
|
16
|
+
secretAccessKey: string;
|
|
17
|
+
sessionToken?: string;
|
|
18
|
+
}>;
|
|
19
|
+
}) => BedrockFactory)
|
|
20
|
+
| undefined;
|
|
21
|
+
|
|
22
|
+
let cachedFromNodeProviderChain:
|
|
23
|
+
| ((options?: { profile?: string }) => () => PromiseLike<{
|
|
24
|
+
accessKeyId: string;
|
|
25
|
+
secretAccessKey: string;
|
|
26
|
+
sessionToken?: string;
|
|
27
|
+
}>)
|
|
28
|
+
| undefined;
|
|
29
|
+
|
|
30
|
+
async function loadCreateAmazonBedrock() {
|
|
31
|
+
if (cachedCreateAmazonBedrock) {
|
|
32
|
+
return cachedCreateAmazonBedrock;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
const moduleName = "@ai-sdk/amazon-bedrock";
|
|
36
|
+
const mod = (await import(moduleName)) as {
|
|
37
|
+
createAmazonBedrock?: typeof cachedCreateAmazonBedrock;
|
|
38
|
+
};
|
|
39
|
+
if (!mod.createAmazonBedrock) {
|
|
40
|
+
throw new Error(`Failed to load createAmazonBedrock from ${moduleName}`);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
cachedCreateAmazonBedrock = mod.createAmazonBedrock;
|
|
44
|
+
return cachedCreateAmazonBedrock;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async function loadFromNodeProviderChain() {
|
|
48
|
+
if (cachedFromNodeProviderChain) {
|
|
49
|
+
return cachedFromNodeProviderChain;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const moduleName = "@aws-sdk/credential-providers";
|
|
53
|
+
const mod = (await import(moduleName)) as {
|
|
54
|
+
fromNodeProviderChain?: typeof cachedFromNodeProviderChain;
|
|
55
|
+
};
|
|
56
|
+
if (!mod.fromNodeProviderChain) {
|
|
57
|
+
throw new Error(`Failed to load fromNodeProviderChain from ${moduleName}`);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
cachedFromNodeProviderChain = mod.fromNodeProviderChain;
|
|
61
|
+
return cachedFromNodeProviderChain;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export async function createBedrockClient(
|
|
65
|
+
config: ProviderConfig,
|
|
66
|
+
defaultHeaders: Record<string, string>,
|
|
67
|
+
): Promise<BedrockFactory> {
|
|
68
|
+
const createAmazonBedrock = await loadCreateAmazonBedrock();
|
|
69
|
+
|
|
70
|
+
const region = config.region ?? "us-east-1";
|
|
71
|
+
const authentication = config.aws?.authentication;
|
|
72
|
+
const hasExplicitKeys = Boolean(
|
|
73
|
+
config.aws?.accessKey && config.aws?.secretKey,
|
|
74
|
+
);
|
|
75
|
+
const shouldUseCredentialChain =
|
|
76
|
+
authentication === "profile" ||
|
|
77
|
+
authentication === "iam" ||
|
|
78
|
+
(!authentication && !hasExplicitKeys && !config.apiKey);
|
|
79
|
+
|
|
80
|
+
let credentialProvider:
|
|
81
|
+
| ReturnType<Exclude<typeof cachedFromNodeProviderChain, undefined>>
|
|
82
|
+
| undefined;
|
|
83
|
+
if (shouldUseCredentialChain) {
|
|
84
|
+
const fromNodeProviderChain = await loadFromNodeProviderChain();
|
|
85
|
+
credentialProvider = fromNodeProviderChain({
|
|
86
|
+
profile: config.aws?.profile,
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return createAmazonBedrock({
|
|
91
|
+
region,
|
|
92
|
+
accessKeyId: config.aws?.accessKey ?? undefined,
|
|
93
|
+
secretAccessKey: config.aws?.secretKey ?? undefined,
|
|
94
|
+
sessionToken: config.aws?.sessionToken ?? undefined,
|
|
95
|
+
apiKey: config.apiKey ?? undefined,
|
|
96
|
+
baseURL: config.aws?.endpoint ?? config.baseUrl ?? undefined,
|
|
97
|
+
headers: defaultHeaders,
|
|
98
|
+
credentialProvider,
|
|
99
|
+
});
|
|
100
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import { beforeEach, describe, expect, it, vi } from "vitest";
|
|
2
|
+
import { CodexHandler } from "./community-sdk";
|
|
3
|
+
|
|
4
|
+
const streamTextSpy = vi.fn();
|
|
5
|
+
const codexCliSpy = vi.fn((modelId: string) => ({ modelId }));
|
|
6
|
+
let lastCreateCodexCliOptions: Record<string, unknown> | undefined;
|
|
7
|
+
|
|
8
|
+
vi.mock("ai", () => ({
|
|
9
|
+
streamText: (input: unknown) => streamTextSpy(input),
|
|
10
|
+
}));
|
|
11
|
+
|
|
12
|
+
vi.mock("ai-sdk-provider-codex-cli", () => ({
|
|
13
|
+
codexCli: (modelId: string) => codexCliSpy(modelId),
|
|
14
|
+
createCodexCli: (options?: Record<string, unknown>) => {
|
|
15
|
+
lastCreateCodexCliOptions = options;
|
|
16
|
+
return (modelId: string) => codexCliSpy(modelId);
|
|
17
|
+
},
|
|
18
|
+
}));
|
|
19
|
+
|
|
20
|
+
async function* makeStreamParts(parts: unknown[]) {
|
|
21
|
+
for (const part of parts) {
|
|
22
|
+
yield part;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
describe("CodexHandler", () => {
|
|
27
|
+
beforeEach(() => {
|
|
28
|
+
vi.clearAllMocks();
|
|
29
|
+
lastCreateCodexCliOptions = undefined;
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
it("streams text and usage through AI SDK fullStream", async () => {
|
|
33
|
+
streamTextSpy.mockReturnValue({
|
|
34
|
+
fullStream: makeStreamParts([
|
|
35
|
+
{ type: "text-delta", textDelta: "Hello" },
|
|
36
|
+
{
|
|
37
|
+
type: "finish",
|
|
38
|
+
usage: { inputTokens: 12, outputTokens: 4 },
|
|
39
|
+
},
|
|
40
|
+
]),
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
const handler = new CodexHandler({
|
|
44
|
+
providerId: "openai-codex",
|
|
45
|
+
modelId: "gpt-5.2-codex",
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
const chunks: Array<Record<string, unknown>> = [];
|
|
49
|
+
for await (const chunk of handler.createMessage("System", [
|
|
50
|
+
{ role: "user", content: "Hi" },
|
|
51
|
+
])) {
|
|
52
|
+
chunks.push(chunk as unknown as Record<string, unknown>);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
expect(codexCliSpy).toHaveBeenCalledWith("gpt-5.2-codex");
|
|
56
|
+
expect(chunks.map((chunk) => chunk.type)).toEqual([
|
|
57
|
+
"text",
|
|
58
|
+
"usage",
|
|
59
|
+
"done",
|
|
60
|
+
]);
|
|
61
|
+
expect(chunks[0]?.text).toBe("Hello");
|
|
62
|
+
expect(chunks[1]?.inputTokens).toBe(12);
|
|
63
|
+
expect(chunks[1]?.outputTokens).toBe(4);
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
it("uses a fallback model id when model is missing", () => {
|
|
67
|
+
const handler = new CodexHandler({
|
|
68
|
+
providerId: "openai-codex",
|
|
69
|
+
modelId: "",
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
expect(handler.getModel().id).toBe("gpt-5.3-codex");
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it("does not map OAuth access tokens to OPENAI_API_KEY env", async () => {
|
|
76
|
+
streamTextSpy.mockReturnValue({
|
|
77
|
+
fullStream: makeStreamParts([{ type: "finish" }]),
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
const handler = new CodexHandler({
|
|
81
|
+
providerId: "openai-codex",
|
|
82
|
+
modelId: "gpt-5.3-codex",
|
|
83
|
+
apiKey: "oauth-token-shorthand",
|
|
84
|
+
accessToken: "oauth-access-token",
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
for await (const _chunk of handler.createMessage("System", [
|
|
88
|
+
{ role: "user", content: "Hi" },
|
|
89
|
+
])) {
|
|
90
|
+
// consume stream
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const createOptions = lastCreateCodexCliOptions as
|
|
94
|
+
| { defaultSettings?: { env?: Record<string, string> } }
|
|
95
|
+
| undefined;
|
|
96
|
+
expect(createOptions?.defaultSettings?.env?.OPENAI_API_KEY).toBeUndefined();
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
it("maps explicit OpenAI API keys to OPENAI_API_KEY env", async () => {
|
|
100
|
+
streamTextSpy.mockReturnValue({
|
|
101
|
+
fullStream: makeStreamParts([{ type: "finish" }]),
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
const handler = new CodexHandler({
|
|
105
|
+
providerId: "openai-codex",
|
|
106
|
+
modelId: "gpt-5.3-codex",
|
|
107
|
+
apiKey: "sk-test-key",
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
for await (const _chunk of handler.createMessage("System", [
|
|
111
|
+
{ role: "user", content: "Hi" },
|
|
112
|
+
])) {
|
|
113
|
+
// consume stream
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
const createOptions = lastCreateCodexCliOptions as
|
|
117
|
+
| { defaultSettings?: { env?: Record<string, string> } }
|
|
118
|
+
| undefined;
|
|
119
|
+
expect(createOptions?.defaultSettings?.env?.OPENAI_API_KEY).toBe(
|
|
120
|
+
"sk-test-key",
|
|
121
|
+
);
|
|
122
|
+
});
|
|
123
|
+
});
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
import { beforeEach, describe, expect, it, vi } from "vitest";
|
|
2
|
+
import type { ApiStreamChunk } from "../types";
|
|
3
|
+
import {
|
|
4
|
+
ClaudeCodeHandler,
|
|
5
|
+
DifyHandler,
|
|
6
|
+
MistralHandler,
|
|
7
|
+
OpenCodeHandler,
|
|
8
|
+
SapAiCoreHandler,
|
|
9
|
+
} from "./community-sdk";
|
|
10
|
+
|
|
11
|
+
const streamTextSpy = vi.fn();
|
|
12
|
+
const claudeCodeSpy = vi.fn((modelId: string) => ({ modelId }));
|
|
13
|
+
const opencodeSpy = vi.fn((modelId: string) => ({ modelId }));
|
|
14
|
+
const mistralSpy = vi.fn((modelId: string) => ({ modelId }));
|
|
15
|
+
const difySpy = vi.fn(
|
|
16
|
+
(modelId: string, settings?: Record<string, unknown>) => ({
|
|
17
|
+
modelId,
|
|
18
|
+
settings,
|
|
19
|
+
}),
|
|
20
|
+
);
|
|
21
|
+
const sapAiProviderSpy = vi.fn((modelId: string) => ({ modelId }));
|
|
22
|
+
let lastCreateSapAiProviderOptions: Record<string, unknown> | undefined;
|
|
23
|
+
let lastCreateDifyProviderOptions: Record<string, unknown> | undefined;
|
|
24
|
+
|
|
25
|
+
vi.mock("ai", () => ({
|
|
26
|
+
streamText: (input: unknown) => streamTextSpy(input),
|
|
27
|
+
}));
|
|
28
|
+
|
|
29
|
+
vi.mock("ai-sdk-provider-claude-code", () => ({
|
|
30
|
+
claudeCode: (modelId: string) => claudeCodeSpy(modelId),
|
|
31
|
+
createClaudeCode: () => (modelId: string) => claudeCodeSpy(modelId),
|
|
32
|
+
}));
|
|
33
|
+
|
|
34
|
+
vi.mock("ai-sdk-provider-opencode-sdk", () => ({
|
|
35
|
+
opencode: (modelId: string) => opencodeSpy(modelId),
|
|
36
|
+
createOpencode: () => (modelId: string) => opencodeSpy(modelId),
|
|
37
|
+
}));
|
|
38
|
+
|
|
39
|
+
vi.mock("@ai-sdk/mistral", () => ({
|
|
40
|
+
mistral: (modelId: string) => mistralSpy(modelId),
|
|
41
|
+
createMistral: () => (modelId: string) => mistralSpy(modelId),
|
|
42
|
+
}));
|
|
43
|
+
|
|
44
|
+
vi.mock("dify-ai-provider", () => ({
|
|
45
|
+
difyProvider: (modelId: string, settings?: Record<string, unknown>) =>
|
|
46
|
+
difySpy(modelId, settings),
|
|
47
|
+
createDifyProvider: (options?: Record<string, unknown>) => {
|
|
48
|
+
lastCreateDifyProviderOptions = options;
|
|
49
|
+
return (modelId: string, settings?: Record<string, unknown>) =>
|
|
50
|
+
difySpy(modelId, settings);
|
|
51
|
+
},
|
|
52
|
+
}));
|
|
53
|
+
|
|
54
|
+
vi.mock("@jerome-benoit/sap-ai-provider", () => ({
|
|
55
|
+
sapai: (modelId: string) => sapAiProviderSpy(modelId),
|
|
56
|
+
createSAPAIProvider: (options?: Record<string, unknown>) => {
|
|
57
|
+
lastCreateSapAiProviderOptions = options;
|
|
58
|
+
return (modelId: string) => sapAiProviderSpy(modelId);
|
|
59
|
+
},
|
|
60
|
+
}));
|
|
61
|
+
|
|
62
|
+
async function* makeStreamParts(parts: unknown[]) {
|
|
63
|
+
for (const part of parts) {
|
|
64
|
+
yield part;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
describe("Community SDK handlers", () => {
|
|
69
|
+
beforeEach(() => {
|
|
70
|
+
vi.clearAllMocks();
|
|
71
|
+
lastCreateSapAiProviderOptions = undefined;
|
|
72
|
+
lastCreateDifyProviderOptions = undefined;
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
describe("ClaudeCodeHandler", () => {
|
|
76
|
+
it("streams text and usage through AI SDK fullStream", async () => {
|
|
77
|
+
streamTextSpy.mockReturnValue({
|
|
78
|
+
fullStream: makeStreamParts([
|
|
79
|
+
{ type: "text-delta", textDelta: "Hello" },
|
|
80
|
+
{
|
|
81
|
+
type: "finish",
|
|
82
|
+
usage: { inputTokens: 10, outputTokens: 3 },
|
|
83
|
+
},
|
|
84
|
+
]),
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
const handler = new ClaudeCodeHandler({
|
|
88
|
+
providerId: "claude-code",
|
|
89
|
+
modelId: "sonnet",
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
const chunks: ApiStreamChunk[] = [];
|
|
93
|
+
for await (const chunk of handler.createMessage("System", [
|
|
94
|
+
{ role: "user", content: "Hi" },
|
|
95
|
+
])) {
|
|
96
|
+
chunks.push(chunk);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
expect(claudeCodeSpy).toHaveBeenCalledWith("sonnet");
|
|
100
|
+
expect(chunks.map((chunk) => chunk.type)).toEqual([
|
|
101
|
+
"text",
|
|
102
|
+
"usage",
|
|
103
|
+
"done",
|
|
104
|
+
]);
|
|
105
|
+
const textChunk = chunks.find(
|
|
106
|
+
(chunk): chunk is Extract<ApiStreamChunk, { type: "text" }> =>
|
|
107
|
+
chunk.type === "text",
|
|
108
|
+
);
|
|
109
|
+
const usageChunk = chunks.find(
|
|
110
|
+
(chunk): chunk is Extract<ApiStreamChunk, { type: "usage" }> =>
|
|
111
|
+
chunk.type === "usage",
|
|
112
|
+
);
|
|
113
|
+
expect(textChunk?.text).toBe("Hello");
|
|
114
|
+
expect(usageChunk?.inputTokens).toBe(10);
|
|
115
|
+
expect(usageChunk?.outputTokens).toBe(3);
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
it("uses a fallback model id when model is missing", () => {
|
|
119
|
+
const handler = new ClaudeCodeHandler({
|
|
120
|
+
providerId: "claude-code",
|
|
121
|
+
modelId: "",
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
expect(handler.getModel().id).toBe("sonnet");
|
|
125
|
+
});
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
describe("MistralHandler", () => {
|
|
129
|
+
it("uses a fallback model id when model is missing", () => {
|
|
130
|
+
const handler = new MistralHandler({
|
|
131
|
+
providerId: "mistral",
|
|
132
|
+
modelId: "",
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
expect(handler.getModel().id).toBe("mistral-medium-latest");
|
|
136
|
+
});
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
describe("DifyHandler", () => {
|
|
140
|
+
it("passes baseURL and apiKey model settings to dify provider", async () => {
|
|
141
|
+
streamTextSpy.mockReturnValue({
|
|
142
|
+
fullStream: makeStreamParts([{ type: "finish", usage: {} }]),
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
const handler = new DifyHandler({
|
|
146
|
+
providerId: "dify",
|
|
147
|
+
modelId: "workflow-123",
|
|
148
|
+
apiKey: "dify-key",
|
|
149
|
+
baseUrl: "https://dify.example.com/v1",
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
for await (const _chunk of handler.createMessage("System", [
|
|
153
|
+
{ role: "user", content: "Hi" },
|
|
154
|
+
])) {
|
|
155
|
+
// noop
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
expect(lastCreateDifyProviderOptions).toEqual({
|
|
159
|
+
baseURL: "https://dify.example.com/v1",
|
|
160
|
+
});
|
|
161
|
+
expect(difySpy).toHaveBeenCalledWith("workflow-123", {
|
|
162
|
+
responseMode: "blocking",
|
|
163
|
+
apiKey: "dify-key",
|
|
164
|
+
});
|
|
165
|
+
});
|
|
166
|
+
});
|
|
167
|
+
|
|
168
|
+
describe("OpenCodeHandler", () => {
|
|
169
|
+
it("streams text and usage through AI SDK fullStream", async () => {
|
|
170
|
+
streamTextSpy.mockReturnValue({
|
|
171
|
+
fullStream: makeStreamParts([
|
|
172
|
+
{ type: "text-delta", textDelta: "Hello" },
|
|
173
|
+
{
|
|
174
|
+
type: "finish",
|
|
175
|
+
usage: { inputTokens: 10, outputTokens: 3 },
|
|
176
|
+
},
|
|
177
|
+
]),
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
const handler = new OpenCodeHandler({
|
|
181
|
+
providerId: "opencode",
|
|
182
|
+
modelId: "gpt-5.1-codex",
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
const chunks: ApiStreamChunk[] = [];
|
|
186
|
+
for await (const chunk of handler.createMessage("System", [
|
|
187
|
+
{ role: "user", content: "Hi" },
|
|
188
|
+
])) {
|
|
189
|
+
chunks.push(chunk);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
expect(opencodeSpy).toHaveBeenCalledWith("openai/gpt-5.1-codex");
|
|
193
|
+
expect(chunks.map((chunk) => chunk.type)).toEqual([
|
|
194
|
+
"text",
|
|
195
|
+
"usage",
|
|
196
|
+
"done",
|
|
197
|
+
]);
|
|
198
|
+
const textChunk = chunks.find(
|
|
199
|
+
(chunk): chunk is Extract<ApiStreamChunk, { type: "text" }> =>
|
|
200
|
+
chunk.type === "text",
|
|
201
|
+
);
|
|
202
|
+
const usageChunk = chunks.find(
|
|
203
|
+
(chunk): chunk is Extract<ApiStreamChunk, { type: "usage" }> =>
|
|
204
|
+
chunk.type === "usage",
|
|
205
|
+
);
|
|
206
|
+
expect(textChunk?.text).toBe("Hello");
|
|
207
|
+
expect(usageChunk?.inputTokens).toBe(10);
|
|
208
|
+
expect(usageChunk?.outputTokens).toBe(3);
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
it("uses full model IDs without changes", async () => {
|
|
212
|
+
streamTextSpy.mockReturnValue({
|
|
213
|
+
fullStream: makeStreamParts([{ type: "finish", usage: {} }]),
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
const handler = new OpenCodeHandler({
|
|
217
|
+
providerId: "opencode",
|
|
218
|
+
modelId: "openai/gpt-5.1-codex-max",
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
for await (const _chunk of handler.createMessage("System", [
|
|
222
|
+
{ role: "user", content: "Hi" },
|
|
223
|
+
])) {
|
|
224
|
+
// noop
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
expect(opencodeSpy).toHaveBeenCalledWith("openai/gpt-5.1-codex-max");
|
|
228
|
+
});
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
describe("SapAiCoreHandler", () => {
|
|
232
|
+
it("uses a fallback model id when model is missing", () => {
|
|
233
|
+
const handler = new SapAiCoreHandler({
|
|
234
|
+
providerId: "sapaicore",
|
|
235
|
+
modelId: "",
|
|
236
|
+
});
|
|
237
|
+
|
|
238
|
+
expect(handler.getModel().id).toBe("anthropic--claude-3.5-sonnet");
|
|
239
|
+
});
|
|
240
|
+
|
|
241
|
+
it("maps sap config to provider create options and streams text", async () => {
|
|
242
|
+
streamTextSpy.mockReturnValue({
|
|
243
|
+
fullStream: makeStreamParts([
|
|
244
|
+
{ type: "text-delta", textDelta: "Hello" },
|
|
245
|
+
{
|
|
246
|
+
type: "finish",
|
|
247
|
+
usage: { inputTokens: 10, outputTokens: 3 },
|
|
248
|
+
},
|
|
249
|
+
]),
|
|
250
|
+
});
|
|
251
|
+
|
|
252
|
+
const handler = new SapAiCoreHandler({
|
|
253
|
+
providerId: "sapaicore",
|
|
254
|
+
modelId: "gpt-4o",
|
|
255
|
+
sap: {
|
|
256
|
+
resourceGroup: "default",
|
|
257
|
+
deploymentId: "dep-123",
|
|
258
|
+
useOrchestrationMode: false,
|
|
259
|
+
defaultSettings: {
|
|
260
|
+
modelParams: { temperature: 0 },
|
|
261
|
+
},
|
|
262
|
+
},
|
|
263
|
+
});
|
|
264
|
+
|
|
265
|
+
const chunks: ApiStreamChunk[] = [];
|
|
266
|
+
for await (const chunk of handler.createMessage("System", [
|
|
267
|
+
{ role: "user", content: "Hi" },
|
|
268
|
+
])) {
|
|
269
|
+
chunks.push(chunk);
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
expect(sapAiProviderSpy).toHaveBeenCalledWith("gpt-4o");
|
|
273
|
+
expect(lastCreateSapAiProviderOptions).toEqual({
|
|
274
|
+
resourceGroup: "default",
|
|
275
|
+
deploymentId: "dep-123",
|
|
276
|
+
api: "foundation-models",
|
|
277
|
+
defaultSettings: {
|
|
278
|
+
modelParams: { temperature: 0 },
|
|
279
|
+
},
|
|
280
|
+
});
|
|
281
|
+
expect(chunks.map((chunk) => chunk.type)).toEqual([
|
|
282
|
+
"text",
|
|
283
|
+
"usage",
|
|
284
|
+
"done",
|
|
285
|
+
]);
|
|
286
|
+
});
|
|
287
|
+
});
|
|
288
|
+
});
|