@clinebot/llms 0.0.20 → 0.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/config-browser.d.ts +1 -0
- package/dist/config-browser.d.ts.map +1 -0
- package/dist/config.d.ts +1 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/index.browser.d.ts +1 -0
- package/dist/index.browser.d.ts.map +1 -0
- package/dist/index.browser.js +5 -5
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +12 -12
- package/dist/models/generated-access.d.ts +1 -0
- package/dist/models/generated-access.d.ts.map +1 -0
- package/dist/models/generated-provider-loaders.d.ts +1 -0
- package/dist/models/generated-provider-loaders.d.ts.map +1 -0
- package/dist/models/generated.d.ts +1 -0
- package/dist/models/generated.d.ts.map +1 -0
- package/dist/models/index.d.ts +1 -0
- package/dist/models/index.d.ts.map +1 -0
- package/dist/models/models-dev-catalog.d.ts +1 -0
- package/dist/models/models-dev-catalog.d.ts.map +1 -0
- package/dist/models/providers/aihubmix.d.ts +1 -0
- package/dist/models/providers/aihubmix.d.ts.map +1 -0
- package/dist/models/providers/anthropic.d.ts +1 -0
- package/dist/models/providers/anthropic.d.ts.map +1 -0
- package/dist/models/providers/asksage.d.ts +1 -0
- package/dist/models/providers/asksage.d.ts.map +1 -0
- package/dist/models/providers/baseten.d.ts +1 -0
- package/dist/models/providers/baseten.d.ts.map +1 -0
- package/dist/models/providers/bedrock.d.ts +1 -0
- package/dist/models/providers/bedrock.d.ts.map +1 -0
- package/dist/models/providers/cerebras.d.ts +1 -0
- package/dist/models/providers/cerebras.d.ts.map +1 -0
- package/dist/models/providers/claude-code.d.ts +1 -0
- package/dist/models/providers/claude-code.d.ts.map +1 -0
- package/dist/models/providers/cline.d.ts +1 -0
- package/dist/models/providers/cline.d.ts.map +1 -0
- package/dist/models/providers/deepseek.d.ts +1 -0
- package/dist/models/providers/deepseek.d.ts.map +1 -0
- package/dist/models/providers/dify.d.ts +1 -0
- package/dist/models/providers/dify.d.ts.map +1 -0
- package/dist/models/providers/doubao.d.ts +1 -0
- package/dist/models/providers/doubao.d.ts.map +1 -0
- package/dist/models/providers/fireworks.d.ts +1 -0
- package/dist/models/providers/fireworks.d.ts.map +1 -0
- package/dist/models/providers/gemini.d.ts +1 -0
- package/dist/models/providers/gemini.d.ts.map +1 -0
- package/dist/models/providers/groq.d.ts +1 -0
- package/dist/models/providers/groq.d.ts.map +1 -0
- package/dist/models/providers/hicap.d.ts +1 -0
- package/dist/models/providers/hicap.d.ts.map +1 -0
- package/dist/models/providers/huawei-cloud-maas.d.ts +1 -0
- package/dist/models/providers/huawei-cloud-maas.d.ts.map +1 -0
- package/dist/models/providers/huggingface.d.ts +1 -0
- package/dist/models/providers/huggingface.d.ts.map +1 -0
- package/dist/models/providers/index.d.ts +1 -0
- package/dist/models/providers/index.d.ts.map +1 -0
- package/dist/models/providers/litellm.d.ts +1 -0
- package/dist/models/providers/litellm.d.ts.map +1 -0
- package/dist/models/providers/lmstudio.d.ts +1 -0
- package/dist/models/providers/lmstudio.d.ts.map +1 -0
- package/dist/models/providers/minimax.d.ts +1 -0
- package/dist/models/providers/minimax.d.ts.map +1 -0
- package/dist/models/providers/mistral.d.ts +1 -0
- package/dist/models/providers/mistral.d.ts.map +1 -0
- package/dist/models/providers/moonshot.d.ts +1 -0
- package/dist/models/providers/moonshot.d.ts.map +1 -0
- package/dist/models/providers/nebius.d.ts +1 -0
- package/dist/models/providers/nebius.d.ts.map +1 -0
- package/dist/models/providers/nous-research.d.ts +1 -0
- package/dist/models/providers/nous-research.d.ts.map +1 -0
- package/dist/models/providers/oca.d.ts +1 -0
- package/dist/models/providers/oca.d.ts.map +1 -0
- package/dist/models/providers/ollama.d.ts +1 -0
- package/dist/models/providers/ollama.d.ts.map +1 -0
- package/dist/models/providers/openai-codex.d.ts +1 -0
- package/dist/models/providers/openai-codex.d.ts.map +1 -0
- package/dist/models/providers/openai.d.ts +1 -0
- package/dist/models/providers/openai.d.ts.map +1 -0
- package/dist/models/providers/opencode.d.ts +1 -0
- package/dist/models/providers/opencode.d.ts.map +1 -0
- package/dist/models/providers/openrouter.d.ts +1 -0
- package/dist/models/providers/openrouter.d.ts.map +1 -0
- package/dist/models/providers/qwen-code.d.ts +1 -0
- package/dist/models/providers/qwen-code.d.ts.map +1 -0
- package/dist/models/providers/qwen.d.ts +1 -0
- package/dist/models/providers/qwen.d.ts.map +1 -0
- package/dist/models/providers/requesty.d.ts +1 -0
- package/dist/models/providers/requesty.d.ts.map +1 -0
- package/dist/models/providers/sambanova.d.ts +1 -0
- package/dist/models/providers/sambanova.d.ts.map +1 -0
- package/dist/models/providers/sapaicore.d.ts +1 -0
- package/dist/models/providers/sapaicore.d.ts.map +1 -0
- package/dist/models/providers/together.d.ts +1 -0
- package/dist/models/providers/together.d.ts.map +1 -0
- package/dist/models/providers/vercel-ai-gateway.d.ts +1 -0
- package/dist/models/providers/vercel-ai-gateway.d.ts.map +1 -0
- package/dist/models/providers/vertex.d.ts +1 -0
- package/dist/models/providers/vertex.d.ts.map +1 -0
- package/dist/models/providers/xai.d.ts +1 -0
- package/dist/models/providers/xai.d.ts.map +1 -0
- package/dist/models/providers/zai.d.ts +1 -0
- package/dist/models/providers/zai.d.ts.map +1 -0
- package/dist/models/query.d.ts +1 -0
- package/dist/models/query.d.ts.map +1 -0
- package/dist/models/registry.d.ts +1 -0
- package/dist/models/registry.d.ts.map +1 -0
- package/dist/models/schemas/index.d.ts +1 -0
- package/dist/models/schemas/index.d.ts.map +1 -0
- package/dist/models/schemas/model.d.ts +1 -0
- package/dist/models/schemas/model.d.ts.map +1 -0
- package/dist/models/schemas/query.d.ts +1 -0
- package/dist/models/schemas/query.d.ts.map +1 -0
- package/dist/providers/handlers/ai-sdk-community.d.ts +1 -0
- package/dist/providers/handlers/ai-sdk-community.d.ts.map +1 -0
- package/dist/providers/handlers/ai-sdk-provider-base.d.ts +1 -0
- package/dist/providers/handlers/ai-sdk-provider-base.d.ts.map +1 -0
- package/dist/providers/handlers/anthropic-base.d.ts +1 -0
- package/dist/providers/handlers/anthropic-base.d.ts.map +1 -0
- package/dist/providers/handlers/asksage.d.ts +1 -0
- package/dist/providers/handlers/asksage.d.ts.map +1 -0
- package/dist/providers/handlers/auth.d.ts +1 -0
- package/dist/providers/handlers/auth.d.ts.map +1 -0
- package/dist/providers/handlers/base.d.ts +1 -0
- package/dist/providers/handlers/base.d.ts.map +1 -0
- package/dist/providers/handlers/bedrock-base.d.ts +1 -0
- package/dist/providers/handlers/bedrock-base.d.ts.map +1 -0
- package/dist/providers/handlers/bedrock-client.d.ts +1 -0
- package/dist/providers/handlers/bedrock-client.d.ts.map +1 -0
- package/dist/providers/handlers/community-sdk.d.ts +1 -0
- package/dist/providers/handlers/community-sdk.d.ts.map +1 -0
- package/dist/providers/handlers/fetch-base.d.ts +1 -0
- package/dist/providers/handlers/fetch-base.d.ts.map +1 -0
- package/dist/providers/handlers/gemini-base.d.ts +1 -0
- package/dist/providers/handlers/gemini-base.d.ts.map +1 -0
- package/dist/providers/handlers/index.d.ts +1 -0
- package/dist/providers/handlers/index.d.ts.map +1 -0
- package/dist/providers/handlers/openai-base.d.ts +1 -0
- package/dist/providers/handlers/openai-base.d.ts.map +1 -0
- package/dist/providers/handlers/openai-responses.d.ts +1 -0
- package/dist/providers/handlers/openai-responses.d.ts.map +1 -0
- package/dist/providers/handlers/providers.d.ts +1 -0
- package/dist/providers/handlers/providers.d.ts.map +1 -0
- package/dist/providers/handlers/r1-base.d.ts +1 -0
- package/dist/providers/handlers/r1-base.d.ts.map +1 -0
- package/dist/providers/handlers/registry.d.ts +1 -0
- package/dist/providers/handlers/registry.d.ts.map +1 -0
- package/dist/providers/handlers/vertex.d.ts +1 -0
- package/dist/providers/handlers/vertex.d.ts.map +1 -0
- package/dist/providers/index.d.ts +1 -0
- package/dist/providers/index.d.ts.map +1 -0
- package/dist/providers/public.browser.d.ts +1 -0
- package/dist/providers/public.browser.d.ts.map +1 -0
- package/dist/providers/public.d.ts +1 -0
- package/dist/providers/public.d.ts.map +1 -0
- package/dist/providers/shared/openai-compatible.d.ts +1 -0
- package/dist/providers/shared/openai-compatible.d.ts.map +1 -0
- package/dist/providers/transform/ai-sdk-community-format.d.ts +1 -0
- package/dist/providers/transform/ai-sdk-community-format.d.ts.map +1 -0
- package/dist/providers/transform/anthropic-format.d.ts +1 -0
- package/dist/providers/transform/anthropic-format.d.ts.map +1 -0
- package/dist/providers/transform/content-format.d.ts +1 -0
- package/dist/providers/transform/content-format.d.ts.map +1 -0
- package/dist/providers/transform/gemini-format.d.ts +1 -0
- package/dist/providers/transform/gemini-format.d.ts.map +1 -0
- package/dist/providers/transform/index.d.ts +1 -0
- package/dist/providers/transform/index.d.ts.map +1 -0
- package/dist/providers/transform/openai-format.d.ts +1 -0
- package/dist/providers/transform/openai-format.d.ts.map +1 -0
- package/dist/providers/transform/r1-format.d.ts +1 -0
- package/dist/providers/transform/r1-format.d.ts.map +1 -0
- package/dist/providers/types/config.d.ts +1 -0
- package/dist/providers/types/config.d.ts.map +1 -0
- package/dist/providers/types/handler.d.ts +1 -0
- package/dist/providers/types/handler.d.ts.map +1 -0
- package/dist/providers/types/index.d.ts +1 -0
- package/dist/providers/types/index.d.ts.map +1 -0
- package/dist/providers/types/messages.d.ts +1 -0
- package/dist/providers/types/messages.d.ts.map +1 -0
- package/dist/providers/types/model-info.d.ts +1 -0
- package/dist/providers/types/model-info.d.ts.map +1 -0
- package/dist/providers/types/provider-ids.d.ts +1 -1
- package/dist/providers/types/provider-ids.d.ts.map +1 -0
- package/dist/providers/types/settings.d.ts +1 -0
- package/dist/providers/types/settings.d.ts.map +1 -0
- package/dist/providers/types/stream.d.ts +1 -0
- package/dist/providers/types/stream.d.ts.map +1 -0
- package/dist/providers/utils/index.d.ts +1 -0
- package/dist/providers/utils/index.d.ts.map +1 -0
- package/dist/providers/utils/retry.d.ts +1 -0
- package/dist/providers/utils/retry.d.ts.map +1 -0
- package/dist/providers/utils/stream-processor.d.ts +1 -0
- package/dist/providers/utils/stream-processor.d.ts.map +1 -0
- package/dist/providers/utils/tool-processor.d.ts +1 -0
- package/dist/providers/utils/tool-processor.d.ts.map +1 -0
- package/dist/sdk.d.ts +1 -0
- package/dist/sdk.d.ts.map +1 -0
- package/dist/types.d.ts +1 -0
- package/dist/types.d.ts.map +1 -0
- package/package.json +3 -4
- package/src/catalog.ts +0 -20
- package/src/config-browser.ts +0 -11
- package/src/config.ts +0 -49
- package/src/index.browser.ts +0 -9
- package/src/index.ts +0 -10
- package/src/live-providers.test.ts +0 -138
- package/src/models/generated-access.ts +0 -41
- package/src/models/generated-provider-loaders.ts +0 -166
- package/src/models/generated.ts +0 -11785
- package/src/models/index.ts +0 -271
- package/src/models/models-dev-catalog.test.ts +0 -161
- package/src/models/models-dev-catalog.ts +0 -168
- package/src/models/providers/aihubmix.ts +0 -19
- package/src/models/providers/anthropic.ts +0 -60
- package/src/models/providers/asksage.ts +0 -19
- package/src/models/providers/baseten.ts +0 -21
- package/src/models/providers/bedrock.ts +0 -30
- package/src/models/providers/cerebras.ts +0 -24
- package/src/models/providers/claude-code.ts +0 -51
- package/src/models/providers/cline.ts +0 -25
- package/src/models/providers/deepseek.ts +0 -33
- package/src/models/providers/dify.ts +0 -17
- package/src/models/providers/doubao.ts +0 -33
- package/src/models/providers/fireworks.ts +0 -34
- package/src/models/providers/gemini.ts +0 -43
- package/src/models/providers/groq.ts +0 -33
- package/src/models/providers/hicap.ts +0 -18
- package/src/models/providers/huawei-cloud-maas.ts +0 -18
- package/src/models/providers/huggingface.ts +0 -22
- package/src/models/providers/index.ts +0 -162
- package/src/models/providers/litellm.ts +0 -19
- package/src/models/providers/lmstudio.ts +0 -22
- package/src/models/providers/minimax.ts +0 -34
- package/src/models/providers/mistral.ts +0 -19
- package/src/models/providers/moonshot.ts +0 -34
- package/src/models/providers/nebius.ts +0 -24
- package/src/models/providers/nous-research.ts +0 -21
- package/src/models/providers/oca.ts +0 -30
- package/src/models/providers/ollama.ts +0 -18
- package/src/models/providers/openai-codex.ts +0 -46
- package/src/models/providers/openai.ts +0 -43
- package/src/models/providers/opencode.ts +0 -28
- package/src/models/providers/openrouter.ts +0 -24
- package/src/models/providers/qwen-code.ts +0 -33
- package/src/models/providers/qwen.ts +0 -34
- package/src/models/providers/requesty.ts +0 -23
- package/src/models/providers/sambanova.ts +0 -23
- package/src/models/providers/sapaicore.ts +0 -34
- package/src/models/providers/together.ts +0 -35
- package/src/models/providers/vercel-ai-gateway.ts +0 -23
- package/src/models/providers/vertex.ts +0 -36
- package/src/models/providers/xai.ts +0 -34
- package/src/models/providers/zai.ts +0 -25
- package/src/models/query.ts +0 -407
- package/src/models/registry.ts +0 -511
- package/src/models/schemas/index.ts +0 -62
- package/src/models/schemas/model.ts +0 -308
- package/src/models/schemas/query.ts +0 -336
- package/src/providers/browser.ts +0 -4
- package/src/providers/handlers/ai-sdk-community.ts +0 -229
- package/src/providers/handlers/ai-sdk-provider-base.ts +0 -203
- package/src/providers/handlers/anthropic-base.test.ts +0 -30
- package/src/providers/handlers/anthropic-base.ts +0 -387
- package/src/providers/handlers/asksage.test.ts +0 -103
- package/src/providers/handlers/asksage.ts +0 -138
- package/src/providers/handlers/auth.test.ts +0 -19
- package/src/providers/handlers/auth.ts +0 -121
- package/src/providers/handlers/base.test.ts +0 -230
- package/src/providers/handlers/base.ts +0 -310
- package/src/providers/handlers/bedrock-base.ts +0 -390
- package/src/providers/handlers/bedrock-client.ts +0 -100
- package/src/providers/handlers/codex.test.ts +0 -160
- package/src/providers/handlers/community-sdk.test.ts +0 -321
- package/src/providers/handlers/community-sdk.ts +0 -391
- package/src/providers/handlers/fetch-base.ts +0 -68
- package/src/providers/handlers/gemini-base.test.ts +0 -261
- package/src/providers/handlers/gemini-base.ts +0 -307
- package/src/providers/handlers/index.ts +0 -67
- package/src/providers/handlers/openai-base.ts +0 -341
- package/src/providers/handlers/openai-responses.test.ts +0 -259
- package/src/providers/handlers/openai-responses.ts +0 -634
- package/src/providers/handlers/providers.test.ts +0 -120
- package/src/providers/handlers/providers.ts +0 -563
- package/src/providers/handlers/r1-base.ts +0 -283
- package/src/providers/handlers/registry.ts +0 -185
- package/src/providers/handlers/vertex.test.ts +0 -124
- package/src/providers/handlers/vertex.ts +0 -302
- package/src/providers/index.ts +0 -534
- package/src/providers/public.browser.ts +0 -20
- package/src/providers/public.ts +0 -51
- package/src/providers/shared/openai-compatible.ts +0 -63
- package/src/providers/transform/ai-sdk-community-format.test.ts +0 -73
- package/src/providers/transform/ai-sdk-community-format.ts +0 -115
- package/src/providers/transform/anthropic-format.ts +0 -230
- package/src/providers/transform/content-format.ts +0 -34
- package/src/providers/transform/format-conversion.test.ts +0 -413
- package/src/providers/transform/gemini-format.ts +0 -262
- package/src/providers/transform/index.ts +0 -22
- package/src/providers/transform/openai-format.ts +0 -290
- package/src/providers/transform/r1-format.ts +0 -287
- package/src/providers/types/config.ts +0 -396
- package/src/providers/types/handler.ts +0 -92
- package/src/providers/types/index.ts +0 -120
- package/src/providers/types/messages.ts +0 -162
- package/src/providers/types/model-info.test.ts +0 -57
- package/src/providers/types/model-info.ts +0 -65
- package/src/providers/types/provider-ids.test.ts +0 -12
- package/src/providers/types/provider-ids.ts +0 -89
- package/src/providers/types/settings.test.ts +0 -49
- package/src/providers/types/settings.ts +0 -533
- package/src/providers/types/stream.ts +0 -117
- package/src/providers/utils/index.ts +0 -27
- package/src/providers/utils/retry.test.ts +0 -140
- package/src/providers/utils/retry.ts +0 -188
- package/src/providers/utils/stream-processor.test.ts +0 -232
- package/src/providers/utils/stream-processor.ts +0 -472
- package/src/providers/utils/tool-processor.test.ts +0 -235
- package/src/providers/utils/tool-processor.ts +0 -146
- package/src/sdk.ts +0 -264
- package/src/types.ts +0 -79
|
@@ -1,390 +0,0 @@
|
|
|
1
|
-
import { convertToolsToAnthropic } from "../transform/anthropic-format";
|
|
2
|
-
import {
|
|
3
|
-
type ApiStream,
|
|
4
|
-
type HandlerModelInfo,
|
|
5
|
-
type ProviderConfig,
|
|
6
|
-
supportsModelThinking,
|
|
7
|
-
} from "../types";
|
|
8
|
-
import type { Message, ToolDefinition } from "../types/messages";
|
|
9
|
-
import { retryStream } from "../utils/retry";
|
|
10
|
-
import { BaseHandler } from "./base";
|
|
11
|
-
import { createBedrockClient } from "./bedrock-client";
|
|
12
|
-
|
|
13
|
-
const CLAUDE_SONNET_1M_SUFFIX = ":1m";
|
|
14
|
-
|
|
15
|
-
type AiModule = {
|
|
16
|
-
streamText: (input: Record<string, unknown>) => {
|
|
17
|
-
fullStream?: AsyncIterable<{ type?: string; [key: string]: unknown }>;
|
|
18
|
-
textStream?: AsyncIterable<string>;
|
|
19
|
-
usage?: Promise<{
|
|
20
|
-
inputTokens?: number;
|
|
21
|
-
outputTokens?: number;
|
|
22
|
-
reasoningTokens?: number;
|
|
23
|
-
cachedInputTokens?: number;
|
|
24
|
-
[key: string]: unknown;
|
|
25
|
-
}>;
|
|
26
|
-
};
|
|
27
|
-
};
|
|
28
|
-
|
|
29
|
-
let cachedAiModule: AiModule | undefined;
|
|
30
|
-
const DEFAULT_THINKING_BUDGET_TOKENS = 1024;
|
|
31
|
-
const DEFAULT_REASONING_EFFORT = "medium" as const;
|
|
32
|
-
|
|
33
|
-
async function loadAiModule(): Promise<AiModule> {
|
|
34
|
-
if (cachedAiModule) {
|
|
35
|
-
return cachedAiModule;
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
const moduleName = "ai";
|
|
39
|
-
cachedAiModule = (await import(moduleName)) as AiModule;
|
|
40
|
-
return cachedAiModule;
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
type ModelMessagePart = Record<string, unknown>;
|
|
44
|
-
type ModelMessage = {
|
|
45
|
-
role: "system" | "user" | "assistant" | "tool";
|
|
46
|
-
content: string | ModelMessagePart[];
|
|
47
|
-
};
|
|
48
|
-
|
|
49
|
-
/**
|
|
50
|
-
* Handler for AWS Bedrock using AI SDK's Amazon Bedrock provider.
|
|
51
|
-
*
|
|
52
|
-
* This handler is async-lazy loaded via createHandlerAsync.
|
|
53
|
-
*/
|
|
54
|
-
export class BedrockHandler extends BaseHandler {
|
|
55
|
-
private clientFactory: ((modelId: string) => unknown) | undefined;
|
|
56
|
-
|
|
57
|
-
private async ensureClientFactory(): Promise<(modelId: string) => unknown> {
|
|
58
|
-
if (!this.clientFactory) {
|
|
59
|
-
this.clientFactory = await createBedrockClient(
|
|
60
|
-
this.config,
|
|
61
|
-
this.getRequestHeaders(),
|
|
62
|
-
);
|
|
63
|
-
}
|
|
64
|
-
return this.clientFactory;
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
getModel(): HandlerModelInfo {
|
|
68
|
-
const modelId = this.config.modelId;
|
|
69
|
-
if (!modelId) {
|
|
70
|
-
throw new Error("Model ID is required. Set modelId in config.");
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
const modelInfo =
|
|
74
|
-
this.config.modelInfo ?? this.config.knownModels?.[modelId] ?? {};
|
|
75
|
-
return { id: modelId, info: { ...modelInfo, id: modelId } };
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
getMessages(systemPrompt: string, messages: Message[]): ModelMessage[] {
|
|
79
|
-
return toModelMessages(systemPrompt, messages);
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
async *createMessage(
|
|
83
|
-
systemPrompt: string,
|
|
84
|
-
messages: Message[],
|
|
85
|
-
tools?: ToolDefinition[],
|
|
86
|
-
): ApiStream {
|
|
87
|
-
yield* retryStream(
|
|
88
|
-
() => this.createMessageInternal(systemPrompt, messages, tools),
|
|
89
|
-
{ maxRetries: 4 },
|
|
90
|
-
);
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
private async *createMessageInternal(
|
|
94
|
-
systemPrompt: string,
|
|
95
|
-
messages: Message[],
|
|
96
|
-
tools?: ToolDefinition[],
|
|
97
|
-
): ApiStream {
|
|
98
|
-
const ai = await loadAiModule();
|
|
99
|
-
const factory = await this.ensureClientFactory();
|
|
100
|
-
const responseId = this.createResponseId();
|
|
101
|
-
const abortSignal = this.getAbortSignal();
|
|
102
|
-
const model = this.getModel();
|
|
103
|
-
|
|
104
|
-
let modelId = model.id;
|
|
105
|
-
const providerOptions: Record<string, unknown> = {};
|
|
106
|
-
const bedrockOptions: Record<string, unknown> = {};
|
|
107
|
-
|
|
108
|
-
if (modelId.endsWith(CLAUDE_SONNET_1M_SUFFIX)) {
|
|
109
|
-
modelId = modelId.slice(0, -CLAUDE_SONNET_1M_SUFFIX.length);
|
|
110
|
-
bedrockOptions.anthropicBeta = ["context-1m-2025-08-07"];
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
const thinkingSupported = supportsModelThinking(model.info);
|
|
114
|
-
const budgetTokens =
|
|
115
|
-
this.config.thinkingBudgetTokens ??
|
|
116
|
-
(this.config.thinking ? DEFAULT_THINKING_BUDGET_TOKENS : 0);
|
|
117
|
-
let reasoningEnabled = false;
|
|
118
|
-
if (
|
|
119
|
-
thinkingSupported &&
|
|
120
|
-
budgetTokens > 0 &&
|
|
121
|
-
modelId.includes("anthropic")
|
|
122
|
-
) {
|
|
123
|
-
bedrockOptions.reasoningConfig = { type: "enabled", budgetTokens };
|
|
124
|
-
reasoningEnabled = true;
|
|
125
|
-
} else if (thinkingSupported && modelId.includes("amazon.nova")) {
|
|
126
|
-
const reasoningEffort =
|
|
127
|
-
this.config.reasoningEffort ??
|
|
128
|
-
(this.config.thinking ? DEFAULT_REASONING_EFFORT : undefined);
|
|
129
|
-
if (reasoningEffort) {
|
|
130
|
-
bedrockOptions.reasoningConfig = {
|
|
131
|
-
type: "enabled",
|
|
132
|
-
maxReasoningEffort: reasoningEffort,
|
|
133
|
-
};
|
|
134
|
-
reasoningEnabled = true;
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
if (Object.keys(bedrockOptions).length > 0) {
|
|
139
|
-
providerOptions.bedrock = bedrockOptions;
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
const stream = ai.streamText({
|
|
143
|
-
model: factory(modelId),
|
|
144
|
-
messages: this.getMessages(systemPrompt, messages),
|
|
145
|
-
tools: toAiSdkTools(tools),
|
|
146
|
-
maxTokens: model.info.maxTokens ?? this.config.maxOutputTokens ?? 128_000,
|
|
147
|
-
temperature: reasoningEnabled ? undefined : (model.info.temperature ?? 0),
|
|
148
|
-
providerOptions:
|
|
149
|
-
Object.keys(providerOptions).length > 0 ? providerOptions : undefined,
|
|
150
|
-
abortSignal,
|
|
151
|
-
});
|
|
152
|
-
|
|
153
|
-
let usageEmitted = false;
|
|
154
|
-
|
|
155
|
-
if (stream.fullStream) {
|
|
156
|
-
for await (const part of stream.fullStream) {
|
|
157
|
-
const partType = part.type;
|
|
158
|
-
|
|
159
|
-
if (partType === "text-delta") {
|
|
160
|
-
const text =
|
|
161
|
-
(part.textDelta as string | undefined) ??
|
|
162
|
-
(part.delta as string | undefined);
|
|
163
|
-
if (text) {
|
|
164
|
-
yield { type: "text", text, id: responseId };
|
|
165
|
-
}
|
|
166
|
-
continue;
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
if (partType === "reasoning-delta" || partType === "reasoning") {
|
|
170
|
-
const reasoning =
|
|
171
|
-
(part.textDelta as string | undefined) ??
|
|
172
|
-
(part.reasoning as string | undefined);
|
|
173
|
-
if (reasoning) {
|
|
174
|
-
yield { type: "reasoning", reasoning, id: responseId };
|
|
175
|
-
}
|
|
176
|
-
continue;
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
if (partType === "tool-call") {
|
|
180
|
-
const toolCallId =
|
|
181
|
-
(part.toolCallId as string | undefined) ??
|
|
182
|
-
(part.id as string | undefined);
|
|
183
|
-
const toolName =
|
|
184
|
-
(part.toolName as string | undefined) ??
|
|
185
|
-
(part.name as string | undefined);
|
|
186
|
-
const args = (part.args as Record<string, unknown> | undefined) ?? {};
|
|
187
|
-
|
|
188
|
-
yield {
|
|
189
|
-
type: "tool_calls",
|
|
190
|
-
id: responseId,
|
|
191
|
-
tool_call: {
|
|
192
|
-
call_id: toolCallId,
|
|
193
|
-
function: {
|
|
194
|
-
name: toolName,
|
|
195
|
-
arguments: args,
|
|
196
|
-
},
|
|
197
|
-
},
|
|
198
|
-
};
|
|
199
|
-
continue;
|
|
200
|
-
}
|
|
201
|
-
|
|
202
|
-
if (partType === "error") {
|
|
203
|
-
const message =
|
|
204
|
-
(part.error as Error | undefined)?.message ??
|
|
205
|
-
"Bedrock stream failed";
|
|
206
|
-
throw new Error(message);
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
if (partType === "finish") {
|
|
210
|
-
const usage =
|
|
211
|
-
(part.usage as Record<string, unknown> | undefined) ?? {};
|
|
212
|
-
const inputTokens = numberOrZero(usage.inputTokens);
|
|
213
|
-
const outputTokens = numberOrZero(usage.outputTokens);
|
|
214
|
-
const thoughtsTokenCount = numberOrZero(usage.reasoningTokens);
|
|
215
|
-
const cacheReadTokens = numberOrZero(usage.cachedInputTokens);
|
|
216
|
-
|
|
217
|
-
yield {
|
|
218
|
-
type: "usage",
|
|
219
|
-
inputTokens: Math.max(0, inputTokens - cacheReadTokens),
|
|
220
|
-
outputTokens,
|
|
221
|
-
thoughtsTokenCount,
|
|
222
|
-
cacheReadTokens,
|
|
223
|
-
totalCost: this.calculateCostFromInclusiveInput(
|
|
224
|
-
inputTokens,
|
|
225
|
-
outputTokens,
|
|
226
|
-
cacheReadTokens,
|
|
227
|
-
),
|
|
228
|
-
id: responseId,
|
|
229
|
-
};
|
|
230
|
-
usageEmitted = true;
|
|
231
|
-
}
|
|
232
|
-
}
|
|
233
|
-
} else if (stream.textStream) {
|
|
234
|
-
for await (const text of stream.textStream) {
|
|
235
|
-
yield { type: "text", text, id: responseId };
|
|
236
|
-
}
|
|
237
|
-
}
|
|
238
|
-
|
|
239
|
-
if (!usageEmitted && stream.usage) {
|
|
240
|
-
const usage = await stream.usage;
|
|
241
|
-
const inputTokens = numberOrZero(usage.inputTokens);
|
|
242
|
-
const outputTokens = numberOrZero(usage.outputTokens);
|
|
243
|
-
const thoughtsTokenCount = numberOrZero(usage.reasoningTokens);
|
|
244
|
-
const cacheReadTokens = numberOrZero(usage.cachedInputTokens);
|
|
245
|
-
|
|
246
|
-
yield {
|
|
247
|
-
type: "usage",
|
|
248
|
-
inputTokens: Math.max(0, inputTokens - cacheReadTokens),
|
|
249
|
-
outputTokens,
|
|
250
|
-
thoughtsTokenCount,
|
|
251
|
-
cacheReadTokens,
|
|
252
|
-
totalCost: this.calculateCostFromInclusiveInput(
|
|
253
|
-
inputTokens,
|
|
254
|
-
outputTokens,
|
|
255
|
-
cacheReadTokens,
|
|
256
|
-
),
|
|
257
|
-
id: responseId,
|
|
258
|
-
};
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
yield { type: "done", success: true, id: responseId };
|
|
262
|
-
}
|
|
263
|
-
}
|
|
264
|
-
|
|
265
|
-
export function createBedrockHandler(config: ProviderConfig): BedrockHandler {
|
|
266
|
-
return new BedrockHandler(config);
|
|
267
|
-
}
|
|
268
|
-
|
|
269
|
-
function numberOrZero(value: unknown): number {
|
|
270
|
-
return typeof value === "number" && Number.isFinite(value) ? value : 0;
|
|
271
|
-
}
|
|
272
|
-
|
|
273
|
-
function toAiSdkTools(
|
|
274
|
-
tools: ToolDefinition[] | undefined,
|
|
275
|
-
): Record<string, unknown> | undefined {
|
|
276
|
-
if (!tools || tools.length === 0) {
|
|
277
|
-
return undefined;
|
|
278
|
-
}
|
|
279
|
-
|
|
280
|
-
// We keep the same schema shape used by Anthropic conversion.
|
|
281
|
-
const anthropicTools = convertToolsToAnthropic(tools);
|
|
282
|
-
return Object.fromEntries(
|
|
283
|
-
anthropicTools.map((tool) => [
|
|
284
|
-
tool.name,
|
|
285
|
-
{
|
|
286
|
-
description: tool.description,
|
|
287
|
-
inputSchema: tool.input_schema,
|
|
288
|
-
},
|
|
289
|
-
]),
|
|
290
|
-
);
|
|
291
|
-
}
|
|
292
|
-
|
|
293
|
-
function toModelMessages(
|
|
294
|
-
systemPrompt: string,
|
|
295
|
-
messages: Message[],
|
|
296
|
-
): ModelMessage[] {
|
|
297
|
-
const result: ModelMessage[] = [{ role: "system", content: systemPrompt }];
|
|
298
|
-
const toolNamesById = new Map<string, string>();
|
|
299
|
-
|
|
300
|
-
for (const message of messages) {
|
|
301
|
-
if (typeof message.content === "string") {
|
|
302
|
-
result.push({ role: message.role, content: message.content });
|
|
303
|
-
continue;
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
if (message.role === "assistant") {
|
|
307
|
-
const parts: ModelMessagePart[] = [];
|
|
308
|
-
for (const block of message.content) {
|
|
309
|
-
if (block.type === "text") {
|
|
310
|
-
parts.push({ type: "text", text: block.text });
|
|
311
|
-
continue;
|
|
312
|
-
}
|
|
313
|
-
|
|
314
|
-
if (block.type === "tool_use") {
|
|
315
|
-
toolNamesById.set(block.id, block.name);
|
|
316
|
-
parts.push({
|
|
317
|
-
type: "tool-call",
|
|
318
|
-
toolCallId: block.id,
|
|
319
|
-
toolName: block.name,
|
|
320
|
-
args: block.input,
|
|
321
|
-
});
|
|
322
|
-
}
|
|
323
|
-
}
|
|
324
|
-
|
|
325
|
-
if (parts.length > 0) {
|
|
326
|
-
result.push({ role: "assistant", content: parts });
|
|
327
|
-
}
|
|
328
|
-
continue;
|
|
329
|
-
}
|
|
330
|
-
|
|
331
|
-
// User message (can include text/image/tool_result blocks)
|
|
332
|
-
const userParts: ModelMessagePart[] = [];
|
|
333
|
-
|
|
334
|
-
for (const block of message.content) {
|
|
335
|
-
if (block.type === "text") {
|
|
336
|
-
userParts.push({ type: "text", text: block.text });
|
|
337
|
-
continue;
|
|
338
|
-
}
|
|
339
|
-
|
|
340
|
-
if (block.type === "image") {
|
|
341
|
-
userParts.push({
|
|
342
|
-
type: "image",
|
|
343
|
-
image: Buffer.from(block.data, "base64"),
|
|
344
|
-
mediaType: block.mediaType,
|
|
345
|
-
});
|
|
346
|
-
continue;
|
|
347
|
-
}
|
|
348
|
-
|
|
349
|
-
if (block.type === "tool_result") {
|
|
350
|
-
if (userParts.length > 0) {
|
|
351
|
-
result.push({
|
|
352
|
-
role: "user",
|
|
353
|
-
content: userParts.splice(0, userParts.length),
|
|
354
|
-
});
|
|
355
|
-
}
|
|
356
|
-
|
|
357
|
-
result.push({
|
|
358
|
-
role: "tool",
|
|
359
|
-
content: [
|
|
360
|
-
{
|
|
361
|
-
type: "tool-result",
|
|
362
|
-
toolCallId: block.tool_use_id,
|
|
363
|
-
toolName: toolNamesById.get(block.tool_use_id) ?? "tool",
|
|
364
|
-
output: serializeToolResult(block.content),
|
|
365
|
-
isError: block.is_error ?? false,
|
|
366
|
-
},
|
|
367
|
-
],
|
|
368
|
-
});
|
|
369
|
-
}
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
if (userParts.length > 0) {
|
|
373
|
-
result.push({ role: "user", content: userParts });
|
|
374
|
-
}
|
|
375
|
-
}
|
|
376
|
-
|
|
377
|
-
return result;
|
|
378
|
-
}
|
|
379
|
-
|
|
380
|
-
function serializeToolResult(content: Message["content"] | string): string {
|
|
381
|
-
if (typeof content === "string") {
|
|
382
|
-
return content;
|
|
383
|
-
}
|
|
384
|
-
|
|
385
|
-
try {
|
|
386
|
-
return JSON.stringify(content);
|
|
387
|
-
} catch {
|
|
388
|
-
return String(content);
|
|
389
|
-
}
|
|
390
|
-
}
|
|
@@ -1,100 +0,0 @@
|
|
|
1
|
-
import type { ProviderConfig } from "../types";
|
|
2
|
-
|
|
3
|
-
type BedrockFactory = (modelId: string) => unknown;
|
|
4
|
-
|
|
5
|
-
let cachedCreateAmazonBedrock:
|
|
6
|
-
| ((options: {
|
|
7
|
-
region?: string;
|
|
8
|
-
accessKeyId?: string;
|
|
9
|
-
secretAccessKey?: string;
|
|
10
|
-
sessionToken?: string;
|
|
11
|
-
apiKey?: string;
|
|
12
|
-
baseURL?: string;
|
|
13
|
-
headers?: Record<string, string>;
|
|
14
|
-
credentialProvider?: () => PromiseLike<{
|
|
15
|
-
accessKeyId: string;
|
|
16
|
-
secretAccessKey: string;
|
|
17
|
-
sessionToken?: string;
|
|
18
|
-
}>;
|
|
19
|
-
}) => BedrockFactory)
|
|
20
|
-
| undefined;
|
|
21
|
-
|
|
22
|
-
let cachedFromNodeProviderChain:
|
|
23
|
-
| ((options?: { profile?: string }) => () => PromiseLike<{
|
|
24
|
-
accessKeyId: string;
|
|
25
|
-
secretAccessKey: string;
|
|
26
|
-
sessionToken?: string;
|
|
27
|
-
}>)
|
|
28
|
-
| undefined;
|
|
29
|
-
|
|
30
|
-
async function loadCreateAmazonBedrock() {
|
|
31
|
-
if (cachedCreateAmazonBedrock) {
|
|
32
|
-
return cachedCreateAmazonBedrock;
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
const moduleName = "@ai-sdk/amazon-bedrock";
|
|
36
|
-
const mod = (await import(moduleName)) as {
|
|
37
|
-
createAmazonBedrock?: typeof cachedCreateAmazonBedrock;
|
|
38
|
-
};
|
|
39
|
-
if (!mod.createAmazonBedrock) {
|
|
40
|
-
throw new Error(`Failed to load createAmazonBedrock from ${moduleName}`);
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
cachedCreateAmazonBedrock = mod.createAmazonBedrock;
|
|
44
|
-
return cachedCreateAmazonBedrock;
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
async function loadFromNodeProviderChain() {
|
|
48
|
-
if (cachedFromNodeProviderChain) {
|
|
49
|
-
return cachedFromNodeProviderChain;
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
const moduleName = "@aws-sdk/credential-providers";
|
|
53
|
-
const mod = (await import(moduleName)) as {
|
|
54
|
-
fromNodeProviderChain?: typeof cachedFromNodeProviderChain;
|
|
55
|
-
};
|
|
56
|
-
if (!mod.fromNodeProviderChain) {
|
|
57
|
-
throw new Error(`Failed to load fromNodeProviderChain from ${moduleName}`);
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
cachedFromNodeProviderChain = mod.fromNodeProviderChain;
|
|
61
|
-
return cachedFromNodeProviderChain;
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
export async function createBedrockClient(
|
|
65
|
-
config: ProviderConfig,
|
|
66
|
-
defaultHeaders: Record<string, string>,
|
|
67
|
-
): Promise<BedrockFactory> {
|
|
68
|
-
const createAmazonBedrock = await loadCreateAmazonBedrock();
|
|
69
|
-
|
|
70
|
-
const region = config.region ?? "us-east-1";
|
|
71
|
-
const authentication = config.aws?.authentication;
|
|
72
|
-
const hasExplicitKeys = Boolean(
|
|
73
|
-
config.aws?.accessKey && config.aws?.secretKey,
|
|
74
|
-
);
|
|
75
|
-
const shouldUseCredentialChain =
|
|
76
|
-
authentication === "profile" ||
|
|
77
|
-
authentication === "iam" ||
|
|
78
|
-
(!authentication && !hasExplicitKeys && !config.apiKey);
|
|
79
|
-
|
|
80
|
-
let credentialProvider:
|
|
81
|
-
| ReturnType<Exclude<typeof cachedFromNodeProviderChain, undefined>>
|
|
82
|
-
| undefined;
|
|
83
|
-
if (shouldUseCredentialChain) {
|
|
84
|
-
const fromNodeProviderChain = await loadFromNodeProviderChain();
|
|
85
|
-
credentialProvider = fromNodeProviderChain({
|
|
86
|
-
profile: config.aws?.profile,
|
|
87
|
-
});
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
return createAmazonBedrock({
|
|
91
|
-
region,
|
|
92
|
-
accessKeyId: config.aws?.accessKey ?? undefined,
|
|
93
|
-
secretAccessKey: config.aws?.secretKey ?? undefined,
|
|
94
|
-
sessionToken: config.aws?.sessionToken ?? undefined,
|
|
95
|
-
apiKey: config.apiKey ?? undefined,
|
|
96
|
-
baseURL: config.aws?.endpoint ?? config.baseUrl ?? undefined,
|
|
97
|
-
headers: defaultHeaders,
|
|
98
|
-
credentialProvider,
|
|
99
|
-
});
|
|
100
|
-
}
|
|
@@ -1,160 +0,0 @@
|
|
|
1
|
-
import { beforeEach, describe, expect, it, vi } from "vitest";
|
|
2
|
-
import { OPENAI_CODEX_PROVIDER } from "../../models/providers/openai-codex";
|
|
3
|
-
import { CodexHandler } from "./community-sdk";
|
|
4
|
-
|
|
5
|
-
const streamTextSpy = vi.fn();
|
|
6
|
-
const codexCliSpy = vi.fn((modelId: string) => ({ modelId }));
|
|
7
|
-
let lastCreateCodexCliOptions: Record<string, unknown> | undefined;
|
|
8
|
-
|
|
9
|
-
vi.mock("ai", () => ({
|
|
10
|
-
streamText: (input: unknown) => streamTextSpy(input),
|
|
11
|
-
}));
|
|
12
|
-
|
|
13
|
-
vi.mock("ai-sdk-provider-codex-cli", () => ({
|
|
14
|
-
codexCli: (modelId: string) => codexCliSpy(modelId),
|
|
15
|
-
createCodexCli: (options?: Record<string, unknown>) => {
|
|
16
|
-
lastCreateCodexCliOptions = options;
|
|
17
|
-
return (modelId: string) => codexCliSpy(modelId);
|
|
18
|
-
},
|
|
19
|
-
}));
|
|
20
|
-
|
|
21
|
-
async function* makeStreamParts(parts: unknown[]) {
|
|
22
|
-
for (const part of parts) {
|
|
23
|
-
yield part;
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
describe("CodexHandler", () => {
|
|
28
|
-
beforeEach(() => {
|
|
29
|
-
vi.clearAllMocks();
|
|
30
|
-
lastCreateCodexCliOptions = undefined;
|
|
31
|
-
});
|
|
32
|
-
|
|
33
|
-
it("streams text and usage through AI SDK fullStream", async () => {
|
|
34
|
-
streamTextSpy.mockReturnValue({
|
|
35
|
-
fullStream: makeStreamParts([
|
|
36
|
-
{ type: "text-delta", textDelta: "Hello" },
|
|
37
|
-
{
|
|
38
|
-
type: "finish",
|
|
39
|
-
usage: { inputTokens: 12, outputTokens: 4 },
|
|
40
|
-
},
|
|
41
|
-
]),
|
|
42
|
-
});
|
|
43
|
-
|
|
44
|
-
const handler = new CodexHandler({
|
|
45
|
-
providerId: "openai-codex",
|
|
46
|
-
modelId: "gpt-5.2-codex",
|
|
47
|
-
});
|
|
48
|
-
|
|
49
|
-
const chunks: Array<Record<string, unknown>> = [];
|
|
50
|
-
for await (const chunk of handler.createMessage("System", [
|
|
51
|
-
{ role: "user", content: "Hi" },
|
|
52
|
-
])) {
|
|
53
|
-
chunks.push(chunk as unknown as Record<string, unknown>);
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
expect(codexCliSpy).toHaveBeenCalledWith("gpt-5.2-codex");
|
|
57
|
-
expect(chunks.map((chunk) => chunk.type)).toEqual([
|
|
58
|
-
"text",
|
|
59
|
-
"usage",
|
|
60
|
-
"done",
|
|
61
|
-
]);
|
|
62
|
-
expect(chunks[0]?.text).toBe("Hello");
|
|
63
|
-
expect(chunks[1]?.inputTokens).toBe(12);
|
|
64
|
-
expect(chunks[1]?.outputTokens).toBe(4);
|
|
65
|
-
});
|
|
66
|
-
|
|
67
|
-
it("uses a fallback model id when model is missing", () => {
|
|
68
|
-
const handler = new CodexHandler({
|
|
69
|
-
providerId: "openai-codex",
|
|
70
|
-
modelId: "",
|
|
71
|
-
});
|
|
72
|
-
|
|
73
|
-
expect(handler.getModel().id).toBe("gpt-5.3-codex");
|
|
74
|
-
});
|
|
75
|
-
|
|
76
|
-
it("does not map OAuth access tokens to OPENAI_API_KEY env", async () => {
|
|
77
|
-
streamTextSpy.mockReturnValue({
|
|
78
|
-
fullStream: makeStreamParts([{ type: "finish" }]),
|
|
79
|
-
});
|
|
80
|
-
|
|
81
|
-
const handler = new CodexHandler({
|
|
82
|
-
providerId: "openai-codex",
|
|
83
|
-
modelId: "gpt-5.3-codex",
|
|
84
|
-
apiKey: "oauth-token-shorthand",
|
|
85
|
-
accessToken: "oauth-access-token",
|
|
86
|
-
});
|
|
87
|
-
|
|
88
|
-
for await (const _chunk of handler.createMessage("System", [
|
|
89
|
-
{ role: "user", content: "Hi" },
|
|
90
|
-
])) {
|
|
91
|
-
// consume stream
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
const createOptions = lastCreateCodexCliOptions as
|
|
95
|
-
| { defaultSettings?: { env?: Record<string, string> } }
|
|
96
|
-
| undefined;
|
|
97
|
-
expect(createOptions?.defaultSettings?.env?.OPENAI_API_KEY).toBeUndefined();
|
|
98
|
-
});
|
|
99
|
-
|
|
100
|
-
it("maps explicit OpenAI API keys to OPENAI_API_KEY env", async () => {
|
|
101
|
-
streamTextSpy.mockReturnValue({
|
|
102
|
-
fullStream: makeStreamParts([{ type: "finish" }]),
|
|
103
|
-
});
|
|
104
|
-
|
|
105
|
-
const handler = new CodexHandler({
|
|
106
|
-
providerId: "openai-codex",
|
|
107
|
-
modelId: "gpt-5.3-codex",
|
|
108
|
-
apiKey: "sk-test-key",
|
|
109
|
-
});
|
|
110
|
-
|
|
111
|
-
for await (const _chunk of handler.createMessage("System", [
|
|
112
|
-
{ role: "user", content: "Hi" },
|
|
113
|
-
])) {
|
|
114
|
-
// consume stream
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
const createOptions = lastCreateCodexCliOptions as
|
|
118
|
-
| { defaultSettings?: { env?: Record<string, string> } }
|
|
119
|
-
| undefined;
|
|
120
|
-
expect(createOptions?.defaultSettings?.env?.OPENAI_API_KEY).toBe(
|
|
121
|
-
"sk-test-key",
|
|
122
|
-
);
|
|
123
|
-
});
|
|
124
|
-
|
|
125
|
-
it("does not surface Codex native tool calls as local tool calls", async () => {
|
|
126
|
-
streamTextSpy.mockReturnValue({
|
|
127
|
-
fullStream: makeStreamParts([
|
|
128
|
-
{
|
|
129
|
-
type: "tool-call",
|
|
130
|
-
toolCallId: "codex-call-1",
|
|
131
|
-
toolName: "read_file",
|
|
132
|
-
args: { path: "README.md" },
|
|
133
|
-
},
|
|
134
|
-
{
|
|
135
|
-
type: "finish",
|
|
136
|
-
usage: { inputTokens: 8, outputTokens: 3 },
|
|
137
|
-
},
|
|
138
|
-
]),
|
|
139
|
-
});
|
|
140
|
-
|
|
141
|
-
const handler = new CodexHandler({
|
|
142
|
-
providerId: "openai-codex",
|
|
143
|
-
modelId: "gpt-5.3-codex",
|
|
144
|
-
});
|
|
145
|
-
|
|
146
|
-
const chunks: Array<Record<string, unknown>> = [];
|
|
147
|
-
for await (const chunk of handler.createMessage("System", [
|
|
148
|
-
{ role: "user", content: "Hi" },
|
|
149
|
-
])) {
|
|
150
|
-
chunks.push(chunk as unknown as Record<string, unknown>);
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
expect(chunks.map((chunk) => chunk.type)).toEqual(["usage", "done"]);
|
|
154
|
-
});
|
|
155
|
-
|
|
156
|
-
it("does not advertise custom tool capability for Codex models", () => {
|
|
157
|
-
const model = OPENAI_CODEX_PROVIDER.models["gpt-5.3-codex"];
|
|
158
|
-
expect(model?.capabilities).not.toContain("tools");
|
|
159
|
-
});
|
|
160
|
-
});
|