thebird 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.d.ts ADDED
@@ -0,0 +1,98 @@
1
+ export interface TextBlock { type: 'text'; text: string }
2
+ export interface ImageBlockBase64 { type: 'image'; source: { type: 'base64'; media_type: string; data: string } }
3
+ export interface ImageBlockUrl { type: 'image'; source: { type: 'url'; url: string; media_type?: string } }
4
+ export interface ImageBlockInline { inlineData: { mimeType: string; data: string } }
5
+ export interface ImageBlockFile { fileData: { mimeType: string; fileUri: string } }
6
+ export type ImageBlock = ImageBlockBase64 | ImageBlockUrl | ImageBlockInline | ImageBlockFile;
7
+ export interface ToolUseBlock { type: 'tool_use'; name: string; input: Record<string, unknown> }
8
+ export interface ToolResultBlock { type: 'tool_result'; name: string; content: string | Record<string, unknown> }
9
+ export type ContentBlock = TextBlock | ImageBlock | ToolUseBlock | ToolResultBlock;
10
+ export interface Message { role: 'user' | 'assistant'; content: string | ContentBlock[] }
11
+ export interface ToolDefinition {
12
+ description?: string;
13
+ parameters?: Record<string, unknown>;
14
+ execute?: (args: Record<string, unknown>, ctx?: { toolCallId: string }) => Promise<unknown>;
15
+ }
16
+ export type Tools = Record<string, ToolDefinition>;
17
+ export interface SafetySetting { category: string; threshold: string }
18
+ export interface GenerationParams {
19
+ model?: string | { modelId?: string; id?: string };
20
+ system?: string;
21
+ messages: Message[];
22
+ tools?: Tools;
23
+ apiKey?: string;
24
+ temperature?: number;
25
+ maxOutputTokens?: number;
26
+ topP?: number;
27
+ topK?: number;
28
+ safetySettings?: SafetySetting[];
29
+ configPath?: string;
30
+ taskType?: 'background' | 'think' | 'webSearch' | 'image';
31
+ }
32
+ export interface StartStepEvent { type: 'start-step' }
33
+ export interface TextDeltaEvent { type: 'text-delta'; textDelta: string }
34
+ export interface ToolCallEvent { type: 'tool-call'; toolCallId: string; toolName: string; args: Record<string, unknown> }
35
+ export interface ToolResultEvent { type: 'tool-result'; toolCallId: string; toolName: string; args: Record<string, unknown>; result: unknown }
36
+ export interface FinishStepEvent { type: 'finish-step'; finishReason: 'stop' | 'tool-calls' | 'error' }
37
+ export interface ErrorEvent { type: 'error'; error: Error }
38
+ export type StreamEvent = StartStepEvent | TextDeltaEvent | ToolCallEvent | ToolResultEvent | FinishStepEvent | ErrorEvent;
39
+ export interface StreamResult { fullStream: AsyncIterable<StreamEvent>; warnings: Promise<unknown[]> }
40
+ export interface StreamParams extends GenerationParams { onStepFinish?: () => Promise<void> | void }
41
+ export function streamGemini(params: StreamParams): StreamResult;
42
+ export interface GenerateResult { text: string; parts: unknown[]; response: unknown }
43
+ export function generateGemini(params: GenerationParams): Promise<GenerateResult>;
44
+
45
+ export type TransformerEntry = string | [string, Record<string, unknown>];
46
+ export interface TransformerConfig {
47
+ use?: TransformerEntry[];
48
+ [modelName: string]: { use?: TransformerEntry[] } | TransformerEntry[] | undefined;
49
+ }
50
+ export interface ProviderConfig {
51
+ name: string;
52
+ api_base_url: string;
53
+ api_key: string;
54
+ models?: string[];
55
+ transformer?: TransformerConfig;
56
+ }
57
+ export interface RouterConfig {
58
+ default?: string;
59
+ background?: string;
60
+ think?: string;
61
+ longContext?: string;
62
+ longContextThreshold?: number;
63
+ webSearch?: string;
64
+ image?: string;
65
+ }
66
+ export interface RouterConfiguration {
67
+ Providers?: ProviderConfig[];
68
+ providers?: ProviderConfig[];
69
+ Router?: RouterConfig;
70
+ customRouter?: (params: GenerationParams, config: RouterConfig) => Promise<string | null>;
71
+ configPath?: string;
72
+ }
73
+ export interface RouterInstance {
74
+ stream(params: StreamParams): StreamResult;
75
+ generate(params: GenerationParams): Promise<GenerateResult | { text: string; response: unknown }>;
76
+ }
77
+ export function createRouter(config: RouterConfiguration): RouterInstance;
78
+ export function streamRouter(params: StreamParams & RouterConfiguration): StreamResult;
79
+ export function generateRouter(params: GenerationParams & RouterConfiguration): Promise<GenerateResult | { text: string; response: unknown }>;
80
+
81
+ export interface GeminiPart {
82
+ text?: string;
83
+ functionCall?: { name: string; args: Record<string, unknown> };
84
+ functionResponse?: { name: string; response: unknown };
85
+ inlineData?: { mimeType: string; data: string };
86
+ fileData?: { mimeType: string; fileUri: string };
87
+ }
88
+ export interface GeminiContent { role: 'user' | 'model'; parts: GeminiPart[] }
89
+ export function convertMessages(messages: Message[]): GeminiContent[];
90
+ export function convertTools(tools: Tools): Array<{ name: string; description: string; parameters: Record<string, unknown> }>;
91
+ export function cleanSchema(schema: unknown): unknown;
92
+ export class GeminiError extends Error {
93
+ name: 'GeminiError';
94
+ status?: number;
95
+ code?: string | number;
96
+ retryable: boolean;
97
+ constructor(message: string, options?: { status?: number; code?: string | number; retryable?: boolean });
98
+ }
package/index.js ADDED
@@ -0,0 +1,173 @@
1
+ const { getClient } = require('./lib/client');
2
+ const { GeminiError, withRetry } = require('./lib/errors');
3
+ const { convertMessages, convertTools, cleanSchema, extractModelId, buildConfig } = require('./lib/convert');
4
+ const { loadConfig } = require('./lib/config');
5
+ const { route } = require('./lib/router');
6
+ const { resolveTransformers, applyRequestTransformers } = require('./lib/transformers');
7
+ const openaiProv = require('./lib/providers/openai');
8
+
9
+ function streamGemini({ model, system, messages, tools, onStepFinish, apiKey,
10
+ temperature, maxOutputTokens, topP, topK, safetySettings }) {
11
+ return {
12
+ fullStream: createFullStream({ model, system, messages, tools, onStepFinish, apiKey, temperature, maxOutputTokens, topP, topK, safetySettings }),
13
+ warnings: Promise.resolve([])
14
+ };
15
+ }
16
+
17
+ async function* createFullStream({ model, system, messages, tools, onStepFinish, apiKey, temperature, maxOutputTokens, topP, topK, safetySettings }) {
18
+ const client = getClient(apiKey);
19
+ const modelId = extractModelId(model);
20
+ let contents = convertMessages(messages);
21
+ const { config } = buildConfig({ system, tools, temperature, maxOutputTokens, topP, topK, safetySettings });
22
+ while (true) {
23
+ yield { type: 'start-step' };
24
+ try {
25
+ const stream = await withRetry(() => client.models.generateContentStream({ model: modelId, contents, config }));
26
+ const allParts = [];
27
+ for await (const chunk of stream) {
28
+ for (const candidate of (chunk.candidates || [])) {
29
+ for (const part of (candidate.content?.parts || [])) {
30
+ allParts.push(part);
31
+ if (part.text && !part.thought) yield { type: 'text-delta', textDelta: part.text };
32
+ }
33
+ }
34
+ }
35
+ const fcParts = allParts.filter(p => p.functionCall);
36
+ if (fcParts.length === 0) {
37
+ yield { type: 'finish-step', finishReason: 'stop' };
38
+ if (onStepFinish) await onStepFinish();
39
+ return;
40
+ }
41
+ const toolResultParts = [];
42
+ for (const part of fcParts) {
43
+ const name = part.functionCall.name;
44
+ const args = part.functionCall.args || {};
45
+ const toolId = 'toolu_' + Math.random().toString(36).slice(2, 10);
46
+ yield { type: 'tool-call', toolCallId: toolId, toolName: name, args };
47
+ const toolDef = tools?.[name];
48
+ let result = toolDef ? null : { error: true, message: 'Tool not found: ' + name };
49
+ if (toolDef?.execute) {
50
+ try { result = await toolDef.execute(args, { toolCallId: toolId }); }
51
+ catch (e) { result = { error: true, message: e.message }; }
52
+ }
53
+ yield { type: 'tool-result', toolCallId: toolId, toolName: name, args, result };
54
+ toolResultParts.push({ functionResponse: { name, response: result || {} } });
55
+ }
56
+ yield { type: 'finish-step', finishReason: 'tool-calls' };
57
+ if (onStepFinish) await onStepFinish();
58
+ contents.push({ role: 'model', parts: allParts });
59
+ contents.push({ role: 'user', parts: toolResultParts });
60
+ } catch (err) {
61
+ yield { type: 'error', error: err };
62
+ yield { type: 'finish-step', finishReason: 'error' };
63
+ if (onStepFinish) await onStepFinish();
64
+ return;
65
+ }
66
+ }
67
+ }
68
+
69
+ async function generateGemini({ model, system, messages, tools, apiKey, temperature, maxOutputTokens, topP, topK, safetySettings }) {
70
+ const client = getClient(apiKey);
71
+ const modelId = extractModelId(model);
72
+ let contents = convertMessages(messages);
73
+ const { config } = buildConfig({ system, tools, temperature, maxOutputTokens, topP, topK, safetySettings });
74
+ while (true) {
75
+ const response = await withRetry(() => client.models.generateContent({ model: modelId, contents, config }));
76
+ const candidate = response.candidates?.[0];
77
+ if (!candidate) throw new GeminiError('No candidates returned', { retryable: false });
78
+ const allParts = candidate.content?.parts || [];
79
+ const fcParts = allParts.filter(p => p.functionCall);
80
+ if (fcParts.length === 0) {
81
+ const text = allParts.filter(p => p.text && !p.thought).map(p => p.text).join('');
82
+ return { text, parts: allParts, response };
83
+ }
84
+ const toolResultParts = [];
85
+ for (const part of fcParts) {
86
+ const name = part.functionCall.name;
87
+ const args = part.functionCall.args || {};
88
+ const toolDef = tools?.[name];
89
+ let result = toolDef ? null : { error: true, message: 'Tool not found: ' + name };
90
+ if (toolDef?.execute) {
91
+ try { result = await toolDef.execute(args); }
92
+ catch (e) { result = { error: true, message: e.message }; }
93
+ }
94
+ toolResultParts.push({ functionResponse: { name, response: result || {} } });
95
+ }
96
+ contents.push({ role: 'model', parts: allParts });
97
+ contents.push({ role: 'user', parts: toolResultParts });
98
+ }
99
+ }
100
+
101
+ function isGeminiProvider(p) {
102
+ return p.name === 'gemini' || (p.api_base_url || '').includes('generativelanguage.googleapis.com');
103
+ }
104
+
105
+ function findProvider(providers, providerName, modelName) {
106
+ if (providerName) return providers.find(p => p.name === providerName);
107
+ if (modelName) return providers.find(p => (p.models || []).includes(modelName));
108
+ return providers[0];
109
+ }
110
+
111
+ function buildOpenAIUrl(base) {
112
+ const clean = (base || '').replace(/\/$/g, '');
113
+ return clean.includes('/completions') ? clean : clean + '/chat/completions';
114
+ }
115
+
116
+ function resolveForProvider(provider, model, customMap) {
117
+ const useList = provider.transformer?.[model]?.use || provider.transformer?.use || [];
118
+ return resolveTransformers(useList, customMap);
119
+ }
120
+
121
+ async function* routerStream(params, resolver) {
122
+ const { provider, actualModel, transformers } = await resolver(params);
123
+ if (isGeminiProvider(provider)) {
124
+ yield* createFullStream({ ...params, model: actualModel, apiKey: provider.api_key || params.apiKey });
125
+ } else {
126
+ const oaiMsgs = openaiProv.convertMessages(params.messages, params.system);
127
+ const oaiTools = openaiProv.convertTools(params.tools);
128
+ let req = { messages: oaiMsgs, model: actualModel, max_tokens: params.maxOutputTokens || 8192, temperature: params.temperature ?? 0.5 };
129
+ if (oaiTools) req.tools = oaiTools;
130
+ req = applyRequestTransformers(req, transformers);
131
+ yield* openaiProv.streamOpenAI({ url: buildOpenAIUrl(provider.api_base_url), apiKey: provider.api_key, headers: req._extraHeaders, body: req, tools: params.tools, onStepFinish: params.onStepFinish });
132
+ }
133
+ }
134
+
135
+ function createRouter(config) {
136
+ const providers = config.Providers || config.providers || [];
137
+ const routerCfg = config.Router || {};
138
+ async function resolve(params) {
139
+ const { providerName, modelName } = await route(params, routerCfg, config.customRouter);
140
+ const provider = findProvider(providers, providerName, modelName) || providers[0];
141
+ if (!provider) throw new Error('[thebird] no provider configured');
142
+ const actualModel = modelName || (provider.models || [])[0] || extractModelId(params.model) || 'gemini-2.0-flash';
143
+ const transformers = resolveForProvider(provider, actualModel, config._transformers);
144
+ return { provider, actualModel, transformers };
145
+ }
146
+ return {
147
+ stream(params) { return { fullStream: routerStream(params, resolve), warnings: Promise.resolve([]) }; },
148
+ async generate(params) {
149
+ const { provider, actualModel, transformers } = await resolve(params);
150
+ if (isGeminiProvider(provider)) return generateGemini({ ...params, model: actualModel, apiKey: provider.api_key || params.apiKey });
151
+ const oaiMsgs = openaiProv.convertMessages(params.messages, params.system);
152
+ const oaiTools = openaiProv.convertTools(params.tools);
153
+ let req = { messages: oaiMsgs, model: actualModel, max_tokens: params.maxOutputTokens || 8192, temperature: params.temperature ?? 0.5 };
154
+ if (oaiTools) req.tools = oaiTools;
155
+ req = applyRequestTransformers(req, transformers);
156
+ return openaiProv.generateOpenAI({ url: buildOpenAIUrl(provider.api_base_url), apiKey: provider.api_key, headers: req._extraHeaders, body: req, tools: params.tools });
157
+ }
158
+ };
159
+ }
160
+
161
+ function streamRouter(params) {
162
+ const config = loadConfig(params.configPath);
163
+ if (!(config.Providers || config.providers)?.length) return streamGemini(params);
164
+ return createRouter(config).stream(params);
165
+ }
166
+
167
+ async function generateRouter(params) {
168
+ const config = loadConfig(params.configPath);
169
+ if (!(config.Providers || config.providers)?.length) return generateGemini(params);
170
+ return createRouter(config).generate(params);
171
+ }
172
+
173
+ module.exports = { streamGemini, generateGemini, streamRouter, generateRouter, createRouter, convertMessages, convertTools, cleanSchema, GeminiError };
package/lib/client.js ADDED
@@ -0,0 +1,10 @@
1
+ const { GoogleGenAI } = require('@google/genai');
2
+
3
+ let _client = null;
4
+
5
+ function getClient(apiKey) {
6
+ if (!_client || apiKey) _client = new GoogleGenAI({ apiKey: apiKey || process.env.GEMINI_API_KEY });
7
+ return _client;
8
+ }
9
+
10
+ module.exports = { getClient };
package/lib/config.js ADDED
@@ -0,0 +1,24 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const os = require('os');
4
+
5
+ function interpolateEnv(val) {
6
+ if (typeof val === 'string') return val.replace(/\$\{([^}]+)\}|\$([A-Z_][A-Z0-9_]*)/g, (_, a, b) => process.env[a || b] || '');
7
+ if (Array.isArray(val)) return val.map(interpolateEnv);
8
+ if (val && typeof val === 'object') {
9
+ const out = {};
10
+ for (const [k, v] of Object.entries(val)) out[k] = interpolateEnv(v);
11
+ return out;
12
+ }
13
+ return val;
14
+ }
15
+
16
+ function loadConfig(configPath) {
17
+ const fp = configPath || process.env.THEBIRD_CONFIG || path.join(os.homedir(), '.thebird', 'config.json');
18
+ try {
19
+ const raw = JSON.parse(fs.readFileSync(fp, 'utf8'));
20
+ return interpolateEnv(raw);
21
+ } catch { return {}; }
22
+ }
23
+
24
+ module.exports = { loadConfig, interpolateEnv };
package/lib/convert.js ADDED
@@ -0,0 +1,86 @@
1
+ function cleanSchema(schema) {
2
+ if (!schema || typeof schema !== 'object') return schema;
3
+ if (Array.isArray(schema)) return schema.map(cleanSchema);
4
+ const out = {};
5
+ for (const [k, v] of Object.entries(schema)) {
6
+ if (k === 'additionalProperties' || k === '$schema') continue;
7
+ out[k] = cleanSchema(v);
8
+ }
9
+ return out;
10
+ }
11
+
12
+ function convertTools(tools) {
13
+ if (!tools || typeof tools !== 'object') return [];
14
+ return Object.entries(tools).map(([name, t]) => ({
15
+ name,
16
+ description: t.description || '',
17
+ parameters: cleanSchema(t.parameters?.jsonSchema || t.parameters || { type: 'object' })
18
+ }));
19
+ }
20
+
21
+ function convertImageBlock(b) {
22
+ // Handle inlineData: { mimeType, data } (base64)
23
+ if (b.inlineData || b.type === 'image') {
24
+ const src = b.inlineData || b.source;
25
+ if (src?.data) return { inlineData: { mimeType: src.mimeType || 'image/jpeg', data: src.data } };
26
+ if (src?.url) return { fileData: { mimeType: src.mimeType || 'image/jpeg', fileUri: src.url } };
27
+ }
28
+ // Handle fileData: { mimeType, fileUri }
29
+ if (b.fileData) return { fileData: { mimeType: b.fileData.mimeType, fileUri: b.fileData.fileUri } };
30
+ // Anthropic-style image block
31
+ if (b.type === 'image' && b.source) {
32
+ if (b.source.type === 'base64') return { inlineData: { mimeType: b.source.media_type, data: b.source.data } };
33
+ if (b.source.type === 'url') return { fileData: { mimeType: b.source.media_type || 'image/jpeg', fileUri: b.source.url } };
34
+ }
35
+ return null;
36
+ }
37
+
38
+ function convertMessages(messages) {
39
+ const contents = [];
40
+ for (const m of messages) {
41
+ const role = m.role === 'assistant' ? 'model' : 'user';
42
+ if (typeof m.content === 'string') {
43
+ if (m.content) contents.push({ role, parts: [{ text: m.content }] });
44
+ continue;
45
+ }
46
+ if (Array.isArray(m.content)) {
47
+ const parts = m.content.map(b => {
48
+ if (b.type === 'text' && b.text) return { text: b.text };
49
+ if (b.type === 'image' || b.inlineData || b.fileData) return convertImageBlock(b);
50
+ if (b.type === 'tool_use') return { functionCall: { name: b.name, args: b.input || {} } };
51
+ if (b.type === 'tool_result') {
52
+ let resp;
53
+ try { resp = typeof b.content === 'string' ? JSON.parse(b.content) : (b.content || {}); }
54
+ catch { resp = { result: b.content }; }
55
+ return { functionResponse: { name: b.name || 'unknown', response: resp } };
56
+ }
57
+ return null;
58
+ }).filter(Boolean);
59
+ if (parts.length) contents.push({ role, parts });
60
+ }
61
+ }
62
+ return contents;
63
+ }
64
+
65
+ function extractModelId(model) {
66
+ if (typeof model === 'string') return model;
67
+ if (model?.modelId) return model.modelId;
68
+ if (model?.id) return model.id;
69
+ return 'gemini-2.0-flash';
70
+ }
71
+
72
+ function buildConfig({ system, tools, temperature, maxOutputTokens, topP, topK, safetySettings } = {}) {
73
+ const geminiTools = convertTools(tools);
74
+ const config = {
75
+ maxOutputTokens: maxOutputTokens ?? 8192,
76
+ temperature: temperature ?? 0.5,
77
+ topP: topP ?? 0.95
78
+ };
79
+ if (topK != null) config.topK = topK;
80
+ if (system) config.systemInstruction = system;
81
+ if (geminiTools.length > 0) config.tools = [{ functionDeclarations: geminiTools }];
82
+ if (safetySettings) config.safetySettings = safetySettings;
83
+ return { config, geminiTools };
84
+ }
85
+
86
+ module.exports = { cleanSchema, convertTools, convertMessages, extractModelId, buildConfig, convertImageBlock };
package/lib/errors.js ADDED
@@ -0,0 +1,35 @@
1
+ class GeminiError extends Error {
2
+ constructor(message, { status, code, retryable = false } = {}) {
3
+ super(message);
4
+ this.name = 'GeminiError';
5
+ this.status = status;
6
+ this.code = code;
7
+ this.retryable = retryable;
8
+ }
9
+ }
10
+
11
+ function isRetryable(err) {
12
+ if (err instanceof GeminiError) return err.retryable;
13
+ const status = err?.status ?? err?.code;
14
+ if (status === 429) return true;
15
+ if (typeof status === 'number' && status >= 500) return true;
16
+ const msg = err?.message ?? '';
17
+ return /quota|rate.?limit|overloaded|unavailable/i.test(msg);
18
+ }
19
+
20
+ async function withRetry(fn, maxRetries = 3) {
21
+ let lastErr;
22
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
23
+ try {
24
+ return await fn();
25
+ } catch (err) {
26
+ lastErr = err;
27
+ if (!isRetryable(err) || attempt === maxRetries) throw err;
28
+ const delay = Math.min(1000 * 2 ** attempt + Math.random() * 200, 16000);
29
+ await new Promise(r => setTimeout(r, delay));
30
+ }
31
+ }
32
+ throw lastErr;
33
+ }
34
+
35
+ module.exports = { GeminiError, isRetryable, withRetry };
@@ -0,0 +1,127 @@
1
+ const { GeminiError } = require('../errors');
2
+
3
+ function convertMessages(messages, system) {
4
+ const result = [];
5
+ if (system) result.push({ role: 'system', content: typeof system === 'string' ? system : JSON.stringify(system) });
6
+ for (const m of messages) {
7
+ if (typeof m.content === 'string') { result.push({ role: m.role, content: m.content }); continue; }
8
+ if (!Array.isArray(m.content)) continue;
9
+ const toolCalls = m.content.filter(b => b.type === 'tool_use');
10
+ const toolResults = m.content.filter(b => b.type === 'tool_result');
11
+ if (toolResults.length) {
12
+ for (const b of toolResults) {
13
+ const c = typeof b.content === 'string' ? b.content : JSON.stringify(b.content || '');
14
+ result.push({ role: 'tool', tool_call_id: b.tool_use_id || b.id || b.name, content: c });
15
+ }
16
+ continue;
17
+ }
18
+ const textParts = m.content.filter(b => b.type === 'text').map(b => b.text).join('');
19
+ if (toolCalls.length) {
20
+ result.push({ role: 'assistant', content: textParts || null,
21
+ tool_calls: toolCalls.map(b => ({ id: b.id || ('call_' + Math.random().toString(36).slice(2,8)), type: 'function',
22
+ function: { name: b.name, arguments: JSON.stringify(b.input || {}) } })) });
23
+ } else {
24
+ result.push({ role: m.role, content: textParts });
25
+ }
26
+ }
27
+ return result;
28
+ }
29
+
30
+ function convertTools(tools) {
31
+ if (!tools || typeof tools !== 'object') return undefined;
32
+ const list = Object.entries(tools).map(([name, t]) => ({
33
+ type: 'function', function: { name, description: t.description || '',
34
+ parameters: t.parameters?.jsonSchema || t.parameters || { type: 'object' } }
35
+ }));
36
+ return list.length ? list : undefined;
37
+ }
38
+
39
+ async function callOpenAI({ url, apiKey, headers, body }) {
40
+ const res = await fetch(url, { method: 'POST',
41
+ headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${apiKey}`, ...(headers || {}) },
42
+ body: JSON.stringify(body) });
43
+ if (!res.ok) { const t = await res.text(); throw new GeminiError(t, { status: res.status, retryable: res.status === 429 || res.status >= 500 }); }
44
+ return res;
45
+ }
46
+
47
+ async function* streamOpenAI({ url, apiKey, headers, body, tools, onStepFinish }) {
48
+ while (true) {
49
+ yield { type: 'start-step' };
50
+ const res = await callOpenAI({ url, apiKey, headers, body: { ...body, stream: true } });
51
+ const reader = res.body.getReader();
52
+ const dec = new TextDecoder();
53
+ let buf = '', toolCallsMap = {};
54
+ try {
55
+ while (true) {
56
+ const { done, value } = await reader.read();
57
+ if (done) break;
58
+ buf += dec.decode(value, { stream: true });
59
+ const lines = buf.split('\n');
60
+ buf = lines.pop();
61
+ for (const line of lines) {
62
+ if (!line.startsWith('data: ')) continue;
63
+ const d = line.slice(6).trim();
64
+ if (d === '[DONE]') break;
65
+ let chunk; try { chunk = JSON.parse(d); } catch { continue; }
66
+ const delta = chunk.choices?.[0]?.delta;
67
+ if (!delta) continue;
68
+ if (delta.content) yield { type: 'text-delta', textDelta: delta.content };
69
+ if (delta.tool_calls) {
70
+ for (const tc of delta.tool_calls) {
71
+ const idx = tc.index ?? 0;
72
+ if (!toolCallsMap[idx]) toolCallsMap[idx] = { id: tc.id || '', name: '', args: '' };
73
+ if (tc.id) toolCallsMap[idx].id = tc.id;
74
+ if (tc.function?.name) toolCallsMap[idx].name += tc.function.name;
75
+ if (tc.function?.arguments) toolCallsMap[idx].args += tc.function.arguments;
76
+ }
77
+ }
78
+ }
79
+ }
80
+ } finally { reader.releaseLock(); }
81
+
82
+ const pending = Object.values(toolCallsMap);
83
+ if (!pending.length) {
84
+ yield { type: 'finish-step', finishReason: 'stop' };
85
+ if (onStepFinish) await onStepFinish();
86
+ return;
87
+ }
88
+ const toolResultMsgs = [];
89
+ for (const tc of pending) {
90
+ let args; try { args = JSON.parse(tc.args || '{}'); } catch { args = {}; }
91
+ const toolDef = tools?.[tc.name];
92
+ let result = toolDef ? null : { error: true, message: 'Tool not found: ' + tc.name };
93
+ if (toolDef?.execute) try { result = await toolDef.execute(args, { toolCallId: tc.id }); } catch(e) { result = { error: true, message: e.message }; }
94
+ yield { type: 'tool-call', toolCallId: tc.id, toolName: tc.name, args };
95
+ yield { type: 'tool-result', toolCallId: tc.id, toolName: tc.name, args, result };
96
+ toolResultMsgs.push({ role: 'tool', tool_call_id: tc.id, content: JSON.stringify(result ?? '') });
97
+ }
98
+ yield { type: 'finish-step', finishReason: 'tool-calls' };
99
+ if (onStepFinish) await onStepFinish();
100
+ body = { ...body, messages: [...body.messages,
101
+ { role: 'assistant', content: null, tool_calls: pending.map(tc => ({ id: tc.id, type: 'function', function: { name: tc.name, arguments: tc.args } })) },
102
+ ...toolResultMsgs
103
+ ]};
104
+ toolCallsMap = {};
105
+ }
106
+ }
107
+
108
+ async function generateOpenAI({ url, apiKey, headers, body, tools }) {
109
+ while (true) {
110
+ const res = await callOpenAI({ url, apiKey, headers, body: { ...body, stream: false } });
111
+ const data = await res.json();
112
+ const msg = data.choices?.[0]?.message;
113
+ if (!msg) throw new GeminiError('No message in response', { retryable: false });
114
+ if (!msg.tool_calls?.length) return { text: msg.content || '', response: data };
115
+ const toolResultMsgs = [];
116
+ for (const tc of msg.tool_calls) {
117
+ let args; try { args = JSON.parse(tc.function?.arguments || '{}'); } catch { args = {}; }
118
+ const toolDef = tools?.[tc.function?.name];
119
+ let result = toolDef ? null : { error: true, message: 'Tool not found: ' + tc.function?.name };
120
+ if (toolDef?.execute) try { result = await toolDef.execute(args); } catch(e) { result = { error: true, message: e.message }; }
121
+ toolResultMsgs.push({ role: 'tool', tool_call_id: tc.id, content: JSON.stringify(result ?? '') });
122
+ }
123
+ body = { ...body, messages: [...body.messages, msg, ...toolResultMsgs] };
124
+ }
125
+ }
126
+
127
+ module.exports = { streamOpenAI, generateOpenAI, convertMessages, convertTools };
package/lib/router.js ADDED
@@ -0,0 +1,51 @@
1
+ const { loadConfig } = require('./config');
2
+
3
+ const SUBAGENT_RE = /<CCR-SUBAGENT-MODEL>([^<]+)<\/CCR-SUBAGENT-MODEL>/;
4
+
5
+ function estimateTokens(messages, system) {
6
+ let chars = typeof system === 'string' ? system.length : (system ? JSON.stringify(system).length : 0);
7
+ for (const m of (messages || [])) {
8
+ chars += typeof m.content === 'string' ? m.content.length : JSON.stringify(m.content || '').length;
9
+ }
10
+ return Math.ceil(chars / 4);
11
+ }
12
+
13
+ function extractSubagentModel(messages) {
14
+ const first = messages?.[0];
15
+ if (!first) return null;
16
+ const text = typeof first.content === 'string' ? first.content :
17
+ (Array.isArray(first.content) ? first.content.map(b => b.text || '').join('') : '');
18
+ const m = SUBAGENT_RE.exec(text);
19
+ return m ? m[1].trim() : null;
20
+ }
21
+
22
+ function parseProviderModel(str) {
23
+ const idx = str.indexOf(',');
24
+ if (idx === -1) return { providerName: null, modelName: str };
25
+ return { providerName: str.slice(0, idx), modelName: str.slice(idx + 1) };
26
+ }
27
+
28
+ async function route(params, routerCfg, customRouterFn) {
29
+ const { messages, system, taskType } = params;
30
+
31
+ if (customRouterFn) {
32
+ const custom = await customRouterFn(params, routerCfg);
33
+ if (custom) return parseProviderModel(custom);
34
+ }
35
+
36
+ const subagent = extractSubagentModel(messages);
37
+ if (subagent) return parseProviderModel(subagent);
38
+
39
+ if (taskType === 'background' && routerCfg.background) return parseProviderModel(routerCfg.background);
40
+ if (taskType === 'think' && routerCfg.think) return parseProviderModel(routerCfg.think);
41
+ if (taskType === 'webSearch' && routerCfg.webSearch) return parseProviderModel(routerCfg.webSearch);
42
+ if (taskType === 'image' && routerCfg.image) return parseProviderModel(routerCfg.image);
43
+
44
+ const threshold = routerCfg.longContextThreshold || 60000;
45
+ if (routerCfg.longContext && estimateTokens(messages, system) > threshold) return parseProviderModel(routerCfg.longContext);
46
+
47
+ if (routerCfg.default) return parseProviderModel(routerCfg.default);
48
+ return { providerName: null, modelName: null };
49
+ }
50
+
51
+ module.exports = { route, estimateTokens, parseProviderModel };