@agenticc/core 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +370 -0
- package/README.zh-CN.md +372 -0
- package/dist/audit/index.d.ts +9 -0
- package/dist/audit/index.d.ts.map +1 -0
- package/dist/audit/index.js +9 -0
- package/dist/audit/index.js.map +1 -0
- package/dist/audit/logger.d.ts +211 -0
- package/dist/audit/logger.d.ts.map +1 -0
- package/dist/audit/logger.js +268 -0
- package/dist/audit/logger.js.map +1 -0
- package/dist/audit/query.d.ts +164 -0
- package/dist/audit/query.d.ts.map +1 -0
- package/dist/audit/query.js +250 -0
- package/dist/audit/query.js.map +1 -0
- package/dist/conversation/context-builder.d.ts +119 -0
- package/dist/conversation/context-builder.d.ts.map +1 -0
- package/dist/conversation/context-builder.js +252 -0
- package/dist/conversation/context-builder.js.map +1 -0
- package/dist/conversation/index.d.ts +10 -0
- package/dist/conversation/index.d.ts.map +1 -0
- package/dist/conversation/index.js +10 -0
- package/dist/conversation/index.js.map +1 -0
- package/dist/conversation/message-store.d.ts +231 -0
- package/dist/conversation/message-store.d.ts.map +1 -0
- package/dist/conversation/message-store.js +404 -0
- package/dist/conversation/message-store.js.map +1 -0
- package/dist/conversation/session.d.ts +201 -0
- package/dist/conversation/session.d.ts.map +1 -0
- package/dist/conversation/session.js +285 -0
- package/dist/conversation/session.js.map +1 -0
- package/dist/core/agent.d.ts +277 -0
- package/dist/core/agent.d.ts.map +1 -0
- package/dist/core/agent.js +674 -0
- package/dist/core/agent.js.map +1 -0
- package/dist/core/agentic-loop.d.ts +98 -0
- package/dist/core/agentic-loop.d.ts.map +1 -0
- package/dist/core/agentic-loop.js +496 -0
- package/dist/core/agentic-loop.js.map +1 -0
- package/dist/core/index.d.ts +14 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +14 -0
- package/dist/core/index.js.map +1 -0
- package/dist/core/intent-parser.d.ts +101 -0
- package/dist/core/intent-parser.d.ts.map +1 -0
- package/dist/core/intent-parser.js +221 -0
- package/dist/core/intent-parser.js.map +1 -0
- package/dist/core/plan-generator.d.ts +133 -0
- package/dist/core/plan-generator.d.ts.map +1 -0
- package/dist/core/plan-generator.js +294 -0
- package/dist/core/plan-generator.js.map +1 -0
- package/dist/core/plugin-manager.d.ts +120 -0
- package/dist/core/plugin-manager.d.ts.map +1 -0
- package/dist/core/plugin-manager.js +369 -0
- package/dist/core/plugin-manager.js.map +1 -0
- package/dist/core/response-handler.d.ts +141 -0
- package/dist/core/response-handler.d.ts.map +1 -0
- package/dist/core/response-handler.js +384 -0
- package/dist/core/response-handler.js.map +1 -0
- package/dist/core/tool-executor.d.ts +143 -0
- package/dist/core/tool-executor.d.ts.map +1 -0
- package/dist/core/tool-executor.js +354 -0
- package/dist/core/tool-executor.js.map +1 -0
- package/dist/core/tool-registry.d.ts +133 -0
- package/dist/core/tool-registry.d.ts.map +1 -0
- package/dist/core/tool-registry.js +252 -0
- package/dist/core/tool-registry.js.map +1 -0
- package/dist/index.d.ts +44 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +47 -0
- package/dist/index.js.map +1 -0
- package/dist/knowledge/chunker.d.ts +78 -0
- package/dist/knowledge/chunker.d.ts.map +1 -0
- package/dist/knowledge/chunker.js +233 -0
- package/dist/knowledge/chunker.js.map +1 -0
- package/dist/knowledge/embedder.d.ts +93 -0
- package/dist/knowledge/embedder.d.ts.map +1 -0
- package/dist/knowledge/embedder.js +205 -0
- package/dist/knowledge/embedder.js.map +1 -0
- package/dist/knowledge/index.d.ts +10 -0
- package/dist/knowledge/index.d.ts.map +1 -0
- package/dist/knowledge/index.js +11 -0
- package/dist/knowledge/index.js.map +1 -0
- package/dist/knowledge/loaders/index.d.ts +10 -0
- package/dist/knowledge/loaders/index.d.ts.map +1 -0
- package/dist/knowledge/loaders/index.js +10 -0
- package/dist/knowledge/loaders/index.js.map +1 -0
- package/dist/knowledge/loaders/markdown.d.ts +88 -0
- package/dist/knowledge/loaders/markdown.d.ts.map +1 -0
- package/dist/knowledge/loaders/markdown.js +205 -0
- package/dist/knowledge/loaders/markdown.js.map +1 -0
- package/dist/knowledge/loaders/yaml.d.ts +112 -0
- package/dist/knowledge/loaders/yaml.d.ts.map +1 -0
- package/dist/knowledge/loaders/yaml.js +368 -0
- package/dist/knowledge/loaders/yaml.js.map +1 -0
- package/dist/knowledge/retriever.d.ts +144 -0
- package/dist/knowledge/retriever.d.ts.map +1 -0
- package/dist/knowledge/retriever.js +399 -0
- package/dist/knowledge/retriever.js.map +1 -0
- package/dist/knowledge/store.d.ts +146 -0
- package/dist/knowledge/store.d.ts.map +1 -0
- package/dist/knowledge/store.js +420 -0
- package/dist/knowledge/store.js.map +1 -0
- package/dist/llm/adapter.d.ts +194 -0
- package/dist/llm/adapter.d.ts.map +1 -0
- package/dist/llm/adapter.js +42 -0
- package/dist/llm/adapter.js.map +1 -0
- package/dist/llm/adapters/anyrouter.d.ts +84 -0
- package/dist/llm/adapters/anyrouter.d.ts.map +1 -0
- package/dist/llm/adapters/anyrouter.js +372 -0
- package/dist/llm/adapters/anyrouter.js.map +1 -0
- package/dist/llm/adapters/claude.d.ts +66 -0
- package/dist/llm/adapters/claude.d.ts.map +1 -0
- package/dist/llm/adapters/claude.js +323 -0
- package/dist/llm/adapters/claude.js.map +1 -0
- package/dist/llm/adapters/index.d.ts +12 -0
- package/dist/llm/adapters/index.d.ts.map +1 -0
- package/dist/llm/adapters/index.js +12 -0
- package/dist/llm/adapters/index.js.map +1 -0
- package/dist/llm/adapters/mimo.d.ts +85 -0
- package/dist/llm/adapters/mimo.d.ts.map +1 -0
- package/dist/llm/adapters/mimo.js +316 -0
- package/dist/llm/adapters/mimo.js.map +1 -0
- package/dist/llm/adapters/openai.d.ts +53 -0
- package/dist/llm/adapters/openai.d.ts.map +1 -0
- package/dist/llm/adapters/openai.js +293 -0
- package/dist/llm/adapters/openai.js.map +1 -0
- package/dist/llm/adapters/qwen.d.ts +53 -0
- package/dist/llm/adapters/qwen.d.ts.map +1 -0
- package/dist/llm/adapters/qwen.js +299 -0
- package/dist/llm/adapters/qwen.js.map +1 -0
- package/dist/llm/adapters/siliconflow.d.ts +69 -0
- package/dist/llm/adapters/siliconflow.d.ts.map +1 -0
- package/dist/llm/adapters/siliconflow.js +331 -0
- package/dist/llm/adapters/siliconflow.js.map +1 -0
- package/dist/llm/index.d.ts +9 -0
- package/dist/llm/index.d.ts.map +1 -0
- package/dist/llm/index.js +12 -0
- package/dist/llm/index.js.map +1 -0
- package/dist/llm/manager.d.ts +97 -0
- package/dist/llm/manager.d.ts.map +1 -0
- package/dist/llm/manager.js +337 -0
- package/dist/llm/manager.js.map +1 -0
- package/dist/test-utils/arbitraries.d.ts +230 -0
- package/dist/test-utils/arbitraries.d.ts.map +1 -0
- package/dist/test-utils/arbitraries.js +280 -0
- package/dist/test-utils/arbitraries.js.map +1 -0
- package/dist/test-utils/cleanup.d.ts +184 -0
- package/dist/test-utils/cleanup.d.ts.map +1 -0
- package/dist/test-utils/cleanup.js +282 -0
- package/dist/test-utils/cleanup.js.map +1 -0
- package/dist/test-utils/config.d.ts +80 -0
- package/dist/test-utils/config.d.ts.map +1 -0
- package/dist/test-utils/config.js +94 -0
- package/dist/test-utils/config.js.map +1 -0
- package/dist/test-utils/index.d.ts +10 -0
- package/dist/test-utils/index.d.ts.map +1 -0
- package/dist/test-utils/index.js +36 -0
- package/dist/test-utils/index.js.map +1 -0
- package/dist/test-utils/mocks.d.ts +170 -0
- package/dist/test-utils/mocks.d.ts.map +1 -0
- package/dist/test-utils/mocks.js +281 -0
- package/dist/test-utils/mocks.js.map +1 -0
- package/dist/types/config.d.ts +170 -0
- package/dist/types/config.d.ts.map +1 -0
- package/dist/types/config.js +120 -0
- package/dist/types/config.js.map +1 -0
- package/dist/types/knowledge.d.ts +95 -0
- package/dist/types/knowledge.d.ts.map +1 -0
- package/dist/types/knowledge.js +7 -0
- package/dist/types/knowledge.js.map +1 -0
- package/dist/types/loop.d.ts +148 -0
- package/dist/types/loop.d.ts.map +1 -0
- package/dist/types/loop.js +16 -0
- package/dist/types/loop.js.map +1 -0
- package/dist/types/plugin.d.ts +137 -0
- package/dist/types/plugin.d.ts.map +1 -0
- package/dist/types/plugin.js +15 -0
- package/dist/types/plugin.js.map +1 -0
- package/dist/types/response.d.ts +186 -0
- package/dist/types/response.d.ts.map +1 -0
- package/dist/types/response.js +99 -0
- package/dist/types/response.js.map +1 -0
- package/dist/types/streaming.d.ts +478 -0
- package/dist/types/streaming.d.ts.map +1 -0
- package/dist/types/streaming.js +483 -0
- package/dist/types/streaming.js.map +1 -0
- package/dist/types/tool.d.ts +118 -0
- package/dist/types/tool.d.ts.map +1 -0
- package/dist/types/tool.js +42 -0
- package/dist/types/tool.js.map +1 -0
- package/dist/utils/error.d.ts +22 -0
- package/dist/utils/error.d.ts.map +1 -0
- package/dist/utils/error.js +36 -0
- package/dist/utils/error.js.map +1 -0
- package/package.json +102 -0
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Xiaomi MiMo LLM Adapter
|
|
3
|
+
*
|
|
4
|
+
* Implements the LLMAdapter interface for Xiaomi MiMo API.
|
|
5
|
+
* MiMo provides OpenAI-compatible API with additional features like
|
|
6
|
+
* deep thinking mode (reasoning_content).
|
|
7
|
+
*
|
|
8
|
+
* @see https://platform.xiaomimimo.com/#/docs/api/text-generation/openai-api
|
|
9
|
+
*/
|
|
10
|
+
import OpenAI from 'openai';
|
|
11
|
+
import { LLMError, promptToMessages } from '../adapter.js';
|
|
12
|
+
/**
|
|
13
|
+
* Default MiMo API base URL
|
|
14
|
+
*/
|
|
15
|
+
const DEFAULT_MIMO_BASE_URL = 'https://api.xiaomimimo.com/v1';
|
|
16
|
+
/**
|
|
17
|
+
* Available MiMo models
|
|
18
|
+
*/
|
|
19
|
+
export const MIMO_MODELS = {
|
|
20
|
+
// Main models
|
|
21
|
+
V2_FLASH: 'mimo-v2-flash',
|
|
22
|
+
};
|
|
23
|
+
/**
|
|
24
|
+
* Xiaomi MiMo LLM Adapter implementation
|
|
25
|
+
*
|
|
26
|
+
* Provides access to MiMo models with support for:
|
|
27
|
+
* - OpenAI-compatible chat completions
|
|
28
|
+
* - Function/tool calling
|
|
29
|
+
* - Deep thinking mode (reasoning_content)
|
|
30
|
+
* - Streaming responses
|
|
31
|
+
*/
|
|
32
|
+
export class MiMoAdapter {
|
|
33
|
+
provider = 'mimo';
|
|
34
|
+
model;
|
|
35
|
+
client;
|
|
36
|
+
defaultTemperature;
|
|
37
|
+
defaultMaxTokens;
|
|
38
|
+
defaultThinkingMode;
|
|
39
|
+
constructor(config) {
|
|
40
|
+
this.model = config.model;
|
|
41
|
+
this.defaultTemperature = config.temperature ?? 0.3;
|
|
42
|
+
this.defaultMaxTokens = config.maxTokens ?? 65536;
|
|
43
|
+
this.defaultThinkingMode = config.thinkingMode ?? 'disabled';
|
|
44
|
+
this.client = new OpenAI({
|
|
45
|
+
apiKey: config.apiKey,
|
|
46
|
+
baseURL: config.baseUrl ?? DEFAULT_MIMO_BASE_URL,
|
|
47
|
+
timeout: config.timeoutMs ?? 60000,
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
async generate(prompt, options) {
|
|
51
|
+
const messages = typeof prompt === 'string' ? promptToMessages(prompt, options?.systemPrompt) : prompt;
|
|
52
|
+
try {
|
|
53
|
+
const response = await this.client.chat.completions.create({
|
|
54
|
+
model: this.model,
|
|
55
|
+
messages: this.convertMessages(messages),
|
|
56
|
+
temperature: options?.temperature ?? this.defaultTemperature,
|
|
57
|
+
max_completion_tokens: options?.maxTokens ?? this.defaultMaxTokens,
|
|
58
|
+
stop: options?.stopSequences,
|
|
59
|
+
thinking: {
|
|
60
|
+
type: options?.thinkingMode ?? this.defaultThinkingMode,
|
|
61
|
+
},
|
|
62
|
+
}, { signal: options?.abortSignal });
|
|
63
|
+
return response.choices[0]?.message?.content ?? '';
|
|
64
|
+
}
|
|
65
|
+
catch (error) {
|
|
66
|
+
throw this.handleError(error);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
async generateWithTools(prompt, tools, options) {
|
|
70
|
+
const messages = typeof prompt === 'string' ? promptToMessages(prompt, options?.systemPrompt) : prompt;
|
|
71
|
+
try {
|
|
72
|
+
const response = await this.client.chat.completions.create({
|
|
73
|
+
model: this.model,
|
|
74
|
+
messages: this.convertMessages(messages),
|
|
75
|
+
tools: tools.map((t) => ({
|
|
76
|
+
type: 'function',
|
|
77
|
+
function: t.function,
|
|
78
|
+
})),
|
|
79
|
+
temperature: options?.temperature ?? this.defaultTemperature,
|
|
80
|
+
max_completion_tokens: options?.maxTokens ?? this.defaultMaxTokens,
|
|
81
|
+
stop: options?.stopSequences,
|
|
82
|
+
thinking: {
|
|
83
|
+
type: options?.thinkingMode ?? this.defaultThinkingMode,
|
|
84
|
+
},
|
|
85
|
+
}, { signal: options?.abortSignal });
|
|
86
|
+
const choice = response.choices[0];
|
|
87
|
+
const message = choice?.message;
|
|
88
|
+
const toolCalls = message?.tool_calls?.map((tc) => ({
|
|
89
|
+
id: tc.id,
|
|
90
|
+
name: tc.function.name,
|
|
91
|
+
arguments: JSON.parse(tc.function.arguments),
|
|
92
|
+
}));
|
|
93
|
+
return {
|
|
94
|
+
content: message?.content ?? '',
|
|
95
|
+
toolCalls,
|
|
96
|
+
finishReason: this.mapFinishReason(choice?.finish_reason),
|
|
97
|
+
reasoningContent: message?.reasoning_content,
|
|
98
|
+
usage: response.usage
|
|
99
|
+
? {
|
|
100
|
+
promptTokens: response.usage.prompt_tokens,
|
|
101
|
+
completionTokens: response.usage.completion_tokens,
|
|
102
|
+
totalTokens: response.usage.total_tokens,
|
|
103
|
+
}
|
|
104
|
+
: undefined,
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
catch (error) {
|
|
108
|
+
throw this.handleError(error);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
async embed(_text) {
|
|
112
|
+
// MiMo does not support embeddings currently
|
|
113
|
+
throw new LLMError('MiMo does not support embeddings. Use SiliconFlow or OpenAI for embeddings.', 'INVALID_REQUEST', this.provider);
|
|
114
|
+
}
|
|
115
|
+
supportsEmbeddings() {
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
supportsToolCalling() {
|
|
119
|
+
return true;
|
|
120
|
+
}
|
|
121
|
+
supportsStreaming() {
|
|
122
|
+
return true;
|
|
123
|
+
}
|
|
124
|
+
async generateWithToolsStream(prompt, tools, onChunk, options) {
|
|
125
|
+
const messages = typeof prompt === 'string' ? promptToMessages(prompt, options?.systemPrompt) : prompt;
|
|
126
|
+
try {
|
|
127
|
+
const stream = await this.client.chat.completions.create({
|
|
128
|
+
model: this.model,
|
|
129
|
+
messages: this.convertMessages(messages),
|
|
130
|
+
tools: tools.map((t) => ({
|
|
131
|
+
type: 'function',
|
|
132
|
+
function: t.function,
|
|
133
|
+
})),
|
|
134
|
+
temperature: options?.temperature ?? this.defaultTemperature,
|
|
135
|
+
max_completion_tokens: options?.maxTokens ?? this.defaultMaxTokens,
|
|
136
|
+
stop: options?.stopSequences,
|
|
137
|
+
stream: true,
|
|
138
|
+
thinking: {
|
|
139
|
+
type: options?.thinkingMode ?? this.defaultThinkingMode,
|
|
140
|
+
},
|
|
141
|
+
}, { signal: options?.abortSignal });
|
|
142
|
+
// Accumulate the response
|
|
143
|
+
let accumulatedContent = '';
|
|
144
|
+
let accumulatedReasoningContent = '';
|
|
145
|
+
const toolCallsMap = new Map();
|
|
146
|
+
let finishReason = 'stop';
|
|
147
|
+
for await (const chunk of stream) {
|
|
148
|
+
const delta = chunk.choices[0]?.delta;
|
|
149
|
+
const chunkFinishReason = chunk.choices[0]?.finish_reason;
|
|
150
|
+
// Handle reasoning content chunks (from thinking mode)
|
|
151
|
+
if (delta?.reasoning_content) {
|
|
152
|
+
accumulatedReasoningContent += delta.reasoning_content;
|
|
153
|
+
// Optionally emit reasoning content as a special chunk
|
|
154
|
+
// For now, we accumulate it silently
|
|
155
|
+
}
|
|
156
|
+
// Handle content chunks
|
|
157
|
+
if (delta?.content) {
|
|
158
|
+
accumulatedContent += delta.content;
|
|
159
|
+
const streamChunk = {
|
|
160
|
+
type: 'content',
|
|
161
|
+
content: delta.content,
|
|
162
|
+
};
|
|
163
|
+
onChunk(streamChunk);
|
|
164
|
+
}
|
|
165
|
+
// Handle tool call chunks
|
|
166
|
+
if (delta?.tool_calls) {
|
|
167
|
+
for (const tc of delta.tool_calls) {
|
|
168
|
+
const index = tc.index;
|
|
169
|
+
// Get or create tool call entry
|
|
170
|
+
let toolCallEntry = toolCallsMap.get(index);
|
|
171
|
+
if (!toolCallEntry) {
|
|
172
|
+
toolCallEntry = { id: '', name: '', arguments: '' };
|
|
173
|
+
toolCallsMap.set(index, toolCallEntry);
|
|
174
|
+
}
|
|
175
|
+
// Accumulate tool call data
|
|
176
|
+
if (tc.id)
|
|
177
|
+
toolCallEntry.id = tc.id;
|
|
178
|
+
if (tc.function?.name)
|
|
179
|
+
toolCallEntry.name = tc.function.name;
|
|
180
|
+
if (tc.function?.arguments)
|
|
181
|
+
toolCallEntry.arguments += tc.function.arguments;
|
|
182
|
+
// Send tool call chunk
|
|
183
|
+
const streamChunk = {
|
|
184
|
+
type: 'tool_call',
|
|
185
|
+
toolCall: {
|
|
186
|
+
index,
|
|
187
|
+
id: tc.id,
|
|
188
|
+
name: tc.function?.name,
|
|
189
|
+
arguments: tc.function?.arguments,
|
|
190
|
+
},
|
|
191
|
+
};
|
|
192
|
+
onChunk(streamChunk);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
// Update finish reason
|
|
196
|
+
if (chunkFinishReason) {
|
|
197
|
+
finishReason = this.mapFinishReason(chunkFinishReason);
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
// Build final tool calls array
|
|
201
|
+
const toolCalls = toolCallsMap.size > 0
|
|
202
|
+
? Array.from(toolCallsMap.entries())
|
|
203
|
+
.sort(([a], [b]) => a - b)
|
|
204
|
+
.map(([, tc]) => ({
|
|
205
|
+
id: tc.id,
|
|
206
|
+
name: tc.name,
|
|
207
|
+
arguments: JSON.parse(tc.arguments || '{}'),
|
|
208
|
+
}))
|
|
209
|
+
: undefined;
|
|
210
|
+
// Build final response
|
|
211
|
+
const response = {
|
|
212
|
+
content: accumulatedContent,
|
|
213
|
+
toolCalls,
|
|
214
|
+
finishReason,
|
|
215
|
+
reasoningContent: accumulatedReasoningContent || undefined,
|
|
216
|
+
};
|
|
217
|
+
// Send done chunk
|
|
218
|
+
onChunk({ type: 'done', response });
|
|
219
|
+
return response;
|
|
220
|
+
}
|
|
221
|
+
catch (error) {
|
|
222
|
+
throw this.handleError(error);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
/**
|
|
226
|
+
* Convert internal message format to OpenAI format
|
|
227
|
+
* Preserves reasoning_content for multi-turn tool calling
|
|
228
|
+
*/
|
|
229
|
+
convertMessages(messages) {
|
|
230
|
+
return messages.map((msg) => {
|
|
231
|
+
if (msg.role === 'tool') {
|
|
232
|
+
return {
|
|
233
|
+
role: 'tool',
|
|
234
|
+
content: msg.content,
|
|
235
|
+
tool_call_id: msg.toolCallId ?? '',
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
if (msg.role === 'assistant' && msg.toolCalls && msg.toolCalls.length > 0) {
|
|
239
|
+
// Assistant message with tool calls
|
|
240
|
+
return {
|
|
241
|
+
role: 'assistant',
|
|
242
|
+
content: msg.content || null,
|
|
243
|
+
tool_calls: msg.toolCalls.map((tc) => ({
|
|
244
|
+
id: tc.id,
|
|
245
|
+
type: 'function',
|
|
246
|
+
function: {
|
|
247
|
+
name: tc.name,
|
|
248
|
+
arguments: JSON.stringify(tc.arguments),
|
|
249
|
+
},
|
|
250
|
+
})),
|
|
251
|
+
};
|
|
252
|
+
}
|
|
253
|
+
return {
|
|
254
|
+
role: msg.role,
|
|
255
|
+
content: msg.content,
|
|
256
|
+
};
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
/**
|
|
260
|
+
* Map finish reason to internal format
|
|
261
|
+
*/
|
|
262
|
+
mapFinishReason(reason) {
|
|
263
|
+
switch (reason) {
|
|
264
|
+
case 'stop':
|
|
265
|
+
return 'stop';
|
|
266
|
+
case 'tool_calls':
|
|
267
|
+
return 'tool_calls';
|
|
268
|
+
case 'length':
|
|
269
|
+
return 'length';
|
|
270
|
+
case 'content_filter':
|
|
271
|
+
return 'content_filter';
|
|
272
|
+
default:
|
|
273
|
+
return 'stop';
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
/**
|
|
277
|
+
* Handle API errors
|
|
278
|
+
*/
|
|
279
|
+
handleError(error) {
|
|
280
|
+
// Check for abort error first
|
|
281
|
+
if (error instanceof Error && error.name === 'AbortError') {
|
|
282
|
+
return new LLMError('Operation cancelled', 'CANCELLED', this.provider, error);
|
|
283
|
+
}
|
|
284
|
+
// Preserve existing LLMError instances
|
|
285
|
+
if (error instanceof LLMError) {
|
|
286
|
+
return error;
|
|
287
|
+
}
|
|
288
|
+
if (error instanceof OpenAI.APIError) {
|
|
289
|
+
const code = this.mapErrorCode(error.status, error.code);
|
|
290
|
+
return new LLMError(error.message, code, this.provider, error);
|
|
291
|
+
}
|
|
292
|
+
if (error instanceof Error) {
|
|
293
|
+
return new LLMError(error.message, 'UNKNOWN_ERROR', this.provider, error);
|
|
294
|
+
}
|
|
295
|
+
return new LLMError('Unknown error occurred', 'UNKNOWN_ERROR', this.provider);
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* Map error codes to internal error codes
|
|
299
|
+
*/
|
|
300
|
+
mapErrorCode(status, code) {
|
|
301
|
+
if (status === 401)
|
|
302
|
+
return 'AUTHENTICATION_ERROR';
|
|
303
|
+
if (status === 429)
|
|
304
|
+
return 'RATE_LIMIT_ERROR';
|
|
305
|
+
if (status === 400)
|
|
306
|
+
return 'INVALID_REQUEST';
|
|
307
|
+
if (status === 404)
|
|
308
|
+
return 'MODEL_NOT_FOUND';
|
|
309
|
+
if (code === 'context_length_exceeded')
|
|
310
|
+
return 'CONTEXT_LENGTH_EXCEEDED';
|
|
311
|
+
if (code === 'content_filter')
|
|
312
|
+
return 'CONTENT_FILTER';
|
|
313
|
+
return 'UNKNOWN_ERROR';
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
//# sourceMappingURL=mimo.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mimo.js","sourceRoot":"","sources":["../../../src/llm/adapters/mimo.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,OAAO,MAAM,MAAM,QAAQ,CAAC;AAY5B,OAAO,EAAE,QAAQ,EAAE,gBAAgB,EAAE,MAAM,eAAe,CAAC;AAG3D;;GAEG;AACH,MAAM,qBAAqB,GAAG,+BAA+B,CAAC;AAE9D;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG;IACzB,cAAc;IACd,QAAQ,EAAE,eAAe;CACjB,CAAC;AA+BX;;;;;;;;GAQG;AACH,MAAM,OAAO,WAAW;IACb,QAAQ,GAAG,MAAM,CAAC;IAClB,KAAK,CAAS;IAEf,MAAM,CAAS;IACf,kBAAkB,CAAS;IAC3B,gBAAgB,CAAS;IACzB,mBAAmB,CAAe;IAE1C,YAAY,MAAyB;QACnC,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;QAC1B,IAAI,CAAC,kBAAkB,GAAG,MAAM,CAAC,WAAW,IAAI,GAAG,CAAC;QACpD,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,SAAS,IAAI,KAAK,CAAC;QAClD,IAAI,CAAC,mBAAmB,GAAG,MAAM,CAAC,YAAY,IAAI,UAAU,CAAC;QAE7D,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC;YACvB,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,qBAAqB;YAChD,OAAO,EAAE,MAAM,CAAC,SAAS,IAAI,KAAK;SACnC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,MAA8B,EAAE,OAA6B;QAC1E,MAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;QAExF,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CACxD;gBACE,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC;gBACxC,WAAW,EAAE,OAAO,EAAE,WAAW,IAAI,IAAI,CAAC,kBAAkB;gBAC5D,qBAAqB,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI,CAAC,gBAAgB;gBAClE,IAAI,EAAE,OAAO,EAAE,aAAa;gBAC5B,QAAQ,EAAE;oBACR,IAAI,EAAE,OAAO,EAAE,YAAY,IAAI,IAAI,CAAC,mBAAmB;iBACxD;aAC+C,EAClD,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CACjC,CAAC;YAEF,OAAO,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;QACrD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAChC,CAAC;IACH,CAAC;IAED,KAAK,CAAC,iBAAiB,CACrB,MAA8B,EAC9B,KAAuB,EACvB,OAA6B;QAE7B,MAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;QAExF,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CACxD;gBACE,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC;gBACxC,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAiB,EAAE,EAAE,CAAC,CAAC;oBACvC,IAAI,EAAE,UAAmB;oBACzB,QAAQ,EAAE,CAAC,CAAC,QAAQ;iBACrB,CAAC,CAAC;gBACH,WAAW,EAAE,OAAO,EAAE,WAAW,IAAI,IAAI,CAAC,kBAAkB;gBAC5D,qBAAqB,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI,CAAC,gBAAgB;gBAClE,IAAI,EAAE,OAAO,EAAE,aAAa;gBAC5B,QAAQ,EAAE;oBACR,IAAI,EAAE,OAAO,EAAE,YAAY,IAAI,IAAI,CAAC,mBAAmB;iBACxD;aAC+C,EAClD,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CACjC,CAAC;YAEF,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YACnC,MAAM,OAAO,GAAG,MAAM,EAAE,OAEvB,CAAC;YAEF,MAAM,SAAS,GAA2B,OAAO,EAAE,UAAU,EAAE,GAAG,CAChE,CAAC,EAAyD,EAAE,EAAE,CAAC,CAAC;gBAC9D,EAAE,EAAE,EAAE,CAAC,EAAE;gBACT,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI;gBACtB,SAAS,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;aAC7C,CAAC,CACH,CAAC;YAEF,OAAO;gBACL,OAAO,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE;gBAC/B,SAAS;gBACT,YAAY,EAAE,IAAI,CAAC,eAAe,CAAC,MAAM,EAAE,aAAa,CAAC;gBACzD,gBAAgB,EAAE,OAAO,EAAE,iBAAiB;gBAC5C,KAAK,EAAE,QAAQ,CAAC,KAAK;oBACnB,CAAC,CAAC;wBACE,YAAY,EAAE,QAAQ,CAAC,KAAK,CAAC,aAAa;wBAC1C,gBAAgB,EAAE,QAAQ,CAAC,KAAK,CAAC,iBAAiB;wBAClD,WAAW,EAAE,QAAQ,CAAC,KAAK,CAAC,YAAY;qBACzC;oBACH,CAAC,CAAC,SAAS;aACd,CAAC;QACJ,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAChC,CAAC;IACH,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,KAAa;QACvB,6CAA6C;QAC7C,MAAM,IAAI,QAAQ,CAChB,6EAA6E,EAC7E,iBAAiB,EACjB,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED,kBAAkB;QAChB,OAAO,KAAK,CAAC;IACf,CAAC;IAED,mBAAmB;QACjB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,iBAAiB;QACf,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,uBAAuB,CAC3B,MAA8B,EAC9B,KAAuB,EACvB,OAAuB,EACvB,OAA6B;QAE7B,MAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,gBAAgB,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;QAExF,IAAI,CAAC;YACH,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CACtD;gBACE,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC;gBACxC,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAiB,EAAE,EAAE,CAAC,CAAC;oBACvC,IAAI,EAAE,UAAmB;oBACzB,QAAQ,EAAE,CAAC,CAAC,QAAQ;iBACrB,CAAC,CAAC;gBACH,WAAW,EAAE,OAAO,EAAE,WAAW,IAAI,IAAI,CAAC,kBAAkB;gBAC5D,qBAAqB,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI,CAAC,gBAAgB;gBAClE,IAAI,EAAE,OAAO,EAAE,aAAa;gBAC5B,MAAM,EAAE,IAAI;gBACZ,QAAQ,EAAE;oBACR,IAAI,EAAE,OAAO,EAAE,YAAY,IAAI,IAAI,CAAC,mBAAmB;iBACxD;aAC4C,EAC/C,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CACjC,CAAC;YAEF,0BAA0B;YAC1B,IAAI,kBAAkB,GAAG,EAAE,CAAC;YAC5B,IAAI,2BAA2B,GAAG,EAAE,CAAC;YACrC,MAAM,YAAY,GAAG,IAAI,GAAG,EAA2D,CAAC;YACxF,IAAI,YAAY,GAAgC,MAAM,CAAC;YAEvD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;gBACjC,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,KAE/B,CAAC;gBACF,MAAM,iBAAiB,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,aAAa,CAAC;gBAE1D,uDAAuD;gBACvD,IAAI,KAAK,EAAE,iBAAiB,EAAE,CAAC;oBAC7B,2BAA2B,IAAI,KAAK,CAAC,iBAAiB,CAAC;oBACvD,uDAAuD;oBACvD,qCAAqC;gBACvC,CAAC;gBAED,wBAAwB;gBACxB,IAAI,KAAK,EAAE,OAAO,EAAE,CAAC;oBACnB,kBAAkB,IAAI,KAAK,CAAC,OAAO,CAAC;oBACpC,MAAM,WAAW,GAAgB;wBAC/B,IAAI,EAAE,SAAS;wBACf,OAAO,EAAE,KAAK,CAAC,OAAO;qBACvB,CAAC;oBACF,OAAO,CAAC,WAAW,CAAC,CAAC;gBACvB,CAAC;gBAED,0BAA0B;gBAC1B,IAAI,KAAK,EAAE,UAAU,EAAE,CAAC;oBACtB,KAAK,MAAM,EAAE,IAAI,KAAK,CAAC,UAAmB,EAAE,CAAC;wBAC3C,MAAM,KAAK,GAAG,EAAE,CAAC,KAAK,CAAC;wBAEvB,gCAAgC;wBAChC,IAAI,aAAa,GAAG,YAAY,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;wBAC5C,IAAI,CAAC,aAAa,EAAE,CAAC;4BACnB,aAAa,GAAG,EAAE,EAAE,EAAE,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE,CAAC;4BACpD,YAAY,CAAC,GAAG,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC;wBACzC,CAAC;wBAED,4BAA4B;wBAC5B,IAAI,EAAE,CAAC,EAAE;4BAAE,aAAa,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC;wBACpC,IAAI,EAAE,CAAC,QAAQ,EAAE,IAAI;4BAAE,aAAa,CAAC,IAAI,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC;wBAC7D,IAAI,EAAE,CAAC,QAAQ,EAAE,SAAS;4BAAE,aAAa,CAAC,SAAS,IAAI,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;wBAE7E,uBAAuB;wBACvB,MAAM,WAAW,GAAgB;4BAC/B,IAAI,EAAE,WAAW;4BACjB,QAAQ,EAAE;gCACR,KAAK;gCACL,EAAE,EAAE,EAAE,CAAC,EAAE;gCACT,IAAI,EAAE,EAAE,CAAC,QAAQ,EAAE,IAAI;gCACvB,SAAS,EAAE,EAAE,CAAC,QAAQ,EAAE,SAAS;6BAClC;yBACF,CAAC;wBACF,OAAO,CAAC,WAAW,CAAC,CAAC;oBACvB,CAAC;gBACH,CAAC;gBAED,uBAAuB;gBACvB,IAAI,iBAAiB,EAAE,CAAC;oBACtB,YAAY,GAAG,IAAI,CAAC,eAAe,CAAC,iBAAiB,CAAC,CAAC;gBACzD,CAAC;YACH,CAAC;YAED,+BAA+B;YAC/B,MAAM,SAAS,GACb,YAAY,CAAC,IAAI,GAAG,CAAC;gBACnB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC;qBAC/B,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC;qBACzB,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;oBAChB,EAAE,EAAE,EAAE,CAAC,EAAE;oBACT,IAAI,EAAE,EAAE,CAAC,IAAI;oBACb,SAAS,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,SAAS,IAAI,IAAI,CAAC;iBAC5C,CAAC,CAAC;gBACP,CAAC,CAAC,SAAS,CAAC;YAEhB,uBAAuB;YACvB,MAAM,QAAQ,GAAoB;gBAChC,OAAO,EAAE,kBAAkB;gBAC3B,SAAS;gBACT,YAAY;gBACZ,gBAAgB,EAAE,2BAA2B,IAAI,SAAS;aAC3D,CAAC;YAEF,kBAAkB;YAClB,OAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAC;YAEpC,OAAO,QAAQ,CAAC;QAClB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAChC,CAAC;IACH,CAAC;IAED;;;OAGG;IACK,eAAe,CAAC,QAAuB;QAC7C,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAgB,EAAE,EAAE;YACvC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;gBACxB,OAAO;oBACL,IAAI,EAAE,MAAe;oBACrB,OAAO,EAAE,GAAG,CAAC,OAAO;oBACpB,YAAY,EAAE,GAAG,CAAC,UAAU,IAAI,EAAE;iBACnC,CAAC;YACJ,CAAC;YACD,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,IAAI,GAAG,CAAC,SAAS,IAAI,GAAG,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC1E,oCAAoC;gBACpC,OAAO;oBACL,IAAI,EAAE,WAAoB;oBAC1B,OAAO,EAAE,GAAG,CAAC,OAAO,IAAI,IAAI;oBAC5B,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,EAAY,EAAE,EAAE,CAAC,CAAC;wBAC/C,EAAE,EAAE,EAAE,CAAC,EAAE;wBACT,IAAI,EAAE,UAAmB;wBACzB,QAAQ,EAAE;4BACR,IAAI,EAAE,EAAE,CAAC,IAAI;4BACb,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,SAAS,CAAC;yBACxC;qBACF,CAAC,CAAC;iBACJ,CAAC;YACJ,CAAC;YACD,OAAO;gBACL,IAAI,EAAE,GAAG,CAAC,IAAuC;gBACjD,OAAO,EAAE,GAAG,CAAC,OAAO;aACrB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACK,eAAe,CAAC,MAAsB;QAC5C,QAAQ,MAAM,EAAE,CAAC;YACf,KAAK,MAAM;gBACT,OAAO,MAAM,CAAC;YAChB,KAAK,YAAY;gBACf,OAAO,YAAY,CAAC;YACtB,KAAK,QAAQ;gBACX,OAAO,QAAQ,CAAC;YAClB,KAAK,gBAAgB;gBACnB,OAAO,gBAAgB,CAAC;YAC1B;gBACE,OAAO,MAAM,CAAC;QAClB,CAAC;IACH,CAAC;IAED;;OAEG;IACK,WAAW,CAAC,KAAc;QAChC,8BAA8B;QAC9B,IAAI,KAAK,YAAY,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YAC1D,OAAO,IAAI,QAAQ,CAAC,qBAAqB,EAAE,WAAW,EAAE,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QAChF,CAAC;QAED,uCAAuC;QACvC,IAAI,KAAK,YAAY,QAAQ,EAAE,CAAC;YAC9B,OAAO,KAAK,CAAC;QACf,CAAC;QAED,IAAI,KAAK,YAAY,MAAM,CAAC,QAAQ,EAAE,CAAC;YACrC,MAAM,IAAI,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YACzD,OAAO,IAAI,QAAQ,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QACjE,CAAC;QAED,IAAI,KAAK,YAAY,KAAK,EAAE,CAAC;YAC3B,OAAO,IAAI,QAAQ,CAAC,KAAK,CAAC,OAAO,EAAE,eAAe,EAAE,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QAC5E,CAAC;QAED,OAAO,IAAI,QAAQ,CAAC,wBAAwB,EAAE,eAAe,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAChF,CAAC;IAED;;OAEG;IACK,YAAY,CAAC,MAAe,EAAE,IAAoB;QACxD,IAAI,MAAM,KAAK,GAAG;YAAE,OAAO,sBAAsB,CAAC;QAClD,IAAI,MAAM,KAAK,GAAG;YAAE,OAAO,kBAAkB,CAAC;QAC9C,IAAI,MAAM,KAAK,GAAG;YAAE,OAAO,iBAAiB,CAAC;QAC7C,IAAI,MAAM,KAAK,GAAG;YAAE,OAAO,iBAAiB,CAAC;QAC7C,IAAI,IAAI,KAAK,yBAAyB;YAAE,OAAO,yBAAyB,CAAC;QACzE,IAAI,IAAI,KAAK,gBAAgB;YAAE,OAAO,gBAAgB,CAAC;QACvD,OAAO,eAAe,CAAC;IACzB,CAAC;CACF"}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI LLM Adapter
|
|
3
|
+
*
|
|
4
|
+
* Implements the LLMAdapter interface for OpenAI's API.
|
|
5
|
+
* Supports chat completions, function calling, and embeddings.
|
|
6
|
+
*/
|
|
7
|
+
import type { LLMAdapter, LLMAdapterConfig, GenerateOptions, ChatMessage, LLMResponse, EmbeddingResult, StreamCallback } from '../adapter.js';
|
|
8
|
+
import type { ToolDefinition } from '../../types/tool.js';
|
|
9
|
+
/**
|
|
10
|
+
* OpenAI-specific configuration
|
|
11
|
+
*/
|
|
12
|
+
export interface OpenAIAdapterConfig extends LLMAdapterConfig {
|
|
13
|
+
/** Organization ID (optional) */
|
|
14
|
+
organization?: string;
|
|
15
|
+
/** Embedding model (defaults to text-embedding-ada-002) */
|
|
16
|
+
embeddingModel?: string;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* OpenAI LLM Adapter implementation
|
|
20
|
+
*/
|
|
21
|
+
export declare class OpenAIAdapter implements LLMAdapter {
|
|
22
|
+
readonly provider = "openai";
|
|
23
|
+
readonly model: string;
|
|
24
|
+
private client;
|
|
25
|
+
private embeddingModel;
|
|
26
|
+
private defaultTemperature;
|
|
27
|
+
private defaultMaxTokens;
|
|
28
|
+
constructor(config: OpenAIAdapterConfig);
|
|
29
|
+
generate(prompt: string | ChatMessage[], options?: GenerateOptions): Promise<string>;
|
|
30
|
+
generateWithTools(prompt: string | ChatMessage[], tools: ToolDefinition[], options?: GenerateOptions): Promise<LLMResponse>;
|
|
31
|
+
embed(text: string): Promise<EmbeddingResult>;
|
|
32
|
+
supportsEmbeddings(): boolean;
|
|
33
|
+
supportsToolCalling(): boolean;
|
|
34
|
+
supportsStreaming(): boolean;
|
|
35
|
+
generateWithToolsStream(prompt: string | ChatMessage[], tools: ToolDefinition[], onChunk: StreamCallback, options?: GenerateOptions): Promise<LLMResponse>;
|
|
36
|
+
/**
|
|
37
|
+
* Convert internal message format to OpenAI format
|
|
38
|
+
*/
|
|
39
|
+
private convertMessages;
|
|
40
|
+
/**
|
|
41
|
+
* Map OpenAI finish reason to internal format
|
|
42
|
+
*/
|
|
43
|
+
private mapFinishReason;
|
|
44
|
+
/**
|
|
45
|
+
* Handle OpenAI API errors
|
|
46
|
+
*/
|
|
47
|
+
private handleError;
|
|
48
|
+
/**
|
|
49
|
+
* Map OpenAI error codes to internal error codes
|
|
50
|
+
*/
|
|
51
|
+
private mapErrorCode;
|
|
52
|
+
}
|
|
53
|
+
//# sourceMappingURL=openai.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/llm/adapters/openai.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,OAAO,KAAK,EACV,UAAU,EACV,gBAAgB,EAChB,eAAe,EACf,WAAW,EACX,WAAW,EAEX,eAAe,EACf,cAAc,EAEf,MAAM,eAAe,CAAC;AAEvB,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAE1D;;GAEG;AACH,MAAM,WAAW,mBAAoB,SAAQ,gBAAgB;IAC3D,iCAAiC;IACjC,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,2DAA2D;IAC3D,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED;;GAEG;AACH,qBAAa,aAAc,YAAW,UAAU;IAC9C,QAAQ,CAAC,QAAQ,YAAY;IAC7B,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IAEvB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,cAAc,CAAS;IAC/B,OAAO,CAAC,kBAAkB,CAAS;IACnC,OAAO,CAAC,gBAAgB,CAAS;gBAErB,MAAM,EAAE,mBAAmB;IAcjC,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,MAAM,CAAC;IAsBpF,iBAAiB,CACrB,MAAM,EAAE,MAAM,GAAG,WAAW,EAAE,EAC9B,KAAK,EAAE,cAAc,EAAE,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,WAAW,CAAC;IAgDjB,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC;IAqBnD,kBAAkB,IAAI,OAAO;IAI7B,mBAAmB,IAAI,OAAO;IAI9B,iBAAiB,IAAI,OAAO;IAItB,uBAAuB,CAC3B,MAAM,EAAE,MAAM,GAAG,WAAW,EAAE,EAC9B,KAAK,EAAE,cAAc,EAAE,EACvB,OAAO,EAAE,cAAc,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,WAAW,CAAC;IAyGvB;;OAEG;IACH,OAAO,CAAC,eAAe;IA+BvB;;OAEG;IACH,OAAO,CAAC,eAAe;IAevB;;OAEG;IACH,OAAO,CAAC,WAAW;IAuBnB;;OAEG;IACH,OAAO,CAAC,YAAY;CASrB"}
|
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI LLM Adapter
|
|
3
|
+
*
|
|
4
|
+
* Implements the LLMAdapter interface for OpenAI's API.
|
|
5
|
+
* Supports chat completions, function calling, and embeddings.
|
|
6
|
+
*/
|
|
7
|
+
import OpenAI from 'openai';
|
|
8
|
+
import { LLMError, promptToMessages } from '../adapter.js';
|
|
9
|
+
/**
|
|
10
|
+
* OpenAI LLM Adapter implementation
|
|
11
|
+
*/
|
|
12
|
+
export class OpenAIAdapter {
|
|
13
|
+
provider = 'openai';
|
|
14
|
+
model;
|
|
15
|
+
client;
|
|
16
|
+
embeddingModel;
|
|
17
|
+
defaultTemperature;
|
|
18
|
+
defaultMaxTokens;
|
|
19
|
+
constructor(config) {
|
|
20
|
+
this.model = config.model;
|
|
21
|
+
this.embeddingModel = config.embeddingModel ?? 'text-embedding-ada-002';
|
|
22
|
+
this.defaultTemperature = config.temperature ?? 0.7;
|
|
23
|
+
this.defaultMaxTokens = config.maxTokens ?? 2048;
|
|
24
|
+
this.client = new OpenAI({
|
|
25
|
+
apiKey: config.apiKey,
|
|
26
|
+
baseURL: config.baseUrl,
|
|
27
|
+
organization: config.organization,
|
|
28
|
+
timeout: config.timeoutMs ?? 30000,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
async generate(prompt, options) {
|
|
32
|
+
const messages = typeof prompt === 'string' ? promptToMessages(prompt, options?.systemPrompt) : prompt;
|
|
33
|
+
try {
|
|
34
|
+
const response = await this.client.chat.completions.create({
|
|
35
|
+
model: this.model,
|
|
36
|
+
messages: this.convertMessages(messages),
|
|
37
|
+
temperature: options?.temperature ?? this.defaultTemperature,
|
|
38
|
+
max_tokens: options?.maxTokens ?? this.defaultMaxTokens,
|
|
39
|
+
stop: options?.stopSequences,
|
|
40
|
+
}, { signal: options?.abortSignal });
|
|
41
|
+
return response.choices[0]?.message?.content ?? '';
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
throw this.handleError(error);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
async generateWithTools(prompt, tools, options) {
|
|
48
|
+
const messages = typeof prompt === 'string' ? promptToMessages(prompt, options?.systemPrompt) : prompt;
|
|
49
|
+
try {
|
|
50
|
+
const response = await this.client.chat.completions.create({
|
|
51
|
+
model: this.model,
|
|
52
|
+
messages: this.convertMessages(messages),
|
|
53
|
+
tools: tools.map((t) => ({
|
|
54
|
+
type: 'function',
|
|
55
|
+
function: t.function,
|
|
56
|
+
})),
|
|
57
|
+
temperature: options?.temperature ?? this.defaultTemperature,
|
|
58
|
+
max_tokens: options?.maxTokens ?? this.defaultMaxTokens,
|
|
59
|
+
stop: options?.stopSequences,
|
|
60
|
+
}, { signal: options?.abortSignal });
|
|
61
|
+
const choice = response.choices[0];
|
|
62
|
+
const message = choice?.message;
|
|
63
|
+
const toolCalls = message?.tool_calls?.map((tc) => ({
|
|
64
|
+
id: tc.id,
|
|
65
|
+
name: tc.function.name,
|
|
66
|
+
arguments: JSON.parse(tc.function.arguments),
|
|
67
|
+
}));
|
|
68
|
+
return {
|
|
69
|
+
content: message?.content ?? '',
|
|
70
|
+
toolCalls,
|
|
71
|
+
finishReason: this.mapFinishReason(choice?.finish_reason),
|
|
72
|
+
usage: response.usage
|
|
73
|
+
? {
|
|
74
|
+
promptTokens: response.usage.prompt_tokens,
|
|
75
|
+
completionTokens: response.usage.completion_tokens,
|
|
76
|
+
totalTokens: response.usage.total_tokens,
|
|
77
|
+
}
|
|
78
|
+
: undefined,
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
catch (error) {
|
|
82
|
+
throw this.handleError(error);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
async embed(text) {
|
|
86
|
+
try {
|
|
87
|
+
const response = await this.client.embeddings.create({
|
|
88
|
+
model: this.embeddingModel,
|
|
89
|
+
input: text,
|
|
90
|
+
});
|
|
91
|
+
const embeddingData = response.data[0];
|
|
92
|
+
if (!embeddingData) {
|
|
93
|
+
throw new LLMError('No embedding data returned', 'INVALID_REQUEST', this.provider);
|
|
94
|
+
}
|
|
95
|
+
return {
|
|
96
|
+
embedding: embeddingData.embedding,
|
|
97
|
+
tokenCount: response.usage?.total_tokens,
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
catch (error) {
|
|
101
|
+
throw this.handleError(error);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
supportsEmbeddings() {
|
|
105
|
+
return true;
|
|
106
|
+
}
|
|
107
|
+
supportsToolCalling() {
|
|
108
|
+
return true;
|
|
109
|
+
}
|
|
110
|
+
supportsStreaming() {
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
async generateWithToolsStream(prompt, tools, onChunk, options) {
|
|
114
|
+
const messages = typeof prompt === 'string' ? promptToMessages(prompt, options?.systemPrompt) : prompt;
|
|
115
|
+
try {
|
|
116
|
+
const stream = await this.client.chat.completions.create({
|
|
117
|
+
model: this.model,
|
|
118
|
+
messages: this.convertMessages(messages),
|
|
119
|
+
tools: tools.map((t) => ({
|
|
120
|
+
type: 'function',
|
|
121
|
+
function: t.function,
|
|
122
|
+
})),
|
|
123
|
+
temperature: options?.temperature ?? this.defaultTemperature,
|
|
124
|
+
max_tokens: options?.maxTokens ?? this.defaultMaxTokens,
|
|
125
|
+
stop: options?.stopSequences,
|
|
126
|
+
stream: true,
|
|
127
|
+
}, { signal: options?.abortSignal });
|
|
128
|
+
// Accumulate the response
|
|
129
|
+
let accumulatedContent = '';
|
|
130
|
+
const toolCallsMap = new Map();
|
|
131
|
+
let finishReason = 'stop';
|
|
132
|
+
for await (const chunk of stream) {
|
|
133
|
+
const delta = chunk.choices[0]?.delta;
|
|
134
|
+
const chunkFinishReason = chunk.choices[0]?.finish_reason;
|
|
135
|
+
// Handle content chunks
|
|
136
|
+
if (delta?.content) {
|
|
137
|
+
accumulatedContent += delta.content;
|
|
138
|
+
const streamChunk = {
|
|
139
|
+
type: 'content',
|
|
140
|
+
content: delta.content,
|
|
141
|
+
};
|
|
142
|
+
onChunk(streamChunk);
|
|
143
|
+
}
|
|
144
|
+
// Handle tool call chunks
|
|
145
|
+
if (delta?.tool_calls) {
|
|
146
|
+
for (const tc of delta.tool_calls) {
|
|
147
|
+
const index = tc.index;
|
|
148
|
+
// Get or create tool call entry
|
|
149
|
+
let toolCallEntry = toolCallsMap.get(index);
|
|
150
|
+
if (!toolCallEntry) {
|
|
151
|
+
toolCallEntry = { id: '', name: '', arguments: '' };
|
|
152
|
+
toolCallsMap.set(index, toolCallEntry);
|
|
153
|
+
}
|
|
154
|
+
// Accumulate tool call data
|
|
155
|
+
if (tc.id)
|
|
156
|
+
toolCallEntry.id = tc.id;
|
|
157
|
+
if (tc.function?.name)
|
|
158
|
+
toolCallEntry.name = tc.function.name;
|
|
159
|
+
if (tc.function?.arguments)
|
|
160
|
+
toolCallEntry.arguments += tc.function.arguments;
|
|
161
|
+
// Send tool call chunk
|
|
162
|
+
const streamChunk = {
|
|
163
|
+
type: 'tool_call',
|
|
164
|
+
toolCall: {
|
|
165
|
+
index,
|
|
166
|
+
id: tc.id,
|
|
167
|
+
name: tc.function?.name,
|
|
168
|
+
arguments: tc.function?.arguments,
|
|
169
|
+
},
|
|
170
|
+
};
|
|
171
|
+
onChunk(streamChunk);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
// Update finish reason
|
|
175
|
+
if (chunkFinishReason) {
|
|
176
|
+
finishReason = this.mapFinishReason(chunkFinishReason);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
// Build final tool calls array
|
|
180
|
+
const toolCalls = toolCallsMap.size > 0
|
|
181
|
+
? Array.from(toolCallsMap.entries())
|
|
182
|
+
.sort(([a], [b]) => a - b)
|
|
183
|
+
.map(([, tc]) => ({
|
|
184
|
+
id: tc.id,
|
|
185
|
+
name: tc.name,
|
|
186
|
+
arguments: JSON.parse(tc.arguments || '{}'),
|
|
187
|
+
}))
|
|
188
|
+
: undefined;
|
|
189
|
+
// Build final response
|
|
190
|
+
const response = {
|
|
191
|
+
content: accumulatedContent,
|
|
192
|
+
toolCalls,
|
|
193
|
+
finishReason,
|
|
194
|
+
};
|
|
195
|
+
// Send done chunk
|
|
196
|
+
onChunk({ type: 'done', response });
|
|
197
|
+
return response;
|
|
198
|
+
}
|
|
199
|
+
catch (error) {
|
|
200
|
+
throw this.handleError(error);
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Convert internal message format to OpenAI format
|
|
205
|
+
*/
|
|
206
|
+
convertMessages(messages) {
|
|
207
|
+
return messages.map((msg) => {
|
|
208
|
+
if (msg.role === 'tool') {
|
|
209
|
+
return {
|
|
210
|
+
role: 'tool',
|
|
211
|
+
content: msg.content,
|
|
212
|
+
tool_call_id: msg.toolCallId ?? '',
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
if (msg.role === 'assistant' && msg.toolCalls && msg.toolCalls.length > 0) {
|
|
216
|
+
// Assistant message with tool calls
|
|
217
|
+
return {
|
|
218
|
+
role: 'assistant',
|
|
219
|
+
content: msg.content || null,
|
|
220
|
+
tool_calls: msg.toolCalls.map((tc) => ({
|
|
221
|
+
id: tc.id,
|
|
222
|
+
type: 'function',
|
|
223
|
+
function: {
|
|
224
|
+
name: tc.name,
|
|
225
|
+
arguments: JSON.stringify(tc.arguments),
|
|
226
|
+
},
|
|
227
|
+
})),
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
return {
|
|
231
|
+
role: msg.role,
|
|
232
|
+
content: msg.content,
|
|
233
|
+
};
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
/**
|
|
237
|
+
* Map OpenAI finish reason to internal format
|
|
238
|
+
*/
|
|
239
|
+
mapFinishReason(reason) {
|
|
240
|
+
switch (reason) {
|
|
241
|
+
case 'stop':
|
|
242
|
+
return 'stop';
|
|
243
|
+
case 'tool_calls':
|
|
244
|
+
return 'tool_calls';
|
|
245
|
+
case 'length':
|
|
246
|
+
return 'length';
|
|
247
|
+
case 'content_filter':
|
|
248
|
+
return 'content_filter';
|
|
249
|
+
default:
|
|
250
|
+
return 'stop';
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
/**
|
|
254
|
+
* Handle OpenAI API errors
|
|
255
|
+
*/
|
|
256
|
+
handleError(error) {
|
|
257
|
+
// Check for abort error first
|
|
258
|
+
if (error instanceof Error && error.name === 'AbortError') {
|
|
259
|
+
return new LLMError('Operation cancelled', 'CANCELLED', this.provider, error);
|
|
260
|
+
}
|
|
261
|
+
// Preserve existing LLMError instances
|
|
262
|
+
if (error instanceof LLMError) {
|
|
263
|
+
return error;
|
|
264
|
+
}
|
|
265
|
+
if (error instanceof OpenAI.APIError) {
|
|
266
|
+
const code = this.mapErrorCode(error.status, error.code);
|
|
267
|
+
return new LLMError(error.message, code, this.provider, error);
|
|
268
|
+
}
|
|
269
|
+
if (error instanceof Error) {
|
|
270
|
+
return new LLMError(error.message, 'UNKNOWN_ERROR', this.provider, error);
|
|
271
|
+
}
|
|
272
|
+
return new LLMError('Unknown error occurred', 'UNKNOWN_ERROR', this.provider);
|
|
273
|
+
}
|
|
274
|
+
/**
|
|
275
|
+
* Map OpenAI error codes to internal error codes
|
|
276
|
+
*/
|
|
277
|
+
mapErrorCode(status, code) {
|
|
278
|
+
if (status === 401)
|
|
279
|
+
return 'AUTHENTICATION_ERROR';
|
|
280
|
+
if (status === 429)
|
|
281
|
+
return 'RATE_LIMIT_ERROR';
|
|
282
|
+
if (status === 400)
|
|
283
|
+
return 'INVALID_REQUEST';
|
|
284
|
+
if (status === 404)
|
|
285
|
+
return 'MODEL_NOT_FOUND';
|
|
286
|
+
if (code === 'context_length_exceeded')
|
|
287
|
+
return 'CONTEXT_LENGTH_EXCEEDED';
|
|
288
|
+
if (code === 'content_filter')
|
|
289
|
+
return 'CONTENT_FILTER';
|
|
290
|
+
return 'UNKNOWN_ERROR';
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
//# sourceMappingURL=openai.js.map
|