bopodev-api 0.1.31 → 0.1.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,375 @@
1
+ import { resolveDirectApiCredentials, type DirectApiProvider } from "bopodev-agent-sdk";
2
+
3
+ export type AssistantToolDefinition = {
4
+ name: string;
5
+ description: string;
6
+ inputSchema: Record<string, unknown>;
7
+ };
8
+
9
+ export type AssistantChatMessage =
10
+ | { role: "user"; content: string }
11
+ | { role: "assistant"; content: string };
12
+
13
+ export type AssistantApiTurnMetrics = {
14
+ tokenInput: number;
15
+ tokenOutput: number;
16
+ runtimeModelId: string;
17
+ };
18
+
19
+ const DEFAULT_ANTHROPIC_MODEL = "claude-sonnet-4-5-20250929";
20
+ const DEFAULT_OPENAI_MODEL = "gpt-4.1";
21
+
22
+ export function resolveAssistantProvider(): DirectApiProvider {
23
+ const raw = process.env.BOPO_ASSISTANT_PROVIDER?.trim().toLowerCase();
24
+ if (raw === "openai_api") {
25
+ return "openai_api";
26
+ }
27
+ return "anthropic_api";
28
+ }
29
+
30
+ export function resolveAssistantModel(provider: DirectApiProvider): string {
31
+ const override = process.env.BOPO_ASSISTANT_MODEL?.trim();
32
+ if (override) {
33
+ return override;
34
+ }
35
+ return provider === "openai_api" ? DEFAULT_OPENAI_MODEL : DEFAULT_ANTHROPIC_MODEL;
36
+ }
37
+
38
+ function fetchWithTimeout(url: string, init: RequestInit, timeoutMs: number): Promise<Response> {
39
+ const controller = new AbortController();
40
+ const timer = setTimeout(() => controller.abort(), timeoutMs);
41
+ return fetch(url, { ...init, signal: controller.signal }).finally(() => clearTimeout(timer));
42
+ }
43
+
44
+ function nonnegInt(value: unknown): number {
45
+ const n = typeof value === "number" ? value : Number(value);
46
+ return Number.isFinite(n) && n > 0 ? Math.floor(n) : 0;
47
+ }
48
+
49
+ function mergeAnthropicUsageFromResponse(
50
+ acc: { tokenInput: number; tokenOutput: number },
51
+ raw: Record<string, unknown>
52
+ ) {
53
+ const u = raw.usage;
54
+ if (!u || typeof u !== "object") {
55
+ return;
56
+ }
57
+ const usage = u as Record<string, unknown>;
58
+ acc.tokenInput += nonnegInt(usage.input_tokens);
59
+ acc.tokenOutput += nonnegInt(usage.output_tokens);
60
+ }
61
+
62
+ function mergeOpenAiUsageFromResponse(
63
+ acc: { tokenInput: number; tokenOutput: number },
64
+ raw: Record<string, unknown>
65
+ ) {
66
+ const u = raw.usage;
67
+ if (!u || typeof u !== "object") {
68
+ return;
69
+ }
70
+ const usage = u as Record<string, unknown>;
71
+ acc.tokenInput += nonnegInt(usage.prompt_tokens);
72
+ acc.tokenOutput += nonnegInt(usage.completion_tokens);
73
+ }
74
+
75
+ type AnthropicContentBlock =
76
+ | { type: "text"; text: string }
77
+ | { type: "tool_use"; id: string; name: string; input: Record<string, unknown> };
78
+
79
+ type AnthropicMessage = { role: "user" | "assistant"; content: AnthropicContentBlock[] | string };
80
+
81
+ export async function runAssistantWithToolsAnthropic(input: {
82
+ system: string;
83
+ messages: AnthropicMessage[];
84
+ tools: AssistantToolDefinition[];
85
+ executeTool: (name: string, args: Record<string, unknown>) => Promise<string>;
86
+ maxToolRounds: number;
87
+ timeoutMs: number;
88
+ }): Promise<{ text: string; toolRoundCount: number } & AssistantApiTurnMetrics> {
89
+ const provider: DirectApiProvider = "anthropic_api";
90
+ const { key, baseUrl } = resolveDirectApiCredentials(provider, undefined);
91
+ if (!key) {
92
+ throw new Error("Missing API key for anthropic_api (ANTHROPIC_API_KEY or BOPO_ANTHROPIC_API_KEY).");
93
+ }
94
+ const model = resolveAssistantModel(provider);
95
+ const endpoint = `${String(baseUrl).replace(/\/$/, "")}/v1/messages`;
96
+ let toolRoundCount = 0;
97
+ const conversation: AnthropicMessage[] = [...input.messages];
98
+ const usageAcc = { tokenInput: 0, tokenOutput: 0 };
99
+
100
+ for (let round = 0; round < input.maxToolRounds + 1; round += 1) {
101
+ const body = {
102
+ model,
103
+ max_tokens: 8192,
104
+ system: input.system,
105
+ tools: input.tools.map((t) => ({
106
+ name: t.name,
107
+ description: t.description,
108
+ input_schema: t.inputSchema
109
+ })),
110
+ messages: conversation
111
+ };
112
+
113
+ const response = await fetchWithTimeout(
114
+ endpoint,
115
+ {
116
+ method: "POST",
117
+ headers: {
118
+ "content-type": "application/json",
119
+ "x-api-key": key,
120
+ "anthropic-version": "2023-06-01"
121
+ },
122
+ body: JSON.stringify(body)
123
+ },
124
+ input.timeoutMs
125
+ );
126
+
127
+ const raw = (await response.json()) as Record<string, unknown>;
128
+ if (!response.ok) {
129
+ const err = typeof raw.error === "object" && raw.error && "message" in raw.error
130
+ ? String((raw.error as { message?: string }).message)
131
+ : JSON.stringify(raw);
132
+ throw new Error(`Anthropic API error (${response.status}): ${err}`);
133
+ }
134
+
135
+ mergeAnthropicUsageFromResponse(usageAcc, raw);
136
+
137
+ const stopReason = String(raw.stop_reason ?? "");
138
+ const contentBlocks = (Array.isArray(raw.content) ? raw.content : []) as AnthropicContentBlock[];
139
+
140
+ conversation.push({ role: "assistant", content: contentBlocks });
141
+
142
+ if (stopReason !== "tool_use") {
143
+ const textParts = contentBlocks.filter((b) => b.type === "text").map((b) => (b as { text: string }).text);
144
+ return {
145
+ text: textParts.join("\n").trim() || "(No text response.)",
146
+ toolRoundCount,
147
+ tokenInput: usageAcc.tokenInput,
148
+ tokenOutput: usageAcc.tokenOutput,
149
+ runtimeModelId: model
150
+ };
151
+ }
152
+
153
+ const toolUses = contentBlocks.filter((b) => b.type === "tool_use") as Array<{
154
+ type: "tool_use";
155
+ id: string;
156
+ name: string;
157
+ input: Record<string, unknown>;
158
+ }>;
159
+
160
+ if (toolUses.length === 0) {
161
+ const textParts = contentBlocks.filter((b) => b.type === "text").map((b) => (b as { text: string }).text);
162
+ return {
163
+ text: textParts.join("\n").trim() || "(No text response.)",
164
+ toolRoundCount,
165
+ tokenInput: usageAcc.tokenInput,
166
+ tokenOutput: usageAcc.tokenOutput,
167
+ runtimeModelId: model
168
+ };
169
+ }
170
+
171
+ toolRoundCount += 1;
172
+ if (toolRoundCount > input.maxToolRounds) {
173
+ return {
174
+ text: "I hit the tool-call limit for this question. Try a narrower question or break it into steps.",
175
+ toolRoundCount,
176
+ tokenInput: usageAcc.tokenInput,
177
+ tokenOutput: usageAcc.tokenOutput,
178
+ runtimeModelId: model
179
+ };
180
+ }
181
+
182
+ const toolResultBlocks: Array<{ type: "tool_result"; tool_use_id: string; content: string }> = [];
183
+ for (const tu of toolUses) {
184
+ let output: string;
185
+ try {
186
+ output = await input.executeTool(tu.name, tu.input ?? {});
187
+ } catch (e) {
188
+ output = JSON.stringify({ error: String(e) });
189
+ }
190
+ toolResultBlocks.push({
191
+ type: "tool_result",
192
+ tool_use_id: tu.id,
193
+ content: output
194
+ });
195
+ }
196
+
197
+ conversation.push({
198
+ role: "user",
199
+ content: toolResultBlocks as unknown as AnthropicContentBlock[]
200
+ });
201
+ }
202
+
203
+ return {
204
+ text: "Unable to complete the response.",
205
+ toolRoundCount,
206
+ tokenInput: usageAcc.tokenInput,
207
+ tokenOutput: usageAcc.tokenOutput,
208
+ runtimeModelId: model
209
+ };
210
+ }
211
+
212
+ type OpenAIToolCall = { id: string; type: "function"; function: { name: string; arguments: string } };
213
+
214
+ export async function runAssistantWithToolsOpenAI(input: {
215
+ system: string;
216
+ messages: Array<{ role: "user" | "assistant" | "tool"; content: string; tool_call_id?: string }>;
217
+ tools: AssistantToolDefinition[];
218
+ executeTool: (name: string, args: Record<string, unknown>) => Promise<string>;
219
+ maxToolRounds: number;
220
+ timeoutMs: number;
221
+ }): Promise<{ text: string; toolRoundCount: number } & AssistantApiTurnMetrics> {
222
+ const provider: DirectApiProvider = "openai_api";
223
+ const { key, baseUrl } = resolveDirectApiCredentials(provider, undefined);
224
+ if (!key) {
225
+ throw new Error("Missing API key for openai_api (OPENAI_API_KEY or BOPO_OPENAI_API_KEY).");
226
+ }
227
+ const model = resolveAssistantModel(provider);
228
+ const endpoint = `${String(baseUrl).replace(/\/$/, "")}/v1/chat/completions`;
229
+ let toolRoundCount = 0;
230
+ const messages: Array<Record<string, unknown>> = [{ role: "system", content: input.system }, ...input.messages];
231
+ const usageAcc = { tokenInput: 0, tokenOutput: 0 };
232
+
233
+ const openaiTools = input.tools.map((t) => ({
234
+ type: "function" as const,
235
+ function: {
236
+ name: t.name,
237
+ description: t.description,
238
+ parameters: t.inputSchema
239
+ }
240
+ }));
241
+
242
+ for (let round = 0; round < input.maxToolRounds + 1; round += 1) {
243
+ const response = await fetchWithTimeout(
244
+ endpoint,
245
+ {
246
+ method: "POST",
247
+ headers: {
248
+ "content-type": "application/json",
249
+ authorization: `Bearer ${key}`
250
+ },
251
+ body: JSON.stringify({
252
+ model,
253
+ messages,
254
+ tools: openaiTools,
255
+ tool_choice: "auto"
256
+ })
257
+ },
258
+ input.timeoutMs
259
+ );
260
+
261
+ const raw = (await response.json()) as Record<string, unknown>;
262
+ if (!response.ok) {
263
+ const err =
264
+ typeof raw.error === "object" && raw.error && "message" in (raw.error as object)
265
+ ? String((raw.error as { message?: string }).message)
266
+ : JSON.stringify(raw);
267
+ throw new Error(`OpenAI API error (${response.status}): ${err}`);
268
+ }
269
+
270
+ mergeOpenAiUsageFromResponse(usageAcc, raw);
271
+
272
+ const choice = (Array.isArray(raw.choices) ? raw.choices[0] : null) as Record<string, unknown> | null;
273
+ const msg = choice?.message as Record<string, unknown> | undefined;
274
+ const toolCalls = (msg?.tool_calls as OpenAIToolCall[] | undefined) ?? [];
275
+ const content = typeof msg?.content === "string" ? msg.content : "";
276
+
277
+ messages.push({
278
+ role: "assistant",
279
+ content: content || null,
280
+ tool_calls: toolCalls.length > 0 ? toolCalls : undefined
281
+ });
282
+
283
+ if (toolCalls.length === 0) {
284
+ return {
285
+ text: content.trim() || "(No text response.)",
286
+ toolRoundCount,
287
+ tokenInput: usageAcc.tokenInput,
288
+ tokenOutput: usageAcc.tokenOutput,
289
+ runtimeModelId: model
290
+ };
291
+ }
292
+
293
+ toolRoundCount += 1;
294
+ if (toolRoundCount > input.maxToolRounds) {
295
+ return {
296
+ text: "I hit the tool-call limit for this question. Try a narrower question or break it into steps.",
297
+ toolRoundCount,
298
+ tokenInput: usageAcc.tokenInput,
299
+ tokenOutput: usageAcc.tokenOutput,
300
+ runtimeModelId: model
301
+ };
302
+ }
303
+
304
+ for (const call of toolCalls) {
305
+ let args: Record<string, unknown> = {};
306
+ try {
307
+ args = JSON.parse(call.function.arguments || "{}") as Record<string, unknown>;
308
+ } catch {
309
+ args = {};
310
+ }
311
+ let output: string;
312
+ try {
313
+ output = await input.executeTool(call.function.name, args);
314
+ } catch (e) {
315
+ output = JSON.stringify({ error: String(e) });
316
+ }
317
+ messages.push({
318
+ role: "tool",
319
+ tool_call_id: call.id,
320
+ content: output
321
+ });
322
+ }
323
+ }
324
+
325
+ return {
326
+ text: "Unable to complete the response.",
327
+ toolRoundCount,
328
+ tokenInput: usageAcc.tokenInput,
329
+ tokenOutput: usageAcc.tokenOutput,
330
+ runtimeModelId: model
331
+ };
332
+ }
333
+
334
+ export async function runAssistantWithTools(input: {
335
+ provider: DirectApiProvider;
336
+ system: string;
337
+ chatHistory: AssistantChatMessage[];
338
+ tools: AssistantToolDefinition[];
339
+ executeTool: (name: string, args: Record<string, unknown>) => Promise<string>;
340
+ maxToolRounds: number;
341
+ timeoutMs: number;
342
+ }): Promise<{ text: string; toolRoundCount: number } & AssistantApiTurnMetrics> {
343
+ if (input.provider === "openai_api") {
344
+ const openaiMessages: Array<{ role: "user" | "assistant" | "tool"; content: string; tool_call_id?: string }> = [];
345
+ for (const m of input.chatHistory) {
346
+ if (m.role === "user") {
347
+ openaiMessages.push({ role: "user", content: m.content });
348
+ } else {
349
+ openaiMessages.push({ role: "assistant", content: m.content });
350
+ }
351
+ }
352
+ return runAssistantWithToolsOpenAI({
353
+ system: input.system,
354
+ messages: openaiMessages,
355
+ tools: input.tools,
356
+ executeTool: input.executeTool,
357
+ maxToolRounds: input.maxToolRounds,
358
+ timeoutMs: input.timeoutMs
359
+ });
360
+ }
361
+
362
+ const anthropicMessages: AnthropicMessage[] = input.chatHistory.map((m) => ({
363
+ role: m.role,
364
+ content: m.content
365
+ }));
366
+
367
+ return runAssistantWithToolsAnthropic({
368
+ system: input.system,
369
+ messages: anthropicMessages,
370
+ tools: input.tools,
371
+ executeTool: input.executeTool,
372
+ maxToolRounds: input.maxToolRounds,
373
+ timeoutMs: input.timeoutMs
374
+ });
375
+ }