@polka-codes/core 0.1.5 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Agent/AgentBase.d.ts +5 -5
- package/dist/Agent/AgentBase.js +28 -28
- package/dist/Agent/CoderAgent/index.d.ts +3 -3
- package/dist/Agent/CoderAgent/index.js +7 -7
- package/dist/Agent/CoderAgent/prompts.d.ts +1 -1
- package/dist/Agent/CoderAgent/prompts.js +10 -10
- package/dist/Agent/index.d.ts +2 -2
- package/dist/Agent/index.js +3 -3
- package/dist/Agent/parseAssistantMessage.d.ts +3 -3
- package/dist/Agent/prompts.d.ts +1 -1
- package/dist/AiService/AiServiceBase.d.ts +4 -4
- package/dist/AiService/AnthropicService.d.ts +2 -2
- package/dist/AiService/AnthropicService.js +43 -43
- package/dist/AiService/DeepSeekService.d.ts +2 -2
- package/dist/AiService/DeepSeekService.js +14 -14
- package/dist/AiService/OllamaService.d.ts +2 -2
- package/dist/AiService/OllamaService.js +15 -15
- package/dist/AiService/index.d.ts +5 -5
- package/dist/AiService/index.js +4 -4
- package/dist/AiService/utils.js +36 -36
- package/dist/index.d.ts +4 -4
- package/dist/index.js +5 -5
- package/dist/tools/index.d.ts +3 -3
- package/dist/tools/index.js +4 -4
- package/package.json +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import type Anthropic from
|
|
2
|
-
import type { AiServiceBase, MessageParam } from
|
|
3
|
-
import type { Logger } from
|
|
4
|
-
import { type ToolHandler, type ToolInfo } from
|
|
1
|
+
import type Anthropic from "@anthropic-ai/sdk";
|
|
2
|
+
import type { AiServiceBase, MessageParam } from "../AiService/index.js";
|
|
3
|
+
import type { Logger } from "../logger.js";
|
|
4
|
+
import { type ToolHandler, type ToolInfo } from "../tools/index.js";
|
|
5
5
|
export type TaskEvent = {
|
|
6
6
|
kind: string;
|
|
7
7
|
info: TaskInfo;
|
|
@@ -43,7 +43,7 @@ export declare abstract class AgentBase {
|
|
|
43
43
|
}): Promise<TaskInfo>;
|
|
44
44
|
get model(): {
|
|
45
45
|
id: string;
|
|
46
|
-
info: import("../AiService").ModelInfo;
|
|
46
|
+
info: import("../AiService/index.js").ModelInfo;
|
|
47
47
|
};
|
|
48
48
|
}
|
|
49
49
|
export {};
|
package/dist/Agent/AgentBase.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { ToolResponseType } from
|
|
2
|
-
import { parseAssistantMessage } from
|
|
3
|
-
import { responsePrompts } from
|
|
1
|
+
import { ToolResponseType } from "../tools/index.js";
|
|
2
|
+
import { parseAssistantMessage } from "./parseAssistantMessage.js";
|
|
3
|
+
import { responsePrompts } from "./prompts.js";
|
|
4
4
|
export class AgentBase {
|
|
5
5
|
ai;
|
|
6
6
|
config;
|
|
@@ -28,75 +28,75 @@ export class AgentBase {
|
|
|
28
28
|
}
|
|
29
29
|
let nextRequest = [
|
|
30
30
|
{
|
|
31
|
-
type:
|
|
31
|
+
type: "text",
|
|
32
32
|
text,
|
|
33
33
|
},
|
|
34
34
|
];
|
|
35
35
|
let iterations = 0;
|
|
36
36
|
while (nextRequest) {
|
|
37
37
|
if (iterations >= taskInfo.options.maxIterations) {
|
|
38
|
-
callback({ kind:
|
|
38
|
+
callback({ kind: "max_iterations_reached", info: taskInfo });
|
|
39
39
|
break;
|
|
40
40
|
}
|
|
41
41
|
const response = await this.#request(taskInfo, nextRequest, callback);
|
|
42
42
|
nextRequest = await this.#handleResponse(taskInfo, response, callback);
|
|
43
43
|
++iterations;
|
|
44
44
|
}
|
|
45
|
-
callback({ kind:
|
|
45
|
+
callback({ kind: "end_task", info: taskInfo });
|
|
46
46
|
return taskInfo;
|
|
47
47
|
}
|
|
48
48
|
async #request(info, userContent, callback) {
|
|
49
|
-
await callback({ kind:
|
|
49
|
+
await callback({ kind: "start_request", info, userContent });
|
|
50
50
|
info.messages.push({
|
|
51
|
-
role:
|
|
51
|
+
role: "user",
|
|
52
52
|
content: userContent,
|
|
53
53
|
});
|
|
54
|
-
this.logger.trace(info.messages,
|
|
54
|
+
this.logger.trace(info.messages, "Sending messages to AI");
|
|
55
55
|
// TODO: use a truncated messages if needed to avoid exceeding the token limit
|
|
56
56
|
const stream = this.ai.send(this.config.systemPrompt, info.messages);
|
|
57
|
-
let currentAssistantMessage =
|
|
57
|
+
let currentAssistantMessage = "";
|
|
58
58
|
for await (const chunk of stream) {
|
|
59
59
|
switch (chunk.type) {
|
|
60
|
-
case
|
|
60
|
+
case "usage":
|
|
61
61
|
info.inputTokens = chunk.inputTokens;
|
|
62
62
|
info.outputTokens = chunk.outputTokens;
|
|
63
63
|
info.cacheWriteTokens = chunk.cacheWriteTokens ?? 0;
|
|
64
64
|
info.cacheReadTokens = chunk.cacheReadTokens ?? 0;
|
|
65
65
|
info.totalCost = chunk.totalCost;
|
|
66
|
-
await callback({ kind:
|
|
66
|
+
await callback({ kind: "usage", info });
|
|
67
67
|
break;
|
|
68
|
-
case
|
|
68
|
+
case "text":
|
|
69
69
|
currentAssistantMessage += chunk.text;
|
|
70
|
-
await callback({ kind:
|
|
70
|
+
await callback({ kind: "text", info, newText: chunk.text });
|
|
71
71
|
break;
|
|
72
72
|
}
|
|
73
73
|
}
|
|
74
74
|
// TODO: error handling
|
|
75
75
|
if (!currentAssistantMessage) {
|
|
76
|
-
throw new Error(
|
|
76
|
+
throw new Error("No assistant message received");
|
|
77
77
|
}
|
|
78
78
|
info.messages.push({
|
|
79
|
-
role:
|
|
79
|
+
role: "assistant",
|
|
80
80
|
content: currentAssistantMessage,
|
|
81
81
|
});
|
|
82
82
|
const ret = parseAssistantMessage(currentAssistantMessage, this.config.tools, this.config.toolNamePrefix);
|
|
83
|
-
await callback({ kind:
|
|
83
|
+
await callback({ kind: "end_request", info });
|
|
84
84
|
return ret;
|
|
85
85
|
}
|
|
86
86
|
async #handleResponse(info, response, callback) {
|
|
87
87
|
const toolReponses = [];
|
|
88
88
|
for (const content of response) {
|
|
89
89
|
switch (content.type) {
|
|
90
|
-
case
|
|
90
|
+
case "text":
|
|
91
91
|
// no need to handle text content
|
|
92
92
|
break;
|
|
93
|
-
case
|
|
94
|
-
await callback({ kind:
|
|
93
|
+
case "tool_use": {
|
|
94
|
+
await callback({ kind: "tool_use", info, tool: content.name });
|
|
95
95
|
const toolResp = await this.#invokeTool(content.name, content.params);
|
|
96
96
|
switch (toolResp.type) {
|
|
97
97
|
case ToolResponseType.Reply:
|
|
98
98
|
// reply to the tool use
|
|
99
|
-
await callback({ kind:
|
|
99
|
+
await callback({ kind: "tool_reply", info, tool: content.name });
|
|
100
100
|
toolReponses.push({ tool: content.name, response: toolResp.message });
|
|
101
101
|
break;
|
|
102
102
|
case ToolResponseType.Exit:
|
|
@@ -104,17 +104,17 @@ export class AgentBase {
|
|
|
104
104
|
return undefined;
|
|
105
105
|
case ToolResponseType.Invalid:
|
|
106
106
|
// tell AI about the invalid arguments
|
|
107
|
-
await callback({ kind:
|
|
107
|
+
await callback({ kind: "tool_invalid", info, tool: content.name });
|
|
108
108
|
toolReponses.push({ tool: content.name, response: toolResp.message });
|
|
109
109
|
break;
|
|
110
110
|
case ToolResponseType.Error:
|
|
111
111
|
// tell AI about the error
|
|
112
|
-
await callback({ kind:
|
|
112
|
+
await callback({ kind: "tool_error", info, tool: content.name });
|
|
113
113
|
toolReponses.push({ tool: content.name, response: toolResp.message });
|
|
114
114
|
break;
|
|
115
115
|
case ToolResponseType.Interrupted:
|
|
116
116
|
// the execution is killed
|
|
117
|
-
await callback({ kind:
|
|
117
|
+
await callback({ kind: "tool_interrupted", info, tool: content.name });
|
|
118
118
|
return undefined;
|
|
119
119
|
}
|
|
120
120
|
break;
|
|
@@ -126,15 +126,15 @@ export class AgentBase {
|
|
|
126
126
|
// we need to convert the loop to a state machine
|
|
127
127
|
return [
|
|
128
128
|
{
|
|
129
|
-
type:
|
|
129
|
+
type: "text",
|
|
130
130
|
text: responsePrompts.requireUseTool,
|
|
131
131
|
},
|
|
132
132
|
];
|
|
133
133
|
}
|
|
134
|
-
const finalResp = toolReponses.map(({ tool, response }) => responsePrompts.toolResults(tool, response)).join(
|
|
134
|
+
const finalResp = toolReponses.map(({ tool, response }) => responsePrompts.toolResults(tool, response)).join("\n");
|
|
135
135
|
return [
|
|
136
136
|
{
|
|
137
|
-
type:
|
|
137
|
+
type: "text",
|
|
138
138
|
text: finalResp,
|
|
139
139
|
},
|
|
140
140
|
];
|
|
@@ -155,4 +155,4 @@ export class AgentBase {
|
|
|
155
155
|
return this.ai.model;
|
|
156
156
|
}
|
|
157
157
|
}
|
|
158
|
-
//# sourceMappingURL=AgentBase.js.map
|
|
158
|
+
//# sourceMappingURL=AgentBase.js.map
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import type { AiServiceBase } from
|
|
2
|
-
import { type ToolHandler, type ToolInfo } from
|
|
3
|
-
import { AgentBase } from
|
|
1
|
+
import type { AiServiceBase } from "../../AiService/index.js";
|
|
2
|
+
import { type ToolHandler, type ToolInfo } from "../../tools/index.js";
|
|
3
|
+
import { AgentBase } from "../AgentBase.js";
|
|
4
4
|
export type CoderAgentOptions = {
|
|
5
5
|
ai: AiServiceBase;
|
|
6
6
|
os: string;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import { createServiceLogger } from
|
|
2
|
-
import { allTools } from
|
|
3
|
-
import { AgentBase } from
|
|
4
|
-
import { fullSystemPrompt } from
|
|
1
|
+
import { createServiceLogger } from "../../logger.js";
|
|
2
|
+
import { allTools } from "../../tools/index.js";
|
|
3
|
+
import { AgentBase } from "../AgentBase.js";
|
|
4
|
+
import { fullSystemPrompt } from "./prompts.js";
|
|
5
5
|
const defaultTools = [
|
|
6
6
|
allTools.executeCommand,
|
|
7
7
|
allTools.readFile,
|
|
@@ -16,7 +16,7 @@ const defaultTools = [
|
|
|
16
16
|
export class CoderAgent extends AgentBase {
|
|
17
17
|
constructor(options) {
|
|
18
18
|
const tools = options.tools || defaultTools;
|
|
19
|
-
const toolNamePrefix =
|
|
19
|
+
const toolNamePrefix = "tool_";
|
|
20
20
|
const systemPrompt = fullSystemPrompt({
|
|
21
21
|
os: options.os,
|
|
22
22
|
}, tools, toolNamePrefix, options.customInstructions ?? [], options.commands ?? {});
|
|
@@ -26,7 +26,7 @@ export class CoderAgent extends AgentBase {
|
|
|
26
26
|
toolNamePrefix,
|
|
27
27
|
toolHandler: options.toolHandler,
|
|
28
28
|
customInstructions: options.customInstructions,
|
|
29
|
-
}, createServiceLogger(
|
|
29
|
+
}, createServiceLogger("CoderAgent"));
|
|
30
30
|
}
|
|
31
31
|
}
|
|
32
|
-
//# sourceMappingURL=index.js.map
|
|
32
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { ToolInfo } from
|
|
1
|
+
import type { ToolInfo } from "../../tools/index.js";
|
|
2
2
|
export declare const basePrompt = "You are a highly skilled software engineer with extensive knowledge in many programming languages, frameworks, design patterns, and best practices.";
|
|
3
3
|
export declare const editingFilesPrompt: (toolNamePrefix: string) => string;
|
|
4
4
|
export declare const capabilities: (toolNamePrefix: string) => string;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
// source: https://github.com/cline/cline/blob/f6c19c29a64ca84e9360df7ab2c07d128dcebe64/src/core/prompts/system.ts#L1
|
|
2
|
-
import { toolUsePrompt } from
|
|
2
|
+
import { toolUsePrompt } from "../prompts.js";
|
|
3
3
|
// TODO: restructure the prompts to avoid duplications
|
|
4
|
-
export const basePrompt =
|
|
4
|
+
export const basePrompt = "You are a highly skilled software engineer with extensive knowledge in many programming languages, frameworks, design patterns, and best practices.";
|
|
5
5
|
export const editingFilesPrompt = (toolNamePrefix) => `
|
|
6
6
|
====
|
|
7
7
|
|
|
@@ -117,9 +117,9 @@ SYSTEM INFORMATION
|
|
|
117
117
|
|
|
118
118
|
Operating System: ${info.os}`;
|
|
119
119
|
export const customInstructions = (customInstructions) => {
|
|
120
|
-
const joined = customInstructions.join(
|
|
121
|
-
if (joined.trim() ===
|
|
122
|
-
return
|
|
120
|
+
const joined = customInstructions.join("\n");
|
|
121
|
+
if (joined.trim() === "") {
|
|
122
|
+
return "";
|
|
123
123
|
}
|
|
124
124
|
return `
|
|
125
125
|
====
|
|
@@ -133,14 +133,14 @@ ${joined}`;
|
|
|
133
133
|
export const customCommands = (commands) => {
|
|
134
134
|
const joined = Object.entries(commands)
|
|
135
135
|
.map(([name, command]) => {
|
|
136
|
-
if (typeof command ===
|
|
136
|
+
if (typeof command === "string") {
|
|
137
137
|
return `- ${name}\n - Command: \`${command}\``;
|
|
138
138
|
}
|
|
139
139
|
return `- ${name}\n - Command: \`${command.command}\`\n - Description: ${command.description}`;
|
|
140
140
|
})
|
|
141
|
-
.join(
|
|
142
|
-
if (joined.trim() ===
|
|
143
|
-
return
|
|
141
|
+
.join("\n");
|
|
142
|
+
if (joined.trim() === "") {
|
|
143
|
+
return "";
|
|
144
144
|
}
|
|
145
145
|
return `
|
|
146
146
|
====
|
|
@@ -162,4 +162,4 @@ ${systemInformation(info)}
|
|
|
162
162
|
${customInstructions(instructions)}
|
|
163
163
|
${customCommands(commands)}
|
|
164
164
|
`;
|
|
165
|
-
//# sourceMappingURL=prompts.js.map
|
|
165
|
+
//# sourceMappingURL=prompts.js.map
|
package/dist/Agent/index.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export * from
|
|
2
|
-
export * from
|
|
1
|
+
export * from "./AgentBase.js";
|
|
2
|
+
export * from "./CoderAgent/index.js";
|
package/dist/Agent/index.js
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
export * from
|
|
2
|
-
export * from
|
|
3
|
-
//# sourceMappingURL=index.js.map
|
|
1
|
+
export * from "./AgentBase.js";
|
|
2
|
+
export * from "./CoderAgent/index.js";
|
|
3
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import type { ToolInfo } from
|
|
1
|
+
import type { ToolInfo } from "../tools/index.js";
|
|
2
2
|
export interface TextContent {
|
|
3
|
-
type:
|
|
3
|
+
type: "text";
|
|
4
4
|
content: string;
|
|
5
5
|
}
|
|
6
6
|
export interface ToolUse {
|
|
7
|
-
type:
|
|
7
|
+
type: "tool_use";
|
|
8
8
|
name: string;
|
|
9
9
|
params: Record<string, string>;
|
|
10
10
|
}
|
package/dist/Agent/prompts.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { ToolInfo } from
|
|
1
|
+
import type { ToolInfo } from "../tools/index.js";
|
|
2
2
|
export declare const toolUsePrompt: (tools: ToolInfo[], toolNamePrefix: string) => string;
|
|
3
3
|
export declare const responsePrompts: {
|
|
4
4
|
readonly errorInvokeTool: (tool: string, error: unknown) => string;
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import type { Anthropic } from
|
|
2
|
-
import type { ModelInfo } from
|
|
1
|
+
import type { Anthropic } from "@anthropic-ai/sdk";
|
|
2
|
+
import type { ModelInfo } from "./ModelInfo.js";
|
|
3
3
|
export type ApiStreamChunk = ApiStreamTextChunk | ApiStreamUsageChunk;
|
|
4
4
|
export interface ApiStreamTextChunk {
|
|
5
|
-
type:
|
|
5
|
+
type: "text";
|
|
6
6
|
text: string;
|
|
7
7
|
}
|
|
8
8
|
export interface ApiStreamUsageChunk {
|
|
9
|
-
type:
|
|
9
|
+
type: "usage";
|
|
10
10
|
inputTokens: number;
|
|
11
11
|
outputTokens: number;
|
|
12
12
|
cacheWriteTokens?: number;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from
|
|
2
|
-
import { type AnthropicModelId, type ModelInfo } from
|
|
1
|
+
import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from "./AiServiceBase.js";
|
|
2
|
+
import { type AnthropicModelId, type ModelInfo } from "./ModelInfo.js";
|
|
3
3
|
export declare class AnthropicService extends AiServiceBase {
|
|
4
4
|
#private;
|
|
5
5
|
readonly model: {
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
// source: https://github.com/cline/cline/blob/f6c19c29a64ca84e9360df7ab2c07d128dcebe64/src/api/providers/anthropic.ts
|
|
2
|
-
import { Anthropic } from
|
|
3
|
-
import { createServiceLogger } from
|
|
4
|
-
import { AiServiceBase } from
|
|
5
|
-
import { anthropicDefaultModelId, anthropicModels } from
|
|
6
|
-
const logger = createServiceLogger(
|
|
2
|
+
import { Anthropic } from "@anthropic-ai/sdk";
|
|
3
|
+
import { createServiceLogger } from "../logger.js";
|
|
4
|
+
import { AiServiceBase } from "./AiServiceBase.js";
|
|
5
|
+
import { anthropicDefaultModelId, anthropicModels } from "./ModelInfo.js";
|
|
6
|
+
const logger = createServiceLogger("AnthropicService");
|
|
7
7
|
export class AnthropicService extends AiServiceBase {
|
|
8
8
|
#options;
|
|
9
9
|
#client;
|
|
@@ -22,20 +22,20 @@ export class AnthropicService extends AiServiceBase {
|
|
|
22
22
|
};
|
|
23
23
|
}
|
|
24
24
|
async *send(systemPrompt, messages) {
|
|
25
|
-
logger.debug({ modelId: this.model.id, messagesCount: messages.length },
|
|
25
|
+
logger.debug({ modelId: this.model.id, messagesCount: messages.length }, "Starting message stream");
|
|
26
26
|
let stream;
|
|
27
27
|
const modelId = this.model.id;
|
|
28
28
|
switch (modelId) {
|
|
29
29
|
// 'latest' alias does not support cache_control
|
|
30
|
-
case
|
|
31
|
-
case
|
|
32
|
-
case
|
|
33
|
-
case
|
|
30
|
+
case "claude-3-5-sonnet-20241022":
|
|
31
|
+
case "claude-3-5-haiku-20241022":
|
|
32
|
+
case "claude-3-opus-20240229":
|
|
33
|
+
case "claude-3-haiku-20240307": {
|
|
34
34
|
/*
|
|
35
35
|
The latest message will be the new user message, one before will be the assistant message from a previous request, and the user message before that will be a previously cached user message. So we need to mark the latest user message as ephemeral to cache it for the next request, and mark the second to last user message as ephemeral to let the server know the last message to retrieve from the cache for the current request..
|
|
36
36
|
*/
|
|
37
37
|
const userMsgIndices = messages.reduce((acc, msg, index) => {
|
|
38
|
-
if (msg.role ===
|
|
38
|
+
if (msg.role === "user") {
|
|
39
39
|
acc.push(index);
|
|
40
40
|
}
|
|
41
41
|
return acc;
|
|
@@ -49,21 +49,21 @@ export class AnthropicService extends AiServiceBase {
|
|
|
49
49
|
system: [
|
|
50
50
|
{
|
|
51
51
|
text: systemPrompt,
|
|
52
|
-
type:
|
|
53
|
-
cache_control: { type:
|
|
52
|
+
type: "text",
|
|
53
|
+
cache_control: { type: "ephemeral" },
|
|
54
54
|
},
|
|
55
55
|
], // setting cache breakpoint for system prompt so new tasks can reuse it
|
|
56
56
|
messages: messages.map((message, index) => {
|
|
57
57
|
if (index === lastUserMsgIndex || index === secondLastMsgUserIndex) {
|
|
58
58
|
return {
|
|
59
59
|
...message,
|
|
60
|
-
content: typeof message.content ===
|
|
60
|
+
content: typeof message.content === "string"
|
|
61
61
|
? [
|
|
62
62
|
{
|
|
63
|
-
type:
|
|
63
|
+
type: "text",
|
|
64
64
|
text: message.content,
|
|
65
65
|
cache_control: {
|
|
66
|
-
type:
|
|
66
|
+
type: "ephemeral",
|
|
67
67
|
},
|
|
68
68
|
},
|
|
69
69
|
]
|
|
@@ -71,7 +71,7 @@ export class AnthropicService extends AiServiceBase {
|
|
|
71
71
|
? {
|
|
72
72
|
...content,
|
|
73
73
|
cache_control: {
|
|
74
|
-
type:
|
|
74
|
+
type: "ephemeral",
|
|
75
75
|
},
|
|
76
76
|
}
|
|
77
77
|
: content),
|
|
@@ -88,13 +88,13 @@ export class AnthropicService extends AiServiceBase {
|
|
|
88
88
|
// https://github.com/anthropics/anthropic-sdk-typescript?tab=readme-ov-file#default-headers
|
|
89
89
|
// https://github.com/anthropics/anthropic-sdk-typescript/commit/c920b77fc67bd839bfeb6716ceab9d7c9bbe7393
|
|
90
90
|
switch (modelId) {
|
|
91
|
-
case
|
|
92
|
-
case
|
|
93
|
-
case
|
|
94
|
-
case
|
|
91
|
+
case "claude-3-5-sonnet-20241022":
|
|
92
|
+
case "claude-3-5-haiku-20241022":
|
|
93
|
+
case "claude-3-opus-20240229":
|
|
94
|
+
case "claude-3-haiku-20240307":
|
|
95
95
|
return {
|
|
96
96
|
headers: {
|
|
97
|
-
|
|
97
|
+
"anthropic-beta": "prompt-caching-2024-07-31",
|
|
98
98
|
},
|
|
99
99
|
};
|
|
100
100
|
default:
|
|
@@ -108,7 +108,7 @@ export class AnthropicService extends AiServiceBase {
|
|
|
108
108
|
model: modelId,
|
|
109
109
|
max_tokens: this.model.info.maxTokens || 8192,
|
|
110
110
|
temperature: 0,
|
|
111
|
-
system: [{ text: systemPrompt, type:
|
|
111
|
+
system: [{ text: systemPrompt, type: "text" }],
|
|
112
112
|
messages,
|
|
113
113
|
// tools,
|
|
114
114
|
// tool_choice: { type: "auto" },
|
|
@@ -117,69 +117,69 @@ export class AnthropicService extends AiServiceBase {
|
|
|
117
117
|
break;
|
|
118
118
|
}
|
|
119
119
|
}
|
|
120
|
-
logger.debug(
|
|
120
|
+
logger.debug("Stream created, processing chunks");
|
|
121
121
|
for await (const chunk of stream) {
|
|
122
122
|
switch (chunk.type) {
|
|
123
|
-
case
|
|
123
|
+
case "message_start": {
|
|
124
124
|
// tells us cache reads/writes/input/output
|
|
125
125
|
const usage = chunk.message.usage;
|
|
126
126
|
const usageInfo = {
|
|
127
|
-
type:
|
|
127
|
+
type: "usage",
|
|
128
128
|
inputTokens: usage.input_tokens || 0,
|
|
129
129
|
outputTokens: usage.output_tokens || 0,
|
|
130
130
|
cacheWriteTokens: usage.cache_creation_input_tokens || undefined,
|
|
131
131
|
cacheReadTokens: usage.cache_read_input_tokens || undefined,
|
|
132
132
|
};
|
|
133
|
-
logger.trace({ usage: usageInfo },
|
|
133
|
+
logger.trace({ usage: usageInfo }, "Message start usage");
|
|
134
134
|
yield usageInfo;
|
|
135
135
|
break;
|
|
136
136
|
}
|
|
137
|
-
case
|
|
137
|
+
case "message_delta": {
|
|
138
138
|
// tells us stop_reason, stop_sequence, and output tokens along the way and at the end of the message
|
|
139
139
|
const deltaUsage = {
|
|
140
|
-
type:
|
|
140
|
+
type: "usage",
|
|
141
141
|
inputTokens: 0,
|
|
142
142
|
outputTokens: chunk.usage.output_tokens || 0,
|
|
143
143
|
};
|
|
144
|
-
logger.trace({ usage: deltaUsage },
|
|
144
|
+
logger.trace({ usage: deltaUsage }, "Message delta usage");
|
|
145
145
|
yield deltaUsage;
|
|
146
146
|
break;
|
|
147
147
|
}
|
|
148
|
-
case
|
|
149
|
-
logger.debug(
|
|
148
|
+
case "message_stop":
|
|
149
|
+
logger.debug("Message stream completed");
|
|
150
150
|
break;
|
|
151
|
-
case
|
|
151
|
+
case "content_block_start":
|
|
152
152
|
switch (chunk.content_block.type) {
|
|
153
|
-
case
|
|
153
|
+
case "text":
|
|
154
154
|
// we may receive multiple text blocks, in which case just insert a line break between them
|
|
155
155
|
if (chunk.index > 0) {
|
|
156
156
|
yield {
|
|
157
|
-
type:
|
|
158
|
-
text:
|
|
157
|
+
type: "text",
|
|
158
|
+
text: "\n",
|
|
159
159
|
};
|
|
160
160
|
}
|
|
161
161
|
yield {
|
|
162
|
-
type:
|
|
162
|
+
type: "text",
|
|
163
163
|
text: chunk.content_block.text,
|
|
164
164
|
};
|
|
165
165
|
break;
|
|
166
166
|
}
|
|
167
167
|
break;
|
|
168
|
-
case
|
|
168
|
+
case "content_block_delta":
|
|
169
169
|
switch (chunk.delta.type) {
|
|
170
|
-
case
|
|
170
|
+
case "text_delta":
|
|
171
171
|
yield {
|
|
172
|
-
type:
|
|
172
|
+
type: "text",
|
|
173
173
|
text: chunk.delta.text,
|
|
174
174
|
};
|
|
175
175
|
break;
|
|
176
176
|
}
|
|
177
177
|
break;
|
|
178
|
-
case
|
|
178
|
+
case "content_block_stop":
|
|
179
179
|
break;
|
|
180
180
|
}
|
|
181
181
|
}
|
|
182
|
-
logger.debug(
|
|
182
|
+
logger.debug("Stream ended");
|
|
183
183
|
}
|
|
184
184
|
}
|
|
185
|
-
//# sourceMappingURL=AnthropicService.js.map
|
|
185
|
+
//# sourceMappingURL=AnthropicService.js.map
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from
|
|
2
|
-
import { type ModelInfo } from
|
|
1
|
+
import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from "./AiServiceBase.js";
|
|
2
|
+
import { type ModelInfo } from "./ModelInfo.js";
|
|
3
3
|
export declare class DeepSeekService extends AiServiceBase {
|
|
4
4
|
#private;
|
|
5
5
|
readonly model: {
|
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
// source: https://github.com/cline/cline/blob/ce2610a6eafd860305ba9b12533db19f2a5385ad/src/api/providers/deepseek.ts
|
|
2
|
-
import OpenAI from
|
|
3
|
-
import { createServiceLogger } from
|
|
4
|
-
import { AiServiceBase } from
|
|
5
|
-
import { deepSeekDefaultModelId, deepSeekModels } from
|
|
6
|
-
import { convertToOpenAiMessages } from
|
|
7
|
-
const logger = createServiceLogger(
|
|
2
|
+
import OpenAI from "openai";
|
|
3
|
+
import { createServiceLogger } from "../logger.js";
|
|
4
|
+
import { AiServiceBase } from "./AiServiceBase.js";
|
|
5
|
+
import { deepSeekDefaultModelId, deepSeekModels } from "./ModelInfo.js";
|
|
6
|
+
import { convertToOpenAiMessages } from "./utils.js";
|
|
7
|
+
const logger = createServiceLogger("DeepSeekService");
|
|
8
8
|
export class DeepSeekService extends AiServiceBase {
|
|
9
9
|
#client;
|
|
10
10
|
model;
|
|
11
11
|
constructor(options) {
|
|
12
12
|
super();
|
|
13
13
|
this.#client = new OpenAI({
|
|
14
|
-
baseURL:
|
|
14
|
+
baseURL: "https://api.deepseek.com/v1",
|
|
15
15
|
apiKey: options.apiKey,
|
|
16
16
|
});
|
|
17
17
|
const id = (options.modelId || deepSeekDefaultModelId);
|
|
@@ -21,12 +21,12 @@ export class DeepSeekService extends AiServiceBase {
|
|
|
21
21
|
};
|
|
22
22
|
}
|
|
23
23
|
async *send(systemPrompt, messages) {
|
|
24
|
-
logger.debug({ modelId: this.model.id, messagesCount: messages.length },
|
|
24
|
+
logger.debug({ modelId: this.model.id, messagesCount: messages.length }, "Starting message stream");
|
|
25
25
|
const openAiMessages = [
|
|
26
|
-
{ role:
|
|
26
|
+
{ role: "system", content: systemPrompt },
|
|
27
27
|
...convertToOpenAiMessages(messages),
|
|
28
28
|
];
|
|
29
|
-
logger.trace({ modelId: this.model.id, messagesCount: messages.length },
|
|
29
|
+
logger.trace({ modelId: this.model.id, messagesCount: messages.length }, "Sending messages to Ollama");
|
|
30
30
|
const stream = await this.#client.chat.completions.create({
|
|
31
31
|
model: this.model.id,
|
|
32
32
|
max_completion_tokens: this.model.info.maxTokens,
|
|
@@ -39,13 +39,13 @@ export class DeepSeekService extends AiServiceBase {
|
|
|
39
39
|
const delta = chunk.choices[0]?.delta;
|
|
40
40
|
if (delta?.content) {
|
|
41
41
|
yield {
|
|
42
|
-
type:
|
|
42
|
+
type: "text",
|
|
43
43
|
text: delta.content,
|
|
44
44
|
};
|
|
45
45
|
}
|
|
46
46
|
if (chunk.usage) {
|
|
47
47
|
yield {
|
|
48
|
-
type:
|
|
48
|
+
type: "usage",
|
|
49
49
|
// deepseek reports total input AND cache reads/writes, see context caching: https://api-docs.deepseek.com/guides/kv_cache
|
|
50
50
|
// where the input tokens is the sum of the cache hits/misses, while anthropic reports them as separate tokens.
|
|
51
51
|
// This is important to know for
|
|
@@ -58,7 +58,7 @@ export class DeepSeekService extends AiServiceBase {
|
|
|
58
58
|
};
|
|
59
59
|
}
|
|
60
60
|
}
|
|
61
|
-
logger.debug(
|
|
61
|
+
logger.debug("Stream ended");
|
|
62
62
|
}
|
|
63
63
|
}
|
|
64
|
-
//# sourceMappingURL=DeepSeekService.js.map
|
|
64
|
+
//# sourceMappingURL=DeepSeekService.js.map
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from
|
|
2
|
-
import { type ModelInfo } from
|
|
1
|
+
import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from "./AiServiceBase.js";
|
|
2
|
+
import { type ModelInfo } from "./ModelInfo.js";
|
|
3
3
|
export declare class OllamaService extends AiServiceBase {
|
|
4
4
|
#private;
|
|
5
5
|
readonly model: {
|
|
@@ -1,31 +1,31 @@
|
|
|
1
1
|
// source: https://github.com/cline/cline/blob/f6c19c29a64ca84e9360df7ab2c07d128dcebe64/src/api/providers/ollama.ts
|
|
2
|
-
import OpenAI from
|
|
3
|
-
import { createServiceLogger } from
|
|
4
|
-
import { AiServiceBase } from
|
|
5
|
-
import { openAiModelInfoSaneDefaults } from
|
|
6
|
-
import { convertToOpenAiMessages } from
|
|
7
|
-
const logger = createServiceLogger(
|
|
2
|
+
import OpenAI from "openai";
|
|
3
|
+
import { createServiceLogger } from "../logger.js";
|
|
4
|
+
import { AiServiceBase } from "./AiServiceBase.js";
|
|
5
|
+
import { openAiModelInfoSaneDefaults } from "./ModelInfo.js";
|
|
6
|
+
import { convertToOpenAiMessages } from "./utils.js";
|
|
7
|
+
const logger = createServiceLogger("OllamaService");
|
|
8
8
|
export class OllamaService extends AiServiceBase {
|
|
9
9
|
#client;
|
|
10
10
|
model;
|
|
11
11
|
constructor(options) {
|
|
12
12
|
super();
|
|
13
13
|
this.#client = new OpenAI({
|
|
14
|
-
baseURL: `${options.baseUrl ||
|
|
15
|
-
apiKey:
|
|
14
|
+
baseURL: `${options.baseUrl || "http://localhost:11434"}/v1`,
|
|
15
|
+
apiKey: "ollama",
|
|
16
16
|
});
|
|
17
17
|
this.model = {
|
|
18
|
-
id: options.modelId ||
|
|
18
|
+
id: options.modelId || "",
|
|
19
19
|
info: openAiModelInfoSaneDefaults,
|
|
20
20
|
};
|
|
21
21
|
}
|
|
22
22
|
async *send(systemPrompt, messages) {
|
|
23
|
-
logger.debug({ modelId: this.model.id, messagesCount: messages.length },
|
|
23
|
+
logger.debug({ modelId: this.model.id, messagesCount: messages.length }, "Starting message stream");
|
|
24
24
|
const openAiMessages = [
|
|
25
|
-
{ role:
|
|
25
|
+
{ role: "system", content: systemPrompt },
|
|
26
26
|
...convertToOpenAiMessages(messages),
|
|
27
27
|
];
|
|
28
|
-
logger.trace({ modelId: this.model.id, messagesCount: messages.length },
|
|
28
|
+
logger.trace({ modelId: this.model.id, messagesCount: messages.length }, "Sending messages to Ollama");
|
|
29
29
|
const stream = await this.#client.chat.completions.create({
|
|
30
30
|
model: this.model.id,
|
|
31
31
|
messages: openAiMessages,
|
|
@@ -36,12 +36,12 @@ export class OllamaService extends AiServiceBase {
|
|
|
36
36
|
const delta = chunk.choices[0]?.delta;
|
|
37
37
|
if (delta?.content) {
|
|
38
38
|
yield {
|
|
39
|
-
type:
|
|
39
|
+
type: "text",
|
|
40
40
|
text: delta.content,
|
|
41
41
|
};
|
|
42
42
|
}
|
|
43
43
|
}
|
|
44
|
-
logger.debug(
|
|
44
|
+
logger.debug("Stream ended");
|
|
45
45
|
}
|
|
46
46
|
}
|
|
47
|
-
//# sourceMappingURL=OllamaService.js.map
|
|
47
|
+
//# sourceMappingURL=OllamaService.js.map
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import type { AiServiceBase, AiServiceOptions, MessageParam } from
|
|
2
|
-
import { AnthropicService } from
|
|
3
|
-
import { DeepSeekService } from
|
|
4
|
-
import type { ModelInfo } from
|
|
5
|
-
import { OllamaService } from
|
|
1
|
+
import type { AiServiceBase, AiServiceOptions, MessageParam } from "./AiServiceBase.js";
|
|
2
|
+
import { AnthropicService } from "./AnthropicService.js";
|
|
3
|
+
import { DeepSeekService } from "./DeepSeekService.js";
|
|
4
|
+
import type { ModelInfo } from "./ModelInfo.js";
|
|
5
|
+
import { OllamaService } from "./OllamaService.js";
|
|
6
6
|
export declare enum AiServiceProvider {
|
|
7
7
|
Anthropic = "anthropic",
|
|
8
8
|
Ollama = "ollama",
|
package/dist/AiService/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { AnthropicService } from
|
|
2
|
-
import { DeepSeekService } from
|
|
3
|
-
import { OllamaService } from
|
|
1
|
+
import { AnthropicService } from "./AnthropicService.js";
|
|
2
|
+
import { DeepSeekService } from "./DeepSeekService.js";
|
|
3
|
+
import { OllamaService } from "./OllamaService.js";
|
|
4
4
|
export var AiServiceProvider;
|
|
5
5
|
(function (AiServiceProvider) {
|
|
6
6
|
AiServiceProvider["Anthropic"] = "anthropic";
|
|
@@ -17,4 +17,4 @@ export const createService = (provider, options) => {
|
|
|
17
17
|
return new DeepSeekService(options);
|
|
18
18
|
}
|
|
19
19
|
};
|
|
20
|
-
//# sourceMappingURL=index.js.map
|
|
20
|
+
//# sourceMappingURL=index.js.map
|
package/dist/AiService/utils.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
// source: https://github.com/cline/cline/blob/f6c19c29a64ca84e9360df7ab2c07d128dcebe64/src/api/transform/openai-format.ts
|
|
2
|
-
import { createServiceLogger } from
|
|
3
|
-
const logger = createServiceLogger(
|
|
2
|
+
import { createServiceLogger } from "../logger.js";
|
|
3
|
+
const logger = createServiceLogger("utils");
|
|
4
4
|
export function convertToOpenAiMessages(anthropicMessages) {
|
|
5
5
|
const openAiMessages = [];
|
|
6
6
|
for (const anthropicMessage of anthropicMessages) {
|
|
7
|
-
if (typeof anthropicMessage.content ===
|
|
7
|
+
if (typeof anthropicMessage.content === "string") {
|
|
8
8
|
openAiMessages.push({
|
|
9
9
|
role: anthropicMessage.role,
|
|
10
10
|
content: anthropicMessage.content,
|
|
@@ -19,12 +19,12 @@ export function convertToOpenAiMessages(anthropicMessages) {
|
|
|
19
19
|
{ role: "assistant", content?: "" | null, tool_calls?: [{ id: "", function: { name: "", arguments: "" }, type: "function" }] },
|
|
20
20
|
{ role: "tool", tool_call_id: "", content: ""}
|
|
21
21
|
*/
|
|
22
|
-
if (anthropicMessage.role ===
|
|
22
|
+
if (anthropicMessage.role === "user") {
|
|
23
23
|
const { nonToolMessages, toolMessages } = anthropicMessage.content.reduce((acc, part) => {
|
|
24
|
-
if (part.type ===
|
|
24
|
+
if (part.type === "tool_result") {
|
|
25
25
|
acc.toolMessages.push(part);
|
|
26
26
|
}
|
|
27
|
-
else if (part.type ===
|
|
27
|
+
else if (part.type === "text" || part.type === "image") {
|
|
28
28
|
acc.nonToolMessages.push(part);
|
|
29
29
|
} // user cannot send tool_use messages
|
|
30
30
|
return acc;
|
|
@@ -34,23 +34,23 @@ export function convertToOpenAiMessages(anthropicMessages) {
|
|
|
34
34
|
for (const toolMessage of toolMessages) {
|
|
35
35
|
// The Anthropic SDK allows tool results to be a string or an array of text and image blocks, enabling rich and structured content. In contrast, the OpenAI SDK only supports tool results as a single string, so we map the Anthropic tool result parts into one concatenated string to maintain compatibility.
|
|
36
36
|
let content;
|
|
37
|
-
if (typeof toolMessage.content ===
|
|
37
|
+
if (typeof toolMessage.content === "string") {
|
|
38
38
|
content = toolMessage.content;
|
|
39
39
|
}
|
|
40
40
|
else {
|
|
41
41
|
content =
|
|
42
42
|
toolMessage.content
|
|
43
43
|
?.map((part) => {
|
|
44
|
-
if (part.type ===
|
|
44
|
+
if (part.type === "image") {
|
|
45
45
|
toolResultImages.push(part);
|
|
46
|
-
return
|
|
46
|
+
return "(see following user message for image)";
|
|
47
47
|
}
|
|
48
48
|
return part.text;
|
|
49
49
|
})
|
|
50
|
-
.join(
|
|
50
|
+
.join("\n") ?? "";
|
|
51
51
|
}
|
|
52
52
|
openAiMessages.push({
|
|
53
|
-
role:
|
|
53
|
+
role: "tool",
|
|
54
54
|
tool_call_id: toolMessage.tool_use_id,
|
|
55
55
|
content: content,
|
|
56
56
|
});
|
|
@@ -73,27 +73,27 @@ export function convertToOpenAiMessages(anthropicMessages) {
|
|
|
73
73
|
// Process non-tool messages
|
|
74
74
|
if (nonToolMessages.length > 0) {
|
|
75
75
|
openAiMessages.push({
|
|
76
|
-
role:
|
|
76
|
+
role: "user",
|
|
77
77
|
content: nonToolMessages.map((part) => {
|
|
78
|
-
if (part.type ===
|
|
78
|
+
if (part.type === "image") {
|
|
79
79
|
return {
|
|
80
|
-
type:
|
|
80
|
+
type: "image_url",
|
|
81
81
|
image_url: {
|
|
82
82
|
url: `data:${part.source.media_type};base64,${part.source.data}`,
|
|
83
83
|
},
|
|
84
84
|
};
|
|
85
85
|
}
|
|
86
|
-
return { type:
|
|
86
|
+
return { type: "text", text: part.text };
|
|
87
87
|
}),
|
|
88
88
|
});
|
|
89
89
|
}
|
|
90
90
|
}
|
|
91
|
-
else if (anthropicMessage.role ===
|
|
91
|
+
else if (anthropicMessage.role === "assistant") {
|
|
92
92
|
const { nonToolMessages, toolMessages } = anthropicMessage.content.reduce((acc, part) => {
|
|
93
|
-
if (part.type ===
|
|
93
|
+
if (part.type === "tool_use") {
|
|
94
94
|
acc.toolMessages.push(part);
|
|
95
95
|
}
|
|
96
|
-
else if (part.type ===
|
|
96
|
+
else if (part.type === "text" || part.type === "image") {
|
|
97
97
|
acc.nonToolMessages.push(part);
|
|
98
98
|
} // assistant cannot send tool_result messages
|
|
99
99
|
return acc;
|
|
@@ -103,17 +103,17 @@ export function convertToOpenAiMessages(anthropicMessages) {
|
|
|
103
103
|
if (nonToolMessages.length > 0) {
|
|
104
104
|
content = nonToolMessages
|
|
105
105
|
.map((part) => {
|
|
106
|
-
if (part.type ===
|
|
107
|
-
return
|
|
106
|
+
if (part.type === "image") {
|
|
107
|
+
return ""; // impossible as the assistant cannot send images
|
|
108
108
|
}
|
|
109
109
|
return part.text;
|
|
110
110
|
})
|
|
111
|
-
.join(
|
|
111
|
+
.join("\n");
|
|
112
112
|
}
|
|
113
113
|
// Process tool use messages
|
|
114
114
|
const tool_calls = toolMessages.map((toolMessage) => ({
|
|
115
115
|
id: toolMessage.id,
|
|
116
|
-
type:
|
|
116
|
+
type: "function",
|
|
117
117
|
function: {
|
|
118
118
|
name: toolMessage.name,
|
|
119
119
|
// json string
|
|
@@ -121,7 +121,7 @@ export function convertToOpenAiMessages(anthropicMessages) {
|
|
|
121
121
|
},
|
|
122
122
|
}));
|
|
123
123
|
openAiMessages.push({
|
|
124
|
-
role:
|
|
124
|
+
role: "assistant",
|
|
125
125
|
content,
|
|
126
126
|
// Cannot be an empty array. API expects an array with minimum length 1, and will respond with an error if it's empty
|
|
127
127
|
tool_calls: tool_calls.length > 0 ? tool_calls : undefined,
|
|
@@ -136,23 +136,23 @@ export function convertToAnthropicMessage(completion) {
|
|
|
136
136
|
const openAiMessage = completion.choices[0].message;
|
|
137
137
|
const anthropicMessage = {
|
|
138
138
|
id: completion.id,
|
|
139
|
-
type:
|
|
139
|
+
type: "message",
|
|
140
140
|
role: openAiMessage.role, // always "assistant"
|
|
141
141
|
content: [
|
|
142
142
|
{
|
|
143
|
-
type:
|
|
144
|
-
text: openAiMessage.content ||
|
|
143
|
+
type: "text",
|
|
144
|
+
text: openAiMessage.content || "",
|
|
145
145
|
},
|
|
146
146
|
],
|
|
147
147
|
model: completion.model,
|
|
148
148
|
stop_reason: (() => {
|
|
149
149
|
switch (completion.choices[0].finish_reason) {
|
|
150
|
-
case
|
|
151
|
-
return
|
|
152
|
-
case
|
|
153
|
-
return
|
|
154
|
-
case
|
|
155
|
-
return
|
|
150
|
+
case "stop":
|
|
151
|
+
return "end_turn";
|
|
152
|
+
case "length":
|
|
153
|
+
return "max_tokens";
|
|
154
|
+
case "tool_calls":
|
|
155
|
+
return "tool_use";
|
|
156
156
|
default:
|
|
157
157
|
return null;
|
|
158
158
|
}
|
|
@@ -169,13 +169,13 @@ export function convertToAnthropicMessage(completion) {
|
|
|
169
169
|
anthropicMessage.content.push(...openAiMessage.tool_calls.map((toolCall) => {
|
|
170
170
|
let parsedInput = {};
|
|
171
171
|
try {
|
|
172
|
-
parsedInput = JSON.parse(toolCall.function.arguments ||
|
|
172
|
+
parsedInput = JSON.parse(toolCall.function.arguments || "{}");
|
|
173
173
|
}
|
|
174
174
|
catch (error) {
|
|
175
|
-
logger.warn(
|
|
175
|
+
logger.warn("Failed to parse tool arguments:", error);
|
|
176
176
|
}
|
|
177
177
|
return {
|
|
178
|
-
type:
|
|
178
|
+
type: "tool_use",
|
|
179
179
|
id: toolCall.id,
|
|
180
180
|
name: toolCall.function.name,
|
|
181
181
|
input: parsedInput,
|
|
@@ -184,4 +184,4 @@ export function convertToAnthropicMessage(completion) {
|
|
|
184
184
|
}
|
|
185
185
|
return anthropicMessage;
|
|
186
186
|
}
|
|
187
|
-
//# sourceMappingURL=utils.js.map
|
|
187
|
+
//# sourceMappingURL=utils.js.map
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export * from
|
|
2
|
-
export * from
|
|
3
|
-
export * from
|
|
4
|
-
export * from
|
|
1
|
+
export * from "./AiService/index.js";
|
|
2
|
+
export * from "./Agent/index.js";
|
|
3
|
+
export * from "./tools/index.js";
|
|
4
|
+
export * from "./logger.js";
|
package/dist/index.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
export * from
|
|
2
|
-
export * from
|
|
3
|
-
export * from
|
|
4
|
-
export * from
|
|
5
|
-
//# sourceMappingURL=index.js.map
|
|
1
|
+
export * from "./AiService/index.js";
|
|
2
|
+
export * from "./Agent/index.js";
|
|
3
|
+
export * from "./tools/index.js";
|
|
4
|
+
export * from "./logger.js";
|
|
5
|
+
//# sourceMappingURL=index.js.map
|
package/dist/tools/index.d.ts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
export * from
|
|
2
|
-
export * from
|
|
3
|
-
export * as allTools from
|
|
1
|
+
export * from "./types.js";
|
|
2
|
+
export * from "./tools.js";
|
|
3
|
+
export * as allTools from "./tools.js";
|
package/dist/tools/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export * from
|
|
2
|
-
export * from
|
|
3
|
-
export * as allTools from
|
|
4
|
-
//# sourceMappingURL=index.js.map
|
|
1
|
+
export * from "./types.js";
|
|
2
|
+
export * from "./tools.js";
|
|
3
|
+
export * as allTools from "./tools.js";
|
|
4
|
+
//# sourceMappingURL=index.js.map
|