@polka-codes/core 0.0.4 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dist/Agent/AgentBase.d.ts +49 -0
  2. package/dist/Agent/AgentBase.js +158 -0
  3. package/dist/Agent/AgentBase.js.map +1 -0
  4. package/dist/Agent/CoderAgent/__snapshots__/prompts.test.js.snap +432 -0
  5. package/dist/Agent/CoderAgent/index.d.ts +17 -0
  6. package/dist/Agent/CoderAgent/index.js +32 -0
  7. package/dist/Agent/CoderAgent/index.js.map +1 -0
  8. package/dist/Agent/CoderAgent/prompts.d.ts +20 -0
  9. package/dist/Agent/CoderAgent/prompts.js +163 -0
  10. package/dist/Agent/CoderAgent/prompts.js.map +1 -0
  11. package/dist/Agent/CoderAgent/prompts.test.d.ts +1 -0
  12. package/dist/Agent/CoderAgent/prompts.test.js +20 -0
  13. package/dist/Agent/CoderAgent/prompts.test.js.map +1 -0
  14. package/dist/Agent/index.d.ts +2 -0
  15. package/dist/Agent/index.js +3 -0
  16. package/dist/Agent/index.js.map +1 -0
  17. package/dist/Agent/parseAssistantMessage.d.ts +45 -0
  18. package/dist/Agent/parseAssistantMessage.js +103 -0
  19. package/dist/Agent/parseAssistantMessage.js.map +1 -0
  20. package/dist/Agent/parseAssistantMessage.test.d.ts +1 -0
  21. package/dist/Agent/parseAssistantMessage.test.js +172 -0
  22. package/dist/Agent/parseAssistantMessage.test.js.map +1 -0
  23. package/dist/Agent/prompts.d.ts +7 -0
  24. package/dist/Agent/prompts.js +93 -0
  25. package/dist/Agent/prompts.js.map +1 -0
  26. package/dist/AiService/AiServiceBase.d.ts +29 -0
  27. package/dist/AiService/AiServiceBase.js +3 -0
  28. package/dist/AiService/AiServiceBase.js.map +1 -0
  29. package/dist/AiService/AnthropicService.d.ts +11 -0
  30. package/dist/AiService/AnthropicService.js +185 -0
  31. package/dist/AiService/AnthropicService.js.map +1 -0
  32. package/dist/AiService/DeepSeekService.d.ts +11 -0
  33. package/dist/AiService/DeepSeekService.js +64 -0
  34. package/dist/AiService/DeepSeekService.js.map +1 -0
  35. package/dist/AiService/ModelInfo.d.ts +79 -0
  36. package/dist/AiService/ModelInfo.js +67 -0
  37. package/dist/AiService/ModelInfo.js.map +1 -0
  38. package/dist/AiService/OllamaService.d.ts +11 -0
  39. package/dist/AiService/OllamaService.js +47 -0
  40. package/dist/AiService/OllamaService.js.map +1 -0
  41. package/dist/AiService/index.d.ts +12 -0
  42. package/dist/AiService/index.js +20 -0
  43. package/dist/AiService/index.js.map +1 -0
  44. package/dist/AiService/utils.d.ts +4 -0
  45. package/dist/AiService/utils.js +187 -0
  46. package/dist/AiService/utils.js.map +1 -0
  47. package/dist/AiService/utils.test.d.ts +1 -0
  48. package/dist/AiService/utils.test.js +275 -0
  49. package/dist/AiService/utils.test.js.map +1 -0
  50. package/dist/index.d.ts +4 -10
  51. package/dist/index.js +4 -10
  52. package/dist/index.js.map +1 -1
  53. package/dist/logger.d.ts +5 -0
  54. package/dist/logger.js +25 -0
  55. package/dist/logger.js.map +1 -0
  56. package/dist/tools/index.d.ts +3 -0
  57. package/dist/tools/index.js +4 -0
  58. package/dist/tools/index.js.map +1 -0
  59. package/dist/tools/tools.d.ts +200 -0
  60. package/dist/tools/tools.js +329 -0
  61. package/dist/tools/tools.js.map +1 -0
  62. package/dist/tools/types.d.ts +49 -0
  63. package/dist/tools/types.js +9 -0
  64. package/dist/tools/types.js.map +1 -0
  65. package/package.json +9 -2
@@ -0,0 +1,93 @@
1
+ const toolInfoPrompt = (tool, toolNamePrefix, parameterPrefix) => `
2
+ ## ${toolNamePrefix}${tool.name}
3
+
4
+ Description: ${tool.description}
5
+
6
+ Parameters:
7
+ ${tool.parameters.map((param) => `- ${parameterPrefix}${param.name}: (${param.required ? 'required' : 'optional'}) ${param.description}`).join('\n')}
8
+
9
+ Usage:
10
+ <${toolNamePrefix}${tool.name}>
11
+ ${tool.parameters.map((param) => `<${parameterPrefix}${param.name}>${param.usageValue}</${parameterPrefix}${param.name}>`).join('\n')}
12
+ </${toolNamePrefix}${tool.name}>`;
13
+ const toolInfoExamplesPrompt = (idx, tool, example, toolNamePrefix, parameterPrefix) => `
14
+ ## Example ${idx + 1}: ${example.description}
15
+
16
+ <${toolNamePrefix}${tool.name}>
17
+ ${example.parameters.map((param) => `<${parameterPrefix}${param.name}>${param.value}</${parameterPrefix}${param.name}>`).join('\n')}
18
+ </${toolNamePrefix}${tool.name}>
19
+ `;
20
+ export const toolUsePrompt = (tools, toolNamePrefix) => {
21
+ if (tools.length === 0) {
22
+ return '';
23
+ }
24
+ const parameterPrefix = `${toolNamePrefix}parameter_`;
25
+ let exampleIndex = 0;
26
+ return `
27
+ ====
28
+
29
+ TOOL USE
30
+
31
+ You have access to a set of tools that are executed upon the user's approval. You can use one tool per message, and will receive the result of that tool use in the user's response. You use tools step-by-step to accomplish a given task, with each tool use informed by the result of the previous tool use.
32
+
33
+ # Tool Use Formatting
34
+
35
+ Tool use is formatted using XML-style tags. The tool name is enclosed in opening and closing tags, and each parameter is similarly enclosed within its own set of tags. Here's the structure:
36
+
37
+ <${toolNamePrefix}tool_name>
38
+ <${parameterPrefix}name1>value1</${parameterPrefix}name1>
39
+ <${parameterPrefix}name2>value2</${parameterPrefix}name2>
40
+ ...
41
+ </${toolNamePrefix}tool_name>
42
+
43
+ For example:
44
+
45
+ <${toolNamePrefix}read_file>
46
+ <${parameterPrefix}path>src/main.js</${parameterPrefix}path>
47
+ </${toolNamePrefix}read_file>
48
+
49
+ Always adhere to this format for the tool use to ensure proper parsing and execution.
50
+
51
+ # Tools
52
+ ${tools.map((tool) => toolInfoPrompt(tool, toolNamePrefix, parameterPrefix)).join('\n')}
53
+
54
+ # Tool Use Examples
55
+ ${tools
56
+ .map((tool) => {
57
+ let promp = '';
58
+ for (const example of tool.examples ?? []) {
59
+ promp += toolInfoExamplesPrompt(exampleIndex++, tool, example, toolNamePrefix, parameterPrefix);
60
+ }
61
+ return promp;
62
+ })
63
+ .join('')}
64
+ # Tool Use Guidelines
65
+
66
+ 1. **In \`<thinking>\` tags**, assess what information you have and what you need to proceed.
67
+ 2. **Choose one tool at a time per message** based on the task and its description. Do not assume a tool’s outcome without explicit confirmation.
68
+ 3. **Formulate tool use only in the specified XML format** for each tool.
69
+ 4. **Wait for the user’s response** after each tool use. Do not proceed until you have their confirmation.
70
+ 5. The user’s response may include:
71
+ - Tool success or failure details
72
+ - Linter errors
73
+ - Terminal output or other relevant feedback
74
+ 6. **Never repeat or quote the entire tool command** in your final user-facing message. Summarize outcomes clearly and avoid echoing commands verbatim.
75
+ 7. **Respond concisely** and move the conversation forward. Do not re-issue the same command or re-trigger tool use without necessity.
76
+ 8. Follow these steps **iteratively**, confirming success and addressing issues as you go.
77
+
78
+ By adhering to these guidelines:
79
+ - You maintain clarity without accidentally re-invoking tools.
80
+ - You confirm each step’s results before proceeding.
81
+ - You provide only the necessary information in user-facing replies to prevent re-interpretation as new commands.`;
82
+ };
83
+ export const responsePrompts = {
84
+ errorInvokeTool: (tool, error) => `An error occurred while invoking the tool "${tool}": ${error}`,
85
+ requireUseTool: 'Error: You must use a tool before proceeding',
86
+ toolResults: (tool, result) => `<tool_response>
87
+ <tool_name>${tool}</tool_name>
88
+ <tool_result>
89
+ ${result}
90
+ </tool_result>
91
+ </tool_response>`,
92
+ };
93
+ //# sourceMappingURL=prompts.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"prompts.js","sourceRoot":"","sources":["../../src/Agent/prompts.ts"],"names":[],"mappings":"AAEA,MAAM,cAAc,GAAG,CAAC,IAAc,EAAE,cAAsB,EAAE,eAAuB,EAAE,EAAE,CAAC;KACvF,cAAc,GAAG,IAAI,CAAC,IAAI;;eAEhB,IAAI,CAAC,WAAW;;;EAG7B,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,eAAe,GAAG,KAAK,CAAC,IAAI,MAAM,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,UAAU,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;;;GAGjJ,cAAc,GAAG,IAAI,CAAC,IAAI;EAC3B,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,eAAe,GAAG,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,UAAU,KAAK,eAAe,GAAG,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;IACjI,cAAc,GAAG,IAAI,CAAC,IAAI,GAAG,CAAA;AAEjC,MAAM,sBAAsB,GAAG,CAAC,GAAW,EAAE,IAAc,EAAE,OAAoB,EAAE,cAAsB,EAAE,eAAuB,EAAE,EAAE,CAAC;aAC1H,GAAG,GAAG,CAAC,KAAK,OAAO,CAAC,WAAW;;GAEzC,cAAc,GAAG,IAAI,CAAC,IAAI;EAC3B,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,eAAe,GAAG,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,KAAK,KAAK,eAAe,GAAG,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;IAC/H,cAAc,GAAG,IAAI,CAAC,IAAI;CAC7B,CAAA;AAED,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,KAAiB,EAAE,cAAsB,EAAE,EAAE;IACzE,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvB,OAAO,EAAE,CAAA;IACX,CAAC;IAED,MAAM,eAAe,GAAG,GAAG,cAAc,YAAY,CAAA;IAErD,IAAI,YAAY,GAAG,CAAC,CAAA;IAEpB,OAAO;;;;;;;;;;;GAWN,cAAc;GACd,eAAe,iBAAiB,eAAe;GAC/C,eAAe,iBAAiB,eAAe;;IAE9C,cAAc;;;;GAIf,cAAc;GACd,eAAe,qBAAqB,eAAe;IAClD,cAAc;;;;;EAKhB,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,EAAE,cAAc,EAAE,eAAe,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;;;EAGrF,KAAK;SACJ,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACZ,IAAI,KAAK,GAAG,EAAE,CAAA;QACd,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,IAAI,EAAE,EAAE,CAAC;YAC1C,KAAK,IAAI,sBAAsB,CAAC,YAAY,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,cAAc,EAAE,eAAe,CAAC,CAAA;QACjG,CAAC;QACD,OAAO,KAAK,CAAA;IACd,CAAC,CAAC;SACD,IAAI,CAAC,EAAE,CAAC;;;;;;;;;;;;;;;;;;kHAkBuG,CAAA;AAClH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,eAAe,GAAG;IAC7B,eAAe,EAAE,CAAC,IAAY,EAAE,KAAc,EAAE,EAAE,CAAC,8CAA8C,IAAI,MAAM,KAAK,EAAE;IAClH,cAAc,EAAE,8CAA8C;IAC9D,WAAW,EAAE,CAAC,IAAY,EAAE,MAAc,EAAE,EAAE,CAAC;aACpC,IAAI;;EAEf,MAAM;;iBAES;CACP,CAAA"}
@@ -0,0 +1,29 @@
1
+ import type { Anthropic } from '@anthropic-ai/sdk';
2
+ import type { ModelInfo } from './ModelInfo';
3
+ export type ApiStreamChunk = ApiStreamTextChunk | ApiStreamUsageChunk;
4
+ export interface ApiStreamTextChunk {
5
+ type: 'text';
6
+ text: string;
7
+ }
8
+ export interface ApiStreamUsageChunk {
9
+ type: 'usage';
10
+ inputTokens: number;
11
+ outputTokens: number;
12
+ cacheWriteTokens?: number;
13
+ cacheReadTokens?: number;
14
+ totalCost?: number;
15
+ }
16
+ export type ApiStream = AsyncGenerator<ApiStreamChunk>;
17
+ export interface AiServiceOptions {
18
+ modelId?: string;
19
+ apiKey?: string;
20
+ baseUrl?: string;
21
+ }
22
+ export type MessageParam = Anthropic.Messages.MessageParam;
23
+ export declare abstract class AiServiceBase {
24
+ abstract get model(): {
25
+ id: string;
26
+ info: ModelInfo;
27
+ };
28
+ abstract send(systemPrompt: string, messages: MessageParam[]): ApiStream;
29
+ }
@@ -0,0 +1,3 @@
1
+ export class AiServiceBase {
2
+ }
3
+ //# sourceMappingURL=AiServiceBase.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"AiServiceBase.js","sourceRoot":"","sources":["../../src/AiService/AiServiceBase.ts"],"names":[],"mappings":"AA8BA,MAAM,OAAgB,aAAa;CAIlC"}
@@ -0,0 +1,11 @@
1
+ import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from './AiServiceBase';
2
+ import { type AnthropicModelId, type ModelInfo } from './ModelInfo';
3
+ export declare class AnthropicService extends AiServiceBase {
4
+ #private;
5
+ readonly model: {
6
+ id: AnthropicModelId;
7
+ info: ModelInfo;
8
+ };
9
+ constructor(options: AiServiceOptions);
10
+ send(systemPrompt: string, messages: MessageParam[]): ApiStream;
11
+ }
@@ -0,0 +1,185 @@
1
+ // source: https://github.com/cline/cline/blob/f6c19c29a64ca84e9360df7ab2c07d128dcebe64/src/api/providers/anthropic.ts
2
+ import { Anthropic } from '@anthropic-ai/sdk';
3
+ import { createServiceLogger } from '../logger';
4
+ import { AiServiceBase } from './AiServiceBase';
5
+ import { anthropicDefaultModelId, anthropicModels } from './ModelInfo';
6
+ const logger = createServiceLogger('AnthropicService');
7
+ export class AnthropicService extends AiServiceBase {
8
+ #options;
9
+ #client;
10
+ model;
11
+ constructor(options) {
12
+ super();
13
+ this.#options = options;
14
+ this.#client = new Anthropic({
15
+ apiKey: options.apiKey,
16
+ baseURL: options.baseUrl || undefined,
17
+ });
18
+ const id = (this.#options.modelId ?? anthropicDefaultModelId);
19
+ this.model = {
20
+ id,
21
+ info: anthropicModels[id] ?? anthropicModels[anthropicDefaultModelId],
22
+ };
23
+ }
24
+ async *send(systemPrompt, messages) {
25
+ logger.debug({ modelId: this.model.id, messagesCount: messages.length }, 'Starting message stream');
26
+ let stream;
27
+ const modelId = this.model.id;
28
+ switch (modelId) {
29
+ // 'latest' alias does not support cache_control
30
+ case 'claude-3-5-sonnet-20241022':
31
+ case 'claude-3-5-haiku-20241022':
32
+ case 'claude-3-opus-20240229':
33
+ case 'claude-3-haiku-20240307': {
34
+ /*
35
+ The latest message will be the new user message, one before will be the assistant message from a previous request, and the user message before that will be a previously cached user message. So we need to mark the latest user message as ephemeral to cache it for the next request, and mark the second to last user message as ephemeral to let the server know the last message to retrieve from the cache for the current request..
36
+ */
37
+ const userMsgIndices = messages.reduce((acc, msg, index) => {
38
+ if (msg.role === 'user') {
39
+ acc.push(index);
40
+ }
41
+ return acc;
42
+ }, []);
43
+ const lastUserMsgIndex = userMsgIndices[userMsgIndices.length - 1] ?? -1;
44
+ const secondLastMsgUserIndex = userMsgIndices[userMsgIndices.length - 2] ?? -1;
45
+ stream = await this.#client.messages.create({
46
+ model: modelId,
47
+ max_tokens: this.model.info.maxTokens || 8192,
48
+ temperature: 0,
49
+ system: [
50
+ {
51
+ text: systemPrompt,
52
+ type: 'text',
53
+ cache_control: { type: 'ephemeral' },
54
+ },
55
+ ], // setting cache breakpoint for system prompt so new tasks can reuse it
56
+ messages: messages.map((message, index) => {
57
+ if (index === lastUserMsgIndex || index === secondLastMsgUserIndex) {
58
+ return {
59
+ ...message,
60
+ content: typeof message.content === 'string'
61
+ ? [
62
+ {
63
+ type: 'text',
64
+ text: message.content,
65
+ cache_control: {
66
+ type: 'ephemeral',
67
+ },
68
+ },
69
+ ]
70
+ : message.content.map((content, contentIndex) => contentIndex === message.content.length - 1
71
+ ? {
72
+ ...content,
73
+ cache_control: {
74
+ type: 'ephemeral',
75
+ },
76
+ }
77
+ : content),
78
+ };
79
+ }
80
+ return message;
81
+ }),
82
+ // tools, // cache breakpoints go from tools > system > messages, and since tools dont change, we can just set the breakpoint at the end of system (this avoids having to set a breakpoint at the end of tools which by itself does not meet min requirements for haiku caching)
83
+ // tool_choice: { type: "auto" },
84
+ // tools: tools,
85
+ stream: true,
86
+ }, (() => {
87
+ // prompt caching: https://x.com/alexalbert__/status/1823751995901272068
88
+ // https://github.com/anthropics/anthropic-sdk-typescript?tab=readme-ov-file#default-headers
89
+ // https://github.com/anthropics/anthropic-sdk-typescript/commit/c920b77fc67bd839bfeb6716ceab9d7c9bbe7393
90
+ switch (modelId) {
91
+ case 'claude-3-5-sonnet-20241022':
92
+ case 'claude-3-5-haiku-20241022':
93
+ case 'claude-3-opus-20240229':
94
+ case 'claude-3-haiku-20240307':
95
+ return {
96
+ headers: {
97
+ 'anthropic-beta': 'prompt-caching-2024-07-31',
98
+ },
99
+ };
100
+ default:
101
+ return undefined;
102
+ }
103
+ })());
104
+ break;
105
+ }
106
+ default: {
107
+ stream = await this.#client.messages.create({
108
+ model: modelId,
109
+ max_tokens: this.model.info.maxTokens || 8192,
110
+ temperature: 0,
111
+ system: [{ text: systemPrompt, type: 'text' }],
112
+ messages,
113
+ // tools,
114
+ // tool_choice: { type: "auto" },
115
+ stream: true,
116
+ });
117
+ break;
118
+ }
119
+ }
120
+ logger.debug('Stream created, processing chunks');
121
+ for await (const chunk of stream) {
122
+ switch (chunk.type) {
123
+ case 'message_start': {
124
+ // tells us cache reads/writes/input/output
125
+ const usage = chunk.message.usage;
126
+ const usageInfo = {
127
+ type: 'usage',
128
+ inputTokens: usage.input_tokens || 0,
129
+ outputTokens: usage.output_tokens || 0,
130
+ cacheWriteTokens: usage.cache_creation_input_tokens || undefined,
131
+ cacheReadTokens: usage.cache_read_input_tokens || undefined,
132
+ };
133
+ logger.trace({ usage: usageInfo }, 'Message start usage');
134
+ yield usageInfo;
135
+ break;
136
+ }
137
+ case 'message_delta': {
138
+ // tells us stop_reason, stop_sequence, and output tokens along the way and at the end of the message
139
+ const deltaUsage = {
140
+ type: 'usage',
141
+ inputTokens: 0,
142
+ outputTokens: chunk.usage.output_tokens || 0,
143
+ };
144
+ logger.trace({ usage: deltaUsage }, 'Message delta usage');
145
+ yield deltaUsage;
146
+ break;
147
+ }
148
+ case 'message_stop':
149
+ logger.debug('Message stream completed');
150
+ break;
151
+ case 'content_block_start':
152
+ switch (chunk.content_block.type) {
153
+ case 'text':
154
+ // we may receive multiple text blocks, in which case just insert a line break between them
155
+ if (chunk.index > 0) {
156
+ yield {
157
+ type: 'text',
158
+ text: '\n',
159
+ };
160
+ }
161
+ yield {
162
+ type: 'text',
163
+ text: chunk.content_block.text,
164
+ };
165
+ break;
166
+ }
167
+ break;
168
+ case 'content_block_delta':
169
+ switch (chunk.delta.type) {
170
+ case 'text_delta':
171
+ yield {
172
+ type: 'text',
173
+ text: chunk.delta.text,
174
+ };
175
+ break;
176
+ }
177
+ break;
178
+ case 'content_block_stop':
179
+ break;
180
+ }
181
+ }
182
+ logger.debug('Stream ended');
183
+ }
184
+ }
185
+ //# sourceMappingURL=AnthropicService.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"AnthropicService.js","sourceRoot":"","sources":["../../src/AiService/AnthropicService.ts"],"names":[],"mappings":"AAAA,sHAAsH;AAEtH,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAA;AAG7C,OAAO,EAAE,mBAAmB,EAAE,MAAM,WAAW,CAAA;AAC/C,OAAO,EAAE,aAAa,EAA4D,MAAM,iBAAiB,CAAA;AACzG,OAAO,EAAyC,uBAAuB,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAE7G,MAAM,MAAM,GAAG,mBAAmB,CAAC,kBAAkB,CAAC,CAAA;AAEtD,MAAM,OAAO,gBAAiB,SAAQ,aAAa;IACjD,QAAQ,CAAkB;IAC1B,OAAO,CAAW;IAET,KAAK,CAA2C;IAEzD,YAAY,OAAyB;QACnC,KAAK,EAAE,CAAA;QAEP,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAA;QACvB,IAAI,CAAC,OAAO,GAAG,IAAI,SAAS,CAAC;YAC3B,MAAM,EAAE,OAAO,CAAC,MAAM;YACtB,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,SAAS;SACtC,CAAC,CAAA;QAEF,MAAM,EAAE,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,IAAI,uBAAuB,CAAqB,CAAA;QACjF,IAAI,CAAC,KAAK,GAAG;YACX,EAAE;YACF,IAAI,EAAE,eAAe,CAAC,EAAE,CAAC,IAAI,eAAe,CAAC,uBAAuB,CAAC;SACtE,CAAA;IACH,CAAC;IAED,KAAK,CAAC,CAAC,IAAI,CAAC,YAAoB,EAAE,QAAwB;QACxD,MAAM,CAAC,KAAK,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE,EAAE,aAAa,EAAE,QAAQ,CAAC,MAAM,EAAE,EAAE,yBAAyB,CAAC,CAAA;QAEnG,IAAI,MAAiE,CAAA;QACrE,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAA;QAC7B,QAAQ,OAAO,EAAE,CAAC;YAChB,gDAAgD;YAChD,KAAK,4BAA4B,CAAC;YAClC,KAAK,2BAA2B,CAAC;YACjC,KAAK,wBAAwB,CAAC;YAC9B,KAAK,yBAAyB,CAAC,CAAC,CAAC;gBAC/B;;0BAEF;gBACE,MAAM,cAAc,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE;oBACzD,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBACxB,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;oBACjB,CAAC;oBACD,OAAO,GAAG,CAAA;gBACZ,CAAC,EAAE,EAAc,CAAC,CAAA;gBAClB,MAAM,gBAAgB,GAAG,cAAc,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;gBACxE,MAAM,sBAAsB,GAAG,cAAc,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;gBAC9E,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CACzC;oBACE,KAAK,EAAE,OAAO;oBACd,UAAU,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,IAAI,IAAI;oBAC7C,WAAW,EAAE,CAAC;oBACd,MAAM,EAAE;wBACN;4BACE,IAAI,EAAE,YAAY;4BAClB,IAAI,EAAE,MAAM;4BACZ,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE;yBACrC;qBACF,EAAE,uEAAuE;oBAC1E,QAAQ,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE;wBACxC,IAAI,KAAK,KAAK,gBAAgB,IAAI,KAAK,KAAK,sBAAsB,EAAE,CAAC;4BACnE,OAAO;gCACL,GAAG,OAAO;gCACV,OAAO,EACL,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ;oCACjC,CAAC,CAAC;wCACE;4CACE,IAAI,EAAE,MAAM;4CACZ,IAAI,EAAE,OAAO,CAAC,OAAO;4CACrB,aAAa,EAAE;gDACb,IAAI,EAAE,WAAW;6CAClB;yCACF;qCACF;oCACH,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,YAAY,EAAE,EAAE,CAC5C,YAAY,KAAK,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC;wCACzC,CAAC,CAAC;4CACE,GAAG,OAAO;4CACV,aAAa,EAAE;gDACb,IAAI,EAAE,WAAW;6CAClB;yCACF;wCACH,CAAC,CAAC,OAAO,CACZ;6BACR,CAAA;wBACH,CAAC;wBACD,OAAO,OAAO,CAAA;oBAChB,CAAC,CAAC;oBACF,gRAAgR;oBAChR,iCAAiC;oBACjC,gBAAgB;oBAChB,MAAM,EAAE,IAAI;iBACb,EACD,CAAC,GAAG,EAAE;oBACJ,wEAAwE;oBACxE,4FAA4F;oBAC5F,yGAAyG;oBACzG,QAAQ,OAAO,EAAE,CAAC;wBAChB,KAAK,4BAA4B,CAAC;wBAClC,KAAK,2BAA2B,CAAC;wBACjC,KAAK,wBAAwB,CAAC;wBAC9B,KAAK,yBAAyB;4BAC5B,OAAO;gCACL,OAAO,EAAE;oCACP,gBAAgB,EAAE,2BAA2B;iCAC9C;6BACF,CAAA;wBACH;4BACE,OAAO,SAAS,CAAA;oBACpB,CAAC;gBACH,CAAC,CAAC,EAAE,CACL,CAAA;gBACD,MAAK;YACP,CAAC;YACD,OAAO,CAAC,CAAC,CAAC;gBACR,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC;oBAC1C,KAAK,EAAE,OAAO;oBACd,UAAU,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,IAAI,IAAI;oBAC7C,WAAW,EAAE,CAAC;oBACd,MAAM,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC;oBAC9C,QAAQ;oBACR,SAAS;oBACT,iCAAiC;oBACjC,MAAM,EAAE,IAAI;iBACb,CAAC,CAAA;gBACF,MAAK;YACP,CAAC;QACH,CAAC;QAED,MAAM,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAA;QACjD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YACjC,QAAQ,KAAK,CAAC,IAAI,EAAE,CAAC;gBACnB,KAAK,eAAe,CAAC,CAAC,CAAC;oBACrB,2CAA2C;oBAC3C,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAA;oBACjC,MAAM,SAAS,GAAG;wBAChB,IAAI,EAAE,OAAO;wBACb,WAAW,EAAE,KAAK,CAAC,YAAY,IAAI,CAAC;wBACpC,YAAY,EAAE,KAAK,CAAC,aAAa,IAAI,CAAC;wBACtC,gBAAgB,EAAE,KAAK,CAAC,2BAA2B,IAAI,SAAS;wBAChE,eAAe,EAAE,KAAK,CAAC,uBAAuB,IAAI,SAAS;qBACnD,CAAA;oBACV,MAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,EAAE,qBAAqB,CAAC,CAAA;oBACzD,MAAM,SAAS,CAAA;oBACf,MAAK;gBACP,CAAC;gBACD,KAAK,eAAe,CAAC,CAAC,CAAC;oBACrB,qGAAqG;oBAErG,MAAM,UAAU,GAAG;wBACjB,IAAI,EAAE,OAAO;wBACb,WAAW,EAAE,CAAC;wBACd,YAAY,EAAE,KAAK,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC;qBACpC,CAAA;oBACV,MAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,UAAU,EAAE,EAAE,qBAAqB,CAAC,CAAA;oBAC1D,MAAM,UAAU,CAAA;oBAChB,MAAK;gBACP,CAAC;gBACD,KAAK,cAAc;oBACjB,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAA;oBACxC,MAAK;gBACP,KAAK,qBAAqB;oBACxB,QAAQ,KAAK,CAAC,aAAa,CAAC,IAAI,EAAE,CAAC;wBACjC,KAAK,MAAM;4BACT,2FAA2F;4BAC3F,IAAI,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE,CAAC;gCACpB,MAAM;oCACJ,IAAI,EAAE,MAAM;oCACZ,IAAI,EAAE,IAAI;iCACX,CAAA;4BACH,CAAC;4BACD,MAAM;gCACJ,IAAI,EAAE,MAAM;gCACZ,IAAI,EAAE,KAAK,CAAC,aAAa,CAAC,IAAI;6BAC/B,CAAA;4BACD,MAAK;oBACT,CAAC;oBACD,MAAK;gBACP,KAAK,qBAAqB;oBACxB,QAAQ,KAAK,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;wBACzB,KAAK,YAAY;4BACf,MAAM;gCACJ,IAAI,EAAE,MAAM;gCACZ,IAAI,EAAE,KAAK,CAAC,KAAK,CAAC,IAAI;6BACvB,CAAA;4BACD,MAAK;oBACT,CAAC;oBACD,MAAK;gBACP,KAAK,oBAAoB;oBACvB,MAAK;YACT,CAAC;QACH,CAAC;QAED,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;IAC9B,CAAC;CACF"}
@@ -0,0 +1,11 @@
1
+ import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from './AiServiceBase';
2
+ import { type ModelInfo } from './ModelInfo';
3
+ export declare class DeepSeekService extends AiServiceBase {
4
+ #private;
5
+ readonly model: {
6
+ id: string;
7
+ info: ModelInfo;
8
+ };
9
+ constructor(options: AiServiceOptions);
10
+ send(systemPrompt: string, messages: MessageParam[]): ApiStream;
11
+ }
@@ -0,0 +1,64 @@
1
+ // source: https://github.com/cline/cline/blob/ce2610a6eafd860305ba9b12533db19f2a5385ad/src/api/providers/deepseek.ts
2
+ import OpenAI from 'openai';
3
+ import { createServiceLogger } from '../logger';
4
+ import { AiServiceBase } from './AiServiceBase';
5
+ import { deepSeekDefaultModelId, deepSeekModels } from './ModelInfo';
6
+ import { convertToOpenAiMessages } from './utils';
7
+ const logger = createServiceLogger('DeepSeekService');
8
+ export class DeepSeekService extends AiServiceBase {
9
+ #client;
10
+ model;
11
+ constructor(options) {
12
+ super();
13
+ this.#client = new OpenAI({
14
+ baseURL: 'https://api.deepseek.com/v1',
15
+ apiKey: options.apiKey,
16
+ });
17
+ const id = (options.modelId || deepSeekDefaultModelId);
18
+ this.model = {
19
+ id,
20
+ info: deepSeekModels[id] ?? deepSeekModels[deepSeekDefaultModelId],
21
+ };
22
+ }
23
+ async *send(systemPrompt, messages) {
24
+ logger.debug({ modelId: this.model.id, messagesCount: messages.length }, 'Starting message stream');
25
+ const openAiMessages = [
26
+ { role: 'system', content: systemPrompt },
27
+ ...convertToOpenAiMessages(messages),
28
+ ];
29
+ logger.trace({ modelId: this.model.id, messagesCount: messages.length }, 'Sending messages to Ollama');
30
+ const stream = await this.#client.chat.completions.create({
31
+ model: this.model.id,
32
+ max_completion_tokens: this.model.info.maxTokens,
33
+ messages: openAiMessages,
34
+ temperature: 0,
35
+ stream: true,
36
+ stream_options: { include_usage: true },
37
+ });
38
+ for await (const chunk of stream) {
39
+ const delta = chunk.choices[0]?.delta;
40
+ if (delta?.content) {
41
+ yield {
42
+ type: 'text',
43
+ text: delta.content,
44
+ };
45
+ }
46
+ if (chunk.usage) {
47
+ yield {
48
+ type: 'usage',
49
+ // deepseek reports total input AND cache reads/writes, see context caching: https://api-docs.deepseek.com/guides/kv_cache
50
+ // where the input tokens is the sum of the cache hits/misses, while anthropic reports them as separate tokens.
51
+ // This is important to know for
52
+ // 1) context management truncation algorithm, and
53
+ // 2) cost calculation (NOTE: we report both input and cache stats but for now set input price to 0 since all the cost calculation will be done using cache hits/misses)
54
+ inputTokens: chunk.usage.prompt_tokens,
55
+ outputTokens: chunk.usage.completion_tokens,
56
+ cacheWriteTokens: chunk.usage.prompt_cache_hit_tokens || 0,
57
+ cacheReadTokens: chunk.usage.prompt_cache_miss_tokens || 0,
58
+ };
59
+ }
60
+ }
61
+ logger.debug('Stream ended');
62
+ }
63
+ }
64
+ //# sourceMappingURL=DeepSeekService.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"DeepSeekService.js","sourceRoot":"","sources":["../../src/AiService/DeepSeekService.ts"],"names":[],"mappings":"AAAA,qHAAqH;AAErH,OAAO,MAAM,MAAM,QAAQ,CAAA;AAE3B,OAAO,EAAE,mBAAmB,EAAE,MAAM,WAAW,CAAA;AAC/C,OAAO,EAAE,aAAa,EAA4D,MAAM,iBAAiB,CAAA;AACzG,OAAO,EAAwC,sBAAsB,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAC1G,OAAO,EAAE,uBAAuB,EAAE,MAAM,SAAS,CAAA;AAEjD,MAAM,MAAM,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,CAAA;AAErD,MAAM,OAAO,eAAgB,SAAQ,aAAa;IAChD,OAAO,CAAQ;IAEN,KAAK,CAAiC;IAE/C,YAAY,OAAyB;QACnC,KAAK,EAAE,CAAA;QAEP,IAAI,CAAC,OAAO,GAAG,IAAI,MAAM,CAAC;YACxB,OAAO,EAAE,6BAA6B;YACtC,MAAM,EAAE,OAAO,CAAC,MAAM;SACvB,CAAC,CAAA;QAEF,MAAM,EAAE,GAAG,CAAC,OAAO,CAAC,OAAO,IAAI,sBAAsB,CAAoB,CAAA;QACzE,IAAI,CAAC,KAAK,GAAG;YACX,EAAE;YACF,IAAI,EAAE,cAAc,CAAC,EAAE,CAAC,IAAI,cAAc,CAAC,sBAAsB,CAAC;SACnE,CAAA;IACH,CAAC;IAED,KAAK,CAAC,CAAC,IAAI,CAAC,YAAoB,EAAE,QAAwB;QACxD,MAAM,CAAC,KAAK,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE,EAAE,aAAa,EAAE,QAAQ,CAAC,MAAM,EAAE,EAAE,yBAAyB,CAAC,CAAA;QAEnG,MAAM,cAAc,GAA6C;YAC/D,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,YAAY,EAAE;YACzC,GAAG,uBAAuB,CAAC,QAAQ,CAAC;SACrC,CAAA;QAED,MAAM,CAAC,KAAK,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE,EAAE,aAAa,EAAE,QAAQ,CAAC,MAAM,EAAE,EAAE,4BAA4B,CAAC,CAAA;QAEtG,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;YACxD,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE;YACpB,qBAAqB,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS;YAChD,QAAQ,EAAE,cAAc;YACxB,WAAW,EAAE,CAAC;YACd,MAAM,EAAE,IAAI;YACZ,cAAc,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE;SACxC,CAAC,CAAA;QACF,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YACjC,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,KAAK,CAAA;YACrC,IAAI,KAAK,EAAE,OAAO,EAAE,CAAC;gBACnB,MAAM;oBACJ,IAAI,EAAE,MAAM;oBACZ,IAAI,EAAE,KAAK,CAAC,OAAO;iBACpB,CAAA;YACH,CAAC;YACD,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;gBAChB,MAAM;oBACJ,IAAI,EAAE,OAAO;oBACb,0HAA0H;oBAC1H,+GAA+G;oBAC/G,gCAAgC;oBAChC,kDAAkD;oBAClD,wKAAwK;oBACxK,WAAW,EAAE,KAAK,CAAC,KAAK,CAAC,aAAa;oBACtC,YAAY,EAAE,KAAK,CAAC,KAAK,CAAC,iBAAiB;oBAC3C,gBAAgB,EAAG,KAAK,CAAC,KAAa,CAAC,uBAAuB,IAAI,CAAC;oBACnE,eAAe,EAAG,KAAK,CAAC,KAAa,CAAC,wBAAwB,IAAI,CAAC;iBACpE,CAAA;YACH,CAAC;QACH,CAAC;QAED,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;IAC9B,CAAC;CACF"}
@@ -0,0 +1,79 @@
1
+ export interface ModelInfo {
2
+ maxTokens?: number;
3
+ contextWindow?: number;
4
+ supportsImages?: boolean;
5
+ supportsComputerUse?: boolean;
6
+ supportsPromptCache: boolean;
7
+ inputPrice?: number;
8
+ outputPrice?: number;
9
+ cacheWritesPrice?: number;
10
+ cacheReadsPrice?: number;
11
+ description?: string;
12
+ }
13
+ export type AnthropicModelId = keyof typeof anthropicModels;
14
+ export declare const anthropicDefaultModelId: AnthropicModelId;
15
+ export declare const anthropicModels: {
16
+ readonly 'claude-3-5-sonnet-20241022': {
17
+ readonly maxTokens: 8192;
18
+ readonly contextWindow: 200000;
19
+ readonly supportsImages: true;
20
+ readonly supportsComputerUse: true;
21
+ readonly supportsPromptCache: true;
22
+ readonly inputPrice: 3;
23
+ readonly outputPrice: 15;
24
+ readonly cacheWritesPrice: 3.75;
25
+ readonly cacheReadsPrice: 0.3;
26
+ };
27
+ readonly 'claude-3-5-haiku-20241022': {
28
+ readonly maxTokens: 8192;
29
+ readonly contextWindow: 200000;
30
+ readonly supportsImages: false;
31
+ readonly supportsPromptCache: true;
32
+ readonly inputPrice: 0.8;
33
+ readonly outputPrice: 4;
34
+ readonly cacheWritesPrice: 1;
35
+ readonly cacheReadsPrice: 0.08;
36
+ };
37
+ readonly 'claude-3-opus-20240229': {
38
+ readonly maxTokens: 4096;
39
+ readonly contextWindow: 200000;
40
+ readonly supportsImages: true;
41
+ readonly supportsPromptCache: true;
42
+ readonly inputPrice: 15;
43
+ readonly outputPrice: 75;
44
+ readonly cacheWritesPrice: 18.75;
45
+ readonly cacheReadsPrice: 1.5;
46
+ };
47
+ readonly 'claude-3-haiku-20240307': {
48
+ readonly maxTokens: 4096;
49
+ readonly contextWindow: 200000;
50
+ readonly supportsImages: true;
51
+ readonly supportsPromptCache: true;
52
+ readonly inputPrice: 0.25;
53
+ readonly outputPrice: 1.25;
54
+ readonly cacheWritesPrice: 0.3;
55
+ readonly cacheReadsPrice: 0.03;
56
+ };
57
+ };
58
+ export declare const openAiModelInfoSaneDefaults: {
59
+ readonly maxTokens: -1;
60
+ readonly contextWindow: 128000;
61
+ readonly supportsImages: true;
62
+ readonly supportsPromptCache: false;
63
+ readonly inputPrice: 0;
64
+ readonly outputPrice: 0;
65
+ };
66
+ export type DeepSeekModelId = keyof typeof deepSeekModels;
67
+ export declare const deepSeekDefaultModelId: DeepSeekModelId;
68
+ export declare const deepSeekModels: {
69
+ readonly 'deepseek-chat': {
70
+ readonly maxTokens: 8000;
71
+ readonly contextWindow: 64000;
72
+ readonly supportsImages: false;
73
+ readonly supportsPromptCache: true;
74
+ readonly inputPrice: 0;
75
+ readonly outputPrice: 0.28;
76
+ readonly cacheWritesPrice: 0.14;
77
+ readonly cacheReadsPrice: 0.014;
78
+ };
79
+ };
@@ -0,0 +1,67 @@
1
+ // source: https://github.com/cline/cline/blob/1f2acc519bc71bd8f38f4df87af0e07876cba0f6/src/shared/api.ts
2
+ export const anthropicDefaultModelId = 'claude-3-5-sonnet-20241022';
3
+ export const anthropicModels = {
4
+ 'claude-3-5-sonnet-20241022': {
5
+ maxTokens: 8192,
6
+ contextWindow: 200_000,
7
+ supportsImages: true,
8
+ supportsComputerUse: true,
9
+ supportsPromptCache: true,
10
+ inputPrice: 3.0, // $3 per million input tokens
11
+ outputPrice: 15.0, // $15 per million output tokens
12
+ cacheWritesPrice: 3.75, // $3.75 per million tokens
13
+ cacheReadsPrice: 0.3, // $0.30 per million tokens
14
+ },
15
+ 'claude-3-5-haiku-20241022': {
16
+ maxTokens: 8192,
17
+ contextWindow: 200_000,
18
+ supportsImages: false,
19
+ supportsPromptCache: true,
20
+ inputPrice: 0.8,
21
+ outputPrice: 4.0,
22
+ cacheWritesPrice: 1.0,
23
+ cacheReadsPrice: 0.08,
24
+ },
25
+ 'claude-3-opus-20240229': {
26
+ maxTokens: 4096,
27
+ contextWindow: 200_000,
28
+ supportsImages: true,
29
+ supportsPromptCache: true,
30
+ inputPrice: 15.0,
31
+ outputPrice: 75.0,
32
+ cacheWritesPrice: 18.75,
33
+ cacheReadsPrice: 1.5,
34
+ },
35
+ 'claude-3-haiku-20240307': {
36
+ maxTokens: 4096,
37
+ contextWindow: 200_000,
38
+ supportsImages: true,
39
+ supportsPromptCache: true,
40
+ inputPrice: 0.25,
41
+ outputPrice: 1.25,
42
+ cacheWritesPrice: 0.3,
43
+ cacheReadsPrice: 0.03,
44
+ },
45
+ };
46
+ export const openAiModelInfoSaneDefaults = {
47
+ maxTokens: -1,
48
+ contextWindow: 128_000,
49
+ supportsImages: true,
50
+ supportsPromptCache: false,
51
+ inputPrice: 0,
52
+ outputPrice: 0,
53
+ };
54
+ export const deepSeekDefaultModelId = 'deepseek-chat';
55
+ export const deepSeekModels = {
56
+ 'deepseek-chat': {
57
+ maxTokens: 8_000,
58
+ contextWindow: 64_000,
59
+ supportsImages: false,
60
+ supportsPromptCache: true, // supports context caching, but not in the way anthropic does it (deepseek reports input tokens and reads/writes in the same usage report) FIXME: we need to show users cache stats how deepseek does it
61
+ inputPrice: 0, // technically there is no input price, it's all either a cache hit or miss (ApiOptions will not show this)
62
+ outputPrice: 0.28,
63
+ cacheWritesPrice: 0.14,
64
+ cacheReadsPrice: 0.014,
65
+ },
66
+ };
67
+ //# sourceMappingURL=ModelInfo.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ModelInfo.js","sourceRoot":"","sources":["../../src/AiService/ModelInfo.ts"],"names":[],"mappings":"AAAA,yGAAyG;AAkBzG,MAAM,CAAC,MAAM,uBAAuB,GAAqB,4BAA4B,CAAA;AACrF,MAAM,CAAC,MAAM,eAAe,GAAG;IAC7B,4BAA4B,EAAE;QAC5B,SAAS,EAAE,IAAI;QACf,aAAa,EAAE,OAAO;QACtB,cAAc,EAAE,IAAI;QACpB,mBAAmB,EAAE,IAAI;QACzB,mBAAmB,EAAE,IAAI;QACzB,UAAU,EAAE,GAAG,EAAE,8BAA8B;QAC/C,WAAW,EAAE,IAAI,EAAE,gCAAgC;QACnD,gBAAgB,EAAE,IAAI,EAAE,2BAA2B;QACnD,eAAe,EAAE,GAAG,EAAE,2BAA2B;KAClD;IACD,2BAA2B,EAAE;QAC3B,SAAS,EAAE,IAAI;QACf,aAAa,EAAE,OAAO;QACtB,cAAc,EAAE,KAAK;QACrB,mBAAmB,EAAE,IAAI;QACzB,UAAU,EAAE,GAAG;QACf,WAAW,EAAE,GAAG;QAChB,gBAAgB,EAAE,GAAG;QACrB,eAAe,EAAE,IAAI;KACtB;IACD,wBAAwB,EAAE;QACxB,SAAS,EAAE,IAAI;QACf,aAAa,EAAE,OAAO;QACtB,cAAc,EAAE,IAAI;QACpB,mBAAmB,EAAE,IAAI;QACzB,UAAU,EAAE,IAAI;QAChB,WAAW,EAAE,IAAI;QACjB,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,GAAG;KACrB;IACD,yBAAyB,EAAE;QACzB,SAAS,EAAE,IAAI;QACf,aAAa,EAAE,OAAO;QACtB,cAAc,EAAE,IAAI;QACpB,mBAAmB,EAAE,IAAI;QACzB,UAAU,EAAE,IAAI;QAChB,WAAW,EAAE,IAAI;QACjB,gBAAgB,EAAE,GAAG;QACrB,eAAe,EAAE,IAAI;KACtB;CAC2C,CAAA;AAE9C,MAAM,CAAC,MAAM,2BAA2B,GAAG;IACzC,SAAS,EAAE,CAAC,CAAC;IACb,aAAa,EAAE,OAAO;IACtB,cAAc,EAAE,IAAI;IACpB,mBAAmB,EAAE,KAAK;IAC1B,UAAU,EAAE,CAAC;IACb,WAAW,EAAE,CAAC;CACc,CAAA;AAK9B,MAAM,CAAC,MAAM,sBAAsB,GAAoB,eAAe,CAAA;AACtE,MAAM,CAAC,MAAM,cAAc,GAAG;IAC5B,eAAe,EAAE;QACf,SAAS,EAAE,KAAK;QAChB,aAAa,EAAE,MAAM;QACrB,cAAc,EAAE,KAAK;QACrB,mBAAmB,EAAE,IAAI,EAAE,yMAAyM;QACpO,UAAU,EAAE,CAAC,EAAE,2GAA2G;QAC1H,WAAW,EAAE,IAAI;QACjB,gBAAgB,EAAE,IAAI;QACtB,eAAe,EAAE,KAAK;KACvB;CAC2C,CAAA"}
@@ -0,0 +1,11 @@
1
+ import { AiServiceBase, type AiServiceOptions, type ApiStream, type MessageParam } from './AiServiceBase';
2
+ import { type ModelInfo } from './ModelInfo';
3
+ export declare class OllamaService extends AiServiceBase {
4
+ #private;
5
+ readonly model: {
6
+ id: string;
7
+ info: ModelInfo;
8
+ };
9
+ constructor(options: AiServiceOptions);
10
+ send(systemPrompt: string, messages: MessageParam[]): ApiStream;
11
+ }
@@ -0,0 +1,47 @@
1
+ // source: https://github.com/cline/cline/blob/f6c19c29a64ca84e9360df7ab2c07d128dcebe64/src/api/providers/ollama.ts
2
+ import OpenAI from 'openai';
3
+ import { createServiceLogger } from '../logger';
4
+ import { AiServiceBase } from './AiServiceBase';
5
+ import { openAiModelInfoSaneDefaults } from './ModelInfo';
6
+ import { convertToOpenAiMessages } from './utils';
7
+ const logger = createServiceLogger('OllamaService');
8
+ export class OllamaService extends AiServiceBase {
9
+ #client;
10
+ model;
11
+ constructor(options) {
12
+ super();
13
+ this.#client = new OpenAI({
14
+ baseURL: `${options.baseUrl || 'http://localhost:11434'}/v1`,
15
+ apiKey: 'ollama',
16
+ });
17
+ this.model = {
18
+ id: options.modelId || '',
19
+ info: openAiModelInfoSaneDefaults,
20
+ };
21
+ }
22
+ async *send(systemPrompt, messages) {
23
+ logger.debug({ modelId: this.model.id, messagesCount: messages.length }, 'Starting message stream');
24
+ const openAiMessages = [
25
+ { role: 'system', content: systemPrompt },
26
+ ...convertToOpenAiMessages(messages),
27
+ ];
28
+ logger.trace({ modelId: this.model.id, messagesCount: messages.length }, 'Sending messages to Ollama');
29
+ const stream = await this.#client.chat.completions.create({
30
+ model: this.model.id,
31
+ messages: openAiMessages,
32
+ temperature: 0,
33
+ stream: true,
34
+ });
35
+ for await (const chunk of stream) {
36
+ const delta = chunk.choices[0]?.delta;
37
+ if (delta?.content) {
38
+ yield {
39
+ type: 'text',
40
+ text: delta.content,
41
+ };
42
+ }
43
+ }
44
+ logger.debug('Stream ended');
45
+ }
46
+ }
47
+ //# sourceMappingURL=OllamaService.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"OllamaService.js","sourceRoot":"","sources":["../../src/AiService/OllamaService.ts"],"names":[],"mappings":"AAAA,mHAAmH;AAEnH,OAAO,MAAM,MAAM,QAAQ,CAAA;AAE3B,OAAO,EAAE,mBAAmB,EAAE,MAAM,WAAW,CAAA;AAC/C,OAAO,EAAE,aAAa,EAA4D,MAAM,iBAAiB,CAAA;AACzG,OAAO,EAAkB,2BAA2B,EAAE,MAAM,aAAa,CAAA;AACzE,OAAO,EAAE,uBAAuB,EAAE,MAAM,SAAS,CAAA;AAEjD,MAAM,MAAM,GAAG,mBAAmB,CAAC,eAAe,CAAC,CAAA;AAEnD,MAAM,OAAO,aAAc,SAAQ,aAAa;IAC9C,OAAO,CAAQ;IAEN,KAAK,CAAiC;IAE/C,YAAY,OAAyB;QACnC,KAAK,EAAE,CAAA;QAEP,IAAI,CAAC,OAAO,GAAG,IAAI,MAAM,CAAC;YACxB,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,IAAI,wBAAwB,KAAK;YAC5D,MAAM,EAAE,QAAQ;SACjB,CAAC,CAAA;QAEF,IAAI,CAAC,KAAK,GAAG;YACX,EAAE,EAAE,OAAO,CAAC,OAAO,IAAI,EAAE;YACzB,IAAI,EAAE,2BAA2B;SAClC,CAAA;IACH,CAAC;IAED,KAAK,CAAC,CAAC,IAAI,CAAC,YAAoB,EAAE,QAAwB;QACxD,MAAM,CAAC,KAAK,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE,EAAE,aAAa,EAAE,QAAQ,CAAC,MAAM,EAAE,EAAE,yBAAyB,CAAC,CAAA;QAEnG,MAAM,cAAc,GAA6C;YAC/D,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,YAAY,EAAE;YACzC,GAAG,uBAAuB,CAAC,QAAQ,CAAC;SACrC,CAAA;QAED,MAAM,CAAC,KAAK,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE,EAAE,aAAa,EAAE,QAAQ,CAAC,MAAM,EAAE,EAAE,4BAA4B,CAAC,CAAA;QAEtG,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;YACxD,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,EAAE;YACpB,QAAQ,EAAE,cAAc;YACxB,WAAW,EAAE,CAAC;YACd,MAAM,EAAE,IAAI;SACb,CAAC,CAAA;QACF,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YACjC,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,KAAK,CAAA;YACrC,IAAI,KAAK,EAAE,OAAO,EAAE,CAAC;gBACnB,MAAM;oBACJ,IAAI,EAAE,MAAM;oBACZ,IAAI,EAAE,KAAK,CAAC,OAAO;iBACpB,CAAA;YACH,CAAC;QACH,CAAC;QAED,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;IAC9B,CAAC;CACF"}