@polka-codes/core 0.7.4 → 0.7.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -82,6 +82,7 @@ declare interface AiServiceOptions {
82
82
  baseUrl?: string;
83
83
  usageMeter: UsageMeter;
84
84
  enableCache?: boolean;
85
+ parameters: Record<string, any>;
85
86
  }
86
87
  export { AiServiceOptions }
87
88
  export { AiServiceOptions as AiServiceOptions_alias_1 }
@@ -182,6 +183,7 @@ declare const anthropicModels: {
182
183
  readonly outputPrice: 15;
183
184
  readonly cacheWritesPrice: 3.75;
184
185
  readonly cacheReadsPrice: 0.3;
186
+ readonly reasoning: true;
185
187
  };
186
188
  readonly 'claude-3-5-sonnet-20241022': {
187
189
  readonly maxTokens: 8192;
@@ -1541,6 +1543,7 @@ declare interface ModelInfo {
1541
1543
  outputPrice?: number;
1542
1544
  cacheWritesPrice?: number;
1543
1545
  cacheReadsPrice?: number;
1546
+ reasoning?: boolean;
1544
1547
  }
1545
1548
  export { ModelInfo }
1546
1549
  export { ModelInfo as ModelInfo_alias_1 }
@@ -1558,6 +1561,7 @@ declare const modelInfos: {
1558
1561
  readonly outputPrice: 15;
1559
1562
  readonly cacheWritesPrice: 3.75;
1560
1563
  readonly cacheReadsPrice: 0.3;
1564
+ readonly reasoning: true;
1561
1565
  };
1562
1566
  readonly 'claude-3-5-sonnet-20241022': {
1563
1567
  readonly maxTokens: 8192;
package/dist/index.js CHANGED
@@ -74,7 +74,8 @@ var anthropicModels = {
74
74
  inputPrice: 3,
75
75
  outputPrice: 15,
76
76
  cacheWritesPrice: 3.75,
77
- cacheReadsPrice: 0.3
77
+ cacheReadsPrice: 0.3,
78
+ reasoning: true
78
79
  },
79
80
  "claude-3-5-sonnet-20241022": {
80
81
  maxTokens: 8192,
@@ -183,6 +184,14 @@ var AnthropicService = class extends AiServiceBase {
183
184
  let stream;
184
185
  const modelId = this.model.id;
185
186
  const cacheControl = this.#options.enableCache ? { type: "ephemeral" } : void 0;
187
+ let temperature = 0;
188
+ let thinkingBudgetTokens = 0;
189
+ if (this.model.info.reasoning) {
190
+ thinkingBudgetTokens = this.#options.parameters.thinkingBudgetTokens ?? 0;
191
+ }
192
+ if (thinkingBudgetTokens > 0) {
193
+ temperature = void 0;
194
+ }
186
195
  switch (modelId) {
187
196
  // 'latest' alias does not support cache_control
188
197
  case "claude-3-7-sonnet-20250219":
@@ -201,7 +210,8 @@ var AnthropicService = class extends AiServiceBase {
201
210
  stream = await this.#client.messages.create({
202
211
  model: modelId,
203
212
  max_tokens: this.model.info.maxTokens || 8192,
204
- temperature: 0,
213
+ thinking: thinkingBudgetTokens ? { type: "enabled", budget_tokens: thinkingBudgetTokens } : void 0,
214
+ temperature,
205
215
  system: [
206
216
  {
207
217
  text: systemPrompt,
@@ -241,8 +251,6 @@ var AnthropicService = class extends AiServiceBase {
241
251
  temperature: 0,
242
252
  system: [{ text: systemPrompt, type: "text" }],
243
253
  messages,
244
- // tools,
245
- // tool_choice: { type: "auto" },
246
254
  stream: true
247
255
  });
248
256
  break;
@@ -285,6 +293,18 @@ var AnthropicService = class extends AiServiceBase {
285
293
  text: chunk.content_block.text
286
294
  };
287
295
  break;
296
+ case "thinking":
297
+ yield {
298
+ type: "reasoning",
299
+ text: chunk.content_block.thinking
300
+ };
301
+ break;
302
+ case "redacted_thinking":
303
+ yield {
304
+ type: "reasoning",
305
+ text: "[Redacted by providered]"
306
+ };
307
+ break;
288
308
  }
289
309
  break;
290
310
  case "content_block_delta":
@@ -295,6 +315,12 @@ var AnthropicService = class extends AiServiceBase {
295
315
  text: chunk.delta.text
296
316
  };
297
317
  break;
318
+ case "thinking_delta":
319
+ yield {
320
+ type: "reasoning",
321
+ text: chunk.delta.thinking
322
+ };
323
+ break;
298
324
  }
299
325
  break;
300
326
  case "content_block_stop":
@@ -354,10 +380,18 @@ function convertToOpenAiMessages(anthropicMessages) {
354
380
  role: "user",
355
381
  content: nonToolMessages.map((part) => {
356
382
  if (part.type === "image") {
383
+ if (part.source.type === "base64") {
384
+ return {
385
+ type: "image_url",
386
+ image_url: {
387
+ url: `data:${part.source.media_type};base64,${part.source.data}`
388
+ }
389
+ };
390
+ }
357
391
  return {
358
392
  type: "image_url",
359
393
  image_url: {
360
- url: `data:${part.source.media_type};base64,${part.source.data}`
394
+ url: part.source.url
361
395
  }
362
396
  };
363
397
  }
@@ -618,6 +652,20 @@ var OpenRouterService = class extends AiServiceBase {
618
652
  maxTokens = 8192;
619
653
  break;
620
654
  }
655
+ let reasoning = {};
656
+ switch (this.model.id) {
657
+ case "anthropic/claude-3.7-sonnet":
658
+ case "anthropic/claude-3.7-sonnet:beta":
659
+ case "anthropic/claude-3.7-sonnet:thinking":
660
+ case "anthropic/claude-3-7-sonnet":
661
+ case "anthropic/claude-3-7-sonnet:beta": {
662
+ const budget_tokens = this.#options.parameters.thinkingBudgetTokens || 0;
663
+ if (budget_tokens > 0) {
664
+ reasoning = { max_tokens: budget_tokens };
665
+ }
666
+ break;
667
+ }
668
+ }
621
669
  let shouldApplyMiddleOutTransform = !this.model.info.supportsPromptCache;
622
670
  if (this.model.id === "deepseek/deepseek-chat") {
623
671
  shouldApplyMiddleOutTransform = true;
@@ -629,7 +677,8 @@ var OpenRouterService = class extends AiServiceBase {
629
677
  temperature: 0,
630
678
  stream: true,
631
679
  transforms: shouldApplyMiddleOutTransform ? ["middle-out"] : void 0,
632
- include_reasoning: true
680
+ include_reasoning: true,
681
+ ...reasoning
633
682
  });
634
683
  let genId;
635
684
  for await (const chunk of stream) {
@@ -2202,19 +2251,23 @@ ${agents}`;
2202
2251
  for (let i = 0; i < retryCount; i++) {
2203
2252
  currentAssistantMessage = "";
2204
2253
  const stream = this.ai.send(this.config.systemPrompt, this.messages);
2205
- for await (const chunk of stream) {
2206
- switch (chunk.type) {
2207
- case "usage":
2208
- await this.#callback({ kind: "Usage" /* Usage */, agent: this });
2209
- break;
2210
- case "text":
2211
- currentAssistantMessage += chunk.text;
2212
- await this.#callback({ kind: "Text" /* Text */, agent: this, newText: chunk.text });
2213
- break;
2214
- case "reasoning":
2215
- await this.#callback({ kind: "Reasoning" /* Reasoning */, agent: this, newText: chunk.text });
2216
- break;
2254
+ try {
2255
+ for await (const chunk of stream) {
2256
+ switch (chunk.type) {
2257
+ case "usage":
2258
+ await this.#callback({ kind: "Usage" /* Usage */, agent: this });
2259
+ break;
2260
+ case "text":
2261
+ currentAssistantMessage += chunk.text;
2262
+ await this.#callback({ kind: "Text" /* Text */, agent: this, newText: chunk.text });
2263
+ break;
2264
+ case "reasoning":
2265
+ await this.#callback({ kind: "Reasoning" /* Reasoning */, agent: this, newText: chunk.text });
2266
+ break;
2267
+ }
2217
2268
  }
2269
+ } catch (error) {
2270
+ console.error("Error in stream:", error);
2218
2271
  }
2219
2272
  if (currentAssistantMessage) {
2220
2273
  break;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@polka-codes/core",
3
- "version": "0.7.4",
3
+ "version": "0.7.5",
4
4
  "license": "AGPL-3.0",
5
5
  "author": "github@polka.codes",
6
6
  "type": "module",
@@ -15,7 +15,7 @@
15
15
  "build": "tsup src/index.ts --experimental-dts --format esm --clean"
16
16
  },
17
17
  "dependencies": {
18
- "@anthropic-ai/sdk": "^0.36.2",
18
+ "@anthropic-ai/sdk": "0.39.0",
19
19
  "lodash": "^4.17.21",
20
20
  "openai": "^4.80.0"
21
21
  }