@polka-codes/core 0.8.15 → 0.8.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -210,8 +210,32 @@ export { AnthropicModelId }
210
210
  export { AnthropicModelId as AnthropicModelId_alias_1 }
211
211
 
212
212
  declare const anthropicModels: {
213
+ readonly 'claude-opus-4-20250514': {
214
+ readonly maxTokens: 32000;
215
+ readonly contextWindow: 200000;
216
+ readonly supportsImages: true;
217
+ readonly supportsComputerUse: true;
218
+ readonly supportsPromptCache: true;
219
+ readonly inputPrice: 15;
220
+ readonly outputPrice: 75;
221
+ readonly cacheWritesPrice: 18.75;
222
+ readonly cacheReadsPrice: 1.5;
223
+ readonly reasoning: true;
224
+ };
225
+ readonly 'claude-sonnet-4-20250514': {
226
+ readonly maxTokens: 64000;
227
+ readonly contextWindow: 200000;
228
+ readonly supportsImages: true;
229
+ readonly supportsComputerUse: true;
230
+ readonly supportsPromptCache: true;
231
+ readonly inputPrice: 3;
232
+ readonly outputPrice: 15;
233
+ readonly cacheWritesPrice: 3.75;
234
+ readonly cacheReadsPrice: 0.3;
235
+ readonly reasoning: true;
236
+ };
213
237
  readonly 'claude-3-7-sonnet-20250219': {
214
- readonly maxTokens: 8192;
238
+ readonly maxTokens: 64000;
215
239
  readonly contextWindow: 200000;
216
240
  readonly supportsImages: true;
217
241
  readonly supportsComputerUse: true;
@@ -2406,8 +2430,32 @@ export { ModelInfo as ModelInfo_alias_2 }
2406
2430
 
2407
2431
  declare const modelInfos: {
2408
2432
  anthropic: {
2433
+ readonly 'claude-opus-4-20250514': {
2434
+ readonly maxTokens: 32000;
2435
+ readonly contextWindow: 200000;
2436
+ readonly supportsImages: true;
2437
+ readonly supportsComputerUse: true;
2438
+ readonly supportsPromptCache: true;
2439
+ readonly inputPrice: 15;
2440
+ readonly outputPrice: 75;
2441
+ readonly cacheWritesPrice: 18.75;
2442
+ readonly cacheReadsPrice: 1.5;
2443
+ readonly reasoning: true;
2444
+ };
2445
+ readonly 'claude-sonnet-4-20250514': {
2446
+ readonly maxTokens: 64000;
2447
+ readonly contextWindow: 200000;
2448
+ readonly supportsImages: true;
2449
+ readonly supportsComputerUse: true;
2450
+ readonly supportsPromptCache: true;
2451
+ readonly inputPrice: 3;
2452
+ readonly outputPrice: 15;
2453
+ readonly cacheWritesPrice: 3.75;
2454
+ readonly cacheReadsPrice: 0.3;
2455
+ readonly reasoning: true;
2456
+ };
2409
2457
  readonly 'claude-3-7-sonnet-20250219': {
2410
- readonly maxTokens: 8192;
2458
+ readonly maxTokens: 64000;
2411
2459
  readonly contextWindow: 200000;
2412
2460
  readonly supportsImages: true;
2413
2461
  readonly supportsComputerUse: true;
package/dist/index.js CHANGED
@@ -69,8 +69,32 @@ import { Anthropic } from "@anthropic-ai/sdk";
69
69
  // src/AiService/ModelInfo.ts
70
70
  var anthropicDefaultModelId = "claude-3-7-sonnet-20250219";
71
71
  var anthropicModels = {
72
+ "claude-opus-4-20250514": {
73
+ maxTokens: 32e3,
74
+ contextWindow: 2e5,
75
+ supportsImages: true,
76
+ supportsComputerUse: true,
77
+ supportsPromptCache: true,
78
+ inputPrice: 15,
79
+ outputPrice: 75,
80
+ cacheWritesPrice: 18.75,
81
+ cacheReadsPrice: 1.5,
82
+ reasoning: true
83
+ },
84
+ "claude-sonnet-4-20250514": {
85
+ maxTokens: 64e3,
86
+ contextWindow: 2e5,
87
+ supportsImages: true,
88
+ supportsComputerUse: true,
89
+ supportsPromptCache: true,
90
+ inputPrice: 3,
91
+ outputPrice: 15,
92
+ cacheWritesPrice: 3.75,
93
+ cacheReadsPrice: 0.3,
94
+ reasoning: true
95
+ },
72
96
  "claude-3-7-sonnet-20250219": {
73
- maxTokens: 8192,
97
+ maxTokens: 64e3,
74
98
  contextWindow: 2e5,
75
99
  supportsImages: true,
76
100
  supportsComputerUse: true,
@@ -198,6 +222,7 @@ var AnthropicService = class extends AiServiceBase {
198
222
  }
199
223
  switch (modelId) {
200
224
  // 'latest' alias does not support cache_control
225
+ case "claude-sonnet-4-20250514":
201
226
  case "claude-3-7-sonnet-20250219":
202
227
  case "claude-3-5-sonnet-20241022":
203
228
  case "claude-3-5-haiku-20241022":
@@ -590,71 +615,32 @@ var OpenRouterService = class extends AiServiceBase {
590
615
  ...convertToOpenAiMessages(messages)
591
616
  ];
592
617
  const cacheControl = this.#options.enableCache ? { type: "ephemeral" } : void 0;
593
- switch (this.model.id) {
594
- case "anthropic/claude-3.7-sonnet:thinking":
595
- case "anthropic/claude-3.7-sonnet":
596
- case "anthropic/claude-3.7-sonnet:beta":
597
- case "anthropic/claude-3-7-sonnet":
598
- case "anthropic/claude-3-7-sonnet:beta":
599
- case "anthropic/claude-3.5-sonnet":
600
- case "anthropic/claude-3.5-sonnet:beta":
601
- case "anthropic/claude-3.5-sonnet-20240620":
602
- case "anthropic/claude-3.5-sonnet-20240620:beta":
603
- case "anthropic/claude-3-5-haiku":
604
- case "anthropic/claude-3-5-haiku:beta":
605
- case "anthropic/claude-3-5-haiku-20241022":
606
- case "anthropic/claude-3-5-haiku-20241022:beta":
607
- case "anthropic/claude-3-haiku":
608
- case "anthropic/claude-3-haiku:beta":
609
- case "anthropic/claude-3-opus":
610
- case "anthropic/claude-3-opus:beta": {
611
- openAiMessages[0] = {
612
- role: "system",
613
- content: [
614
- {
615
- type: "text",
616
- text: systemPrompt,
617
- // @ts-ignore-next-line
618
- cache_control: cacheControl
619
- }
620
- ]
621
- };
622
- const lastTwoUserMessages = openAiMessages.filter((msg) => msg.role === "user").slice(-2);
623
- for (const msg of lastTwoUserMessages) {
624
- if (typeof msg.content === "string") {
625
- msg.content = [{ type: "text", text: msg.content }];
618
+ if (this.model.id.startsWith("anthropic/claude")) {
619
+ openAiMessages[0] = {
620
+ role: "system",
621
+ content: [
622
+ {
623
+ type: "text",
624
+ text: systemPrompt,
625
+ // @ts-ignore-next-line
626
+ cache_control: cacheControl
626
627
  }
627
- if (Array.isArray(msg.content)) {
628
- let lastTextPart = msg.content.filter((part) => part.type === "text").pop();
629
- if (!lastTextPart) {
630
- lastTextPart = { type: "text", text: "..." };
631
- msg.content.push(lastTextPart);
632
- }
633
- lastTextPart.cache_control = cacheControl;
628
+ ]
629
+ };
630
+ const lastTwoUserMessages = openAiMessages.filter((msg) => msg.role === "user").slice(-2);
631
+ for (const msg of lastTwoUserMessages) {
632
+ if (typeof msg.content === "string") {
633
+ msg.content = [{ type: "text", text: msg.content }];
634
+ }
635
+ if (Array.isArray(msg.content)) {
636
+ let lastTextPart = msg.content.filter((part) => part.type === "text").pop();
637
+ if (!lastTextPart) {
638
+ lastTextPart = { type: "text", text: "..." };
639
+ msg.content.push(lastTextPart);
634
640
  }
641
+ lastTextPart.cache_control = cacheControl;
635
642
  }
636
- break;
637
643
  }
638
- default:
639
- break;
640
- }
641
- let maxTokens;
642
- switch (this.model.id) {
643
- case "anthropic/claude-3.7-sonnet:thinking":
644
- case "anthropic/claude-3.7-sonnet":
645
- case "anthropic/claude-3.7-sonnet:beta":
646
- case "anthropic/claude-3-7-sonnet":
647
- case "anthropic/claude-3-7-sonnet:beta":
648
- case "anthropic/claude-3.5-sonnet":
649
- case "anthropic/claude-3.5-sonnet:beta":
650
- case "anthropic/claude-3.5-sonnet-20240620":
651
- case "anthropic/claude-3.5-sonnet-20240620:beta":
652
- case "anthropic/claude-3-5-haiku":
653
- case "anthropic/claude-3-5-haiku:beta":
654
- case "anthropic/claude-3-5-haiku-20241022":
655
- case "anthropic/claude-3-5-haiku-20241022:beta":
656
- maxTokens = 8192;
657
- break;
658
644
  }
659
645
  let reasoning = {};
660
646
  switch (this.model.id) {
@@ -662,7 +648,9 @@ var OpenRouterService = class extends AiServiceBase {
662
648
  case "anthropic/claude-3.7-sonnet:beta":
663
649
  case "anthropic/claude-3.7-sonnet:thinking":
664
650
  case "anthropic/claude-3-7-sonnet":
665
- case "anthropic/claude-3-7-sonnet:beta": {
651
+ case "anthropic/claude-3-7-sonnet:beta":
652
+ case "anthropic/claude-opus-4":
653
+ case "anthropic/claude-sonnet-4": {
666
654
  const budget_tokens = this.#options.parameters.thinkingBudgetTokens || 0;
667
655
  if (budget_tokens > 0) {
668
656
  reasoning = { max_tokens: budget_tokens };
@@ -676,7 +664,6 @@ var OpenRouterService = class extends AiServiceBase {
676
664
  }
677
665
  const stream = await this.#client.chat.completions.create({
678
666
  model: this.model.id,
679
- max_completion_tokens: maxTokens,
680
667
  messages: openAiMessages,
681
668
  temperature: 0,
682
669
  stream: true,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@polka-codes/core",
3
- "version": "0.8.15",
3
+ "version": "0.8.16",
4
4
  "license": "AGPL-3.0",
5
5
  "author": "github@polka.codes",
6
6
  "type": "module",