@mariozechner/pi-ai 0.23.3 → 0.23.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/agent-loop.d.ts.map +1 -1
- package/dist/agent/agent-loop.js +3 -1
- package/dist/agent/agent-loop.js.map +1 -1
- package/dist/agent/types.d.ts +13 -0
- package/dist/agent/types.d.ts.map +1 -1
- package/dist/agent/types.js.map +1 -1
- package/dist/models.generated.d.ts +67 -10
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +86 -34
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/google.d.ts.map +1 -1
- package/dist/providers/google.js +27 -15
- package/dist/providers/google.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +18 -3
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/providers/openai-responses.d.ts.map +1 -1
- package/dist/providers/openai-responses.js +17 -2
- package/dist/providers/openai-responses.js.map +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +22 -5
- package/dist/stream.js.map +1 -1
- package/package.json +2 -2
package/dist/models.generated.js
CHANGED
|
@@ -2175,6 +2175,23 @@ export const MODELS = {
|
|
|
2175
2175
|
contextWindow: 128000,
|
|
2176
2176
|
maxTokens: 16384,
|
|
2177
2177
|
},
|
|
2178
|
+
"devstral-2512": {
|
|
2179
|
+
id: "devstral-2512",
|
|
2180
|
+
name: "Devstral 2",
|
|
2181
|
+
api: "openai-completions",
|
|
2182
|
+
provider: "mistral",
|
|
2183
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
2184
|
+
reasoning: false,
|
|
2185
|
+
input: ["text"],
|
|
2186
|
+
cost: {
|
|
2187
|
+
input: 0,
|
|
2188
|
+
output: 0,
|
|
2189
|
+
cacheRead: 0,
|
|
2190
|
+
cacheWrite: 0,
|
|
2191
|
+
},
|
|
2192
|
+
contextWindow: 262144,
|
|
2193
|
+
maxTokens: 262144,
|
|
2194
|
+
},
|
|
2178
2195
|
"ministral-3b-latest": {
|
|
2179
2196
|
id: "ministral-3b-latest",
|
|
2180
2197
|
name: "Ministral 3B",
|
|
@@ -2235,8 +2252,8 @@ export const MODELS = {
|
|
|
2235
2252
|
reasoning: false,
|
|
2236
2253
|
input: ["text", "image"],
|
|
2237
2254
|
cost: {
|
|
2238
|
-
input: 0
|
|
2239
|
-
output: 0
|
|
2255
|
+
input: 0,
|
|
2256
|
+
output: 0,
|
|
2240
2257
|
cacheRead: 0,
|
|
2241
2258
|
cacheWrite: 0,
|
|
2242
2259
|
},
|
|
@@ -2483,6 +2500,25 @@ export const MODELS = {
|
|
|
2483
2500
|
},
|
|
2484
2501
|
},
|
|
2485
2502
|
"github-copilot": {
|
|
2503
|
+
"gemini-3-flash-preview": {
|
|
2504
|
+
id: "gemini-3-flash-preview",
|
|
2505
|
+
name: "Gemini 3 Flash",
|
|
2506
|
+
api: "openai-completions",
|
|
2507
|
+
provider: "github-copilot",
|
|
2508
|
+
baseUrl: "https://api.individual.githubcopilot.com",
|
|
2509
|
+
headers: { "User-Agent": "GitHubCopilotChat/0.35.0", "Editor-Version": "vscode/1.107.0", "Editor-Plugin-Version": "copilot-chat/0.35.0", "Copilot-Integration-Id": "vscode-chat" },
|
|
2510
|
+
compat: { "supportsStore": false, "supportsDeveloperRole": false, "supportsReasoningEffort": false },
|
|
2511
|
+
reasoning: true,
|
|
2512
|
+
input: ["text", "image"],
|
|
2513
|
+
cost: {
|
|
2514
|
+
input: 0,
|
|
2515
|
+
output: 0,
|
|
2516
|
+
cacheRead: 0,
|
|
2517
|
+
cacheWrite: 0,
|
|
2518
|
+
},
|
|
2519
|
+
contextWindow: 128000,
|
|
2520
|
+
maxTokens: 64000,
|
|
2521
|
+
},
|
|
2486
2522
|
"grok-code-fast-1": {
|
|
2487
2523
|
id: "grok-code-fast-1",
|
|
2488
2524
|
name: "Grok Code Fast 1",
|
|
@@ -2561,11 +2597,10 @@ export const MODELS = {
|
|
|
2561
2597
|
"oswe-vscode-prime": {
|
|
2562
2598
|
id: "oswe-vscode-prime",
|
|
2563
2599
|
name: "Raptor Mini (Preview)",
|
|
2564
|
-
api: "openai-
|
|
2600
|
+
api: "openai-responses",
|
|
2565
2601
|
provider: "github-copilot",
|
|
2566
2602
|
baseUrl: "https://api.individual.githubcopilot.com",
|
|
2567
2603
|
headers: { "User-Agent": "GitHubCopilotChat/0.35.0", "Editor-Version": "vscode/1.107.0", "Editor-Plugin-Version": "copilot-chat/0.35.0", "Copilot-Integration-Id": "vscode-chat" },
|
|
2568
|
-
compat: { "supportsStore": false, "supportsDeveloperRole": false, "supportsReasoningEffort": false },
|
|
2569
2604
|
reasoning: true,
|
|
2570
2605
|
input: ["text", "image"],
|
|
2571
2606
|
cost: {
|
|
@@ -2887,6 +2922,23 @@ export const MODELS = {
|
|
|
2887
2922
|
contextWindow: 256000,
|
|
2888
2923
|
maxTokens: 4096,
|
|
2889
2924
|
},
|
|
2925
|
+
"nvidia/nemotron-3-nano-30b-a3b": {
|
|
2926
|
+
id: "nvidia/nemotron-3-nano-30b-a3b",
|
|
2927
|
+
name: "NVIDIA: Nemotron 3 Nano 30B A3B",
|
|
2928
|
+
api: "openai-completions",
|
|
2929
|
+
provider: "openrouter",
|
|
2930
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
2931
|
+
reasoning: true,
|
|
2932
|
+
input: ["text"],
|
|
2933
|
+
cost: {
|
|
2934
|
+
input: 0.06,
|
|
2935
|
+
output: 0.24,
|
|
2936
|
+
cacheRead: 0,
|
|
2937
|
+
cacheWrite: 0,
|
|
2938
|
+
},
|
|
2939
|
+
contextWindow: 262144,
|
|
2940
|
+
maxTokens: 4096,
|
|
2941
|
+
},
|
|
2890
2942
|
"openai/gpt-5.2-chat": {
|
|
2891
2943
|
id: "openai/gpt-5.2-chat",
|
|
2892
2944
|
name: "OpenAI: GPT-5.2 Chat",
|
|
@@ -3168,13 +3220,13 @@ export const MODELS = {
|
|
|
3168
3220
|
reasoning: true,
|
|
3169
3221
|
input: ["text"],
|
|
3170
3222
|
cost: {
|
|
3171
|
-
input: 0.
|
|
3223
|
+
input: 0.24,
|
|
3172
3224
|
output: 0.38,
|
|
3173
|
-
cacheRead: 0,
|
|
3225
|
+
cacheRead: 0.11,
|
|
3174
3226
|
cacheWrite: 0,
|
|
3175
3227
|
},
|
|
3176
3228
|
contextWindow: 163840,
|
|
3177
|
-
maxTokens:
|
|
3229
|
+
maxTokens: 163840,
|
|
3178
3230
|
},
|
|
3179
3231
|
"prime-intellect/intellect-3": {
|
|
3180
3232
|
id: "prime-intellect/intellect-3",
|
|
@@ -5055,8 +5107,8 @@ export const MODELS = {
|
|
|
5055
5107
|
reasoning: true,
|
|
5056
5108
|
input: ["text"],
|
|
5057
5109
|
cost: {
|
|
5058
|
-
input: 0.
|
|
5059
|
-
output: 0.
|
|
5110
|
+
input: 0.02,
|
|
5111
|
+
output: 0.09999999999999999,
|
|
5060
5112
|
cacheRead: 0,
|
|
5061
5113
|
cacheWrite: 0,
|
|
5062
5114
|
},
|
|
@@ -5429,13 +5481,13 @@ export const MODELS = {
|
|
|
5429
5481
|
reasoning: true,
|
|
5430
5482
|
input: ["text"],
|
|
5431
5483
|
cost: {
|
|
5432
|
-
input: 0.
|
|
5433
|
-
output: 0.
|
|
5434
|
-
cacheRead: 0,
|
|
5484
|
+
input: 0.19999999999999998,
|
|
5485
|
+
output: 0.88,
|
|
5486
|
+
cacheRead: 0.106,
|
|
5435
5487
|
cacheWrite: 0,
|
|
5436
5488
|
},
|
|
5437
|
-
contextWindow:
|
|
5438
|
-
maxTokens:
|
|
5489
|
+
contextWindow: 163840,
|
|
5490
|
+
maxTokens: 4096,
|
|
5439
5491
|
},
|
|
5440
5492
|
"mistralai/mistral-small-3.1-24b-instruct:free": {
|
|
5441
5493
|
id: "mistralai/mistral-small-3.1-24b-instruct:free",
|
|
@@ -5981,9 +6033,9 @@ export const MODELS = {
|
|
|
5981
6033
|
contextWindow: 32768,
|
|
5982
6034
|
maxTokens: 4096,
|
|
5983
6035
|
},
|
|
5984
|
-
"anthropic/claude-3.5-haiku": {
|
|
5985
|
-
id: "anthropic/claude-3.5-haiku",
|
|
5986
|
-
name: "Anthropic: Claude 3.5 Haiku",
|
|
6036
|
+
"anthropic/claude-3.5-haiku-20241022": {
|
|
6037
|
+
id: "anthropic/claude-3.5-haiku-20241022",
|
|
6038
|
+
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
|
5987
6039
|
api: "openai-completions",
|
|
5988
6040
|
provider: "openrouter",
|
|
5989
6041
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5998,9 +6050,9 @@ export const MODELS = {
|
|
|
5998
6050
|
contextWindow: 200000,
|
|
5999
6051
|
maxTokens: 8192,
|
|
6000
6052
|
},
|
|
6001
|
-
"anthropic/claude-3.5-haiku
|
|
6002
|
-
id: "anthropic/claude-3.5-haiku
|
|
6003
|
-
name: "Anthropic: Claude 3.5 Haiku
|
|
6053
|
+
"anthropic/claude-3.5-haiku": {
|
|
6054
|
+
id: "anthropic/claude-3.5-haiku",
|
|
6055
|
+
name: "Anthropic: Claude 3.5 Haiku",
|
|
6004
6056
|
api: "openai-completions",
|
|
6005
6057
|
provider: "openrouter",
|
|
6006
6058
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6236,39 +6288,39 @@ export const MODELS = {
|
|
|
6236
6288
|
contextWindow: 128000,
|
|
6237
6289
|
maxTokens: 16384,
|
|
6238
6290
|
},
|
|
6239
|
-
"meta-llama/llama-3.1-
|
|
6240
|
-
id: "meta-llama/llama-3.1-
|
|
6241
|
-
name: "Meta: Llama 3.1
|
|
6291
|
+
"meta-llama/llama-3.1-8b-instruct": {
|
|
6292
|
+
id: "meta-llama/llama-3.1-8b-instruct",
|
|
6293
|
+
name: "Meta: Llama 3.1 8B Instruct",
|
|
6242
6294
|
api: "openai-completions",
|
|
6243
6295
|
provider: "openrouter",
|
|
6244
6296
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6245
6297
|
reasoning: false,
|
|
6246
6298
|
input: ["text"],
|
|
6247
6299
|
cost: {
|
|
6248
|
-
input:
|
|
6249
|
-
output:
|
|
6300
|
+
input: 0.02,
|
|
6301
|
+
output: 0.03,
|
|
6250
6302
|
cacheRead: 0,
|
|
6251
6303
|
cacheWrite: 0,
|
|
6252
6304
|
},
|
|
6253
|
-
contextWindow:
|
|
6254
|
-
maxTokens:
|
|
6305
|
+
contextWindow: 131072,
|
|
6306
|
+
maxTokens: 16384,
|
|
6255
6307
|
},
|
|
6256
|
-
"meta-llama/llama-3.1-
|
|
6257
|
-
id: "meta-llama/llama-3.1-
|
|
6258
|
-
name: "Meta: Llama 3.1
|
|
6308
|
+
"meta-llama/llama-3.1-405b-instruct": {
|
|
6309
|
+
id: "meta-llama/llama-3.1-405b-instruct",
|
|
6310
|
+
name: "Meta: Llama 3.1 405B Instruct",
|
|
6259
6311
|
api: "openai-completions",
|
|
6260
6312
|
provider: "openrouter",
|
|
6261
6313
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6262
6314
|
reasoning: false,
|
|
6263
6315
|
input: ["text"],
|
|
6264
6316
|
cost: {
|
|
6265
|
-
input:
|
|
6266
|
-
output:
|
|
6317
|
+
input: 3.5,
|
|
6318
|
+
output: 3.5,
|
|
6267
6319
|
cacheRead: 0,
|
|
6268
6320
|
cacheWrite: 0,
|
|
6269
6321
|
},
|
|
6270
|
-
contextWindow:
|
|
6271
|
-
maxTokens:
|
|
6322
|
+
contextWindow: 130815,
|
|
6323
|
+
maxTokens: 4096,
|
|
6272
6324
|
},
|
|
6273
6325
|
"meta-llama/llama-3.1-70b-instruct": {
|
|
6274
6326
|
id: "meta-llama/llama-3.1-70b-instruct",
|