@mariozechner/pi-ai 0.23.3 → 0.23.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/models.generated.d.ts +32 -4
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +41 -22
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/google.d.ts.map +1 -1
- package/dist/providers/google.js +27 -15
- package/dist/providers/google.js.map +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +22 -5
- package/dist/stream.js.map +1 -1
- package/package.json +2 -2
package/dist/models.generated.js
CHANGED
|
@@ -2483,6 +2483,25 @@ export const MODELS = {
|
|
|
2483
2483
|
},
|
|
2484
2484
|
},
|
|
2485
2485
|
"github-copilot": {
|
|
2486
|
+
"gemini-3-flash-preview": {
|
|
2487
|
+
id: "gemini-3-flash-preview",
|
|
2488
|
+
name: "Gemini 3 Flash",
|
|
2489
|
+
api: "openai-completions",
|
|
2490
|
+
provider: "github-copilot",
|
|
2491
|
+
baseUrl: "https://api.individual.githubcopilot.com",
|
|
2492
|
+
headers: { "User-Agent": "GitHubCopilotChat/0.35.0", "Editor-Version": "vscode/1.107.0", "Editor-Plugin-Version": "copilot-chat/0.35.0", "Copilot-Integration-Id": "vscode-chat" },
|
|
2493
|
+
compat: { "supportsStore": false, "supportsDeveloperRole": false, "supportsReasoningEffort": false },
|
|
2494
|
+
reasoning: true,
|
|
2495
|
+
input: ["text", "image"],
|
|
2496
|
+
cost: {
|
|
2497
|
+
input: 0,
|
|
2498
|
+
output: 0,
|
|
2499
|
+
cacheRead: 0,
|
|
2500
|
+
cacheWrite: 0,
|
|
2501
|
+
},
|
|
2502
|
+
contextWindow: 128000,
|
|
2503
|
+
maxTokens: 64000,
|
|
2504
|
+
},
|
|
2486
2505
|
"grok-code-fast-1": {
|
|
2487
2506
|
id: "grok-code-fast-1",
|
|
2488
2507
|
name: "Grok Code Fast 1",
|
|
@@ -5055,8 +5074,8 @@ export const MODELS = {
|
|
|
5055
5074
|
reasoning: true,
|
|
5056
5075
|
input: ["text"],
|
|
5057
5076
|
cost: {
|
|
5058
|
-
input: 0.
|
|
5059
|
-
output: 0.
|
|
5077
|
+
input: 0.02,
|
|
5078
|
+
output: 0.09999999999999999,
|
|
5060
5079
|
cacheRead: 0,
|
|
5061
5080
|
cacheWrite: 0,
|
|
5062
5081
|
},
|
|
@@ -5981,9 +6000,9 @@ export const MODELS = {
|
|
|
5981
6000
|
contextWindow: 32768,
|
|
5982
6001
|
maxTokens: 4096,
|
|
5983
6002
|
},
|
|
5984
|
-
"anthropic/claude-3.5-haiku": {
|
|
5985
|
-
id: "anthropic/claude-3.5-haiku",
|
|
5986
|
-
name: "Anthropic: Claude 3.5 Haiku",
|
|
6003
|
+
"anthropic/claude-3.5-haiku-20241022": {
|
|
6004
|
+
id: "anthropic/claude-3.5-haiku-20241022",
|
|
6005
|
+
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
|
5987
6006
|
api: "openai-completions",
|
|
5988
6007
|
provider: "openrouter",
|
|
5989
6008
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -5998,9 +6017,9 @@ export const MODELS = {
|
|
|
5998
6017
|
contextWindow: 200000,
|
|
5999
6018
|
maxTokens: 8192,
|
|
6000
6019
|
},
|
|
6001
|
-
"anthropic/claude-3.5-haiku
|
|
6002
|
-
id: "anthropic/claude-3.5-haiku
|
|
6003
|
-
name: "Anthropic: Claude 3.5 Haiku
|
|
6020
|
+
"anthropic/claude-3.5-haiku": {
|
|
6021
|
+
id: "anthropic/claude-3.5-haiku",
|
|
6022
|
+
name: "Anthropic: Claude 3.5 Haiku",
|
|
6004
6023
|
api: "openai-completions",
|
|
6005
6024
|
provider: "openrouter",
|
|
6006
6025
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
@@ -6236,39 +6255,39 @@ export const MODELS = {
|
|
|
6236
6255
|
contextWindow: 128000,
|
|
6237
6256
|
maxTokens: 16384,
|
|
6238
6257
|
},
|
|
6239
|
-
"meta-llama/llama-3.1-
|
|
6240
|
-
id: "meta-llama/llama-3.1-
|
|
6241
|
-
name: "Meta: Llama 3.1
|
|
6258
|
+
"meta-llama/llama-3.1-8b-instruct": {
|
|
6259
|
+
id: "meta-llama/llama-3.1-8b-instruct",
|
|
6260
|
+
name: "Meta: Llama 3.1 8B Instruct",
|
|
6242
6261
|
api: "openai-completions",
|
|
6243
6262
|
provider: "openrouter",
|
|
6244
6263
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6245
6264
|
reasoning: false,
|
|
6246
6265
|
input: ["text"],
|
|
6247
6266
|
cost: {
|
|
6248
|
-
input:
|
|
6249
|
-
output:
|
|
6267
|
+
input: 0.02,
|
|
6268
|
+
output: 0.03,
|
|
6250
6269
|
cacheRead: 0,
|
|
6251
6270
|
cacheWrite: 0,
|
|
6252
6271
|
},
|
|
6253
|
-
contextWindow:
|
|
6254
|
-
maxTokens:
|
|
6272
|
+
contextWindow: 131072,
|
|
6273
|
+
maxTokens: 16384,
|
|
6255
6274
|
},
|
|
6256
|
-
"meta-llama/llama-3.1-
|
|
6257
|
-
id: "meta-llama/llama-3.1-
|
|
6258
|
-
name: "Meta: Llama 3.1
|
|
6275
|
+
"meta-llama/llama-3.1-405b-instruct": {
|
|
6276
|
+
id: "meta-llama/llama-3.1-405b-instruct",
|
|
6277
|
+
name: "Meta: Llama 3.1 405B Instruct",
|
|
6259
6278
|
api: "openai-completions",
|
|
6260
6279
|
provider: "openrouter",
|
|
6261
6280
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
6262
6281
|
reasoning: false,
|
|
6263
6282
|
input: ["text"],
|
|
6264
6283
|
cost: {
|
|
6265
|
-
input:
|
|
6266
|
-
output:
|
|
6284
|
+
input: 3.5,
|
|
6285
|
+
output: 3.5,
|
|
6267
6286
|
cacheRead: 0,
|
|
6268
6287
|
cacheWrite: 0,
|
|
6269
6288
|
},
|
|
6270
|
-
contextWindow:
|
|
6271
|
-
maxTokens:
|
|
6289
|
+
contextWindow: 130815,
|
|
6290
|
+
maxTokens: 4096,
|
|
6272
6291
|
},
|
|
6273
6292
|
"meta-llama/llama-3.1-70b-instruct": {
|
|
6274
6293
|
id: "meta-llama/llama-3.1-70b-instruct",
|