@mariozechner/pi-ai 0.23.0 → 0.23.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/models.generated.d.ts +45 -11
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +87 -53
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +14 -9
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/providers/openai-responses.d.ts.map +1 -1
- package/dist/providers/openai-responses.js +4 -6
- package/dist/providers/openai-responses.js.map +1 -1
- package/package.json +1 -1
|
@@ -359,6 +359,23 @@ export declare const MODELS: {
|
|
|
359
359
|
};
|
|
360
360
|
};
|
|
361
361
|
readonly google: {
|
|
362
|
+
readonly "gemini-3-flash-preview": {
|
|
363
|
+
id: string;
|
|
364
|
+
name: string;
|
|
365
|
+
api: "google-generative-ai";
|
|
366
|
+
provider: string;
|
|
367
|
+
baseUrl: string;
|
|
368
|
+
reasoning: true;
|
|
369
|
+
input: ("image" | "text")[];
|
|
370
|
+
cost: {
|
|
371
|
+
input: number;
|
|
372
|
+
output: number;
|
|
373
|
+
cacheRead: number;
|
|
374
|
+
cacheWrite: number;
|
|
375
|
+
};
|
|
376
|
+
contextWindow: number;
|
|
377
|
+
maxTokens: number;
|
|
378
|
+
};
|
|
362
379
|
readonly "gemini-2.5-flash-preview-05-20": {
|
|
363
380
|
id: string;
|
|
364
381
|
name: string;
|
|
@@ -2930,6 +2947,23 @@ export declare const MODELS: {
|
|
|
2930
2947
|
};
|
|
2931
2948
|
};
|
|
2932
2949
|
readonly openrouter: {
|
|
2950
|
+
readonly "google/gemini-3-flash-preview": {
|
|
2951
|
+
id: string;
|
|
2952
|
+
name: string;
|
|
2953
|
+
api: "openai-completions";
|
|
2954
|
+
provider: string;
|
|
2955
|
+
baseUrl: string;
|
|
2956
|
+
reasoning: true;
|
|
2957
|
+
input: ("image" | "text")[];
|
|
2958
|
+
cost: {
|
|
2959
|
+
input: number;
|
|
2960
|
+
output: number;
|
|
2961
|
+
cacheRead: number;
|
|
2962
|
+
cacheWrite: number;
|
|
2963
|
+
};
|
|
2964
|
+
contextWindow: number;
|
|
2965
|
+
maxTokens: number;
|
|
2966
|
+
};
|
|
2933
2967
|
readonly "mistralai/mistral-small-creative": {
|
|
2934
2968
|
id: string;
|
|
2935
2969
|
name: string;
|
|
@@ -6092,7 +6126,7 @@ export declare const MODELS: {
|
|
|
6092
6126
|
contextWindow: number;
|
|
6093
6127
|
maxTokens: number;
|
|
6094
6128
|
};
|
|
6095
|
-
readonly "anthropic/claude-3.5-haiku": {
|
|
6129
|
+
readonly "anthropic/claude-3.5-haiku-20241022": {
|
|
6096
6130
|
id: string;
|
|
6097
6131
|
name: string;
|
|
6098
6132
|
api: "openai-completions";
|
|
@@ -6109,7 +6143,7 @@ export declare const MODELS: {
|
|
|
6109
6143
|
contextWindow: number;
|
|
6110
6144
|
maxTokens: number;
|
|
6111
6145
|
};
|
|
6112
|
-
readonly "anthropic/claude-3.5-haiku
|
|
6146
|
+
readonly "anthropic/claude-3.5-haiku": {
|
|
6113
6147
|
id: string;
|
|
6114
6148
|
name: string;
|
|
6115
6149
|
api: "openai-completions";
|
|
@@ -6143,7 +6177,7 @@ export declare const MODELS: {
|
|
|
6143
6177
|
contextWindow: number;
|
|
6144
6178
|
maxTokens: number;
|
|
6145
6179
|
};
|
|
6146
|
-
readonly "mistralai/ministral-
|
|
6180
|
+
readonly "mistralai/ministral-8b": {
|
|
6147
6181
|
id: string;
|
|
6148
6182
|
name: string;
|
|
6149
6183
|
api: "openai-completions";
|
|
@@ -6160,7 +6194,7 @@ export declare const MODELS: {
|
|
|
6160
6194
|
contextWindow: number;
|
|
6161
6195
|
maxTokens: number;
|
|
6162
6196
|
};
|
|
6163
|
-
readonly "mistralai/ministral-
|
|
6197
|
+
readonly "mistralai/ministral-3b": {
|
|
6164
6198
|
id: string;
|
|
6165
6199
|
name: string;
|
|
6166
6200
|
api: "openai-completions";
|
|
@@ -6262,7 +6296,7 @@ export declare const MODELS: {
|
|
|
6262
6296
|
contextWindow: number;
|
|
6263
6297
|
maxTokens: number;
|
|
6264
6298
|
};
|
|
6265
|
-
readonly "cohere/command-r-
|
|
6299
|
+
readonly "cohere/command-r-08-2024": {
|
|
6266
6300
|
id: string;
|
|
6267
6301
|
name: string;
|
|
6268
6302
|
api: "openai-completions";
|
|
@@ -6279,7 +6313,7 @@ export declare const MODELS: {
|
|
|
6279
6313
|
contextWindow: number;
|
|
6280
6314
|
maxTokens: number;
|
|
6281
6315
|
};
|
|
6282
|
-
readonly "cohere/command-r-08-2024": {
|
|
6316
|
+
readonly "cohere/command-r-plus-08-2024": {
|
|
6283
6317
|
id: string;
|
|
6284
6318
|
name: string;
|
|
6285
6319
|
api: "openai-completions";
|
|
@@ -6347,7 +6381,7 @@ export declare const MODELS: {
|
|
|
6347
6381
|
contextWindow: number;
|
|
6348
6382
|
maxTokens: number;
|
|
6349
6383
|
};
|
|
6350
|
-
readonly "meta-llama/llama-3.1-
|
|
6384
|
+
readonly "meta-llama/llama-3.1-8b-instruct": {
|
|
6351
6385
|
id: string;
|
|
6352
6386
|
name: string;
|
|
6353
6387
|
api: "openai-completions";
|
|
@@ -6364,7 +6398,7 @@ export declare const MODELS: {
|
|
|
6364
6398
|
contextWindow: number;
|
|
6365
6399
|
maxTokens: number;
|
|
6366
6400
|
};
|
|
6367
|
-
readonly "meta-llama/llama-3.1-
|
|
6401
|
+
readonly "meta-llama/llama-3.1-405b-instruct": {
|
|
6368
6402
|
id: string;
|
|
6369
6403
|
name: string;
|
|
6370
6404
|
api: "openai-completions";
|
|
@@ -6381,7 +6415,7 @@ export declare const MODELS: {
|
|
|
6381
6415
|
contextWindow: number;
|
|
6382
6416
|
maxTokens: number;
|
|
6383
6417
|
};
|
|
6384
|
-
readonly "meta-llama/llama-3.1-
|
|
6418
|
+
readonly "meta-llama/llama-3.1-70b-instruct": {
|
|
6385
6419
|
id: string;
|
|
6386
6420
|
name: string;
|
|
6387
6421
|
api: "openai-completions";
|
|
@@ -6585,7 +6619,7 @@ export declare const MODELS: {
|
|
|
6585
6619
|
contextWindow: number;
|
|
6586
6620
|
maxTokens: number;
|
|
6587
6621
|
};
|
|
6588
|
-
readonly "meta-llama/llama-3-
|
|
6622
|
+
readonly "meta-llama/llama-3-70b-instruct": {
|
|
6589
6623
|
id: string;
|
|
6590
6624
|
name: string;
|
|
6591
6625
|
api: "openai-completions";
|
|
@@ -6602,7 +6636,7 @@ export declare const MODELS: {
|
|
|
6602
6636
|
contextWindow: number;
|
|
6603
6637
|
maxTokens: number;
|
|
6604
6638
|
};
|
|
6605
|
-
readonly "meta-llama/llama-3-
|
|
6639
|
+
readonly "meta-llama/llama-3-8b-instruct": {
|
|
6606
6640
|
id: string;
|
|
6607
6641
|
name: string;
|
|
6608
6642
|
api: "openai-completions";
|