@mariozechner/pi-ai 0.50.3 → 0.50.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/models.generated.d.ts +51 -51
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +67 -67
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +2 -2
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/google-gemini-cli.d.ts.map +1 -1
- package/dist/providers/google-gemini-cli.js +1 -1
- package/dist/providers/google-gemini-cli.js.map +1 -1
- package/dist/providers/google-shared.d.ts.map +1 -1
- package/dist/providers/google-shared.js +2 -2
- package/dist/providers/google-shared.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +14 -0
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/types.d.ts +13 -0
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/package.json +1 -1
|
@@ -5079,6 +5079,23 @@ export declare const MODELS: {
|
|
|
5079
5079
|
contextWindow: number;
|
|
5080
5080
|
maxTokens: number;
|
|
5081
5081
|
};
|
|
5082
|
+
readonly "glm-4.7-free": {
|
|
5083
|
+
id: string;
|
|
5084
|
+
name: string;
|
|
5085
|
+
api: "openai-completions";
|
|
5086
|
+
provider: string;
|
|
5087
|
+
baseUrl: string;
|
|
5088
|
+
reasoning: true;
|
|
5089
|
+
input: "text"[];
|
|
5090
|
+
cost: {
|
|
5091
|
+
input: number;
|
|
5092
|
+
output: number;
|
|
5093
|
+
cacheRead: number;
|
|
5094
|
+
cacheWrite: number;
|
|
5095
|
+
};
|
|
5096
|
+
contextWindow: number;
|
|
5097
|
+
maxTokens: number;
|
|
5098
|
+
};
|
|
5082
5099
|
readonly "gpt-5": {
|
|
5083
5100
|
id: string;
|
|
5084
5101
|
name: string;
|
|
@@ -5283,6 +5300,23 @@ export declare const MODELS: {
|
|
|
5283
5300
|
contextWindow: number;
|
|
5284
5301
|
maxTokens: number;
|
|
5285
5302
|
};
|
|
5303
|
+
readonly "kimi-k2.5-free": {
|
|
5304
|
+
id: string;
|
|
5305
|
+
name: string;
|
|
5306
|
+
api: "openai-completions";
|
|
5307
|
+
provider: string;
|
|
5308
|
+
baseUrl: string;
|
|
5309
|
+
reasoning: true;
|
|
5310
|
+
input: ("image" | "text")[];
|
|
5311
|
+
cost: {
|
|
5312
|
+
input: number;
|
|
5313
|
+
output: number;
|
|
5314
|
+
cacheRead: number;
|
|
5315
|
+
cacheWrite: number;
|
|
5316
|
+
};
|
|
5317
|
+
contextWindow: number;
|
|
5318
|
+
maxTokens: number;
|
|
5319
|
+
};
|
|
5286
5320
|
readonly "minimax-m2.1": {
|
|
5287
5321
|
id: string;
|
|
5288
5322
|
name: string;
|
|
@@ -5300,6 +5334,23 @@ export declare const MODELS: {
|
|
|
5300
5334
|
contextWindow: number;
|
|
5301
5335
|
maxTokens: number;
|
|
5302
5336
|
};
|
|
5337
|
+
readonly "minimax-m2.1-free": {
|
|
5338
|
+
id: string;
|
|
5339
|
+
name: string;
|
|
5340
|
+
api: "anthropic-messages";
|
|
5341
|
+
provider: string;
|
|
5342
|
+
baseUrl: string;
|
|
5343
|
+
reasoning: true;
|
|
5344
|
+
input: "text"[];
|
|
5345
|
+
cost: {
|
|
5346
|
+
input: number;
|
|
5347
|
+
output: number;
|
|
5348
|
+
cacheRead: number;
|
|
5349
|
+
cacheWrite: number;
|
|
5350
|
+
};
|
|
5351
|
+
contextWindow: number;
|
|
5352
|
+
maxTokens: number;
|
|
5353
|
+
};
|
|
5303
5354
|
readonly "qwen3-coder": {
|
|
5304
5355
|
id: string;
|
|
5305
5356
|
name: string;
|
|
@@ -6067,23 +6118,6 @@ export declare const MODELS: {
|
|
|
6067
6118
|
contextWindow: number;
|
|
6068
6119
|
maxTokens: number;
|
|
6069
6120
|
};
|
|
6070
|
-
readonly "google/gemini-2.0-flash-exp:free": {
|
|
6071
|
-
id: string;
|
|
6072
|
-
name: string;
|
|
6073
|
-
api: "openai-completions";
|
|
6074
|
-
provider: string;
|
|
6075
|
-
baseUrl: string;
|
|
6076
|
-
reasoning: false;
|
|
6077
|
-
input: ("image" | "text")[];
|
|
6078
|
-
cost: {
|
|
6079
|
-
input: number;
|
|
6080
|
-
output: number;
|
|
6081
|
-
cacheRead: number;
|
|
6082
|
-
cacheWrite: number;
|
|
6083
|
-
};
|
|
6084
|
-
contextWindow: number;
|
|
6085
|
-
maxTokens: number;
|
|
6086
|
-
};
|
|
6087
6121
|
readonly "google/gemini-2.0-flash-lite-001": {
|
|
6088
6122
|
id: string;
|
|
6089
6123
|
name: string;
|
|
@@ -10948,40 +10982,6 @@ export declare const MODELS: {
|
|
|
10948
10982
|
contextWindow: number;
|
|
10949
10983
|
maxTokens: number;
|
|
10950
10984
|
};
|
|
10951
|
-
readonly "stealth/sonoma-dusk-alpha": {
|
|
10952
|
-
id: string;
|
|
10953
|
-
name: string;
|
|
10954
|
-
api: "anthropic-messages";
|
|
10955
|
-
provider: string;
|
|
10956
|
-
baseUrl: string;
|
|
10957
|
-
reasoning: false;
|
|
10958
|
-
input: ("image" | "text")[];
|
|
10959
|
-
cost: {
|
|
10960
|
-
input: number;
|
|
10961
|
-
output: number;
|
|
10962
|
-
cacheRead: number;
|
|
10963
|
-
cacheWrite: number;
|
|
10964
|
-
};
|
|
10965
|
-
contextWindow: number;
|
|
10966
|
-
maxTokens: number;
|
|
10967
|
-
};
|
|
10968
|
-
readonly "stealth/sonoma-sky-alpha": {
|
|
10969
|
-
id: string;
|
|
10970
|
-
name: string;
|
|
10971
|
-
api: "anthropic-messages";
|
|
10972
|
-
provider: string;
|
|
10973
|
-
baseUrl: string;
|
|
10974
|
-
reasoning: false;
|
|
10975
|
-
input: ("image" | "text")[];
|
|
10976
|
-
cost: {
|
|
10977
|
-
input: number;
|
|
10978
|
-
output: number;
|
|
10979
|
-
cacheRead: number;
|
|
10980
|
-
cacheWrite: number;
|
|
10981
|
-
};
|
|
10982
|
-
contextWindow: number;
|
|
10983
|
-
maxTokens: number;
|
|
10984
|
-
};
|
|
10985
10985
|
readonly "vercel/v0-1.0-md": {
|
|
10986
10986
|
id: string;
|
|
10987
10987
|
name: string;
|