@mariozechner/pi-ai 0.26.1 → 0.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1967,6 +1967,23 @@ export declare const MODELS: {
1967
1967
  };
1968
1968
  };
1969
1969
  readonly zai: {
1970
+ readonly "glm-4.7": {
1971
+ id: string;
1972
+ name: string;
1973
+ api: "anthropic-messages";
1974
+ provider: string;
1975
+ baseUrl: string;
1976
+ reasoning: true;
1977
+ input: "text"[];
1978
+ cost: {
1979
+ input: number;
1980
+ output: number;
1981
+ cacheRead: number;
1982
+ cacheWrite: number;
1983
+ };
1984
+ contextWindow: number;
1985
+ maxTokens: number;
1986
+ };
1970
1987
  readonly "glm-4.5-flash": {
1971
1988
  id: string;
1972
1989
  name: string;
@@ -6149,7 +6166,7 @@ export declare const MODELS: {
6149
6166
  contextWindow: number;
6150
6167
  maxTokens: number;
6151
6168
  };
6152
- readonly "anthropic/claude-3.5-haiku-20241022": {
6169
+ readonly "anthropic/claude-3.5-haiku": {
6153
6170
  id: string;
6154
6171
  name: string;
6155
6172
  api: "openai-completions";
@@ -6166,7 +6183,7 @@ export declare const MODELS: {
6166
6183
  contextWindow: number;
6167
6184
  maxTokens: number;
6168
6185
  };
6169
- readonly "anthropic/claude-3.5-haiku": {
6186
+ readonly "anthropic/claude-3.5-haiku-20241022": {
6170
6187
  id: string;
6171
6188
  name: string;
6172
6189
  api: "openai-completions";
@@ -6404,7 +6421,7 @@ export declare const MODELS: {
6404
6421
  contextWindow: number;
6405
6422
  maxTokens: number;
6406
6423
  };
6407
- readonly "meta-llama/llama-3.1-8b-instruct": {
6424
+ readonly "meta-llama/llama-3.1-405b-instruct": {
6408
6425
  id: string;
6409
6426
  name: string;
6410
6427
  api: "openai-completions";
@@ -6421,7 +6438,7 @@ export declare const MODELS: {
6421
6438
  contextWindow: number;
6422
6439
  maxTokens: number;
6423
6440
  };
6424
- readonly "meta-llama/llama-3.1-405b-instruct": {
6441
+ readonly "meta-llama/llama-3.1-8b-instruct": {
6425
6442
  id: string;
6426
6443
  name: string;
6427
6444
  api: "openai-completions";