@mariozechner/pi-ai 0.19.0 → 0.19.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3603,23 +3603,6 @@ export declare const MODELS: {
3603
3603
  contextWindow: number;
3604
3604
  maxTokens: number;
3605
3605
  };
3606
- readonly "meituan/longcat-flash-chat:free": {
3607
- id: string;
3608
- name: string;
3609
- api: "openai-completions";
3610
- provider: string;
3611
- baseUrl: string;
3612
- reasoning: false;
3613
- input: "text"[];
3614
- cost: {
3615
- input: number;
3616
- output: number;
3617
- cacheRead: number;
3618
- cacheWrite: number;
3619
- };
3620
- contextWindow: number;
3621
- maxTokens: number;
3622
- };
3623
3606
  readonly "qwen/qwen-plus-2025-07-28": {
3624
3607
  id: string;
3625
3608
  name: string;
@@ -6068,7 +6051,7 @@ export declare const MODELS: {
6068
6051
  contextWindow: number;
6069
6052
  maxTokens: number;
6070
6053
  };
6071
- readonly "meta-llama/llama-3-70b-instruct": {
6054
+ readonly "meta-llama/llama-3-8b-instruct": {
6072
6055
  id: string;
6073
6056
  name: string;
6074
6057
  api: "openai-completions";
@@ -6085,7 +6068,7 @@ export declare const MODELS: {
6085
6068
  contextWindow: number;
6086
6069
  maxTokens: number;
6087
6070
  };
6088
- readonly "meta-llama/llama-3-8b-instruct": {
6071
+ readonly "meta-llama/llama-3-70b-instruct": {
6089
6072
  id: string;
6090
6073
  name: string;
6091
6074
  api: "openai-completions";
@@ -6187,7 +6170,7 @@ export declare const MODELS: {
6187
6170
  contextWindow: number;
6188
6171
  maxTokens: number;
6189
6172
  };
6190
- readonly "openai/gpt-3.5-turbo-0613": {
6173
+ readonly "openai/gpt-4-turbo-preview": {
6191
6174
  id: string;
6192
6175
  name: string;
6193
6176
  api: "openai-completions";
@@ -6204,7 +6187,7 @@ export declare const MODELS: {
6204
6187
  contextWindow: number;
6205
6188
  maxTokens: number;
6206
6189
  };
6207
- readonly "openai/gpt-4-turbo-preview": {
6190
+ readonly "openai/gpt-3.5-turbo-0613": {
6208
6191
  id: string;
6209
6192
  name: string;
6210
6193
  api: "openai-completions";
@@ -6306,7 +6289,7 @@ export declare const MODELS: {
6306
6289
  contextWindow: number;
6307
6290
  maxTokens: number;
6308
6291
  };
6309
- readonly "openai/gpt-3.5-turbo": {
6292
+ readonly "openai/gpt-4-0314": {
6310
6293
  id: string;
6311
6294
  name: string;
6312
6295
  api: "openai-completions";
@@ -6323,7 +6306,7 @@ export declare const MODELS: {
6323
6306
  contextWindow: number;
6324
6307
  maxTokens: number;
6325
6308
  };
6326
- readonly "openai/gpt-4-0314": {
6309
+ readonly "openai/gpt-3.5-turbo": {
6327
6310
  id: string;
6328
6311
  name: string;
6329
6312
  api: "openai-completions";