@mariozechner/pi-ai 0.9.0 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1453,23 +1453,6 @@ export declare const MODELS: {
1453
1453
  contextWindow: number;
1454
1454
  maxTokens: number;
1455
1455
  };
1456
- readonly "qwen-3-coder-480b": {
1457
- id: string;
1458
- name: string;
1459
- api: "openai-completions";
1460
- provider: string;
1461
- baseUrl: string;
1462
- reasoning: false;
1463
- input: "text"[];
1464
- cost: {
1465
- input: number;
1466
- output: number;
1467
- cacheRead: number;
1468
- cacheWrite: number;
1469
- };
1470
- contextWindow: number;
1471
- maxTokens: number;
1472
- };
1473
1456
  readonly "gpt-oss-120b": {
1474
1457
  id: string;
1475
1458
  name: string;
@@ -1952,6 +1935,23 @@ export declare const MODELS: {
1952
1935
  };
1953
1936
  };
1954
1937
  readonly openrouter: {
1938
+ readonly "allenai/olmo-3-7b-instruct": {
1939
+ id: string;
1940
+ name: string;
1941
+ api: "openai-completions";
1942
+ provider: string;
1943
+ baseUrl: string;
1944
+ reasoning: false;
1945
+ input: "text"[];
1946
+ cost: {
1947
+ input: number;
1948
+ output: number;
1949
+ cacheRead: number;
1950
+ cacheWrite: number;
1951
+ };
1952
+ contextWindow: number;
1953
+ maxTokens: number;
1954
+ };
1955
1955
  readonly "x-ai/grok-4.1-fast": {
1956
1956
  id: string;
1957
1957
  name: string;
@@ -2581,7 +2581,7 @@ export declare const MODELS: {
2581
2581
  contextWindow: number;
2582
2582
  maxTokens: number;
2583
2583
  };
2584
- readonly "deepseek/deepseek-v3.1-terminus": {
2584
+ readonly "deepseek/deepseek-v3.1-terminus:exacto": {
2585
2585
  id: string;
2586
2586
  name: string;
2587
2587
  api: "openai-completions";
@@ -2598,7 +2598,7 @@ export declare const MODELS: {
2598
2598
  contextWindow: number;
2599
2599
  maxTokens: number;
2600
2600
  };
2601
- readonly "deepseek/deepseek-v3.1-terminus:exacto": {
2601
+ readonly "deepseek/deepseek-v3.1-terminus": {
2602
2602
  id: string;
2603
2603
  name: string;
2604
2604
  api: "openai-completions";
@@ -3142,7 +3142,7 @@ export declare const MODELS: {
3142
3142
  contextWindow: number;
3143
3143
  maxTokens: number;
3144
3144
  };
3145
- readonly "openai/gpt-oss-120b": {
3145
+ readonly "openai/gpt-oss-120b:exacto": {
3146
3146
  id: string;
3147
3147
  name: string;
3148
3148
  api: "openai-completions";
@@ -3159,7 +3159,7 @@ export declare const MODELS: {
3159
3159
  contextWindow: number;
3160
3160
  maxTokens: number;
3161
3161
  };
3162
- readonly "openai/gpt-oss-120b:exacto": {
3162
+ readonly "openai/gpt-oss-120b": {
3163
3163
  id: string;
3164
3164
  name: string;
3165
3165
  api: "openai-completions";
@@ -5080,7 +5080,7 @@ export declare const MODELS: {
5080
5080
  contextWindow: number;
5081
5081
  maxTokens: number;
5082
5082
  };
5083
- readonly "meta-llama/llama-3.1-8b-instruct": {
5083
+ readonly "meta-llama/llama-3.1-70b-instruct": {
5084
5084
  id: string;
5085
5085
  name: string;
5086
5086
  api: "openai-completions";
@@ -5097,7 +5097,7 @@ export declare const MODELS: {
5097
5097
  contextWindow: number;
5098
5098
  maxTokens: number;
5099
5099
  };
5100
- readonly "meta-llama/llama-3.1-405b-instruct": {
5100
+ readonly "meta-llama/llama-3.1-8b-instruct": {
5101
5101
  id: string;
5102
5102
  name: string;
5103
5103
  api: "openai-completions";
@@ -5114,7 +5114,7 @@ export declare const MODELS: {
5114
5114
  contextWindow: number;
5115
5115
  maxTokens: number;
5116
5116
  };
5117
- readonly "meta-llama/llama-3.1-70b-instruct": {
5117
+ readonly "meta-llama/llama-3.1-405b-instruct": {
5118
5118
  id: string;
5119
5119
  name: string;
5120
5120
  api: "openai-completions";
@@ -5437,7 +5437,7 @@ export declare const MODELS: {
5437
5437
  contextWindow: number;
5438
5438
  maxTokens: number;
5439
5439
  };
5440
- readonly "openai/gpt-3.5-turbo-0613": {
5440
+ readonly "openai/gpt-4-turbo-preview": {
5441
5441
  id: string;
5442
5442
  name: string;
5443
5443
  api: "openai-completions";
@@ -5454,7 +5454,7 @@ export declare const MODELS: {
5454
5454
  contextWindow: number;
5455
5455
  maxTokens: number;
5456
5456
  };
5457
- readonly "openai/gpt-4-turbo-preview": {
5457
+ readonly "openai/gpt-3.5-turbo-0613": {
5458
5458
  id: string;
5459
5459
  name: string;
5460
5460
  api: "openai-completions";