@mariozechner/pi-ai 0.48.0 → 0.49.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4983,13 +4983,13 @@ export const MODELS = {
4983
4983
  reasoning: true,
4984
4984
  input: ["text"],
4985
4985
  cost: {
4986
- input: 0.44999999999999996,
4987
- output: 2.1500000000000004,
4986
+ input: 0.39999999999999997,
4987
+ output: 1.75,
4988
4988
  cacheRead: 0,
4989
4989
  cacheWrite: 0,
4990
4990
  },
4991
- contextWindow: 131072,
4992
- maxTokens: 32768,
4991
+ contextWindow: 163840,
4992
+ maxTokens: 65536,
4993
4993
  },
4994
4994
  "deepseek/deepseek-r1-distill-llama-70b": {
4995
4995
  id: "deepseek/deepseek-r1-distill-llama-70b",
@@ -5088,7 +5088,7 @@ export const MODELS = {
5088
5088
  input: 0.09999999999999999,
5089
5089
  output: 0.39999999999999997,
5090
5090
  cacheRead: 0.024999999999999998,
5091
- cacheWrite: 0.18330000000000002,
5091
+ cacheWrite: 0.0833,
5092
5092
  },
5093
5093
  contextWindow: 1048576,
5094
5094
  maxTokens: 8192,
@@ -5139,7 +5139,7 @@ export const MODELS = {
5139
5139
  input: 0.3,
5140
5140
  output: 2.5,
5141
5141
  cacheRead: 0.03,
5142
- cacheWrite: 0.3833,
5142
+ cacheWrite: 0.08333333333333334,
5143
5143
  },
5144
5144
  contextWindow: 1048576,
5145
5145
  maxTokens: 65535,
@@ -5156,7 +5156,7 @@ export const MODELS = {
5156
5156
  input: 0.09999999999999999,
5157
5157
  output: 0.39999999999999997,
5158
5158
  cacheRead: 0.01,
5159
- cacheWrite: 0.18330000000000002,
5159
+ cacheWrite: 0.0833,
5160
5160
  },
5161
5161
  contextWindow: 1048576,
5162
5162
  maxTokens: 65535,
@@ -5173,7 +5173,7 @@ export const MODELS = {
5173
5173
  input: 0.09999999999999999,
5174
5174
  output: 0.39999999999999997,
5175
5175
  cacheRead: 0.01,
5176
- cacheWrite: 1,
5176
+ cacheWrite: 0.0833,
5177
5177
  },
5178
5178
  contextWindow: 1048576,
5179
5179
  maxTokens: 65536,
@@ -5190,7 +5190,7 @@ export const MODELS = {
5190
5190
  input: 0.3,
5191
5191
  output: 2.5,
5192
5192
  cacheRead: 0.075,
5193
- cacheWrite: 0.3833,
5193
+ cacheWrite: 0.0833,
5194
5194
  },
5195
5195
  contextWindow: 1048576,
5196
5196
  maxTokens: 65535,
@@ -5207,7 +5207,7 @@ export const MODELS = {
5207
5207
  input: 1.25,
5208
5208
  output: 10,
5209
5209
  cacheRead: 0.125,
5210
- cacheWrite: 1.625,
5210
+ cacheWrite: 0.375,
5211
5211
  },
5212
5212
  contextWindow: 1048576,
5213
5213
  maxTokens: 65536,
@@ -5224,7 +5224,7 @@ export const MODELS = {
5224
5224
  input: 1.25,
5225
5225
  output: 10,
5226
5226
  cacheRead: 0.31,
5227
- cacheWrite: 1.625,
5227
+ cacheWrite: 0.375,
5228
5228
  },
5229
5229
  contextWindow: 1048576,
5230
5230
  maxTokens: 65536,
@@ -5241,7 +5241,7 @@ export const MODELS = {
5241
5241
  input: 1.25,
5242
5242
  output: 10,
5243
5243
  cacheRead: 0.31,
5244
- cacheWrite: 1.625,
5244
+ cacheWrite: 0.375,
5245
5245
  },
5246
5246
  contextWindow: 1048576,
5247
5247
  maxTokens: 65535,
@@ -5275,7 +5275,7 @@ export const MODELS = {
5275
5275
  input: 2,
5276
5276
  output: 12,
5277
5277
  cacheRead: 0.19999999999999998,
5278
- cacheWrite: 2.375,
5278
+ cacheWrite: 0.375,
5279
5279
  },
5280
5280
  contextWindow: 1048576,
5281
5281
  maxTokens: 65536,
@@ -7640,8 +7640,8 @@ export const MODELS = {
7640
7640
  cacheRead: 0,
7641
7641
  cacheWrite: 0,
7642
7642
  },
7643
- contextWindow: 262144,
7644
- maxTokens: 262144,
7643
+ contextWindow: 128000,
7644
+ maxTokens: 4096,
7645
7645
  },
7646
7646
  "qwen/qwen3-vl-235b-a22b-instruct": {
7647
7647
  id: "qwen/qwen3-vl-235b-a22b-instruct",