pricetoken 0.1.1 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -115,6 +115,16 @@ import type {
115
115
  } from 'pricetoken';
116
116
  ```
117
117
 
118
+ ## Disclaimer
119
+
120
+ Pricing data is provided on a best-effort basis and may be inaccurate, incomplete, or outdated. LLM providers change prices without notice, and our scraping pipeline may not capture every change immediately.
121
+
122
+ **This data is for informational purposes only. Do not use it as the sole basis for financial decisions.** Always verify pricing directly with the provider before committing to spend.
123
+
124
+ If you get a bill you weren't expecting, that's between you and your provider — not us. See the [MIT License](LICENSE) under which this project is distributed (specifically the "AS IS" and "NO WARRANTY" clauses).
125
+
126
+ Found incorrect pricing? [Open an issue](https://github.com/affromero/pricetoken/issues).
127
+
118
128
  ## License
119
129
 
120
130
  MIT
package/dist/index.d.mts CHANGED
@@ -1,3 +1,5 @@
1
+ type ModelStatus = 'active' | 'deprecated' | 'preview';
2
+ type DataConfidence = 'high' | 'low';
1
3
  interface ModelPricing {
2
4
  modelId: string;
3
5
  provider: string;
@@ -6,7 +8,9 @@ interface ModelPricing {
6
8
  outputPerMTok: number;
7
9
  contextWindow: number | null;
8
10
  maxOutputTokens: number | null;
9
- source: 'fetched' | 'seed' | 'admin';
11
+ source: 'fetched' | 'seed' | 'admin' | 'verified';
12
+ status: ModelStatus | null;
13
+ confidence: DataConfidence;
10
14
  lastUpdated: string | null;
11
15
  }
12
16
  interface PriceHistoryPoint {
@@ -85,4 +89,4 @@ declare function calculateModelCost(modelId: string, inputTokens: number, output
85
89
 
86
90
  declare const STATIC_PRICING: ModelPricing[];
87
91
 
88
- export { type CostEstimate, type ModelHistory, type ModelPricing, type PriceHistoryPoint, PriceTokenClient, type PriceTokenError, type PriceTokenResponse, type ProviderSummary, STATIC_PRICING, calculateCost, calculateModelCost };
92
+ export { type CostEstimate, type DataConfidence, type ModelHistory, type ModelPricing, type ModelStatus, type PriceHistoryPoint, PriceTokenClient, type PriceTokenError, type PriceTokenResponse, type ProviderSummary, STATIC_PRICING, calculateCost, calculateModelCost };
package/dist/index.d.ts CHANGED
@@ -1,3 +1,5 @@
1
+ type ModelStatus = 'active' | 'deprecated' | 'preview';
2
+ type DataConfidence = 'high' | 'low';
1
3
  interface ModelPricing {
2
4
  modelId: string;
3
5
  provider: string;
@@ -6,7 +8,9 @@ interface ModelPricing {
6
8
  outputPerMTok: number;
7
9
  contextWindow: number | null;
8
10
  maxOutputTokens: number | null;
9
- source: 'fetched' | 'seed' | 'admin';
11
+ source: 'fetched' | 'seed' | 'admin' | 'verified';
12
+ status: ModelStatus | null;
13
+ confidence: DataConfidence;
10
14
  lastUpdated: string | null;
11
15
  }
12
16
  interface PriceHistoryPoint {
@@ -85,4 +89,4 @@ declare function calculateModelCost(modelId: string, inputTokens: number, output
85
89
 
86
90
  declare const STATIC_PRICING: ModelPricing[];
87
91
 
88
- export { type CostEstimate, type ModelHistory, type ModelPricing, type PriceHistoryPoint, PriceTokenClient, type PriceTokenError, type PriceTokenResponse, type ProviderSummary, STATIC_PRICING, calculateCost, calculateModelCost };
92
+ export { type CostEstimate, type DataConfidence, type ModelHistory, type ModelPricing, type ModelStatus, type PriceHistoryPoint, PriceTokenClient, type PriceTokenError, type PriceTokenResponse, type ProviderSummary, STATIC_PRICING, calculateCost, calculateModelCost };
package/dist/index.js CHANGED
@@ -102,6 +102,8 @@ var STATIC_PRICING = [
102
102
  contextWindow: 2e5,
103
103
  maxOutputTokens: 128e3,
104
104
  source: "seed",
105
+ status: "active",
106
+ confidence: "high",
105
107
  lastUpdated: null
106
108
  },
107
109
  {
@@ -113,6 +115,8 @@ var STATIC_PRICING = [
113
115
  contextWindow: 2e5,
114
116
  maxOutputTokens: 64e3,
115
117
  source: "seed",
118
+ status: "active",
119
+ confidence: "high",
116
120
  lastUpdated: null
117
121
  },
118
122
  {
@@ -124,6 +128,8 @@ var STATIC_PRICING = [
124
128
  contextWindow: 2e5,
125
129
  maxOutputTokens: 64e3,
126
130
  source: "seed",
131
+ status: "active",
132
+ confidence: "high",
127
133
  lastUpdated: null
128
134
  },
129
135
  // OpenAI
@@ -133,9 +139,24 @@ var STATIC_PRICING = [
133
139
  displayName: "GPT-5.2",
134
140
  inputPerMTok: 1.75,
135
141
  outputPerMTok: 14,
136
- contextWindow: 1047576,
137
- maxOutputTokens: 32768,
142
+ contextWindow: 4e5,
143
+ maxOutputTokens: 128e3,
144
+ source: "seed",
145
+ status: "active",
146
+ confidence: "high",
147
+ lastUpdated: null
148
+ },
149
+ {
150
+ modelId: "gpt-5",
151
+ provider: "openai",
152
+ displayName: "GPT-5",
153
+ inputPerMTok: 1.25,
154
+ outputPerMTok: 10,
155
+ contextWindow: 4e5,
156
+ maxOutputTokens: 128e3,
138
157
  source: "seed",
158
+ status: "active",
159
+ confidence: "high",
139
160
  lastUpdated: null
140
161
  },
141
162
  {
@@ -144,9 +165,63 @@ var STATIC_PRICING = [
144
165
  displayName: "GPT-5 Mini",
145
166
  inputPerMTok: 0.25,
146
167
  outputPerMTok: 2,
147
- contextWindow: 1047576,
168
+ contextWindow: 4e5,
169
+ maxOutputTokens: 128e3,
170
+ source: "seed",
171
+ status: "active",
172
+ confidence: "high",
173
+ lastUpdated: null
174
+ },
175
+ {
176
+ modelId: "gpt-5-nano",
177
+ provider: "openai",
178
+ displayName: "GPT-5 Nano",
179
+ inputPerMTok: 0.05,
180
+ outputPerMTok: 0.4,
181
+ contextWindow: 4e5,
182
+ maxOutputTokens: 128e3,
183
+ source: "seed",
184
+ status: "active",
185
+ confidence: "high",
186
+ lastUpdated: null
187
+ },
188
+ {
189
+ modelId: "gpt-4.1",
190
+ provider: "openai",
191
+ displayName: "GPT-4.1",
192
+ inputPerMTok: 2,
193
+ outputPerMTok: 8,
194
+ contextWindow: 1e6,
148
195
  maxOutputTokens: 32768,
149
196
  source: "seed",
197
+ status: "active",
198
+ confidence: "high",
199
+ lastUpdated: null
200
+ },
201
+ {
202
+ modelId: "gpt-4.1-mini",
203
+ provider: "openai",
204
+ displayName: "GPT-4.1 Mini",
205
+ inputPerMTok: 0.4,
206
+ outputPerMTok: 1.6,
207
+ contextWindow: 1e6,
208
+ maxOutputTokens: 32768,
209
+ source: "seed",
210
+ status: "active",
211
+ confidence: "high",
212
+ lastUpdated: null
213
+ },
214
+ {
215
+ modelId: "gpt-4.1-nano",
216
+ provider: "openai",
217
+ displayName: "GPT-4.1 Nano",
218
+ inputPerMTok: 0.1,
219
+ outputPerMTok: 0.4,
220
+ contextWindow: 1e6,
221
+ maxOutputTokens: 32768,
222
+ source: "seed",
223
+ status: "active",
224
+ confidence: "high",
150
225
  lastUpdated: null
151
226
  },
152
227
  {
@@ -158,6 +233,8 @@ var STATIC_PRICING = [
158
233
  contextWindow: 128e3,
159
234
  maxOutputTokens: 16384,
160
235
  source: "seed",
236
+ status: "active",
237
+ confidence: "high",
161
238
  lastUpdated: null
162
239
  },
163
240
  {
@@ -169,6 +246,8 @@ var STATIC_PRICING = [
169
246
  contextWindow: 128e3,
170
247
  maxOutputTokens: 16384,
171
248
  source: "seed",
249
+ status: "active",
250
+ confidence: "high",
172
251
  lastUpdated: null
173
252
  },
174
253
  {
@@ -180,6 +259,8 @@ var STATIC_PRICING = [
180
259
  contextWindow: 2e5,
181
260
  maxOutputTokens: 1e5,
182
261
  source: "seed",
262
+ status: "active",
263
+ confidence: "high",
183
264
  lastUpdated: null
184
265
  },
185
266
  {
@@ -191,20 +272,11 @@ var STATIC_PRICING = [
191
272
  contextWindow: 2e5,
192
273
  maxOutputTokens: 1e5,
193
274
  source: "seed",
275
+ status: "active",
276
+ confidence: "high",
194
277
  lastUpdated: null
195
278
  },
196
279
  // Google
197
- {
198
- modelId: "gemini-3.1-pro-preview",
199
- provider: "google",
200
- displayName: "Gemini 3.1 Pro Preview",
201
- inputPerMTok: 2,
202
- outputPerMTok: 12,
203
- contextWindow: 1e6,
204
- maxOutputTokens: 65536,
205
- source: "seed",
206
- lastUpdated: null
207
- },
208
280
  {
209
281
  modelId: "gemini-2.5-pro",
210
282
  provider: "google",
@@ -214,6 +286,8 @@ var STATIC_PRICING = [
214
286
  contextWindow: 1048576,
215
287
  maxOutputTokens: 65536,
216
288
  source: "seed",
289
+ status: "active",
290
+ confidence: "high",
217
291
  lastUpdated: null
218
292
  },
219
293
  {
@@ -225,6 +299,8 @@ var STATIC_PRICING = [
225
299
  contextWindow: 1048576,
226
300
  maxOutputTokens: 65536,
227
301
  source: "seed",
302
+ status: "active",
303
+ confidence: "high",
228
304
  lastUpdated: null
229
305
  },
230
306
  {
@@ -236,6 +312,8 @@ var STATIC_PRICING = [
236
312
  contextWindow: 1048576,
237
313
  maxOutputTokens: 8192,
238
314
  source: "seed",
315
+ status: "active",
316
+ confidence: "high",
239
317
  lastUpdated: null
240
318
  },
241
319
  {
@@ -247,6 +325,21 @@ var STATIC_PRICING = [
247
325
  contextWindow: 1048576,
248
326
  maxOutputTokens: 8192,
249
327
  source: "seed",
328
+ status: "active",
329
+ confidence: "high",
330
+ lastUpdated: null
331
+ },
332
+ {
333
+ modelId: "gemini-2.5-flash-lite",
334
+ provider: "google",
335
+ displayName: "Gemini 2.5 Flash-Lite",
336
+ inputPerMTok: 0.1,
337
+ outputPerMTok: 0.4,
338
+ contextWindow: 1048576,
339
+ maxOutputTokens: 65536,
340
+ source: "seed",
341
+ status: "active",
342
+ confidence: "high",
250
343
  lastUpdated: null
251
344
  },
252
345
  // DeepSeek
@@ -254,22 +347,26 @@ var STATIC_PRICING = [
254
347
  modelId: "deepseek-chat",
255
348
  provider: "deepseek",
256
349
  displayName: "DeepSeek V3",
257
- inputPerMTok: 0.27,
258
- outputPerMTok: 1.1,
259
- contextWindow: 65536,
350
+ inputPerMTok: 0.28,
351
+ outputPerMTok: 0.42,
352
+ contextWindow: 131072,
260
353
  maxOutputTokens: 8192,
261
354
  source: "seed",
355
+ status: "active",
356
+ confidence: "high",
262
357
  lastUpdated: null
263
358
  },
264
359
  {
265
360
  modelId: "deepseek-reasoner",
266
361
  provider: "deepseek",
267
362
  displayName: "DeepSeek R1",
268
- inputPerMTok: 0.55,
269
- outputPerMTok: 2.19,
270
- contextWindow: 65536,
271
- maxOutputTokens: 8192,
363
+ inputPerMTok: 0.28,
364
+ outputPerMTok: 0.42,
365
+ contextWindow: 131072,
366
+ maxOutputTokens: 65536,
272
367
  source: "seed",
368
+ status: "active",
369
+ confidence: "high",
273
370
  lastUpdated: null
274
371
  }
275
372
  ];
package/dist/index.mjs CHANGED
@@ -73,6 +73,8 @@ var STATIC_PRICING = [
73
73
  contextWindow: 2e5,
74
74
  maxOutputTokens: 128e3,
75
75
  source: "seed",
76
+ status: "active",
77
+ confidence: "high",
76
78
  lastUpdated: null
77
79
  },
78
80
  {
@@ -84,6 +86,8 @@ var STATIC_PRICING = [
84
86
  contextWindow: 2e5,
85
87
  maxOutputTokens: 64e3,
86
88
  source: "seed",
89
+ status: "active",
90
+ confidence: "high",
87
91
  lastUpdated: null
88
92
  },
89
93
  {
@@ -95,6 +99,8 @@ var STATIC_PRICING = [
95
99
  contextWindow: 2e5,
96
100
  maxOutputTokens: 64e3,
97
101
  source: "seed",
102
+ status: "active",
103
+ confidence: "high",
98
104
  lastUpdated: null
99
105
  },
100
106
  // OpenAI
@@ -104,9 +110,24 @@ var STATIC_PRICING = [
104
110
  displayName: "GPT-5.2",
105
111
  inputPerMTok: 1.75,
106
112
  outputPerMTok: 14,
107
- contextWindow: 1047576,
108
- maxOutputTokens: 32768,
113
+ contextWindow: 4e5,
114
+ maxOutputTokens: 128e3,
115
+ source: "seed",
116
+ status: "active",
117
+ confidence: "high",
118
+ lastUpdated: null
119
+ },
120
+ {
121
+ modelId: "gpt-5",
122
+ provider: "openai",
123
+ displayName: "GPT-5",
124
+ inputPerMTok: 1.25,
125
+ outputPerMTok: 10,
126
+ contextWindow: 4e5,
127
+ maxOutputTokens: 128e3,
109
128
  source: "seed",
129
+ status: "active",
130
+ confidence: "high",
110
131
  lastUpdated: null
111
132
  },
112
133
  {
@@ -115,9 +136,63 @@ var STATIC_PRICING = [
115
136
  displayName: "GPT-5 Mini",
116
137
  inputPerMTok: 0.25,
117
138
  outputPerMTok: 2,
118
- contextWindow: 1047576,
139
+ contextWindow: 4e5,
140
+ maxOutputTokens: 128e3,
141
+ source: "seed",
142
+ status: "active",
143
+ confidence: "high",
144
+ lastUpdated: null
145
+ },
146
+ {
147
+ modelId: "gpt-5-nano",
148
+ provider: "openai",
149
+ displayName: "GPT-5 Nano",
150
+ inputPerMTok: 0.05,
151
+ outputPerMTok: 0.4,
152
+ contextWindow: 4e5,
153
+ maxOutputTokens: 128e3,
154
+ source: "seed",
155
+ status: "active",
156
+ confidence: "high",
157
+ lastUpdated: null
158
+ },
159
+ {
160
+ modelId: "gpt-4.1",
161
+ provider: "openai",
162
+ displayName: "GPT-4.1",
163
+ inputPerMTok: 2,
164
+ outputPerMTok: 8,
165
+ contextWindow: 1e6,
119
166
  maxOutputTokens: 32768,
120
167
  source: "seed",
168
+ status: "active",
169
+ confidence: "high",
170
+ lastUpdated: null
171
+ },
172
+ {
173
+ modelId: "gpt-4.1-mini",
174
+ provider: "openai",
175
+ displayName: "GPT-4.1 Mini",
176
+ inputPerMTok: 0.4,
177
+ outputPerMTok: 1.6,
178
+ contextWindow: 1e6,
179
+ maxOutputTokens: 32768,
180
+ source: "seed",
181
+ status: "active",
182
+ confidence: "high",
183
+ lastUpdated: null
184
+ },
185
+ {
186
+ modelId: "gpt-4.1-nano",
187
+ provider: "openai",
188
+ displayName: "GPT-4.1 Nano",
189
+ inputPerMTok: 0.1,
190
+ outputPerMTok: 0.4,
191
+ contextWindow: 1e6,
192
+ maxOutputTokens: 32768,
193
+ source: "seed",
194
+ status: "active",
195
+ confidence: "high",
121
196
  lastUpdated: null
122
197
  },
123
198
  {
@@ -129,6 +204,8 @@ var STATIC_PRICING = [
129
204
  contextWindow: 128e3,
130
205
  maxOutputTokens: 16384,
131
206
  source: "seed",
207
+ status: "active",
208
+ confidence: "high",
132
209
  lastUpdated: null
133
210
  },
134
211
  {
@@ -140,6 +217,8 @@ var STATIC_PRICING = [
140
217
  contextWindow: 128e3,
141
218
  maxOutputTokens: 16384,
142
219
  source: "seed",
220
+ status: "active",
221
+ confidence: "high",
143
222
  lastUpdated: null
144
223
  },
145
224
  {
@@ -151,6 +230,8 @@ var STATIC_PRICING = [
151
230
  contextWindow: 2e5,
152
231
  maxOutputTokens: 1e5,
153
232
  source: "seed",
233
+ status: "active",
234
+ confidence: "high",
154
235
  lastUpdated: null
155
236
  },
156
237
  {
@@ -162,20 +243,11 @@ var STATIC_PRICING = [
162
243
  contextWindow: 2e5,
163
244
  maxOutputTokens: 1e5,
164
245
  source: "seed",
246
+ status: "active",
247
+ confidence: "high",
165
248
  lastUpdated: null
166
249
  },
167
250
  // Google
168
- {
169
- modelId: "gemini-3.1-pro-preview",
170
- provider: "google",
171
- displayName: "Gemini 3.1 Pro Preview",
172
- inputPerMTok: 2,
173
- outputPerMTok: 12,
174
- contextWindow: 1e6,
175
- maxOutputTokens: 65536,
176
- source: "seed",
177
- lastUpdated: null
178
- },
179
251
  {
180
252
  modelId: "gemini-2.5-pro",
181
253
  provider: "google",
@@ -185,6 +257,8 @@ var STATIC_PRICING = [
185
257
  contextWindow: 1048576,
186
258
  maxOutputTokens: 65536,
187
259
  source: "seed",
260
+ status: "active",
261
+ confidence: "high",
188
262
  lastUpdated: null
189
263
  },
190
264
  {
@@ -196,6 +270,8 @@ var STATIC_PRICING = [
196
270
  contextWindow: 1048576,
197
271
  maxOutputTokens: 65536,
198
272
  source: "seed",
273
+ status: "active",
274
+ confidence: "high",
199
275
  lastUpdated: null
200
276
  },
201
277
  {
@@ -207,6 +283,8 @@ var STATIC_PRICING = [
207
283
  contextWindow: 1048576,
208
284
  maxOutputTokens: 8192,
209
285
  source: "seed",
286
+ status: "active",
287
+ confidence: "high",
210
288
  lastUpdated: null
211
289
  },
212
290
  {
@@ -218,6 +296,21 @@ var STATIC_PRICING = [
218
296
  contextWindow: 1048576,
219
297
  maxOutputTokens: 8192,
220
298
  source: "seed",
299
+ status: "active",
300
+ confidence: "high",
301
+ lastUpdated: null
302
+ },
303
+ {
304
+ modelId: "gemini-2.5-flash-lite",
305
+ provider: "google",
306
+ displayName: "Gemini 2.5 Flash-Lite",
307
+ inputPerMTok: 0.1,
308
+ outputPerMTok: 0.4,
309
+ contextWindow: 1048576,
310
+ maxOutputTokens: 65536,
311
+ source: "seed",
312
+ status: "active",
313
+ confidence: "high",
221
314
  lastUpdated: null
222
315
  },
223
316
  // DeepSeek
@@ -225,22 +318,26 @@ var STATIC_PRICING = [
225
318
  modelId: "deepseek-chat",
226
319
  provider: "deepseek",
227
320
  displayName: "DeepSeek V3",
228
- inputPerMTok: 0.27,
229
- outputPerMTok: 1.1,
230
- contextWindow: 65536,
321
+ inputPerMTok: 0.28,
322
+ outputPerMTok: 0.42,
323
+ contextWindow: 131072,
231
324
  maxOutputTokens: 8192,
232
325
  source: "seed",
326
+ status: "active",
327
+ confidence: "high",
233
328
  lastUpdated: null
234
329
  },
235
330
  {
236
331
  modelId: "deepseek-reasoner",
237
332
  provider: "deepseek",
238
333
  displayName: "DeepSeek R1",
239
- inputPerMTok: 0.55,
240
- outputPerMTok: 2.19,
241
- contextWindow: 65536,
242
- maxOutputTokens: 8192,
334
+ inputPerMTok: 0.28,
335
+ outputPerMTok: 0.42,
336
+ contextWindow: 131072,
337
+ maxOutputTokens: 65536,
243
338
  source: "seed",
339
+ status: "active",
340
+ confidence: "high",
244
341
  lastUpdated: null
245
342
  }
246
343
  ];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pricetoken",
3
- "version": "0.1.1",
3
+ "version": "0.3.0",
4
4
  "description": "Real-time LLM pricing data — typed client, cost calculator, and static pricing",
5
5
  "keywords": [
6
6
  "llm",
@@ -28,9 +28,9 @@
28
28
  "types": "./dist/index.d.ts",
29
29
  "exports": {
30
30
  ".": {
31
+ "types": "./dist/index.d.ts",
31
32
  "import": "./dist/index.mjs",
32
- "require": "./dist/index.js",
33
- "types": "./dist/index.d.ts"
33
+ "require": "./dist/index.js"
34
34
  }
35
35
  },
36
36
  "files": [