@tokscale/cli 1.0.17 → 1.0.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +214 -91
- package/dist/cli.js.map +1 -1
- package/dist/graph-types.d.ts +1 -1
- package/dist/graph-types.d.ts.map +1 -1
- package/dist/native-runner.js +5 -5
- package/dist/native-runner.js.map +1 -1
- package/dist/native.d.ts +9 -30
- package/dist/native.d.ts.map +1 -1
- package/dist/native.js +18 -134
- package/dist/native.js.map +1 -1
- package/dist/sessions/types.d.ts +1 -1
- package/dist/sessions/types.d.ts.map +1 -1
- package/dist/submit.d.ts +2 -0
- package/dist/submit.d.ts.map +1 -1
- package/dist/submit.js +32 -16
- package/dist/submit.js.map +1 -1
- package/dist/tui/App.d.ts.map +1 -1
- package/dist/tui/App.js +13 -6
- package/dist/tui/App.js.map +1 -1
- package/dist/tui/components/DailyView.d.ts.map +1 -1
- package/dist/tui/components/DailyView.js +25 -8
- package/dist/tui/components/DailyView.js.map +1 -1
- package/dist/tui/components/DateBreakdownPanel.js +2 -2
- package/dist/tui/components/DateBreakdownPanel.js.map +1 -1
- package/dist/tui/components/Footer.d.ts.map +1 -1
- package/dist/tui/components/Footer.js +2 -3
- package/dist/tui/components/Footer.js.map +1 -1
- package/dist/tui/components/LoadingSpinner.d.ts.map +1 -1
- package/dist/tui/components/LoadingSpinner.js +1 -2
- package/dist/tui/components/LoadingSpinner.js.map +1 -1
- package/dist/tui/components/ModelView.js +2 -2
- package/dist/tui/components/ModelView.js.map +1 -1
- package/dist/tui/config/settings.d.ts +4 -4
- package/dist/tui/config/settings.d.ts.map +1 -1
- package/dist/tui/config/settings.js +11 -4
- package/dist/tui/config/settings.js.map +1 -1
- package/dist/tui/hooks/useData.d.ts.map +1 -1
- package/dist/tui/hooks/useData.js +29 -42
- package/dist/tui/hooks/useData.js.map +1 -1
- package/dist/tui/types/index.d.ts +2 -2
- package/dist/tui/types/index.d.ts.map +1 -1
- package/dist/tui/types/index.js +3 -1
- package/dist/tui/types/index.js.map +1 -1
- package/dist/tui/utils/colors.d.ts +1 -0
- package/dist/tui/utils/colors.d.ts.map +1 -1
- package/dist/tui/utils/colors.js +7 -0
- package/dist/tui/utils/colors.js.map +1 -1
- package/dist/wrapped.d.ts.map +1 -1
- package/dist/wrapped.js +20 -48
- package/dist/wrapped.js.map +1 -1
- package/package.json +2 -2
- package/src/cli.ts +232 -97
- package/src/graph-types.ts +1 -1
- package/src/native-runner.ts +5 -5
- package/src/native.ts +35 -200
- package/src/sessions/types.ts +1 -1
- package/src/submit.ts +36 -22
- package/src/tui/App.tsx +9 -6
- package/src/tui/components/DailyView.tsx +29 -11
- package/src/tui/components/DateBreakdownPanel.tsx +2 -2
- package/src/tui/components/Footer.tsx +7 -2
- package/src/tui/components/LoadingSpinner.tsx +1 -2
- package/src/tui/components/ModelView.tsx +2 -2
- package/src/tui/config/settings.ts +18 -9
- package/src/tui/hooks/useData.ts +36 -47
- package/src/tui/types/index.ts +5 -4
- package/src/tui/utils/colors.ts +7 -0
- package/src/wrapped.ts +21 -54
- package/dist/graph.d.ts +0 -29
- package/dist/graph.d.ts.map +0 -1
- package/dist/graph.js +0 -383
- package/dist/graph.js.map +0 -1
- package/dist/pricing.d.ts +0 -58
- package/dist/pricing.d.ts.map +0 -1
- package/dist/pricing.js +0 -232
- package/dist/pricing.js.map +0 -1
- package/dist/sessions/claudecode.d.ts +0 -8
- package/dist/sessions/claudecode.d.ts.map +0 -1
- package/dist/sessions/claudecode.js +0 -84
- package/dist/sessions/claudecode.js.map +0 -1
- package/dist/sessions/codex.d.ts +0 -8
- package/dist/sessions/codex.d.ts.map +0 -1
- package/dist/sessions/codex.js +0 -158
- package/dist/sessions/codex.js.map +0 -1
- package/dist/sessions/gemini.d.ts +0 -8
- package/dist/sessions/gemini.d.ts.map +0 -1
- package/dist/sessions/gemini.js +0 -66
- package/dist/sessions/gemini.js.map +0 -1
- package/dist/sessions/index.d.ts +0 -32
- package/dist/sessions/index.d.ts.map +0 -1
- package/dist/sessions/index.js +0 -96
- package/dist/sessions/index.js.map +0 -1
- package/dist/sessions/opencode.d.ts +0 -9
- package/dist/sessions/opencode.d.ts.map +0 -1
- package/dist/sessions/opencode.js +0 -69
- package/dist/sessions/opencode.js.map +0 -1
- package/dist/sessions/reports.d.ts +0 -58
- package/dist/sessions/reports.d.ts.map +0 -1
- package/dist/sessions/reports.js +0 -337
- package/dist/sessions/reports.js.map +0 -1
- package/src/graph.ts +0 -485
- package/src/pricing.ts +0 -309
- package/src/sessions/claudecode.ts +0 -119
- package/src/sessions/codex.ts +0 -227
- package/src/sessions/gemini.ts +0 -108
- package/src/sessions/index.ts +0 -126
- package/src/sessions/opencode.ts +0 -117
- package/src/sessions/reports.ts +0 -475
package/src/pricing.ts
DELETED
|
@@ -1,309 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Pricing data fetcher using LiteLLM as source
|
|
3
|
-
* Features disk caching with 1-hour TTL
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
import * as fs from "node:fs";
|
|
7
|
-
import * as path from "node:path";
|
|
8
|
-
import * as os from "node:os";
|
|
9
|
-
|
|
10
|
-
function escapeRegex(str: string): string {
|
|
11
|
-
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
export function normalizeModelName(modelId: string): string | null {
|
|
15
|
-
const lower = modelId.toLowerCase();
|
|
16
|
-
|
|
17
|
-
if (lower.includes("opus")) {
|
|
18
|
-
if (lower.includes("4.5") || lower.includes("4-5")) {
|
|
19
|
-
return "opus-4-5";
|
|
20
|
-
} else if (lower.includes("4")) {
|
|
21
|
-
return "opus-4";
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
if (lower.includes("sonnet")) {
|
|
25
|
-
if (lower.includes("4.5") || lower.includes("4-5")) {
|
|
26
|
-
return "sonnet-4-5";
|
|
27
|
-
} else if (lower.includes("4")) {
|
|
28
|
-
return "sonnet-4";
|
|
29
|
-
} else if (lower.includes("3.7") || lower.includes("3-7")) {
|
|
30
|
-
return "sonnet-3-7";
|
|
31
|
-
} else if (lower.includes("3.5") || lower.includes("3-5")) {
|
|
32
|
-
return "sonnet-3-5";
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
if (lower.includes("haiku") && (lower.includes("4.5") || lower.includes("4-5"))) {
|
|
36
|
-
return "haiku-4-5";
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
if (lower === "o3") {
|
|
40
|
-
return "o3";
|
|
41
|
-
}
|
|
42
|
-
if (lower.startsWith("gpt-4o") || lower === "gpt-4o") {
|
|
43
|
-
return "gpt-4o";
|
|
44
|
-
}
|
|
45
|
-
if (lower.startsWith("gpt-4.1") || lower.includes("gpt-4.1")) {
|
|
46
|
-
return "gpt-4.1";
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
if (lower.includes("gemini-2.5-pro")) {
|
|
50
|
-
return "gemini-2.5-pro";
|
|
51
|
-
}
|
|
52
|
-
if (lower.includes("gemini-2.5-flash")) {
|
|
53
|
-
return "gemini-2.5-flash";
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
return null;
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
export function isWordBoundaryMatch(haystack: string, needle: string): boolean {
|
|
60
|
-
const pos = haystack.indexOf(needle);
|
|
61
|
-
if (pos === -1) return false;
|
|
62
|
-
|
|
63
|
-
const beforeOk = pos === 0 || !/[a-zA-Z0-9]/.test(haystack[pos - 1]);
|
|
64
|
-
const afterOk =
|
|
65
|
-
pos + needle.length === haystack.length ||
|
|
66
|
-
!/[a-zA-Z0-9]/.test(haystack[pos + needle.length]);
|
|
67
|
-
|
|
68
|
-
return beforeOk && afterOk;
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
const LITELLM_PRICING_URL =
|
|
72
|
-
"https://raw.githubusercontent.com/BerriAI/litellm/main/model_prices_and_context_window.json";
|
|
73
|
-
|
|
74
|
-
const CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
|
|
75
|
-
|
|
76
|
-
export interface LiteLLMModelPricing {
|
|
77
|
-
input_cost_per_token?: number;
|
|
78
|
-
output_cost_per_token?: number;
|
|
79
|
-
cache_creation_input_token_cost?: number;
|
|
80
|
-
cache_read_input_token_cost?: number;
|
|
81
|
-
input_cost_per_token_above_200k_tokens?: number;
|
|
82
|
-
output_cost_per_token_above_200k_tokens?: number;
|
|
83
|
-
cache_creation_input_token_cost_above_200k_tokens?: number;
|
|
84
|
-
cache_read_input_token_cost_above_200k_tokens?: number;
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
export type PricingDataset = Record<string, LiteLLMModelPricing>;
|
|
88
|
-
|
|
89
|
-
interface CachedPricing {
|
|
90
|
-
timestamp: number;
|
|
91
|
-
data: PricingDataset;
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
/**
|
|
95
|
-
* Format for passing pricing to Rust native module
|
|
96
|
-
* Note: napi-rs expects undefined (not null) for Rust Option<T> fields
|
|
97
|
-
*/
|
|
98
|
-
export interface PricingEntry {
|
|
99
|
-
modelId: string;
|
|
100
|
-
pricing: {
|
|
101
|
-
inputCostPerToken: number;
|
|
102
|
-
outputCostPerToken: number;
|
|
103
|
-
cacheReadInputTokenCost?: number;
|
|
104
|
-
cacheCreationInputTokenCost?: number;
|
|
105
|
-
};
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
function getCacheDir(): string {
|
|
109
|
-
const cacheHome = process.env.XDG_CACHE_HOME || path.join(os.homedir(), ".cache");
|
|
110
|
-
return path.join(cacheHome, "tokscale");
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
function getCachePath(): string {
|
|
114
|
-
return path.join(getCacheDir(), "pricing.json");
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
function loadCachedPricing(): CachedPricing | null {
|
|
118
|
-
try {
|
|
119
|
-
const cachePath = getCachePath();
|
|
120
|
-
if (!fs.existsSync(cachePath)) {
|
|
121
|
-
return null;
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
const content = fs.readFileSync(cachePath, "utf-8");
|
|
125
|
-
const cached = JSON.parse(content) as CachedPricing;
|
|
126
|
-
|
|
127
|
-
// Check TTL
|
|
128
|
-
const age = Date.now() - cached.timestamp;
|
|
129
|
-
if (age > CACHE_TTL_MS) {
|
|
130
|
-
return null; // Cache expired
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
return cached;
|
|
134
|
-
} catch {
|
|
135
|
-
return null;
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
function saveCachedPricing(data: PricingDataset): void {
|
|
140
|
-
try {
|
|
141
|
-
const cacheDir = getCacheDir();
|
|
142
|
-
if (!fs.existsSync(cacheDir)) {
|
|
143
|
-
fs.mkdirSync(cacheDir, { recursive: true });
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
const cached: CachedPricing = {
|
|
147
|
-
timestamp: Date.now(),
|
|
148
|
-
data,
|
|
149
|
-
};
|
|
150
|
-
|
|
151
|
-
fs.writeFileSync(getCachePath(), JSON.stringify(cached), "utf-8");
|
|
152
|
-
} catch {
|
|
153
|
-
// Ignore cache write errors
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
export class PricingFetcher {
|
|
158
|
-
private pricingData: PricingDataset | null = null;
|
|
159
|
-
|
|
160
|
-
/**
|
|
161
|
-
* Fetch pricing data (with disk cache, 1-hour TTL)
|
|
162
|
-
*/
|
|
163
|
-
async fetchPricing(): Promise<PricingDataset> {
|
|
164
|
-
if (this.pricingData) return this.pricingData;
|
|
165
|
-
|
|
166
|
-
// Try to load from cache first
|
|
167
|
-
const cached = loadCachedPricing();
|
|
168
|
-
if (cached) {
|
|
169
|
-
this.pricingData = cached.data;
|
|
170
|
-
return this.pricingData;
|
|
171
|
-
}
|
|
172
|
-
|
|
173
|
-
const controller = new AbortController();
|
|
174
|
-
const timeoutId = setTimeout(() => controller.abort(), 15000);
|
|
175
|
-
|
|
176
|
-
let response: Response;
|
|
177
|
-
try {
|
|
178
|
-
response = await fetch(LITELLM_PRICING_URL, { signal: controller.signal });
|
|
179
|
-
} finally {
|
|
180
|
-
clearTimeout(timeoutId);
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
if (!response.ok) {
|
|
184
|
-
throw new Error(`Failed to fetch pricing: ${response.status}`);
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
this.pricingData = (await response.json()) as PricingDataset;
|
|
188
|
-
|
|
189
|
-
// Save to cache
|
|
190
|
-
saveCachedPricing(this.pricingData);
|
|
191
|
-
|
|
192
|
-
return this.pricingData;
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
/**
|
|
196
|
-
* Get raw pricing dataset
|
|
197
|
-
*/
|
|
198
|
-
getPricingData(): PricingDataset | null {
|
|
199
|
-
return this.pricingData;
|
|
200
|
-
}
|
|
201
|
-
|
|
202
|
-
/**
|
|
203
|
-
* Convert pricing data to format expected by Rust native module
|
|
204
|
-
*/
|
|
205
|
-
toPricingEntries(): PricingEntry[] {
|
|
206
|
-
if (!this.pricingData) return [];
|
|
207
|
-
|
|
208
|
-
return Object.entries(this.pricingData).map(([modelId, pricing]) => ({
|
|
209
|
-
modelId,
|
|
210
|
-
pricing: {
|
|
211
|
-
inputCostPerToken: pricing.input_cost_per_token ?? 0,
|
|
212
|
-
outputCostPerToken: pricing.output_cost_per_token ?? 0,
|
|
213
|
-
// napi-rs expects undefined (not null) for Option<T> fields
|
|
214
|
-
cacheReadInputTokenCost: pricing.cache_read_input_token_cost,
|
|
215
|
-
cacheCreationInputTokenCost: pricing.cache_creation_input_token_cost,
|
|
216
|
-
},
|
|
217
|
-
}));
|
|
218
|
-
}
|
|
219
|
-
|
|
220
|
-
getModelPricing(modelID: string): LiteLLMModelPricing | null {
|
|
221
|
-
if (!this.pricingData) return null;
|
|
222
|
-
|
|
223
|
-
// Direct lookup
|
|
224
|
-
if (this.pricingData[modelID]) {
|
|
225
|
-
return this.pricingData[modelID];
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
// Try with provider prefix
|
|
229
|
-
const prefixes = ["anthropic/", "openai/", "google/", "bedrock/"];
|
|
230
|
-
for (const prefix of prefixes) {
|
|
231
|
-
if (this.pricingData[prefix + modelID]) {
|
|
232
|
-
return this.pricingData[prefix + modelID];
|
|
233
|
-
}
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
const normalized = normalizeModelName(modelID);
|
|
237
|
-
if (normalized) {
|
|
238
|
-
if (this.pricingData[normalized]) {
|
|
239
|
-
return this.pricingData[normalized];
|
|
240
|
-
}
|
|
241
|
-
for (const prefix of prefixes) {
|
|
242
|
-
if (this.pricingData[prefix + normalized]) {
|
|
243
|
-
return this.pricingData[prefix + normalized];
|
|
244
|
-
}
|
|
245
|
-
}
|
|
246
|
-
}
|
|
247
|
-
|
|
248
|
-
const lowerModelID = modelID.toLowerCase();
|
|
249
|
-
const lowerNormalized = normalized?.toLowerCase();
|
|
250
|
-
const sortedKeys = Object.keys(this.pricingData).sort();
|
|
251
|
-
|
|
252
|
-
for (const key of sortedKeys) {
|
|
253
|
-
const lowerKey = key.toLowerCase();
|
|
254
|
-
if (isWordBoundaryMatch(lowerKey, lowerModelID)) {
|
|
255
|
-
return this.pricingData[key];
|
|
256
|
-
}
|
|
257
|
-
if (lowerNormalized && isWordBoundaryMatch(lowerKey, lowerNormalized)) {
|
|
258
|
-
return this.pricingData[key];
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
for (const key of sortedKeys) {
|
|
263
|
-
const lowerKey = key.toLowerCase();
|
|
264
|
-
if (isWordBoundaryMatch(lowerModelID, lowerKey)) {
|
|
265
|
-
return this.pricingData[key];
|
|
266
|
-
}
|
|
267
|
-
if (lowerNormalized && isWordBoundaryMatch(lowerNormalized, lowerKey)) {
|
|
268
|
-
return this.pricingData[key];
|
|
269
|
-
}
|
|
270
|
-
}
|
|
271
|
-
|
|
272
|
-
return null;
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
calculateCost(
|
|
276
|
-
tokens: {
|
|
277
|
-
input: number;
|
|
278
|
-
output: number;
|
|
279
|
-
reasoning?: number;
|
|
280
|
-
cacheRead: number;
|
|
281
|
-
cacheWrite: number;
|
|
282
|
-
},
|
|
283
|
-
pricing: LiteLLMModelPricing
|
|
284
|
-
): number {
|
|
285
|
-
const inputCost = tokens.input * (pricing.input_cost_per_token ?? 0);
|
|
286
|
-
const outputCost =
|
|
287
|
-
(tokens.output + (tokens.reasoning ?? 0)) * (pricing.output_cost_per_token ?? 0);
|
|
288
|
-
const cacheWriteCost =
|
|
289
|
-
tokens.cacheWrite * (pricing.cache_creation_input_token_cost ?? 0);
|
|
290
|
-
const cacheReadCost =
|
|
291
|
-
tokens.cacheRead * (pricing.cache_read_input_token_cost ?? 0);
|
|
292
|
-
|
|
293
|
-
return inputCost + outputCost + cacheWriteCost + cacheReadCost;
|
|
294
|
-
}
|
|
295
|
-
}
|
|
296
|
-
|
|
297
|
-
/**
|
|
298
|
-
* Clear pricing cache (for testing or forced refresh)
|
|
299
|
-
*/
|
|
300
|
-
export function clearPricingCache(): void {
|
|
301
|
-
try {
|
|
302
|
-
const cachePath = getCachePath();
|
|
303
|
-
if (fs.existsSync(cachePath)) {
|
|
304
|
-
fs.unlinkSync(cachePath);
|
|
305
|
-
}
|
|
306
|
-
} catch {
|
|
307
|
-
// Ignore errors
|
|
308
|
-
}
|
|
309
|
-
}
|
|
@@ -1,119 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Claude Code (Anthropic official) session parser
|
|
3
|
-
* Reads from ~/.claude/projects/
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
import * as fs from "node:fs";
|
|
7
|
-
import * as path from "node:path";
|
|
8
|
-
import * as os from "node:os";
|
|
9
|
-
import { createUnifiedMessage, type UnifiedMessage, type TokenBreakdown } from "./types.js";
|
|
10
|
-
|
|
11
|
-
interface ClaudeCodeEntry {
|
|
12
|
-
type: string;
|
|
13
|
-
timestamp?: string;
|
|
14
|
-
message?: {
|
|
15
|
-
model?: string;
|
|
16
|
-
usage?: {
|
|
17
|
-
input_tokens?: number;
|
|
18
|
-
output_tokens?: number;
|
|
19
|
-
cache_read_input_tokens?: number;
|
|
20
|
-
cache_creation_input_tokens?: number;
|
|
21
|
-
};
|
|
22
|
-
};
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
export function getClaudeCodeProjectsPath(): string {
|
|
26
|
-
return path.join(os.homedir(), ".claude", "projects");
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
function findJsonlFiles(dir: string): string[] {
|
|
30
|
-
const files: string[] = [];
|
|
31
|
-
|
|
32
|
-
function walk(currentDir: string) {
|
|
33
|
-
try {
|
|
34
|
-
const entries = fs.readdirSync(currentDir, { withFileTypes: true });
|
|
35
|
-
for (const entry of entries) {
|
|
36
|
-
const fullPath = path.join(currentDir, entry.name);
|
|
37
|
-
if (entry.isDirectory()) {
|
|
38
|
-
walk(fullPath);
|
|
39
|
-
} else if (entry.isFile() && entry.name.endsWith(".jsonl")) {
|
|
40
|
-
files.push(fullPath);
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
} catch {
|
|
44
|
-
// Skip inaccessible directories
|
|
45
|
-
}
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
walk(dir);
|
|
49
|
-
return files;
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
export function parseClaudeCodeMessages(): UnifiedMessage[] {
|
|
53
|
-
const projectsPath = getClaudeCodeProjectsPath();
|
|
54
|
-
|
|
55
|
-
if (!fs.existsSync(projectsPath)) {
|
|
56
|
-
return [];
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
const messages: UnifiedMessage[] = [];
|
|
60
|
-
const files = findJsonlFiles(projectsPath);
|
|
61
|
-
|
|
62
|
-
for (const file of files) {
|
|
63
|
-
// Use file path as session ID
|
|
64
|
-
const sessionId = path.relative(projectsPath, file).replace(/\.jsonl$/, "");
|
|
65
|
-
|
|
66
|
-
try {
|
|
67
|
-
const content = fs.readFileSync(file, "utf-8");
|
|
68
|
-
const lines = content.split(/\r?\n/);
|
|
69
|
-
|
|
70
|
-
for (const line of lines) {
|
|
71
|
-
const trimmed = line.trim();
|
|
72
|
-
if (!trimmed) continue;
|
|
73
|
-
|
|
74
|
-
try {
|
|
75
|
-
const entry = JSON.parse(trimmed) as ClaudeCodeEntry;
|
|
76
|
-
|
|
77
|
-
// Process assistant messages with usage data and timestamp
|
|
78
|
-
if (
|
|
79
|
-
entry.type === "assistant" &&
|
|
80
|
-
entry.message?.usage &&
|
|
81
|
-
entry.timestamp
|
|
82
|
-
) {
|
|
83
|
-
const model = entry.message.model || "unknown";
|
|
84
|
-
const usage = entry.message.usage;
|
|
85
|
-
const timestamp = new Date(entry.timestamp).getTime();
|
|
86
|
-
|
|
87
|
-
// Skip invalid timestamps
|
|
88
|
-
if (isNaN(timestamp)) continue;
|
|
89
|
-
|
|
90
|
-
const tokens: TokenBreakdown = {
|
|
91
|
-
input: usage.input_tokens || 0,
|
|
92
|
-
output: usage.output_tokens || 0,
|
|
93
|
-
cacheRead: usage.cache_read_input_tokens || 0,
|
|
94
|
-
cacheWrite: usage.cache_creation_input_tokens || 0,
|
|
95
|
-
reasoning: 0,
|
|
96
|
-
};
|
|
97
|
-
|
|
98
|
-
messages.push(
|
|
99
|
-
createUnifiedMessage(
|
|
100
|
-
"claude",
|
|
101
|
-
model,
|
|
102
|
-
"anthropic",
|
|
103
|
-
sessionId,
|
|
104
|
-
timestamp,
|
|
105
|
-
tokens
|
|
106
|
-
)
|
|
107
|
-
);
|
|
108
|
-
}
|
|
109
|
-
} catch {
|
|
110
|
-
// Skip malformed lines
|
|
111
|
-
}
|
|
112
|
-
}
|
|
113
|
-
} catch {
|
|
114
|
-
// Skip unreadable files
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
return messages;
|
|
119
|
-
}
|
package/src/sessions/codex.ts
DELETED
|
@@ -1,227 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Codex CLI (OpenAI) session parser
|
|
3
|
-
* Reads from ~/.codex/sessions/
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
import * as fs from "node:fs";
|
|
7
|
-
import * as path from "node:path";
|
|
8
|
-
import * as os from "node:os";
|
|
9
|
-
import { createUnifiedMessage, type UnifiedMessage, type TokenBreakdown } from "./types.js";
|
|
10
|
-
|
|
11
|
-
interface CodexTokenCountInfo {
|
|
12
|
-
last_token_usage?: {
|
|
13
|
-
input_tokens?: number;
|
|
14
|
-
cached_input_tokens?: number;
|
|
15
|
-
cache_read_input_tokens?: number;
|
|
16
|
-
output_tokens?: number;
|
|
17
|
-
reasoning_output_tokens?: number;
|
|
18
|
-
total_tokens?: number;
|
|
19
|
-
};
|
|
20
|
-
total_token_usage?: {
|
|
21
|
-
input_tokens?: number;
|
|
22
|
-
cached_input_tokens?: number;
|
|
23
|
-
cache_read_input_tokens?: number;
|
|
24
|
-
output_tokens?: number;
|
|
25
|
-
reasoning_output_tokens?: number;
|
|
26
|
-
total_tokens?: number;
|
|
27
|
-
};
|
|
28
|
-
model?: string;
|
|
29
|
-
model_name?: string;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
interface CodexEntry {
|
|
33
|
-
type: string;
|
|
34
|
-
timestamp?: string;
|
|
35
|
-
payload?: {
|
|
36
|
-
type?: string;
|
|
37
|
-
timestamp?: string;
|
|
38
|
-
model?: string;
|
|
39
|
-
model_name?: string;
|
|
40
|
-
info?: CodexTokenCountInfo;
|
|
41
|
-
};
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
export function getCodexSessionsPath(): string {
|
|
45
|
-
const codexHome = process.env.CODEX_HOME || path.join(os.homedir(), ".codex");
|
|
46
|
-
return path.join(codexHome, "sessions");
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
function findJsonlFiles(dir: string): string[] {
|
|
50
|
-
const files: string[] = [];
|
|
51
|
-
|
|
52
|
-
function walk(currentDir: string) {
|
|
53
|
-
try {
|
|
54
|
-
const entries = fs.readdirSync(currentDir, { withFileTypes: true });
|
|
55
|
-
for (const entry of entries) {
|
|
56
|
-
const fullPath = path.join(currentDir, entry.name);
|
|
57
|
-
if (entry.isDirectory()) {
|
|
58
|
-
walk(fullPath);
|
|
59
|
-
} else if (entry.isFile() && entry.name.endsWith(".jsonl")) {
|
|
60
|
-
files.push(fullPath);
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
} catch {
|
|
64
|
-
// Skip inaccessible directories
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
walk(dir);
|
|
69
|
-
return files;
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
function extractModel(payload: Record<string, unknown>): string | undefined {
|
|
73
|
-
// Direct model field
|
|
74
|
-
if (typeof payload.model === "string" && payload.model.trim()) {
|
|
75
|
-
return payload.model.trim();
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
// model_name field
|
|
79
|
-
if (typeof payload.model_name === "string" && payload.model_name.trim()) {
|
|
80
|
-
return payload.model_name.trim();
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
// Nested in info
|
|
84
|
-
const info = payload.info as Record<string, unknown> | undefined;
|
|
85
|
-
if (info) {
|
|
86
|
-
if (typeof info.model === "string" && info.model.trim()) {
|
|
87
|
-
return info.model.trim();
|
|
88
|
-
}
|
|
89
|
-
if (typeof info.model_name === "string" && info.model_name.trim()) {
|
|
90
|
-
return info.model_name.trim();
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
return undefined;
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
export function parseCodexMessages(): UnifiedMessage[] {
|
|
98
|
-
const sessionsPath = getCodexSessionsPath();
|
|
99
|
-
|
|
100
|
-
if (!fs.existsSync(sessionsPath)) {
|
|
101
|
-
return [];
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
const messages: UnifiedMessage[] = [];
|
|
105
|
-
const files = findJsonlFiles(sessionsPath);
|
|
106
|
-
|
|
107
|
-
for (const file of files) {
|
|
108
|
-
// Use file path as session ID
|
|
109
|
-
const sessionId = path.relative(sessionsPath, file).replace(/\.jsonl$/, "");
|
|
110
|
-
|
|
111
|
-
try {
|
|
112
|
-
const content = fs.readFileSync(file, "utf-8");
|
|
113
|
-
const lines = content.split(/\r?\n/);
|
|
114
|
-
|
|
115
|
-
let currentModel: string | undefined;
|
|
116
|
-
let previousTotals: {
|
|
117
|
-
input: number;
|
|
118
|
-
cached: number;
|
|
119
|
-
output: number;
|
|
120
|
-
} | null = null;
|
|
121
|
-
|
|
122
|
-
for (const line of lines) {
|
|
123
|
-
const trimmed = line.trim();
|
|
124
|
-
if (!trimmed) continue;
|
|
125
|
-
|
|
126
|
-
try {
|
|
127
|
-
const entry = JSON.parse(trimmed) as CodexEntry;
|
|
128
|
-
|
|
129
|
-
// Extract model from turn_context
|
|
130
|
-
if (entry.type === "turn_context" && entry.payload) {
|
|
131
|
-
const model = extractModel(entry.payload as Record<string, unknown>);
|
|
132
|
-
if (model) currentModel = model;
|
|
133
|
-
continue;
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
// Process token_count events with timestamp
|
|
137
|
-
if (entry.type === "event_msg" && entry.payload?.type === "token_count") {
|
|
138
|
-
const info = entry.payload.info;
|
|
139
|
-
if (!info) continue;
|
|
140
|
-
|
|
141
|
-
// Extract model from payload
|
|
142
|
-
const payloadModel = extractModel(entry.payload as Record<string, unknown>);
|
|
143
|
-
if (payloadModel) currentModel = payloadModel;
|
|
144
|
-
|
|
145
|
-
const model = currentModel || "unknown";
|
|
146
|
-
|
|
147
|
-
// Extract timestamp
|
|
148
|
-
const timestamp = entry.timestamp
|
|
149
|
-
? new Date(entry.timestamp).getTime()
|
|
150
|
-
: entry.payload?.timestamp
|
|
151
|
-
? new Date(entry.payload.timestamp).getTime()
|
|
152
|
-
: Date.now();
|
|
153
|
-
|
|
154
|
-
// Skip invalid timestamps
|
|
155
|
-
if (isNaN(timestamp)) continue;
|
|
156
|
-
|
|
157
|
-
// Get usage data
|
|
158
|
-
const lastUsage = info.last_token_usage;
|
|
159
|
-
const totalUsage = info.total_token_usage;
|
|
160
|
-
|
|
161
|
-
let delta = {
|
|
162
|
-
input: 0,
|
|
163
|
-
cached: 0,
|
|
164
|
-
output: 0,
|
|
165
|
-
};
|
|
166
|
-
|
|
167
|
-
if (lastUsage) {
|
|
168
|
-
delta = {
|
|
169
|
-
input: lastUsage.input_tokens || 0,
|
|
170
|
-
cached: lastUsage.cached_input_tokens || lastUsage.cache_read_input_tokens || 0,
|
|
171
|
-
output: lastUsage.output_tokens || 0,
|
|
172
|
-
};
|
|
173
|
-
} else if (totalUsage && previousTotals) {
|
|
174
|
-
delta = {
|
|
175
|
-
input: Math.max((totalUsage.input_tokens || 0) - previousTotals.input, 0),
|
|
176
|
-
cached: Math.max(
|
|
177
|
-
(totalUsage.cached_input_tokens || totalUsage.cache_read_input_tokens || 0) -
|
|
178
|
-
previousTotals.cached,
|
|
179
|
-
0
|
|
180
|
-
),
|
|
181
|
-
output: Math.max((totalUsage.output_tokens || 0) - previousTotals.output, 0),
|
|
182
|
-
};
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
if (totalUsage) {
|
|
186
|
-
previousTotals = {
|
|
187
|
-
input: totalUsage.input_tokens || 0,
|
|
188
|
-
cached: totalUsage.cached_input_tokens || totalUsage.cache_read_input_tokens || 0,
|
|
189
|
-
output: totalUsage.output_tokens || 0,
|
|
190
|
-
};
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
// Skip empty deltas
|
|
194
|
-
if (delta.input === 0 && delta.cached === 0 && delta.output === 0) {
|
|
195
|
-
continue;
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
const tokens: TokenBreakdown = {
|
|
199
|
-
input: delta.input,
|
|
200
|
-
output: delta.output,
|
|
201
|
-
cacheRead: delta.cached,
|
|
202
|
-
cacheWrite: 0, // Codex doesn't track cache write
|
|
203
|
-
reasoning: 0,
|
|
204
|
-
};
|
|
205
|
-
|
|
206
|
-
messages.push(
|
|
207
|
-
createUnifiedMessage(
|
|
208
|
-
"codex",
|
|
209
|
-
model,
|
|
210
|
-
"openai",
|
|
211
|
-
sessionId,
|
|
212
|
-
timestamp,
|
|
213
|
-
tokens
|
|
214
|
-
)
|
|
215
|
-
);
|
|
216
|
-
}
|
|
217
|
-
} catch {
|
|
218
|
-
// Skip malformed lines
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
} catch {
|
|
222
|
-
// Skip unreadable files
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
return messages;
|
|
227
|
-
}
|