gcusage 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -15
- package/dist/core/aggregate.js +170 -0
- package/dist/core/commands/trim.js +63 -0
- package/dist/core/logs/index.js +43 -0
- package/dist/core/logs/split.js +50 -0
- package/dist/core/metrics/index.js +263 -0
- package/dist/core/range.js +57 -0
- package/dist/core/render/table.js +192 -0
- package/dist/core/run.js +65 -0
- package/dist/core/utils/format.js +45 -0
- package/dist/core/utils/period.js +8 -0
- package/dist/core/utils/time.js +69 -0
- package/dist/index.js +25 -926
- package/dist/messages.js +36 -0
- package/dist/types.js +2 -0
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -19,25 +19,32 @@ Gemini CLI 用量统计工具 / Usage report for Gemini CLI
|
|
|
19
19
|
"enabled": true,
|
|
20
20
|
"target": "local",
|
|
21
21
|
"otlpEndpoint": "",
|
|
22
|
-
|
|
22
|
+
"outfile": "/Users/<yourname>/.gemini/telemetry.log",
|
|
23
23
|
"logPrompts": false
|
|
24
24
|
}
|
|
25
25
|
}
|
|
26
26
|
```
|
|
27
27
|
|
|
28
|
-
|
|
28
|
+
说明:
|
|
29
|
+
- 必须开启 telemetry 并指定 `outfile`,否则不会生成 `telemetry.log`
|
|
30
|
+
- 使用绝对路径可避免 `~` 或相对路径导致写入失败
|
|
31
|
+
- `outfile` 路径必须与 `gcusage` 读取路径一致(默认读取 `~/.gemini/telemetry.log`)
|
|
32
|
+
- 日志包含完整响应内容,建议使用 `trim` 进行瘦身
|
|
33
|
+
- Windows 示例路径:`C:\\Users\\<yourname>\\.gemini\\telemetry.log`
|
|
29
34
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
35
|
+
## 使用方式 / Usage
|
|
36
|
+
|
|
37
|
+
已发布版本:`gcusage@0.1.0`
|
|
38
|
+
npm 包地址: https://www.npmjs.com/package/gcusage
|
|
39
|
+
|
|
40
|
+
## 系统支持 / Platform
|
|
34
41
|
|
|
35
|
-
|
|
42
|
+
- 支持:macOS / Linux / Windows
|
|
36
43
|
|
|
37
44
|
默认输出最近 6 天(含今天)的日统计:
|
|
38
45
|
|
|
39
46
|
```bash
|
|
40
|
-
|
|
47
|
+
npx gcusage
|
|
41
48
|
```
|
|
42
49
|
|
|
43
50
|
输出:每天一行,展示 Models 列(多模型换行)与各类型 token 总量。
|
|
@@ -45,7 +52,7 @@ node dist/index.js
|
|
|
45
52
|
按 session 输出(当天每个 session 一行):
|
|
46
53
|
|
|
47
54
|
```bash
|
|
48
|
-
|
|
55
|
+
npx gcusage --period session
|
|
49
56
|
```
|
|
50
57
|
|
|
51
58
|
输出:当天每个 session 的最终累计值。
|
|
@@ -53,7 +60,7 @@ node dist/index.js --period session
|
|
|
53
60
|
按周(显示该周内每天数据):
|
|
54
61
|
|
|
55
62
|
```bash
|
|
56
|
-
|
|
63
|
+
npx gcusage --period week
|
|
57
64
|
```
|
|
58
65
|
|
|
59
66
|
输出:当前周(周一开始)内每日数据。
|
|
@@ -61,7 +68,7 @@ node dist/index.js --period week
|
|
|
61
68
|
按月(显示该月内每天数据):
|
|
62
69
|
|
|
63
70
|
```bash
|
|
64
|
-
|
|
71
|
+
npx gcusage --period month
|
|
65
72
|
```
|
|
66
73
|
|
|
67
74
|
输出:当前月内每日数据。
|
|
@@ -69,7 +76,7 @@ node dist/index.js --period month
|
|
|
69
76
|
从指定日期开始统计一周:
|
|
70
77
|
|
|
71
78
|
```bash
|
|
72
|
-
|
|
79
|
+
npx gcusage --period week --since 2026-01-01
|
|
73
80
|
```
|
|
74
81
|
|
|
75
82
|
输出:从 2026-01-01 开始的 7 天数据。
|
|
@@ -77,7 +84,7 @@ node dist/index.js --period week --since 2026-01-01
|
|
|
77
84
|
指定范围(覆盖 week/month 计算范围):
|
|
78
85
|
|
|
79
86
|
```bash
|
|
80
|
-
|
|
87
|
+
npx gcusage --period month --since 2026-01-01 --until 2026-01-15
|
|
81
88
|
```
|
|
82
89
|
|
|
83
90
|
输出:2026-01-01 到 2026-01-15 的每日数据。
|
|
@@ -85,7 +92,7 @@ node dist/index.js --period month --since 2026-01-01 --until 2026-01-15
|
|
|
85
92
|
过滤模型或类型:
|
|
86
93
|
|
|
87
94
|
```bash
|
|
88
|
-
|
|
95
|
+
npx gcusage --model gemini-2.5-flash-lite --type input
|
|
89
96
|
```
|
|
90
97
|
|
|
91
98
|
输出:只统计指定模型与类型的数据。
|
|
@@ -111,7 +118,7 @@ Date | Models | Input | Output | Thought | Cache | Tool | Total Tokens
|
|
|
111
118
|
仅保留 token 相关数据(覆盖原文件):
|
|
112
119
|
|
|
113
120
|
```bash
|
|
114
|
-
|
|
121
|
+
npx gcusage trim
|
|
115
122
|
```
|
|
116
123
|
|
|
117
124
|
输出:`telemetry.log` 体积显著减小,统计不受影响。
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.buildSessionSummaries = buildSessionSummaries;
|
|
4
|
+
exports.toSessionTotals = toSessionTotals;
|
|
5
|
+
exports.aggregateSessionsToDay = aggregateSessionsToDay;
|
|
6
|
+
exports.aggregateByDay = aggregateByDay;
|
|
7
|
+
exports.aggregateBySession = aggregateBySession;
|
|
8
|
+
exports.addTypeValue = addTypeValue;
|
|
9
|
+
exports.sumAll = sumAll;
|
|
10
|
+
exports.sumTotals = sumTotals;
|
|
11
|
+
const time_1 = require("./utils/time");
|
|
12
|
+
function buildSessionSummaries(points) {
|
|
13
|
+
const sessionMap = new Map();
|
|
14
|
+
for (const p of points) {
|
|
15
|
+
if (!p.sessionId)
|
|
16
|
+
continue;
|
|
17
|
+
const entry = sessionMap.get(p.sessionId);
|
|
18
|
+
const modelTypeKey = `${p.model}||${p.type}`;
|
|
19
|
+
if (!entry) {
|
|
20
|
+
const map = new Map();
|
|
21
|
+
map.set(modelTypeKey, { timestampMs: p.timestampMs, value: p.value, type: p.type });
|
|
22
|
+
sessionMap.set(p.sessionId, {
|
|
23
|
+
sessionStartMs: p.timestampMs,
|
|
24
|
+
models: new Set([p.model]),
|
|
25
|
+
lastByModelType: map
|
|
26
|
+
});
|
|
27
|
+
continue;
|
|
28
|
+
}
|
|
29
|
+
entry.sessionStartMs = Math.min(entry.sessionStartMs, p.timestampMs);
|
|
30
|
+
entry.models.add(p.model);
|
|
31
|
+
const last = entry.lastByModelType.get(modelTypeKey);
|
|
32
|
+
if (!last || p.timestampMs >= last.timestampMs) {
|
|
33
|
+
entry.lastByModelType.set(modelTypeKey, { timestampMs: p.timestampMs, value: p.value, type: p.type });
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
const summaries = [];
|
|
37
|
+
for (const [sessionId, entry] of sessionMap.entries()) {
|
|
38
|
+
const totals = { input: 0, output: 0, thought: 0, cache: 0, tool: 0 };
|
|
39
|
+
for (const item of entry.lastByModelType.values()) {
|
|
40
|
+
addTypeValue(totals, item.type, item.value);
|
|
41
|
+
}
|
|
42
|
+
summaries.push({
|
|
43
|
+
sessionId,
|
|
44
|
+
sessionStartMs: entry.sessionStartMs,
|
|
45
|
+
models: entry.models,
|
|
46
|
+
input: totals.input,
|
|
47
|
+
output: totals.output,
|
|
48
|
+
thought: totals.thought,
|
|
49
|
+
cache: totals.cache,
|
|
50
|
+
tool: totals.tool
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
summaries.sort((a, b) => a.sessionStartMs - b.sessionStartMs || a.sessionId.localeCompare(b.sessionId));
|
|
54
|
+
return summaries;
|
|
55
|
+
}
|
|
56
|
+
function toSessionTotals(summary) {
|
|
57
|
+
return {
|
|
58
|
+
date: (0, time_1.toDateKey)(summary.sessionStartMs),
|
|
59
|
+
sessionId: summary.sessionId,
|
|
60
|
+
models: summary.models,
|
|
61
|
+
input: summary.input,
|
|
62
|
+
output: summary.output,
|
|
63
|
+
thought: summary.thought,
|
|
64
|
+
cache: summary.cache,
|
|
65
|
+
tool: summary.tool
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
function aggregateSessionsToDay(summaries) {
|
|
69
|
+
const map = new Map();
|
|
70
|
+
for (const s of summaries) {
|
|
71
|
+
const dateKey = (0, time_1.toDateKey)(s.sessionStartMs);
|
|
72
|
+
let row = map.get(dateKey);
|
|
73
|
+
if (!row) {
|
|
74
|
+
row = {
|
|
75
|
+
date: dateKey,
|
|
76
|
+
models: new Set(),
|
|
77
|
+
input: 0,
|
|
78
|
+
output: 0,
|
|
79
|
+
thought: 0,
|
|
80
|
+
cache: 0,
|
|
81
|
+
tool: 0
|
|
82
|
+
};
|
|
83
|
+
map.set(dateKey, row);
|
|
84
|
+
}
|
|
85
|
+
for (const m of s.models)
|
|
86
|
+
row.models.add(m);
|
|
87
|
+
row.input += s.input;
|
|
88
|
+
row.output += s.output;
|
|
89
|
+
row.thought += s.thought;
|
|
90
|
+
row.cache += s.cache;
|
|
91
|
+
row.tool += s.tool;
|
|
92
|
+
}
|
|
93
|
+
const rows = Array.from(map.values());
|
|
94
|
+
rows.sort((a, b) => a.date.localeCompare(b.date));
|
|
95
|
+
return rows;
|
|
96
|
+
}
|
|
97
|
+
function aggregateByDay(points) {
|
|
98
|
+
const map = new Map();
|
|
99
|
+
for (const p of points) {
|
|
100
|
+
const dateKey = (0, time_1.toDateKey)(p.timestampMs);
|
|
101
|
+
let row = map.get(dateKey);
|
|
102
|
+
if (!row) {
|
|
103
|
+
row = {
|
|
104
|
+
date: dateKey,
|
|
105
|
+
models: new Set(),
|
|
106
|
+
input: 0,
|
|
107
|
+
output: 0,
|
|
108
|
+
thought: 0,
|
|
109
|
+
cache: 0,
|
|
110
|
+
tool: 0
|
|
111
|
+
};
|
|
112
|
+
map.set(dateKey, row);
|
|
113
|
+
}
|
|
114
|
+
row.models.add(p.model);
|
|
115
|
+
addTypeValue(row, p.type, p.value);
|
|
116
|
+
}
|
|
117
|
+
const rows = Array.from(map.values());
|
|
118
|
+
rows.sort((a, b) => a.date.localeCompare(b.date));
|
|
119
|
+
return rows;
|
|
120
|
+
}
|
|
121
|
+
function aggregateBySession(points) {
|
|
122
|
+
const map = new Map();
|
|
123
|
+
for (const p of points) {
|
|
124
|
+
if (!p.sessionId)
|
|
125
|
+
continue;
|
|
126
|
+
const dateKey = (0, time_1.toDateKey)(p.timestampMs);
|
|
127
|
+
const key = `${dateKey}||${p.sessionId}`;
|
|
128
|
+
let row = map.get(key);
|
|
129
|
+
if (!row) {
|
|
130
|
+
row = {
|
|
131
|
+
date: dateKey,
|
|
132
|
+
sessionId: p.sessionId,
|
|
133
|
+
models: new Set(),
|
|
134
|
+
input: 0,
|
|
135
|
+
output: 0,
|
|
136
|
+
thought: 0,
|
|
137
|
+
cache: 0,
|
|
138
|
+
tool: 0
|
|
139
|
+
};
|
|
140
|
+
map.set(key, row);
|
|
141
|
+
}
|
|
142
|
+
row.models.add(p.model);
|
|
143
|
+
addTypeValue(row, p.type, p.value);
|
|
144
|
+
}
|
|
145
|
+
const rows = Array.from(map.values());
|
|
146
|
+
rows.sort((a, b) => {
|
|
147
|
+
if (a.date !== b.date)
|
|
148
|
+
return a.date.localeCompare(b.date);
|
|
149
|
+
return a.sessionId.localeCompare(b.sessionId);
|
|
150
|
+
});
|
|
151
|
+
return rows;
|
|
152
|
+
}
|
|
153
|
+
function addTypeValue(target, type, value) {
|
|
154
|
+
if (type === "input")
|
|
155
|
+
target.input += value;
|
|
156
|
+
else if (type === "output")
|
|
157
|
+
target.output += value;
|
|
158
|
+
else if (type === "thought")
|
|
159
|
+
target.thought += value;
|
|
160
|
+
else if (type === "cache")
|
|
161
|
+
target.cache += value;
|
|
162
|
+
else if (type === "tool")
|
|
163
|
+
target.tool += value;
|
|
164
|
+
}
|
|
165
|
+
function sumAll(row) {
|
|
166
|
+
return row.input + row.output + row.thought + row.cache + row.tool;
|
|
167
|
+
}
|
|
168
|
+
function sumTotals(totals) {
|
|
169
|
+
return totals.input + totals.output + totals.thought + totals.cache + totals.tool;
|
|
170
|
+
}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.runTrim = runTrim;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const os_1 = __importDefault(require("os"));
|
|
9
|
+
const path_1 = __importDefault(require("path"));
|
|
10
|
+
const messages_1 = require("../../messages");
|
|
11
|
+
const split_1 = require("../logs/split");
|
|
12
|
+
const metrics_1 = require("../metrics");
|
|
13
|
+
async function runTrim() {
|
|
14
|
+
const logPath = path_1.default.join(os_1.default.homedir(), ".gemini", "telemetry.log");
|
|
15
|
+
const backupPath = path_1.default.join(os_1.default.homedir(), ".gemini", "telemetry.log.bak");
|
|
16
|
+
try {
|
|
17
|
+
const stat = await fs_1.default.promises.stat(logPath);
|
|
18
|
+
if (!stat.isFile()) {
|
|
19
|
+
console.error(messages_1.MESSAGES.NO_TELEMETRY);
|
|
20
|
+
process.exit(1);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
catch {
|
|
24
|
+
console.error(messages_1.MESSAGES.NO_TELEMETRY);
|
|
25
|
+
process.exit(1);
|
|
26
|
+
}
|
|
27
|
+
await fs_1.default.promises.copyFile(logPath, backupPath);
|
|
28
|
+
const content = await fs_1.default.promises.readFile(logPath, "utf8");
|
|
29
|
+
const objects = (0, split_1.splitJsonObjects)(content);
|
|
30
|
+
const lastPoints = new Map();
|
|
31
|
+
for (const objText of objects) {
|
|
32
|
+
let obj;
|
|
33
|
+
try {
|
|
34
|
+
obj = JSON.parse(objText);
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
const points = (0, metrics_1.extractTokenUsageDataPoints)(obj);
|
|
40
|
+
for (const dp of points) {
|
|
41
|
+
const sessionId = (0, metrics_1.readSessionId)(dp) || "no-session";
|
|
42
|
+
const model = (0, metrics_1.readAttributeValueFromDataPoint)(dp, "model") || "unknown";
|
|
43
|
+
const type = (0, metrics_1.readAttributeValueFromDataPoint)(dp, "type") || "unknown";
|
|
44
|
+
const time = (0, metrics_1.readTimestampMs)(dp);
|
|
45
|
+
if (time === null)
|
|
46
|
+
continue;
|
|
47
|
+
const key = `${sessionId}||${model}||${type}`;
|
|
48
|
+
const prev = lastPoints.get(key);
|
|
49
|
+
if (!prev || time >= prev.timestampMs) {
|
|
50
|
+
lastPoints.set(key, { timestampMs: time, dataPoint: dp });
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
const dataPoints = Array.from(lastPoints.values()).map((v) => v.dataPoint);
|
|
55
|
+
const minimalBlock = {
|
|
56
|
+
descriptor: { name: "gemini_cli.token.usage" },
|
|
57
|
+
dataPoints
|
|
58
|
+
};
|
|
59
|
+
const outputText = JSON.stringify(minimalBlock);
|
|
60
|
+
await fs_1.default.promises.writeFile(logPath, outputText, "utf8");
|
|
61
|
+
await fs_1.default.promises.unlink(backupPath);
|
|
62
|
+
console.log(messages_1.MESSAGES.TRIM_DONE);
|
|
63
|
+
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.findLogFiles = findLogFiles;
|
|
7
|
+
exports.parseLogFile = parseLogFile;
|
|
8
|
+
const fs_1 = __importDefault(require("fs"));
|
|
9
|
+
const os_1 = __importDefault(require("os"));
|
|
10
|
+
const path_1 = __importDefault(require("path"));
|
|
11
|
+
const metrics_1 = require("../metrics");
|
|
12
|
+
const split_1 = require("./split");
|
|
13
|
+
async function findLogFiles() {
|
|
14
|
+
const logPath = path_1.default.join(os_1.default.homedir(), ".gemini", "telemetry.log");
|
|
15
|
+
try {
|
|
16
|
+
const stat = await fs_1.default.promises.stat(logPath);
|
|
17
|
+
if (stat.isFile())
|
|
18
|
+
return [logPath];
|
|
19
|
+
}
|
|
20
|
+
catch {
|
|
21
|
+
return [];
|
|
22
|
+
}
|
|
23
|
+
return [];
|
|
24
|
+
}
|
|
25
|
+
async function parseLogFile(filePath) {
|
|
26
|
+
const points = [];
|
|
27
|
+
const content = await fs_1.default.promises.readFile(filePath, "utf8");
|
|
28
|
+
const objects = (0, split_1.splitJsonObjects)(content);
|
|
29
|
+
for (const objText of objects) {
|
|
30
|
+
let obj;
|
|
31
|
+
try {
|
|
32
|
+
obj = JSON.parse(objText);
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
const extracted = (0, metrics_1.extractMetricPoints)(obj);
|
|
38
|
+
if (extracted.length > 0) {
|
|
39
|
+
points.push(...extracted);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return points;
|
|
43
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.splitJsonObjects = splitJsonObjects;
|
|
4
|
+
function splitJsonObjects(input) {
|
|
5
|
+
const results = [];
|
|
6
|
+
let depth = 0;
|
|
7
|
+
let inString = false;
|
|
8
|
+
let escapeNext = false;
|
|
9
|
+
let start = -1;
|
|
10
|
+
for (let i = 0; i < input.length; i += 1) {
|
|
11
|
+
const ch = input[i];
|
|
12
|
+
if (inString) {
|
|
13
|
+
if (escapeNext) {
|
|
14
|
+
escapeNext = false;
|
|
15
|
+
continue;
|
|
16
|
+
}
|
|
17
|
+
if (ch === "\\") {
|
|
18
|
+
escapeNext = true;
|
|
19
|
+
continue;
|
|
20
|
+
}
|
|
21
|
+
if (ch === "\"") {
|
|
22
|
+
inString = false;
|
|
23
|
+
}
|
|
24
|
+
continue;
|
|
25
|
+
}
|
|
26
|
+
if (ch === "\"") {
|
|
27
|
+
inString = true;
|
|
28
|
+
continue;
|
|
29
|
+
}
|
|
30
|
+
if (ch === "{") {
|
|
31
|
+
if (depth === 0) {
|
|
32
|
+
start = i;
|
|
33
|
+
}
|
|
34
|
+
depth += 1;
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
if (ch === "}") {
|
|
38
|
+
if (depth > 0) {
|
|
39
|
+
depth -= 1;
|
|
40
|
+
if (depth === 0 && start >= 0) {
|
|
41
|
+
const chunk = input.slice(start, i + 1).trim();
|
|
42
|
+
if (chunk)
|
|
43
|
+
results.push(chunk);
|
|
44
|
+
start = -1;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return results;
|
|
50
|
+
}
|
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.extractMetricPoints = extractMetricPoints;
|
|
4
|
+
exports.extractTokenMetricBlocks = extractTokenMetricBlocks;
|
|
5
|
+
exports.extractTokenUsageDataPoints = extractTokenUsageDataPoints;
|
|
6
|
+
exports.readAttributeValueFromDataPoint = readAttributeValueFromDataPoint;
|
|
7
|
+
exports.readSessionId = readSessionId;
|
|
8
|
+
exports.buildPointFromDataPoint = buildPointFromDataPoint;
|
|
9
|
+
exports.readNumberValue = readNumberValue;
|
|
10
|
+
exports.readTimestampMs = readTimestampMs;
|
|
11
|
+
exports.readAttributes = readAttributes;
|
|
12
|
+
exports.readAttributeValue = readAttributeValue;
|
|
13
|
+
const METRIC_NAME = "gemini_cli.token.usage";
|
|
14
|
+
function extractMetricPoints(root) {
|
|
15
|
+
const points = [];
|
|
16
|
+
const stack = [root];
|
|
17
|
+
while (stack.length > 0) {
|
|
18
|
+
const node = stack.pop();
|
|
19
|
+
if (!node)
|
|
20
|
+
continue;
|
|
21
|
+
if (Array.isArray(node)) {
|
|
22
|
+
for (const item of node)
|
|
23
|
+
stack.push(item);
|
|
24
|
+
continue;
|
|
25
|
+
}
|
|
26
|
+
if (typeof node !== "object")
|
|
27
|
+
continue;
|
|
28
|
+
const obj = node;
|
|
29
|
+
const name = typeof obj.name === "string" ? obj.name : null;
|
|
30
|
+
const descriptorName = obj.descriptor && typeof obj.descriptor === "object"
|
|
31
|
+
? typeof obj.descriptor.name === "string"
|
|
32
|
+
? obj.descriptor.name
|
|
33
|
+
: null
|
|
34
|
+
: null;
|
|
35
|
+
if (name === METRIC_NAME || descriptorName === METRIC_NAME) {
|
|
36
|
+
const dataPoints = obj.dataPoints;
|
|
37
|
+
if (Array.isArray(dataPoints)) {
|
|
38
|
+
for (const dp of dataPoints) {
|
|
39
|
+
const metric = buildPointFromDataPoint(dp);
|
|
40
|
+
if (metric)
|
|
41
|
+
points.push(metric);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
for (const value of Object.values(obj)) {
|
|
46
|
+
stack.push(value);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return points;
|
|
50
|
+
}
|
|
51
|
+
function extractTokenMetricBlocks(root) {
|
|
52
|
+
const blocks = [];
|
|
53
|
+
const stack = [root];
|
|
54
|
+
while (stack.length > 0) {
|
|
55
|
+
const node = stack.pop();
|
|
56
|
+
if (!node)
|
|
57
|
+
continue;
|
|
58
|
+
if (Array.isArray(node)) {
|
|
59
|
+
for (const item of node)
|
|
60
|
+
stack.push(item);
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
if (typeof node !== "object")
|
|
64
|
+
continue;
|
|
65
|
+
const obj = node;
|
|
66
|
+
const name = typeof obj.name === "string" ? obj.name : null;
|
|
67
|
+
const descriptorName = obj.descriptor && typeof obj.descriptor === "object"
|
|
68
|
+
? typeof obj.descriptor.name === "string"
|
|
69
|
+
? obj.descriptor.name
|
|
70
|
+
: null
|
|
71
|
+
: null;
|
|
72
|
+
if (name === METRIC_NAME || descriptorName === METRIC_NAME) {
|
|
73
|
+
const dataPoints = obj.dataPoints;
|
|
74
|
+
if (Array.isArray(dataPoints)) {
|
|
75
|
+
blocks.push({
|
|
76
|
+
descriptor: { name: METRIC_NAME },
|
|
77
|
+
dataPoints
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
for (const value of Object.values(obj)) {
|
|
82
|
+
stack.push(value);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
return blocks;
|
|
86
|
+
}
|
|
87
|
+
function extractTokenUsageDataPoints(root) {
|
|
88
|
+
const points = [];
|
|
89
|
+
const stack = [root];
|
|
90
|
+
while (stack.length > 0) {
|
|
91
|
+
const node = stack.pop();
|
|
92
|
+
if (!node)
|
|
93
|
+
continue;
|
|
94
|
+
if (Array.isArray(node)) {
|
|
95
|
+
for (const item of node)
|
|
96
|
+
stack.push(item);
|
|
97
|
+
continue;
|
|
98
|
+
}
|
|
99
|
+
if (typeof node !== "object")
|
|
100
|
+
continue;
|
|
101
|
+
const obj = node;
|
|
102
|
+
const name = typeof obj.name === "string" ? obj.name : null;
|
|
103
|
+
const descriptorName = obj.descriptor && typeof obj.descriptor === "object"
|
|
104
|
+
? typeof obj.descriptor.name === "string"
|
|
105
|
+
? obj.descriptor.name
|
|
106
|
+
: null
|
|
107
|
+
: null;
|
|
108
|
+
if (name === METRIC_NAME || descriptorName === METRIC_NAME) {
|
|
109
|
+
const dataPoints = obj.dataPoints;
|
|
110
|
+
if (Array.isArray(dataPoints)) {
|
|
111
|
+
for (const dp of dataPoints) {
|
|
112
|
+
if (dp && typeof dp === "object")
|
|
113
|
+
points.push(dp);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
for (const value of Object.values(obj)) {
|
|
118
|
+
stack.push(value);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
return points;
|
|
122
|
+
}
|
|
123
|
+
function readAttributeValueFromDataPoint(dp, key) {
|
|
124
|
+
const attrs = readAttributes(dp.attributes);
|
|
125
|
+
return attrs[key] || null;
|
|
126
|
+
}
|
|
127
|
+
function readSessionId(dp) {
|
|
128
|
+
const attrs = readAttributes(dp.attributes);
|
|
129
|
+
return attrs["session.id"] || attrs["session_id"] || null;
|
|
130
|
+
}
|
|
131
|
+
function buildPointFromDataPoint(dataPoint) {
|
|
132
|
+
if (!dataPoint || typeof dataPoint !== "object")
|
|
133
|
+
return null;
|
|
134
|
+
const dp = dataPoint;
|
|
135
|
+
const value = readNumberValue(dp);
|
|
136
|
+
if (value === null)
|
|
137
|
+
return null;
|
|
138
|
+
const timestampMs = readTimestampMs(dp);
|
|
139
|
+
if (timestampMs === null)
|
|
140
|
+
return null;
|
|
141
|
+
const attrs = readAttributes(dp.attributes);
|
|
142
|
+
const model = attrs.model || attrs["model"] || "unknown";
|
|
143
|
+
const type = attrs.type || attrs["type"] || "unknown";
|
|
144
|
+
const sessionId = attrs["session.id"] || attrs["session_id"] || null;
|
|
145
|
+
return {
|
|
146
|
+
timestampMs,
|
|
147
|
+
model,
|
|
148
|
+
type,
|
|
149
|
+
sessionId,
|
|
150
|
+
value
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
function readNumberValue(dp) {
|
|
154
|
+
const asInt = dp.asInt;
|
|
155
|
+
const asDouble = dp.asDouble;
|
|
156
|
+
const value = dp.value;
|
|
157
|
+
if (typeof asInt === "number")
|
|
158
|
+
return asInt;
|
|
159
|
+
if (typeof asDouble === "number")
|
|
160
|
+
return asDouble;
|
|
161
|
+
if (typeof value === "number")
|
|
162
|
+
return value;
|
|
163
|
+
if (typeof asInt === "string") {
|
|
164
|
+
const n = Number(asInt);
|
|
165
|
+
if (!Number.isNaN(n))
|
|
166
|
+
return n;
|
|
167
|
+
}
|
|
168
|
+
return null;
|
|
169
|
+
}
|
|
170
|
+
function readTimestampMs(dp) {
|
|
171
|
+
const timeUnixNano = dp.timeUnixNano;
|
|
172
|
+
const endTimeUnixNano = dp.endTimeUnixNano;
|
|
173
|
+
const timeUnix = dp.timeUnix;
|
|
174
|
+
const time = dp.time;
|
|
175
|
+
const endTime = dp.endTime;
|
|
176
|
+
const startTime = dp.startTime;
|
|
177
|
+
if (Array.isArray(endTime) && endTime.length >= 2) {
|
|
178
|
+
const sec = Number(endTime[0]);
|
|
179
|
+
const nsec = Number(endTime[1]);
|
|
180
|
+
if (!Number.isNaN(sec) && !Number.isNaN(nsec)) {
|
|
181
|
+
return sec * 1000 + Math.floor(nsec / 1e6);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
if (Array.isArray(startTime) && startTime.length >= 2) {
|
|
185
|
+
const sec = Number(startTime[0]);
|
|
186
|
+
const nsec = Number(startTime[1]);
|
|
187
|
+
if (!Number.isNaN(sec) && !Number.isNaN(nsec)) {
|
|
188
|
+
return sec * 1000 + Math.floor(nsec / 1e6);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
const candidate = typeof timeUnixNano === "string" || typeof timeUnixNano === "number"
|
|
192
|
+
? timeUnixNano
|
|
193
|
+
: typeof endTimeUnixNano === "string" || typeof endTimeUnixNano === "number"
|
|
194
|
+
? endTimeUnixNano
|
|
195
|
+
: typeof timeUnix === "string" || typeof timeUnix === "number"
|
|
196
|
+
? timeUnix
|
|
197
|
+
: typeof time === "string" || typeof time === "number"
|
|
198
|
+
? time
|
|
199
|
+
: null;
|
|
200
|
+
if (candidate === null)
|
|
201
|
+
return null;
|
|
202
|
+
const num = Number(candidate);
|
|
203
|
+
if (Number.isNaN(num))
|
|
204
|
+
return null;
|
|
205
|
+
if (num > 1e15) {
|
|
206
|
+
return Math.floor(num / 1e6);
|
|
207
|
+
}
|
|
208
|
+
if (num > 1e12) {
|
|
209
|
+
return Math.floor(num / 1e3);
|
|
210
|
+
}
|
|
211
|
+
return Math.floor(num);
|
|
212
|
+
}
|
|
213
|
+
function readAttributes(attrs) {
|
|
214
|
+
if (!attrs)
|
|
215
|
+
return {};
|
|
216
|
+
if (Array.isArray(attrs)) {
|
|
217
|
+
const result = {};
|
|
218
|
+
for (const item of attrs) {
|
|
219
|
+
if (!item || typeof item !== "object")
|
|
220
|
+
continue;
|
|
221
|
+
const obj = item;
|
|
222
|
+
const key = typeof obj.key === "string" ? obj.key : null;
|
|
223
|
+
if (!key)
|
|
224
|
+
continue;
|
|
225
|
+
const value = obj.value;
|
|
226
|
+
const strValue = readAttributeValue(value);
|
|
227
|
+
if (strValue !== null)
|
|
228
|
+
result[key] = strValue;
|
|
229
|
+
}
|
|
230
|
+
return result;
|
|
231
|
+
}
|
|
232
|
+
if (typeof attrs === "object") {
|
|
233
|
+
const result = {};
|
|
234
|
+
for (const [key, value] of Object.entries(attrs)) {
|
|
235
|
+
const strValue = readAttributeValue(value);
|
|
236
|
+
if (strValue !== null)
|
|
237
|
+
result[key] = strValue;
|
|
238
|
+
}
|
|
239
|
+
return result;
|
|
240
|
+
}
|
|
241
|
+
return {};
|
|
242
|
+
}
|
|
243
|
+
function readAttributeValue(value) {
|
|
244
|
+
if (typeof value === "string")
|
|
245
|
+
return value;
|
|
246
|
+
if (typeof value === "number")
|
|
247
|
+
return String(value);
|
|
248
|
+
if (!value || typeof value !== "object")
|
|
249
|
+
return null;
|
|
250
|
+
const obj = value;
|
|
251
|
+
const stringValue = obj.stringValue;
|
|
252
|
+
if (typeof stringValue === "string")
|
|
253
|
+
return stringValue;
|
|
254
|
+
const intValue = obj.intValue;
|
|
255
|
+
if (typeof intValue === "string" || typeof intValue === "number") {
|
|
256
|
+
return String(intValue);
|
|
257
|
+
}
|
|
258
|
+
const doubleValue = obj.doubleValue;
|
|
259
|
+
if (typeof doubleValue === "string" || typeof doubleValue === "number") {
|
|
260
|
+
return String(doubleValue);
|
|
261
|
+
}
|
|
262
|
+
return null;
|
|
263
|
+
}
|