token-tracker 0.2.2__tar.gz → 0.2.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {token_tracker-0.2.2 → token_tracker-0.2.4}/PKG-INFO +1 -1
- {token_tracker-0.2.2 → token_tracker-0.2.4}/pyproject.toml +1 -1
- token_tracker-0.2.4/src/analyzer/aggregator.py +133 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/cli.py +80 -15
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/ui/tables.py +46 -24
- {token_tracker-0.2.2 → token_tracker-0.2.4}/token_tracker.egg-info/PKG-INFO +1 -1
- token_tracker-0.2.2/src/analyzer/aggregator.py +0 -108
- {token_tracker-0.2.2 → token_tracker-0.2.4}/README.md +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/setup.cfg +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/__init__.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/adapters/__init__.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/adapters/claude.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/adapters/codex.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/adapters/rate_limits.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/adapters/registry.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/adapters/types.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/analyzer/__init__.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/analyzer/blocks.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/analyzer/cost.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/hooks.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/src/ui/__init__.py +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/token_tracker.egg-info/SOURCES.txt +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/token_tracker.egg-info/dependency_links.txt +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/token_tracker.egg-info/entry_points.txt +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/token_tracker.egg-info/requires.txt +0 -0
- {token_tracker-0.2.2 → token_tracker-0.2.4}/token_tracker.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
|
|
4
|
+
from ..adapters.types import DailyStats, MonthlyStats, SessionStats, UsageEntry, WeeklyStats
|
|
5
|
+
from .cost import calculate_cost
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def aggregate_daily(entries: list[UsageEntry]) -> list[DailyStats]:
|
|
9
|
+
by_date: dict[str, DailyStats] = {}
|
|
10
|
+
sessions_by_date: dict[str, set[str]] = defaultdict(set)
|
|
11
|
+
|
|
12
|
+
for e in entries:
|
|
13
|
+
date_str = e.timestamp.strftime("%Y-%m-%d")
|
|
14
|
+
if date_str not in by_date:
|
|
15
|
+
by_date[date_str] = DailyStats(date=date_str)
|
|
16
|
+
s = by_date[date_str]
|
|
17
|
+
cost = calculate_cost(e)
|
|
18
|
+
s.input_tokens += e.input_tokens
|
|
19
|
+
s.output_tokens += e.output_tokens
|
|
20
|
+
s.cache_creation_tokens += e.cache_creation_tokens
|
|
21
|
+
s.cache_read_tokens += e.cache_read_tokens
|
|
22
|
+
s.total_tokens += e.total_tokens
|
|
23
|
+
s.cost_usd += cost
|
|
24
|
+
s.message_count += e.message_count
|
|
25
|
+
s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
|
|
26
|
+
sessions_by_date[date_str].add(e.session_id)
|
|
27
|
+
|
|
28
|
+
for date_str, sessions in sessions_by_date.items():
|
|
29
|
+
by_date[date_str].session_count = len(sessions)
|
|
30
|
+
|
|
31
|
+
return sorted(by_date.values(), key=lambda s: s.date)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def aggregate_monthly(entries: list[UsageEntry]) -> list[MonthlyStats]:
|
|
35
|
+
by_month: dict[str, MonthlyStats] = {}
|
|
36
|
+
sessions_by_month: dict[str, set[str]] = defaultdict(set)
|
|
37
|
+
|
|
38
|
+
for e in entries:
|
|
39
|
+
month_str = e.timestamp.strftime("%Y-%m")
|
|
40
|
+
if month_str not in by_month:
|
|
41
|
+
by_month[month_str] = MonthlyStats(month=month_str)
|
|
42
|
+
s = by_month[month_str]
|
|
43
|
+
cost = calculate_cost(e)
|
|
44
|
+
s.input_tokens += e.input_tokens
|
|
45
|
+
s.output_tokens += e.output_tokens
|
|
46
|
+
s.cache_creation_tokens += e.cache_creation_tokens
|
|
47
|
+
s.cache_read_tokens += e.cache_read_tokens
|
|
48
|
+
s.total_tokens += e.total_tokens
|
|
49
|
+
s.cost_usd += cost
|
|
50
|
+
s.message_count += e.message_count
|
|
51
|
+
s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
|
|
52
|
+
sessions_by_month[month_str].add(e.session_id)
|
|
53
|
+
|
|
54
|
+
for month_str, sessions in sessions_by_month.items():
|
|
55
|
+
by_month[month_str].session_count = len(sessions)
|
|
56
|
+
|
|
57
|
+
return sorted(by_month.values(), key=lambda s: s.month)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def aggregate_weekly(entries: list[UsageEntry]) -> list[WeeklyStats]:
|
|
61
|
+
from datetime import timedelta
|
|
62
|
+
|
|
63
|
+
by_week: dict[str, WeeklyStats] = {}
|
|
64
|
+
sessions_by_week: dict[str, set[str]] = defaultdict(set)
|
|
65
|
+
|
|
66
|
+
for e in entries:
|
|
67
|
+
monday = e.timestamp.date() - timedelta(days=e.timestamp.weekday())
|
|
68
|
+
sunday = monday + timedelta(days=6)
|
|
69
|
+
week_key = monday.isoformat()
|
|
70
|
+
if week_key not in by_week:
|
|
71
|
+
by_week[week_key] = WeeklyStats(
|
|
72
|
+
week=week_key,
|
|
73
|
+
week_start=monday.strftime("%m-%d"),
|
|
74
|
+
week_end=sunday.strftime("%m-%d"),
|
|
75
|
+
)
|
|
76
|
+
s = by_week[week_key]
|
|
77
|
+
cost = calculate_cost(e)
|
|
78
|
+
s.input_tokens += e.input_tokens
|
|
79
|
+
s.output_tokens += e.output_tokens
|
|
80
|
+
s.cache_creation_tokens += e.cache_creation_tokens
|
|
81
|
+
s.cache_read_tokens += e.cache_read_tokens
|
|
82
|
+
s.total_tokens += e.total_tokens
|
|
83
|
+
s.cost_usd += cost
|
|
84
|
+
s.message_count += e.message_count
|
|
85
|
+
s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
|
|
86
|
+
sessions_by_week[week_key].add(e.session_id)
|
|
87
|
+
|
|
88
|
+
for week_key, sessions in sessions_by_week.items():
|
|
89
|
+
by_week[week_key].session_count = len(sessions)
|
|
90
|
+
|
|
91
|
+
return sorted(by_week.values(), key=lambda s: s.week)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def aggregate_sessions(entries: list[UsageEntry]) -> list[SessionStats]:
|
|
95
|
+
by_session: dict[str, list[UsageEntry]] = defaultdict(list)
|
|
96
|
+
|
|
97
|
+
for e in entries:
|
|
98
|
+
by_session[e.session_id].append(e)
|
|
99
|
+
|
|
100
|
+
sessions: list[SessionStats] = []
|
|
101
|
+
for session_id, session_entries in by_session.items():
|
|
102
|
+
session_entries.sort(key=lambda e: e.timestamp)
|
|
103
|
+
first = session_entries[0]
|
|
104
|
+
last = session_entries[-1]
|
|
105
|
+
duration = (last.timestamp - first.timestamp).total_seconds() / 60
|
|
106
|
+
|
|
107
|
+
models: dict[str, int] = defaultdict(int)
|
|
108
|
+
for e in session_entries:
|
|
109
|
+
models[e.model] += e.total_tokens
|
|
110
|
+
primary_model = max(models, key=models.get) if models else "unknown"
|
|
111
|
+
|
|
112
|
+
s = SessionStats(
|
|
113
|
+
session_id=session_id,
|
|
114
|
+
project=first.project,
|
|
115
|
+
model=primary_model,
|
|
116
|
+
start_time=first.timestamp,
|
|
117
|
+
end_time=last.timestamp,
|
|
118
|
+
duration_minutes=round(duration, 1),
|
|
119
|
+
)
|
|
120
|
+
for e in session_entries:
|
|
121
|
+
cost = calculate_cost(e)
|
|
122
|
+
s.input_tokens += e.input_tokens
|
|
123
|
+
s.output_tokens += e.output_tokens
|
|
124
|
+
s.cache_creation_tokens += e.cache_creation_tokens
|
|
125
|
+
s.cache_read_tokens += e.cache_read_tokens
|
|
126
|
+
s.total_tokens += e.total_tokens
|
|
127
|
+
s.cost_usd += cost
|
|
128
|
+
s.message_count += e.message_count
|
|
129
|
+
|
|
130
|
+
sessions.append(s)
|
|
131
|
+
|
|
132
|
+
sessions.sort(key=lambda s: s.start_time, reverse=True)
|
|
133
|
+
return sessions
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import sys
|
|
2
|
-
from datetime import datetime, timedelta, timezone
|
|
3
2
|
|
|
4
3
|
from .adapters import claude, codex
|
|
5
4
|
from .adapters.rate_limits import load_rate_limits as load_claude_rate_limits
|
|
@@ -49,6 +48,7 @@ def _build_agent_data(agent_id: str, agent_name: str) -> dict | None:
|
|
|
49
48
|
weekly = aggregate_weekly(entries)
|
|
50
49
|
monthly = aggregate_monthly(entries)
|
|
51
50
|
sessions = aggregate_sessions(entries)
|
|
51
|
+
from datetime import datetime, timezone, timedelta
|
|
52
52
|
cutoff = datetime.now(timezone.utc) - timedelta(hours=48)
|
|
53
53
|
recent = [e for e in entries if e.timestamp >= cutoff]
|
|
54
54
|
blocks = analyze_blocks(recent)
|
|
@@ -64,48 +64,96 @@ def _build_agent_data(agent_id: str, agent_name: str) -> dict | None:
|
|
|
64
64
|
)
|
|
65
65
|
|
|
66
66
|
|
|
67
|
+
def _initial_agent_index(agents) -> int:
|
|
68
|
+
import os
|
|
69
|
+
|
|
70
|
+
preferred = None
|
|
71
|
+
if os.environ.get("CODEX_THREAD_ID") or os.environ.get("CODEX_SANDBOX"):
|
|
72
|
+
preferred = "codex"
|
|
73
|
+
elif os.environ.get("CLAUDE_CONFIG_DIR") or os.environ.get("CLAUDECODE"):
|
|
74
|
+
preferred = "claude-code"
|
|
75
|
+
|
|
76
|
+
if preferred:
|
|
77
|
+
for i, agent in enumerate(agents):
|
|
78
|
+
if agent.id == preferred:
|
|
79
|
+
return i
|
|
80
|
+
return 0
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _fit_screen(text: str, height: int, scroll_offset: int) -> tuple[str, int]:
|
|
84
|
+
lines = text.splitlines()
|
|
85
|
+
if not lines:
|
|
86
|
+
return "", 0
|
|
87
|
+
max_body = max(1, height - 1)
|
|
88
|
+
max_scroll = max(0, len(lines) - max_body)
|
|
89
|
+
scroll_offset = max(0, min(scroll_offset, max_scroll))
|
|
90
|
+
visible = lines[:1] + lines[1 + scroll_offset:1 + scroll_offset + max_body - 1]
|
|
91
|
+
return "\n".join(visible), max_scroll
|
|
92
|
+
|
|
93
|
+
|
|
67
94
|
def _show_interactive_dashboard(agents):
|
|
68
95
|
import tty
|
|
69
96
|
import termios
|
|
97
|
+
import shutil
|
|
70
98
|
from io import StringIO
|
|
71
99
|
from rich.console import Console as RichConsole
|
|
72
100
|
import src.ui.tables as _tables
|
|
73
101
|
|
|
74
102
|
agent_names = [a.name for a in agents]
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
current = 0
|
|
103
|
+
current = _initial_agent_index(agents)
|
|
104
|
+
scroll_offset = 0
|
|
79
105
|
orig = _tables.console
|
|
80
106
|
|
|
81
|
-
sys.stdout.write("\033[?1049h\033[?25l")
|
|
82
|
-
|
|
107
|
+
sys.stdout.write("\033[?1049h\033[?7l\033[2J\033[3J\033[H\033[?25l")
|
|
108
|
+
cache = {}
|
|
109
|
+
|
|
83
110
|
try:
|
|
84
111
|
while True:
|
|
112
|
+
agent = agents[current]
|
|
113
|
+
if agent.id not in cache:
|
|
114
|
+
sys.stdout.write("\033[2J\033[3J\033[H\033[2m加载数据...\033[0m")
|
|
115
|
+
sys.stdout.flush()
|
|
116
|
+
cache[agent.id] = _build_agent_data(agent.id, agent.name)
|
|
117
|
+
|
|
118
|
+
size = shutil.get_terminal_size((80, 24))
|
|
119
|
+
width = size.columns
|
|
120
|
+
height = size.lines
|
|
121
|
+
|
|
85
122
|
buf = StringIO()
|
|
86
123
|
_tables.console = RichConsole(
|
|
87
|
-
file=buf, width=
|
|
124
|
+
file=buf, width=width, force_terminal=True,
|
|
88
125
|
)
|
|
89
126
|
render_tab_bar(agent_names, current)
|
|
90
|
-
data = cache[
|
|
127
|
+
data = cache[agent.id]
|
|
91
128
|
if data:
|
|
92
|
-
render_dashboard(**data)
|
|
129
|
+
render_dashboard(**data, session_limit=10, top_margin=False)
|
|
93
130
|
else:
|
|
94
131
|
_tables.console.print(f"[yellow]暂无数据[/yellow]")
|
|
95
132
|
_tables.console = orig
|
|
96
133
|
|
|
97
|
-
|
|
134
|
+
screen, max_scroll = _fit_screen(buf.getvalue(), height, scroll_offset)
|
|
135
|
+
sys.stdout.write("\033[2J\033[3J\033[H" + screen)
|
|
98
136
|
sys.stdout.flush()
|
|
99
137
|
|
|
100
138
|
key = _read_key(tty, termios)
|
|
101
139
|
if key == "left":
|
|
102
140
|
current = (current - 1) % len(agents)
|
|
141
|
+
scroll_offset = 0
|
|
103
142
|
elif key == "right":
|
|
104
143
|
current = (current + 1) % len(agents)
|
|
144
|
+
scroll_offset = 0
|
|
145
|
+
elif key == "up":
|
|
146
|
+
scroll_offset = max(0, scroll_offset - 1)
|
|
147
|
+
elif key == "down":
|
|
148
|
+
scroll_offset = min(max_scroll, scroll_offset + 1)
|
|
149
|
+
elif key == "page_up":
|
|
150
|
+
scroll_offset = max(0, scroll_offset - max(1, height - 3))
|
|
151
|
+
elif key == "page_down":
|
|
152
|
+
scroll_offset = min(max_scroll, scroll_offset + max(1, height - 3))
|
|
105
153
|
elif key == "quit":
|
|
106
154
|
break
|
|
107
155
|
finally:
|
|
108
|
-
sys.stdout.write("\033[?25h\033[?1049l")
|
|
156
|
+
sys.stdout.write("\033[?7h\033[?25h\033[?1049l")
|
|
109
157
|
sys.stdout.flush()
|
|
110
158
|
_tables.console = orig
|
|
111
159
|
|
|
@@ -128,11 +176,27 @@ def _read_key(tty, termios):
|
|
|
128
176
|
return "left"
|
|
129
177
|
if ch3 == b"C":
|
|
130
178
|
return "right"
|
|
179
|
+
if ch3 == b"A":
|
|
180
|
+
return "up"
|
|
181
|
+
if ch3 == b"B":
|
|
182
|
+
return "down"
|
|
183
|
+
if ch3 in (b"5", b"6"):
|
|
184
|
+
if select.select([fd], [], [], 0.05)[0]:
|
|
185
|
+
_os.read(fd, 1)
|
|
186
|
+
return "page_up" if ch3 == b"5" else "page_down"
|
|
131
187
|
return "other"
|
|
132
|
-
if ch
|
|
188
|
+
if ch == b"h":
|
|
133
189
|
return "left"
|
|
134
|
-
if ch
|
|
190
|
+
if ch == b"l":
|
|
135
191
|
return "right"
|
|
192
|
+
if ch == b"k":
|
|
193
|
+
return "up"
|
|
194
|
+
if ch == b"j":
|
|
195
|
+
return "down"
|
|
196
|
+
if ch == b"b":
|
|
197
|
+
return "page_up"
|
|
198
|
+
if ch == b"f":
|
|
199
|
+
return "page_down"
|
|
136
200
|
if ch in (b"q", b"Q", b"\x03"):
|
|
137
201
|
return "quit"
|
|
138
202
|
return "other"
|
|
@@ -166,7 +230,8 @@ def main():
|
|
|
166
230
|
|
|
167
231
|
agent_ids = {a.id for a in agents}
|
|
168
232
|
|
|
169
|
-
|
|
233
|
+
if command != "dashboard":
|
|
234
|
+
console.print(f"[dim]检测到: {', '.join(a.name + ' ✓' for a in agents)}[/dim]")
|
|
170
235
|
|
|
171
236
|
if not is_setup():
|
|
172
237
|
setup(auto=True)
|
|
@@ -117,14 +117,17 @@ def _fmt_duration(minutes: float) -> str:
|
|
|
117
117
|
return f"{int(minutes)}min"
|
|
118
118
|
|
|
119
119
|
|
|
120
|
-
def
|
|
121
|
-
|
|
120
|
+
def _display_width(s: str) -> int:
|
|
121
|
+
w = 0
|
|
122
|
+
for ch in s:
|
|
123
|
+
w += 2 if ord(ch) > 0x7F else 1
|
|
124
|
+
return w
|
|
122
125
|
|
|
123
126
|
|
|
124
127
|
def _append_bar(lines: Text, label: str, pct: float,
|
|
125
128
|
bar_width: int, suffix: str = "") -> None:
|
|
126
129
|
filled = int(pct / 100 * bar_width)
|
|
127
|
-
bar = "
|
|
130
|
+
bar = "█" * filled + "░" * (bar_width - filled)
|
|
128
131
|
bar_style = _S.bar_high if pct > 80 else _S.bar_mid if pct > 50 else _S.bar_low
|
|
129
132
|
lines.append(label, style=_S.dim)
|
|
130
133
|
lines.append(bar, style=bar_style)
|
|
@@ -135,7 +138,7 @@ def _append_bar(lines: Text, label: str, pct: float,
|
|
|
135
138
|
|
|
136
139
|
|
|
137
140
|
def _append_trend(lines: Text, current: float, previous: float) -> None:
|
|
138
|
-
arrow = "
|
|
141
|
+
arrow = "↑" if current >= previous else "↓"
|
|
139
142
|
style = _S.bad if current >= previous else _S.good
|
|
140
143
|
lines.append(f"{arrow}", style=style)
|
|
141
144
|
|
|
@@ -181,14 +184,21 @@ def _render_week_section(lines: Text, week: WeeklyStats,
|
|
|
181
184
|
def render_tab_bar(agent_names: list[str], current: int) -> None:
|
|
182
185
|
line = Text()
|
|
183
186
|
line.append(" ")
|
|
187
|
+
compact = console.width < 72
|
|
184
188
|
for i, name in enumerate(agent_names):
|
|
185
189
|
if i > 0:
|
|
186
|
-
line.append("
|
|
190
|
+
line.append(" │ ", style=_S.dim)
|
|
191
|
+
label = AGENT_SHORT.get("claude-code" if name == "Claude Code" else name.lower(), name)
|
|
192
|
+
if compact and name == "Claude Code":
|
|
193
|
+
label = "CC"
|
|
194
|
+
elif compact:
|
|
195
|
+
label = name[:8]
|
|
187
196
|
if i == current:
|
|
188
|
-
line.append(f" {
|
|
197
|
+
line.append(f" {label} ", style="bold reverse")
|
|
189
198
|
else:
|
|
190
|
-
line.append(f" {
|
|
191
|
-
|
|
199
|
+
line.append(f" {label} ", style=_S.dim)
|
|
200
|
+
help_text = " ←→ jk q/ESC退出" if compact else " ← → 切换 ↑ ↓ 滚动 q / ESC 退出"
|
|
201
|
+
line.append(help_text, style=_S.dim)
|
|
192
202
|
console.print(line)
|
|
193
203
|
|
|
194
204
|
|
|
@@ -197,9 +207,11 @@ def _project_short(project: str) -> str:
|
|
|
197
207
|
|
|
198
208
|
|
|
199
209
|
def _render_header(agents: list[str], total_tokens: int, total_cost: float,
|
|
200
|
-
total_sessions: int, total_messages: int, days: int
|
|
201
|
-
|
|
202
|
-
|
|
210
|
+
total_sessions: int, total_messages: int, days: int,
|
|
211
|
+
top_margin: bool = True) -> None:
|
|
212
|
+
agent_text = " ".join(f"[{_S.good}]●[/{_S.good}] {a}" for a in agents)
|
|
213
|
+
if top_margin:
|
|
214
|
+
console.print()
|
|
203
215
|
console.print(Panel(
|
|
204
216
|
f"[bold]Token Tracker[/bold] {agent_text}",
|
|
205
217
|
border_style="blue",
|
|
@@ -259,6 +271,8 @@ def render_dashboard(
|
|
|
259
271
|
rate_limits: RateLimits | None = None,
|
|
260
272
|
p90: P90Limits | None = None,
|
|
261
273
|
agents: list[str] | None = None,
|
|
274
|
+
session_limit: int = 10,
|
|
275
|
+
top_margin: bool = True,
|
|
262
276
|
) -> None:
|
|
263
277
|
if not daily_stats:
|
|
264
278
|
console.print(f"[{_S.warn}]暂无数据[/{_S.warn}]")
|
|
@@ -269,7 +283,15 @@ def render_dashboard(
|
|
|
269
283
|
total_msgs = sum(s.message_count for s in daily_stats)
|
|
270
284
|
total_sessions = sum(s.session_count for s in daily_stats)
|
|
271
285
|
|
|
272
|
-
_render_header(
|
|
286
|
+
_render_header(
|
|
287
|
+
agents or ["Claude Code"],
|
|
288
|
+
total_tokens,
|
|
289
|
+
total_cost,
|
|
290
|
+
total_sessions,
|
|
291
|
+
total_msgs,
|
|
292
|
+
len(daily_stats),
|
|
293
|
+
top_margin=top_margin,
|
|
294
|
+
)
|
|
273
295
|
|
|
274
296
|
# --- 本月概览 ---
|
|
275
297
|
if monthly_stats:
|
|
@@ -297,8 +319,8 @@ def render_dashboard(
|
|
|
297
319
|
_render_idle_panel(rate_limits, cur_week, last_week)
|
|
298
320
|
|
|
299
321
|
# --- 最近十条会话 ---
|
|
300
|
-
if sessions:
|
|
301
|
-
_render_recent_sessions(sessions[:
|
|
322
|
+
if sessions and session_limit > 0:
|
|
323
|
+
_render_recent_sessions(sessions[:session_limit])
|
|
302
324
|
|
|
303
325
|
console.print()
|
|
304
326
|
|
|
@@ -611,7 +633,7 @@ def _render_model_breakdown(stats: list[MonthlyStats]) -> None:
|
|
|
611
633
|
for model, tokens in sorted_models[:8]:
|
|
612
634
|
pct = tokens / total * 100 if total > 0 else 0
|
|
613
635
|
bar_width = int(pct / 100 * 20)
|
|
614
|
-
bar_text = "
|
|
636
|
+
bar_text = "█" * bar_width + "░" * (20 - bar_width)
|
|
615
637
|
|
|
616
638
|
if pct > 50:
|
|
617
639
|
bar_style = _S.token_bold
|
|
@@ -694,7 +716,7 @@ def _render_daily_panel(
|
|
|
694
716
|
week: WeeklyStats | None = None,
|
|
695
717
|
last_week: WeeklyStats | None = None,
|
|
696
718
|
) -> None:
|
|
697
|
-
|
|
719
|
+
bar_width = 20 if _width_mode() == "compact" else 30
|
|
698
720
|
lines = Text()
|
|
699
721
|
lines.append("当日数据面板 (P90)\n\n", style="bold")
|
|
700
722
|
|
|
@@ -707,9 +729,9 @@ def _render_daily_panel(
|
|
|
707
729
|
for label, current, limit, unit_fmt in p90_items:
|
|
708
730
|
pct = min(current / limit * 100, 100) if limit > 0 else 0
|
|
709
731
|
max_pct = max(max_pct, pct)
|
|
710
|
-
display_label = f" {label}" + " " * (14 -
|
|
732
|
+
display_label = f" {label}" + " " * (14 - _display_width(label))
|
|
711
733
|
suffix = f" {unit_fmt(current)} / {unit_fmt(limit)}"
|
|
712
|
-
_append_bar(lines, display_label, pct,
|
|
734
|
+
_append_bar(lines, display_label, pct, bar_width, suffix)
|
|
713
735
|
lines.append("\n")
|
|
714
736
|
|
|
715
737
|
lines.append(f" Token {_fmt_tokens(today.total_tokens)}", style=_S.token)
|
|
@@ -762,14 +784,14 @@ def _render_active_block(
|
|
|
762
784
|
remaining_h = remaining_min // 60
|
|
763
785
|
remaining_m = remaining_min % 60
|
|
764
786
|
|
|
765
|
-
|
|
787
|
+
bar_width = 20 if _width_mode() == "compact" else 30
|
|
766
788
|
|
|
767
789
|
lines = Text()
|
|
768
790
|
lines.append("当前 5h&7d 数据面板\n\n", style="bold")
|
|
769
791
|
|
|
770
792
|
if rate_limits and rate_limits.five_hour_pct is not None:
|
|
771
793
|
_render_rate_bar(lines, "5h 限额", rate_limits.five_hour_pct,
|
|
772
|
-
rate_limits.five_hour_resets_at,
|
|
794
|
+
rate_limits.five_hour_resets_at, bar_width)
|
|
773
795
|
|
|
774
796
|
lines.append(f" 时间 ", style=_S.dim)
|
|
775
797
|
lines.append(f"已用 {elapsed_min}min / 剩余 {remaining_h}h{remaining_m:02d}m\n", style=_S.dim)
|
|
@@ -788,7 +810,7 @@ def _render_active_block(
|
|
|
788
810
|
if rate_limits and rate_limits.seven_day_pct is not None:
|
|
789
811
|
lines.append("\n\n")
|
|
790
812
|
_render_rate_bar(lines, "7d 限额", rate_limits.seven_day_pct,
|
|
791
|
-
rate_limits.seven_day_resets_at,
|
|
813
|
+
rate_limits.seven_day_resets_at, bar_width, "%m-%d %H:%M")
|
|
792
814
|
if week:
|
|
793
815
|
_render_week_section(lines, week, last_week)
|
|
794
816
|
|
|
@@ -803,19 +825,19 @@ def _render_idle_panel(
|
|
|
803
825
|
week: WeeklyStats | None = None,
|
|
804
826
|
last_week: WeeklyStats | None = None,
|
|
805
827
|
) -> None:
|
|
806
|
-
|
|
828
|
+
bar_width = 20 if _width_mode() == "compact" else 30
|
|
807
829
|
lines = Text()
|
|
808
830
|
lines.append("限额数据面板\n\n", style="bold")
|
|
809
831
|
|
|
810
832
|
if rate_limits.five_hour_pct is not None:
|
|
811
833
|
_render_rate_bar(lines, "5h 限额", rate_limits.five_hour_pct,
|
|
812
|
-
rate_limits.five_hour_resets_at,
|
|
834
|
+
rate_limits.five_hour_resets_at, bar_width)
|
|
813
835
|
|
|
814
836
|
if rate_limits.seven_day_pct is not None:
|
|
815
837
|
if rate_limits.five_hour_pct is not None:
|
|
816
838
|
lines.append("\n")
|
|
817
839
|
_render_rate_bar(lines, "7d 限额", rate_limits.seven_day_pct,
|
|
818
|
-
rate_limits.seven_day_resets_at,
|
|
840
|
+
rate_limits.seven_day_resets_at, bar_width, "%m-%d %H:%M")
|
|
819
841
|
if week:
|
|
820
842
|
_render_week_section(lines, week, last_week)
|
|
821
843
|
|
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
from collections import defaultdict
|
|
2
|
-
from datetime import timedelta
|
|
3
|
-
|
|
4
|
-
from ..adapters.types import DailyStats, MonthlyStats, SessionStats, UsageEntry, WeeklyStats
|
|
5
|
-
from .cost import calculate_cost
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def _accumulate(stat, entry: UsageEntry) -> None:
|
|
9
|
-
cost = calculate_cost(entry)
|
|
10
|
-
stat.input_tokens += entry.input_tokens
|
|
11
|
-
stat.output_tokens += entry.output_tokens
|
|
12
|
-
stat.cache_creation_tokens += entry.cache_creation_tokens
|
|
13
|
-
stat.cache_read_tokens += entry.cache_read_tokens
|
|
14
|
-
stat.total_tokens += entry.total_tokens
|
|
15
|
-
stat.cost_usd += cost
|
|
16
|
-
stat.message_count += entry.message_count
|
|
17
|
-
if hasattr(stat, "models"):
|
|
18
|
-
stat.models[entry.model] = stat.models.get(entry.model, 0) + entry.total_tokens
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
def aggregate_daily(entries: list[UsageEntry]) -> list[DailyStats]:
|
|
22
|
-
by_date: dict[str, DailyStats] = {}
|
|
23
|
-
sessions: dict[str, set[str]] = defaultdict(set)
|
|
24
|
-
|
|
25
|
-
for e in entries:
|
|
26
|
-
key = e.timestamp.strftime("%Y-%m-%d")
|
|
27
|
-
if key not in by_date:
|
|
28
|
-
by_date[key] = DailyStats(date=key)
|
|
29
|
-
_accumulate(by_date[key], e)
|
|
30
|
-
sessions[key].add(e.session_id)
|
|
31
|
-
|
|
32
|
-
for key, sids in sessions.items():
|
|
33
|
-
by_date[key].session_count = len(sids)
|
|
34
|
-
|
|
35
|
-
return sorted(by_date.values(), key=lambda s: s.date)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def aggregate_monthly(entries: list[UsageEntry]) -> list[MonthlyStats]:
|
|
39
|
-
by_month: dict[str, MonthlyStats] = {}
|
|
40
|
-
sessions: dict[str, set[str]] = defaultdict(set)
|
|
41
|
-
|
|
42
|
-
for e in entries:
|
|
43
|
-
key = e.timestamp.strftime("%Y-%m")
|
|
44
|
-
if key not in by_month:
|
|
45
|
-
by_month[key] = MonthlyStats(month=key)
|
|
46
|
-
_accumulate(by_month[key], e)
|
|
47
|
-
sessions[key].add(e.session_id)
|
|
48
|
-
|
|
49
|
-
for key, sids in sessions.items():
|
|
50
|
-
by_month[key].session_count = len(sids)
|
|
51
|
-
|
|
52
|
-
return sorted(by_month.values(), key=lambda s: s.month)
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def aggregate_weekly(entries: list[UsageEntry]) -> list[WeeklyStats]:
|
|
56
|
-
by_week: dict[str, WeeklyStats] = {}
|
|
57
|
-
sessions: dict[str, set[str]] = defaultdict(set)
|
|
58
|
-
|
|
59
|
-
for e in entries:
|
|
60
|
-
monday = e.timestamp.date() - timedelta(days=e.timestamp.weekday())
|
|
61
|
-
sunday = monday + timedelta(days=6)
|
|
62
|
-
key = monday.isoformat()
|
|
63
|
-
if key not in by_week:
|
|
64
|
-
by_week[key] = WeeklyStats(
|
|
65
|
-
week=key,
|
|
66
|
-
week_start=monday.strftime("%m-%d"),
|
|
67
|
-
week_end=sunday.strftime("%m-%d"),
|
|
68
|
-
)
|
|
69
|
-
_accumulate(by_week[key], e)
|
|
70
|
-
sessions[key].add(e.session_id)
|
|
71
|
-
|
|
72
|
-
for key, sids in sessions.items():
|
|
73
|
-
by_week[key].session_count = len(sids)
|
|
74
|
-
|
|
75
|
-
return sorted(by_week.values(), key=lambda s: s.week)
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def aggregate_sessions(entries: list[UsageEntry]) -> list[SessionStats]:
|
|
79
|
-
by_session: dict[str, list[UsageEntry]] = defaultdict(list)
|
|
80
|
-
for e in entries:
|
|
81
|
-
by_session[e.session_id].append(e)
|
|
82
|
-
|
|
83
|
-
sessions: list[SessionStats] = []
|
|
84
|
-
for session_id, session_entries in by_session.items():
|
|
85
|
-
session_entries.sort(key=lambda e: e.timestamp)
|
|
86
|
-
first = session_entries[0]
|
|
87
|
-
last = session_entries[-1]
|
|
88
|
-
duration = (last.timestamp - first.timestamp).total_seconds() / 60
|
|
89
|
-
|
|
90
|
-
models: dict[str, int] = defaultdict(int)
|
|
91
|
-
for e in session_entries:
|
|
92
|
-
models[e.model] += e.total_tokens
|
|
93
|
-
primary_model = max(models, key=models.get) if models else "unknown"
|
|
94
|
-
|
|
95
|
-
s = SessionStats(
|
|
96
|
-
session_id=session_id,
|
|
97
|
-
project=first.project,
|
|
98
|
-
model=primary_model,
|
|
99
|
-
start_time=first.timestamp,
|
|
100
|
-
end_time=last.timestamp,
|
|
101
|
-
duration_minutes=round(duration, 1),
|
|
102
|
-
)
|
|
103
|
-
for e in session_entries:
|
|
104
|
-
_accumulate(s, e)
|
|
105
|
-
sessions.append(s)
|
|
106
|
-
|
|
107
|
-
sessions.sort(key=lambda s: s.start_time, reverse=True)
|
|
108
|
-
return sessions
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|