token-tracker 0.2.1__tar.gz → 0.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. {token_tracker-0.2.1 → token_tracker-0.2.2}/PKG-INFO +1 -1
  2. {token_tracker-0.2.1 → token_tracker-0.2.2}/README.md +1 -0
  3. {token_tracker-0.2.1 → token_tracker-0.2.2}/pyproject.toml +1 -1
  4. token_tracker-0.2.2/src/analyzer/aggregator.py +108 -0
  5. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/cli.py +10 -2
  6. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/ui/tables.py +17 -20
  7. {token_tracker-0.2.1 → token_tracker-0.2.2}/token_tracker.egg-info/PKG-INFO +1 -1
  8. token_tracker-0.2.1/src/analyzer/aggregator.py +0 -133
  9. {token_tracker-0.2.1 → token_tracker-0.2.2}/setup.cfg +0 -0
  10. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/__init__.py +0 -0
  11. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/adapters/__init__.py +0 -0
  12. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/adapters/claude.py +0 -0
  13. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/adapters/codex.py +0 -0
  14. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/adapters/rate_limits.py +0 -0
  15. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/adapters/registry.py +0 -0
  16. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/adapters/types.py +0 -0
  17. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/analyzer/__init__.py +0 -0
  18. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/analyzer/blocks.py +0 -0
  19. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/analyzer/cost.py +0 -0
  20. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/hooks.py +0 -0
  21. {token_tracker-0.2.1 → token_tracker-0.2.2}/src/ui/__init__.py +0 -0
  22. {token_tracker-0.2.1 → token_tracker-0.2.2}/token_tracker.egg-info/SOURCES.txt +0 -0
  23. {token_tracker-0.2.1 → token_tracker-0.2.2}/token_tracker.egg-info/dependency_links.txt +0 -0
  24. {token_tracker-0.2.1 → token_tracker-0.2.2}/token_tracker.egg-info/entry_points.txt +0 -0
  25. {token_tracker-0.2.1 → token_tracker-0.2.2}/token_tracker.egg-info/requires.txt +0 -0
  26. {token_tracker-0.2.1 → token_tracker-0.2.2}/token_tracker.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: token-tracker
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: Track token usage across local AI agents (Claude Code, Codex)
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: rich>=13.7
@@ -49,6 +49,7 @@ curl -sSL https://raw.githubusercontent.com/stormzhang/token-tracker/master/inst
49
49
 
50
50
  ```bash
51
51
  pip install token-tracker
52
+ tt setup
52
53
  ```
53
54
 
54
55
  ## 使用
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "token-tracker"
7
- version = "0.2.1"
7
+ version = "0.2.2"
8
8
  description = "Track token usage across local AI agents (Claude Code, Codex)"
9
9
  requires-python = ">=3.11"
10
10
  dependencies = [
@@ -0,0 +1,108 @@
1
+ from collections import defaultdict
2
+ from datetime import timedelta
3
+
4
+ from ..adapters.types import DailyStats, MonthlyStats, SessionStats, UsageEntry, WeeklyStats
5
+ from .cost import calculate_cost
6
+
7
+
8
+ def _accumulate(stat, entry: UsageEntry) -> None:
9
+ cost = calculate_cost(entry)
10
+ stat.input_tokens += entry.input_tokens
11
+ stat.output_tokens += entry.output_tokens
12
+ stat.cache_creation_tokens += entry.cache_creation_tokens
13
+ stat.cache_read_tokens += entry.cache_read_tokens
14
+ stat.total_tokens += entry.total_tokens
15
+ stat.cost_usd += cost
16
+ stat.message_count += entry.message_count
17
+ if hasattr(stat, "models"):
18
+ stat.models[entry.model] = stat.models.get(entry.model, 0) + entry.total_tokens
19
+
20
+
21
+ def aggregate_daily(entries: list[UsageEntry]) -> list[DailyStats]:
22
+ by_date: dict[str, DailyStats] = {}
23
+ sessions: dict[str, set[str]] = defaultdict(set)
24
+
25
+ for e in entries:
26
+ key = e.timestamp.strftime("%Y-%m-%d")
27
+ if key not in by_date:
28
+ by_date[key] = DailyStats(date=key)
29
+ _accumulate(by_date[key], e)
30
+ sessions[key].add(e.session_id)
31
+
32
+ for key, sids in sessions.items():
33
+ by_date[key].session_count = len(sids)
34
+
35
+ return sorted(by_date.values(), key=lambda s: s.date)
36
+
37
+
38
+ def aggregate_monthly(entries: list[UsageEntry]) -> list[MonthlyStats]:
39
+ by_month: dict[str, MonthlyStats] = {}
40
+ sessions: dict[str, set[str]] = defaultdict(set)
41
+
42
+ for e in entries:
43
+ key = e.timestamp.strftime("%Y-%m")
44
+ if key not in by_month:
45
+ by_month[key] = MonthlyStats(month=key)
46
+ _accumulate(by_month[key], e)
47
+ sessions[key].add(e.session_id)
48
+
49
+ for key, sids in sessions.items():
50
+ by_month[key].session_count = len(sids)
51
+
52
+ return sorted(by_month.values(), key=lambda s: s.month)
53
+
54
+
55
+ def aggregate_weekly(entries: list[UsageEntry]) -> list[WeeklyStats]:
56
+ by_week: dict[str, WeeklyStats] = {}
57
+ sessions: dict[str, set[str]] = defaultdict(set)
58
+
59
+ for e in entries:
60
+ monday = e.timestamp.date() - timedelta(days=e.timestamp.weekday())
61
+ sunday = monday + timedelta(days=6)
62
+ key = monday.isoformat()
63
+ if key not in by_week:
64
+ by_week[key] = WeeklyStats(
65
+ week=key,
66
+ week_start=monday.strftime("%m-%d"),
67
+ week_end=sunday.strftime("%m-%d"),
68
+ )
69
+ _accumulate(by_week[key], e)
70
+ sessions[key].add(e.session_id)
71
+
72
+ for key, sids in sessions.items():
73
+ by_week[key].session_count = len(sids)
74
+
75
+ return sorted(by_week.values(), key=lambda s: s.week)
76
+
77
+
78
+ def aggregate_sessions(entries: list[UsageEntry]) -> list[SessionStats]:
79
+ by_session: dict[str, list[UsageEntry]] = defaultdict(list)
80
+ for e in entries:
81
+ by_session[e.session_id].append(e)
82
+
83
+ sessions: list[SessionStats] = []
84
+ for session_id, session_entries in by_session.items():
85
+ session_entries.sort(key=lambda e: e.timestamp)
86
+ first = session_entries[0]
87
+ last = session_entries[-1]
88
+ duration = (last.timestamp - first.timestamp).total_seconds() / 60
89
+
90
+ models: dict[str, int] = defaultdict(int)
91
+ for e in session_entries:
92
+ models[e.model] += e.total_tokens
93
+ primary_model = max(models, key=models.get) if models else "unknown"
94
+
95
+ s = SessionStats(
96
+ session_id=session_id,
97
+ project=first.project,
98
+ model=primary_model,
99
+ start_time=first.timestamp,
100
+ end_time=last.timestamp,
101
+ duration_minutes=round(duration, 1),
102
+ )
103
+ for e in session_entries:
104
+ _accumulate(s, e)
105
+ sessions.append(s)
106
+
107
+ sessions.sort(key=lambda s: s.start_time, reverse=True)
108
+ return sessions
@@ -1,4 +1,5 @@
1
1
  import sys
2
+ from datetime import datetime, timedelta, timezone
2
3
 
3
4
  from .adapters import claude, codex
4
5
  from .adapters.rate_limits import load_rate_limits as load_claude_rate_limits
@@ -48,7 +49,6 @@ def _build_agent_data(agent_id: str, agent_name: str) -> dict | None:
48
49
  weekly = aggregate_weekly(entries)
49
50
  monthly = aggregate_monthly(entries)
50
51
  sessions = aggregate_sessions(entries)
51
- from datetime import datetime, timezone, timedelta
52
52
  cutoff = datetime.now(timezone.utc) - timedelta(hours=48)
53
53
  recent = [e for e in entries if e.timestamp >= cutoff]
54
54
  blocks = analyze_blocks(recent)
@@ -140,10 +140,18 @@ def _read_key(tty, termios):
140
140
  termios.tcsetattr(fd, termios.TCSADRAIN, old)
141
141
 
142
142
 
143
+ def _get_version() -> str:
144
+ from importlib.metadata import version
145
+ return version("token-tracker")
146
+
147
+
143
148
  def main():
144
149
  args = sys.argv[1:]
145
150
  command = args[0] if args else "dashboard"
146
151
 
152
+ if command in ("--version", "-v", "-V"):
153
+ print(f"tt {_get_version()}")
154
+ return
147
155
  if command == "setup":
148
156
  setup()
149
157
  return
@@ -215,7 +223,7 @@ def main():
215
223
  render_sessions(stats, limit)
216
224
  else:
217
225
  console.print(f"[red]未知命令: {command}[/red]")
218
- console.print("[dim]可用命令: dashboard, daily, weekly, monthly, sessions, claude, codex, setup, unsetup[/dim]")
226
+ console.print("[dim]可用命令: dashboard, daily, weekly, monthly, sessions, claude, codex, setup, unsetup, --version[/dim]")
219
227
  sys.exit(1)
220
228
 
221
229
 
@@ -117,17 +117,14 @@ def _fmt_duration(minutes: float) -> str:
117
117
  return f"{int(minutes)}min"
118
118
 
119
119
 
120
- def _display_width(s: str) -> int:
121
- w = 0
122
- for ch in s:
123
- w += 2 if ord(ch) > 0x7F else 1
124
- return w
120
+ def _bar_width() -> int:
121
+ return 20 if _width_mode() == "compact" else 30
125
122
 
126
123
 
127
124
  def _append_bar(lines: Text, label: str, pct: float,
128
125
  bar_width: int, suffix: str = "") -> None:
129
126
  filled = int(pct / 100 * bar_width)
130
- bar = "" * filled + "" * (bar_width - filled)
127
+ bar = "=" * filled + "-" * (bar_width - filled)
131
128
  bar_style = _S.bar_high if pct > 80 else _S.bar_mid if pct > 50 else _S.bar_low
132
129
  lines.append(label, style=_S.dim)
133
130
  lines.append(bar, style=bar_style)
@@ -138,7 +135,7 @@ def _append_bar(lines: Text, label: str, pct: float,
138
135
 
139
136
 
140
137
  def _append_trend(lines: Text, current: float, previous: float) -> None:
141
- arrow = "" if current >= previous else ""
138
+ arrow = "^" if current >= previous else "v"
142
139
  style = _S.bad if current >= previous else _S.good
143
140
  lines.append(f"{arrow}", style=style)
144
141
 
@@ -186,12 +183,12 @@ def render_tab_bar(agent_names: list[str], current: int) -> None:
186
183
  line.append(" ")
187
184
  for i, name in enumerate(agent_names):
188
185
  if i > 0:
189
- line.append(" ", style=_S.dim)
186
+ line.append(" | ", style=_S.dim)
190
187
  if i == current:
191
188
  line.append(f" {name} ", style="bold reverse")
192
189
  else:
193
190
  line.append(f" {name} ", style=_S.dim)
194
- line.append(" 切换 q / ESC 退出", style=_S.dim)
191
+ line.append(" < > 切换 q / ESC 退出", style=_S.dim)
195
192
  console.print(line)
196
193
 
197
194
 
@@ -201,7 +198,7 @@ def _project_short(project: str) -> str:
201
198
 
202
199
  def _render_header(agents: list[str], total_tokens: int, total_cost: float,
203
200
  total_sessions: int, total_messages: int, days: int) -> None:
204
- agent_text = " ".join(f"[{_S.good}][/{_S.good}] {a}" for a in agents)
201
+ agent_text = " ".join(f"[{_S.good}]{a}[/{_S.good}]" for a in agents)
205
202
  console.print()
206
203
  console.print(Panel(
207
204
  f"[bold]Token Tracker[/bold] {agent_text}",
@@ -614,7 +611,7 @@ def _render_model_breakdown(stats: list[MonthlyStats]) -> None:
614
611
  for model, tokens in sorted_models[:8]:
615
612
  pct = tokens / total * 100 if total > 0 else 0
616
613
  bar_width = int(pct / 100 * 20)
617
- bar_text = "" * bar_width + "" * (20 - bar_width)
614
+ bar_text = "=" * bar_width + "-" * (20 - bar_width)
618
615
 
619
616
  if pct > 50:
620
617
  bar_style = _S.token_bold
@@ -697,7 +694,7 @@ def _render_daily_panel(
697
694
  week: WeeklyStats | None = None,
698
695
  last_week: WeeklyStats | None = None,
699
696
  ) -> None:
700
- bar_width = 20 if _width_mode() == "compact" else 30
697
+ bw = _bar_width()
701
698
  lines = Text()
702
699
  lines.append("当日数据面板 (P90)\n\n", style="bold")
703
700
 
@@ -710,9 +707,9 @@ def _render_daily_panel(
710
707
  for label, current, limit, unit_fmt in p90_items:
711
708
  pct = min(current / limit * 100, 100) if limit > 0 else 0
712
709
  max_pct = max(max_pct, pct)
713
- display_label = f" {label}" + " " * (14 - _display_width(label))
710
+ display_label = f" {label}" + " " * (14 - len(label))
714
711
  suffix = f" {unit_fmt(current)} / {unit_fmt(limit)}"
715
- _append_bar(lines, display_label, pct, bar_width, suffix)
712
+ _append_bar(lines, display_label, pct, bw, suffix)
716
713
  lines.append("\n")
717
714
 
718
715
  lines.append(f" Token {_fmt_tokens(today.total_tokens)}", style=_S.token)
@@ -765,14 +762,14 @@ def _render_active_block(
765
762
  remaining_h = remaining_min // 60
766
763
  remaining_m = remaining_min % 60
767
764
 
768
- bar_width = 20 if _width_mode() == "compact" else 30
765
+ bw = _bar_width()
769
766
 
770
767
  lines = Text()
771
768
  lines.append("当前 5h&7d 数据面板\n\n", style="bold")
772
769
 
773
770
  if rate_limits and rate_limits.five_hour_pct is not None:
774
771
  _render_rate_bar(lines, "5h 限额", rate_limits.five_hour_pct,
775
- rate_limits.five_hour_resets_at, bar_width)
772
+ rate_limits.five_hour_resets_at, bw)
776
773
 
777
774
  lines.append(f" 时间 ", style=_S.dim)
778
775
  lines.append(f"已用 {elapsed_min}min / 剩余 {remaining_h}h{remaining_m:02d}m\n", style=_S.dim)
@@ -791,7 +788,7 @@ def _render_active_block(
791
788
  if rate_limits and rate_limits.seven_day_pct is not None:
792
789
  lines.append("\n\n")
793
790
  _render_rate_bar(lines, "7d 限额", rate_limits.seven_day_pct,
794
- rate_limits.seven_day_resets_at, bar_width, "%m-%d %H:%M")
791
+ rate_limits.seven_day_resets_at, bw, "%m-%d %H:%M")
795
792
  if week:
796
793
  _render_week_section(lines, week, last_week)
797
794
 
@@ -806,19 +803,19 @@ def _render_idle_panel(
806
803
  week: WeeklyStats | None = None,
807
804
  last_week: WeeklyStats | None = None,
808
805
  ) -> None:
809
- bar_width = 20 if _width_mode() == "compact" else 30
806
+ bw = _bar_width()
810
807
  lines = Text()
811
808
  lines.append("限额数据面板\n\n", style="bold")
812
809
 
813
810
  if rate_limits.five_hour_pct is not None:
814
811
  _render_rate_bar(lines, "5h 限额", rate_limits.five_hour_pct,
815
- rate_limits.five_hour_resets_at, bar_width)
812
+ rate_limits.five_hour_resets_at, bw)
816
813
 
817
814
  if rate_limits.seven_day_pct is not None:
818
815
  if rate_limits.five_hour_pct is not None:
819
816
  lines.append("\n")
820
817
  _render_rate_bar(lines, "7d 限额", rate_limits.seven_day_pct,
821
- rate_limits.seven_day_resets_at, bar_width, "%m-%d %H:%M")
818
+ rate_limits.seven_day_resets_at, bw, "%m-%d %H:%M")
822
819
  if week:
823
820
  _render_week_section(lines, week, last_week)
824
821
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: token-tracker
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: Track token usage across local AI agents (Claude Code, Codex)
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: rich>=13.7
@@ -1,133 +0,0 @@
1
- from collections import defaultdict
2
- from datetime import datetime
3
-
4
- from ..adapters.types import DailyStats, MonthlyStats, SessionStats, UsageEntry, WeeklyStats
5
- from .cost import calculate_cost
6
-
7
-
8
- def aggregate_daily(entries: list[UsageEntry]) -> list[DailyStats]:
9
- by_date: dict[str, DailyStats] = {}
10
- sessions_by_date: dict[str, set[str]] = defaultdict(set)
11
-
12
- for e in entries:
13
- date_str = e.timestamp.strftime("%Y-%m-%d")
14
- if date_str not in by_date:
15
- by_date[date_str] = DailyStats(date=date_str)
16
- s = by_date[date_str]
17
- cost = calculate_cost(e)
18
- s.input_tokens += e.input_tokens
19
- s.output_tokens += e.output_tokens
20
- s.cache_creation_tokens += e.cache_creation_tokens
21
- s.cache_read_tokens += e.cache_read_tokens
22
- s.total_tokens += e.total_tokens
23
- s.cost_usd += cost
24
- s.message_count += e.message_count
25
- s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
26
- sessions_by_date[date_str].add(e.session_id)
27
-
28
- for date_str, sessions in sessions_by_date.items():
29
- by_date[date_str].session_count = len(sessions)
30
-
31
- return sorted(by_date.values(), key=lambda s: s.date)
32
-
33
-
34
- def aggregate_monthly(entries: list[UsageEntry]) -> list[MonthlyStats]:
35
- by_month: dict[str, MonthlyStats] = {}
36
- sessions_by_month: dict[str, set[str]] = defaultdict(set)
37
-
38
- for e in entries:
39
- month_str = e.timestamp.strftime("%Y-%m")
40
- if month_str not in by_month:
41
- by_month[month_str] = MonthlyStats(month=month_str)
42
- s = by_month[month_str]
43
- cost = calculate_cost(e)
44
- s.input_tokens += e.input_tokens
45
- s.output_tokens += e.output_tokens
46
- s.cache_creation_tokens += e.cache_creation_tokens
47
- s.cache_read_tokens += e.cache_read_tokens
48
- s.total_tokens += e.total_tokens
49
- s.cost_usd += cost
50
- s.message_count += e.message_count
51
- s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
52
- sessions_by_month[month_str].add(e.session_id)
53
-
54
- for month_str, sessions in sessions_by_month.items():
55
- by_month[month_str].session_count = len(sessions)
56
-
57
- return sorted(by_month.values(), key=lambda s: s.month)
58
-
59
-
60
- def aggregate_weekly(entries: list[UsageEntry]) -> list[WeeklyStats]:
61
- from datetime import timedelta
62
-
63
- by_week: dict[str, WeeklyStats] = {}
64
- sessions_by_week: dict[str, set[str]] = defaultdict(set)
65
-
66
- for e in entries:
67
- monday = e.timestamp.date() - timedelta(days=e.timestamp.weekday())
68
- sunday = monday + timedelta(days=6)
69
- week_key = monday.isoformat()
70
- if week_key not in by_week:
71
- by_week[week_key] = WeeklyStats(
72
- week=week_key,
73
- week_start=monday.strftime("%m-%d"),
74
- week_end=sunday.strftime("%m-%d"),
75
- )
76
- s = by_week[week_key]
77
- cost = calculate_cost(e)
78
- s.input_tokens += e.input_tokens
79
- s.output_tokens += e.output_tokens
80
- s.cache_creation_tokens += e.cache_creation_tokens
81
- s.cache_read_tokens += e.cache_read_tokens
82
- s.total_tokens += e.total_tokens
83
- s.cost_usd += cost
84
- s.message_count += e.message_count
85
- s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
86
- sessions_by_week[week_key].add(e.session_id)
87
-
88
- for week_key, sessions in sessions_by_week.items():
89
- by_week[week_key].session_count = len(sessions)
90
-
91
- return sorted(by_week.values(), key=lambda s: s.week)
92
-
93
-
94
- def aggregate_sessions(entries: list[UsageEntry]) -> list[SessionStats]:
95
- by_session: dict[str, list[UsageEntry]] = defaultdict(list)
96
-
97
- for e in entries:
98
- by_session[e.session_id].append(e)
99
-
100
- sessions: list[SessionStats] = []
101
- for session_id, session_entries in by_session.items():
102
- session_entries.sort(key=lambda e: e.timestamp)
103
- first = session_entries[0]
104
- last = session_entries[-1]
105
- duration = (last.timestamp - first.timestamp).total_seconds() / 60
106
-
107
- models: dict[str, int] = defaultdict(int)
108
- for e in session_entries:
109
- models[e.model] += e.total_tokens
110
- primary_model = max(models, key=models.get) if models else "unknown"
111
-
112
- s = SessionStats(
113
- session_id=session_id,
114
- project=first.project,
115
- model=primary_model,
116
- start_time=first.timestamp,
117
- end_time=last.timestamp,
118
- duration_minutes=round(duration, 1),
119
- )
120
- for e in session_entries:
121
- cost = calculate_cost(e)
122
- s.input_tokens += e.input_tokens
123
- s.output_tokens += e.output_tokens
124
- s.cache_creation_tokens += e.cache_creation_tokens
125
- s.cache_read_tokens += e.cache_read_tokens
126
- s.total_tokens += e.total_tokens
127
- s.cost_usd += cost
128
- s.message_count += e.message_count
129
-
130
- sessions.append(s)
131
-
132
- sessions.sort(key=lambda s: s.start_time, reverse=True)
133
- return sessions
File without changes