token-tracker 0.2.2__tar.gz → 0.2.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. {token_tracker-0.2.2 → token_tracker-0.2.3}/PKG-INFO +1 -1
  2. {token_tracker-0.2.2 → token_tracker-0.2.3}/pyproject.toml +1 -1
  3. token_tracker-0.2.3/src/analyzer/aggregator.py +133 -0
  4. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/cli.py +5 -5
  5. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/ui/tables.py +20 -17
  6. {token_tracker-0.2.2 → token_tracker-0.2.3}/token_tracker.egg-info/PKG-INFO +1 -1
  7. token_tracker-0.2.2/src/analyzer/aggregator.py +0 -108
  8. {token_tracker-0.2.2 → token_tracker-0.2.3}/README.md +0 -0
  9. {token_tracker-0.2.2 → token_tracker-0.2.3}/setup.cfg +0 -0
  10. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/__init__.py +0 -0
  11. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/adapters/__init__.py +0 -0
  12. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/adapters/claude.py +0 -0
  13. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/adapters/codex.py +0 -0
  14. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/adapters/rate_limits.py +0 -0
  15. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/adapters/registry.py +0 -0
  16. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/adapters/types.py +0 -0
  17. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/analyzer/__init__.py +0 -0
  18. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/analyzer/blocks.py +0 -0
  19. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/analyzer/cost.py +0 -0
  20. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/hooks.py +0 -0
  21. {token_tracker-0.2.2 → token_tracker-0.2.3}/src/ui/__init__.py +0 -0
  22. {token_tracker-0.2.2 → token_tracker-0.2.3}/token_tracker.egg-info/SOURCES.txt +0 -0
  23. {token_tracker-0.2.2 → token_tracker-0.2.3}/token_tracker.egg-info/dependency_links.txt +0 -0
  24. {token_tracker-0.2.2 → token_tracker-0.2.3}/token_tracker.egg-info/entry_points.txt +0 -0
  25. {token_tracker-0.2.2 → token_tracker-0.2.3}/token_tracker.egg-info/requires.txt +0 -0
  26. {token_tracker-0.2.2 → token_tracker-0.2.3}/token_tracker.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: token-tracker
3
- Version: 0.2.2
3
+ Version: 0.2.3
4
4
  Summary: Track token usage across local AI agents (Claude Code, Codex)
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: rich>=13.7
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "token-tracker"
7
- version = "0.2.2"
7
+ version = "0.2.3"
8
8
  description = "Track token usage across local AI agents (Claude Code, Codex)"
9
9
  requires-python = ">=3.11"
10
10
  dependencies = [
@@ -0,0 +1,133 @@
1
+ from collections import defaultdict
2
+ from datetime import datetime
3
+
4
+ from ..adapters.types import DailyStats, MonthlyStats, SessionStats, UsageEntry, WeeklyStats
5
+ from .cost import calculate_cost
6
+
7
+
8
+ def aggregate_daily(entries: list[UsageEntry]) -> list[DailyStats]:
9
+ by_date: dict[str, DailyStats] = {}
10
+ sessions_by_date: dict[str, set[str]] = defaultdict(set)
11
+
12
+ for e in entries:
13
+ date_str = e.timestamp.strftime("%Y-%m-%d")
14
+ if date_str not in by_date:
15
+ by_date[date_str] = DailyStats(date=date_str)
16
+ s = by_date[date_str]
17
+ cost = calculate_cost(e)
18
+ s.input_tokens += e.input_tokens
19
+ s.output_tokens += e.output_tokens
20
+ s.cache_creation_tokens += e.cache_creation_tokens
21
+ s.cache_read_tokens += e.cache_read_tokens
22
+ s.total_tokens += e.total_tokens
23
+ s.cost_usd += cost
24
+ s.message_count += e.message_count
25
+ s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
26
+ sessions_by_date[date_str].add(e.session_id)
27
+
28
+ for date_str, sessions in sessions_by_date.items():
29
+ by_date[date_str].session_count = len(sessions)
30
+
31
+ return sorted(by_date.values(), key=lambda s: s.date)
32
+
33
+
34
+ def aggregate_monthly(entries: list[UsageEntry]) -> list[MonthlyStats]:
35
+ by_month: dict[str, MonthlyStats] = {}
36
+ sessions_by_month: dict[str, set[str]] = defaultdict(set)
37
+
38
+ for e in entries:
39
+ month_str = e.timestamp.strftime("%Y-%m")
40
+ if month_str not in by_month:
41
+ by_month[month_str] = MonthlyStats(month=month_str)
42
+ s = by_month[month_str]
43
+ cost = calculate_cost(e)
44
+ s.input_tokens += e.input_tokens
45
+ s.output_tokens += e.output_tokens
46
+ s.cache_creation_tokens += e.cache_creation_tokens
47
+ s.cache_read_tokens += e.cache_read_tokens
48
+ s.total_tokens += e.total_tokens
49
+ s.cost_usd += cost
50
+ s.message_count += e.message_count
51
+ s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
52
+ sessions_by_month[month_str].add(e.session_id)
53
+
54
+ for month_str, sessions in sessions_by_month.items():
55
+ by_month[month_str].session_count = len(sessions)
56
+
57
+ return sorted(by_month.values(), key=lambda s: s.month)
58
+
59
+
60
+ def aggregate_weekly(entries: list[UsageEntry]) -> list[WeeklyStats]:
61
+ from datetime import timedelta
62
+
63
+ by_week: dict[str, WeeklyStats] = {}
64
+ sessions_by_week: dict[str, set[str]] = defaultdict(set)
65
+
66
+ for e in entries:
67
+ monday = e.timestamp.date() - timedelta(days=e.timestamp.weekday())
68
+ sunday = monday + timedelta(days=6)
69
+ week_key = monday.isoformat()
70
+ if week_key not in by_week:
71
+ by_week[week_key] = WeeklyStats(
72
+ week=week_key,
73
+ week_start=monday.strftime("%m-%d"),
74
+ week_end=sunday.strftime("%m-%d"),
75
+ )
76
+ s = by_week[week_key]
77
+ cost = calculate_cost(e)
78
+ s.input_tokens += e.input_tokens
79
+ s.output_tokens += e.output_tokens
80
+ s.cache_creation_tokens += e.cache_creation_tokens
81
+ s.cache_read_tokens += e.cache_read_tokens
82
+ s.total_tokens += e.total_tokens
83
+ s.cost_usd += cost
84
+ s.message_count += e.message_count
85
+ s.models[e.model] = s.models.get(e.model, 0) + e.total_tokens
86
+ sessions_by_week[week_key].add(e.session_id)
87
+
88
+ for week_key, sessions in sessions_by_week.items():
89
+ by_week[week_key].session_count = len(sessions)
90
+
91
+ return sorted(by_week.values(), key=lambda s: s.week)
92
+
93
+
94
+ def aggregate_sessions(entries: list[UsageEntry]) -> list[SessionStats]:
95
+ by_session: dict[str, list[UsageEntry]] = defaultdict(list)
96
+
97
+ for e in entries:
98
+ by_session[e.session_id].append(e)
99
+
100
+ sessions: list[SessionStats] = []
101
+ for session_id, session_entries in by_session.items():
102
+ session_entries.sort(key=lambda e: e.timestamp)
103
+ first = session_entries[0]
104
+ last = session_entries[-1]
105
+ duration = (last.timestamp - first.timestamp).total_seconds() / 60
106
+
107
+ models: dict[str, int] = defaultdict(int)
108
+ for e in session_entries:
109
+ models[e.model] += e.total_tokens
110
+ primary_model = max(models, key=models.get) if models else "unknown"
111
+
112
+ s = SessionStats(
113
+ session_id=session_id,
114
+ project=first.project,
115
+ model=primary_model,
116
+ start_time=first.timestamp,
117
+ end_time=last.timestamp,
118
+ duration_minutes=round(duration, 1),
119
+ )
120
+ for e in session_entries:
121
+ cost = calculate_cost(e)
122
+ s.input_tokens += e.input_tokens
123
+ s.output_tokens += e.output_tokens
124
+ s.cache_creation_tokens += e.cache_creation_tokens
125
+ s.cache_read_tokens += e.cache_read_tokens
126
+ s.total_tokens += e.total_tokens
127
+ s.cost_usd += cost
128
+ s.message_count += e.message_count
129
+
130
+ sessions.append(s)
131
+
132
+ sessions.sort(key=lambda s: s.start_time, reverse=True)
133
+ return sessions
@@ -1,5 +1,4 @@
1
1
  import sys
2
- from datetime import datetime, timedelta, timezone
3
2
 
4
3
  from .adapters import claude, codex
5
4
  from .adapters.rate_limits import load_rate_limits as load_claude_rate_limits
@@ -49,6 +48,7 @@ def _build_agent_data(agent_id: str, agent_name: str) -> dict | None:
49
48
  weekly = aggregate_weekly(entries)
50
49
  monthly = aggregate_monthly(entries)
51
50
  sessions = aggregate_sessions(entries)
51
+ from datetime import datetime, timezone, timedelta
52
52
  cutoff = datetime.now(timezone.utc) - timedelta(hours=48)
53
53
  recent = [e for e in entries if e.timestamp >= cutoff]
54
54
  blocks = analyze_blocks(recent)
@@ -72,14 +72,14 @@ def _show_interactive_dashboard(agents):
72
72
  import src.ui.tables as _tables
73
73
 
74
74
  agent_names = [a.name for a in agents]
75
- console.print(f"[dim]加载数据...[/dim]")
76
- cache = {a.id: _build_agent_data(a.id, a.name) for a in agents}
77
-
78
75
  current = 0
79
76
  orig = _tables.console
80
77
 
81
78
  sys.stdout.write("\033[?1049h\033[?25l")
79
+ sys.stdout.write("\033[H\033[J\033[2m加载数据...\033[0m")
82
80
  sys.stdout.flush()
81
+ cache = {a.id: _build_agent_data(a.id, a.name) for a in agents}
82
+
83
83
  try:
84
84
  while True:
85
85
  buf = StringIO()
@@ -94,7 +94,7 @@ def _show_interactive_dashboard(agents):
94
94
  _tables.console.print(f"[yellow]暂无数据[/yellow]")
95
95
  _tables.console = orig
96
96
 
97
- sys.stdout.write("\033[2J\033[H" + buf.getvalue())
97
+ sys.stdout.write("\033[H\033[J" + buf.getvalue())
98
98
  sys.stdout.flush()
99
99
 
100
100
  key = _read_key(tty, termios)
@@ -117,14 +117,17 @@ def _fmt_duration(minutes: float) -> str:
117
117
  return f"{int(minutes)}min"
118
118
 
119
119
 
120
- def _bar_width() -> int:
121
- return 20 if _width_mode() == "compact" else 30
120
+ def _display_width(s: str) -> int:
121
+ w = 0
122
+ for ch in s:
123
+ w += 2 if ord(ch) > 0x7F else 1
124
+ return w
122
125
 
123
126
 
124
127
  def _append_bar(lines: Text, label: str, pct: float,
125
128
  bar_width: int, suffix: str = "") -> None:
126
129
  filled = int(pct / 100 * bar_width)
127
- bar = "=" * filled + "-" * (bar_width - filled)
130
+ bar = "" * filled + "" * (bar_width - filled)
128
131
  bar_style = _S.bar_high if pct > 80 else _S.bar_mid if pct > 50 else _S.bar_low
129
132
  lines.append(label, style=_S.dim)
130
133
  lines.append(bar, style=bar_style)
@@ -135,7 +138,7 @@ def _append_bar(lines: Text, label: str, pct: float,
135
138
 
136
139
 
137
140
  def _append_trend(lines: Text, current: float, previous: float) -> None:
138
- arrow = "^" if current >= previous else "v"
141
+ arrow = "" if current >= previous else ""
139
142
  style = _S.bad if current >= previous else _S.good
140
143
  lines.append(f"{arrow}", style=style)
141
144
 
@@ -183,12 +186,12 @@ def render_tab_bar(agent_names: list[str], current: int) -> None:
183
186
  line.append(" ")
184
187
  for i, name in enumerate(agent_names):
185
188
  if i > 0:
186
- line.append(" | ", style=_S.dim)
189
+ line.append(" ", style=_S.dim)
187
190
  if i == current:
188
191
  line.append(f" {name} ", style="bold reverse")
189
192
  else:
190
193
  line.append(f" {name} ", style=_S.dim)
191
- line.append(" < > 切换 q / ESC 退出", style=_S.dim)
194
+ line.append(" 切换 q / ESC 退出", style=_S.dim)
192
195
  console.print(line)
193
196
 
194
197
 
@@ -198,7 +201,7 @@ def _project_short(project: str) -> str:
198
201
 
199
202
  def _render_header(agents: list[str], total_tokens: int, total_cost: float,
200
203
  total_sessions: int, total_messages: int, days: int) -> None:
201
- agent_text = " ".join(f"[{_S.good}]{a}[/{_S.good}]" for a in agents)
204
+ agent_text = " ".join(f"[{_S.good}][/{_S.good}] {a}" for a in agents)
202
205
  console.print()
203
206
  console.print(Panel(
204
207
  f"[bold]Token Tracker[/bold] {agent_text}",
@@ -611,7 +614,7 @@ def _render_model_breakdown(stats: list[MonthlyStats]) -> None:
611
614
  for model, tokens in sorted_models[:8]:
612
615
  pct = tokens / total * 100 if total > 0 else 0
613
616
  bar_width = int(pct / 100 * 20)
614
- bar_text = "=" * bar_width + "-" * (20 - bar_width)
617
+ bar_text = "" * bar_width + "" * (20 - bar_width)
615
618
 
616
619
  if pct > 50:
617
620
  bar_style = _S.token_bold
@@ -694,7 +697,7 @@ def _render_daily_panel(
694
697
  week: WeeklyStats | None = None,
695
698
  last_week: WeeklyStats | None = None,
696
699
  ) -> None:
697
- bw = _bar_width()
700
+ bar_width = 20 if _width_mode() == "compact" else 30
698
701
  lines = Text()
699
702
  lines.append("当日数据面板 (P90)\n\n", style="bold")
700
703
 
@@ -707,9 +710,9 @@ def _render_daily_panel(
707
710
  for label, current, limit, unit_fmt in p90_items:
708
711
  pct = min(current / limit * 100, 100) if limit > 0 else 0
709
712
  max_pct = max(max_pct, pct)
710
- display_label = f" {label}" + " " * (14 - len(label))
713
+ display_label = f" {label}" + " " * (14 - _display_width(label))
711
714
  suffix = f" {unit_fmt(current)} / {unit_fmt(limit)}"
712
- _append_bar(lines, display_label, pct, bw, suffix)
715
+ _append_bar(lines, display_label, pct, bar_width, suffix)
713
716
  lines.append("\n")
714
717
 
715
718
  lines.append(f" Token {_fmt_tokens(today.total_tokens)}", style=_S.token)
@@ -762,14 +765,14 @@ def _render_active_block(
762
765
  remaining_h = remaining_min // 60
763
766
  remaining_m = remaining_min % 60
764
767
 
765
- bw = _bar_width()
768
+ bar_width = 20 if _width_mode() == "compact" else 30
766
769
 
767
770
  lines = Text()
768
771
  lines.append("当前 5h&7d 数据面板\n\n", style="bold")
769
772
 
770
773
  if rate_limits and rate_limits.five_hour_pct is not None:
771
774
  _render_rate_bar(lines, "5h 限额", rate_limits.five_hour_pct,
772
- rate_limits.five_hour_resets_at, bw)
775
+ rate_limits.five_hour_resets_at, bar_width)
773
776
 
774
777
  lines.append(f" 时间 ", style=_S.dim)
775
778
  lines.append(f"已用 {elapsed_min}min / 剩余 {remaining_h}h{remaining_m:02d}m\n", style=_S.dim)
@@ -788,7 +791,7 @@ def _render_active_block(
788
791
  if rate_limits and rate_limits.seven_day_pct is not None:
789
792
  lines.append("\n\n")
790
793
  _render_rate_bar(lines, "7d 限额", rate_limits.seven_day_pct,
791
- rate_limits.seven_day_resets_at, bw, "%m-%d %H:%M")
794
+ rate_limits.seven_day_resets_at, bar_width, "%m-%d %H:%M")
792
795
  if week:
793
796
  _render_week_section(lines, week, last_week)
794
797
 
@@ -803,19 +806,19 @@ def _render_idle_panel(
803
806
  week: WeeklyStats | None = None,
804
807
  last_week: WeeklyStats | None = None,
805
808
  ) -> None:
806
- bw = _bar_width()
809
+ bar_width = 20 if _width_mode() == "compact" else 30
807
810
  lines = Text()
808
811
  lines.append("限额数据面板\n\n", style="bold")
809
812
 
810
813
  if rate_limits.five_hour_pct is not None:
811
814
  _render_rate_bar(lines, "5h 限额", rate_limits.five_hour_pct,
812
- rate_limits.five_hour_resets_at, bw)
815
+ rate_limits.five_hour_resets_at, bar_width)
813
816
 
814
817
  if rate_limits.seven_day_pct is not None:
815
818
  if rate_limits.five_hour_pct is not None:
816
819
  lines.append("\n")
817
820
  _render_rate_bar(lines, "7d 限额", rate_limits.seven_day_pct,
818
- rate_limits.seven_day_resets_at, bw, "%m-%d %H:%M")
821
+ rate_limits.seven_day_resets_at, bar_width, "%m-%d %H:%M")
819
822
  if week:
820
823
  _render_week_section(lines, week, last_week)
821
824
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: token-tracker
3
- Version: 0.2.2
3
+ Version: 0.2.3
4
4
  Summary: Track token usage across local AI agents (Claude Code, Codex)
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: rich>=13.7
@@ -1,108 +0,0 @@
1
- from collections import defaultdict
2
- from datetime import timedelta
3
-
4
- from ..adapters.types import DailyStats, MonthlyStats, SessionStats, UsageEntry, WeeklyStats
5
- from .cost import calculate_cost
6
-
7
-
8
- def _accumulate(stat, entry: UsageEntry) -> None:
9
- cost = calculate_cost(entry)
10
- stat.input_tokens += entry.input_tokens
11
- stat.output_tokens += entry.output_tokens
12
- stat.cache_creation_tokens += entry.cache_creation_tokens
13
- stat.cache_read_tokens += entry.cache_read_tokens
14
- stat.total_tokens += entry.total_tokens
15
- stat.cost_usd += cost
16
- stat.message_count += entry.message_count
17
- if hasattr(stat, "models"):
18
- stat.models[entry.model] = stat.models.get(entry.model, 0) + entry.total_tokens
19
-
20
-
21
- def aggregate_daily(entries: list[UsageEntry]) -> list[DailyStats]:
22
- by_date: dict[str, DailyStats] = {}
23
- sessions: dict[str, set[str]] = defaultdict(set)
24
-
25
- for e in entries:
26
- key = e.timestamp.strftime("%Y-%m-%d")
27
- if key not in by_date:
28
- by_date[key] = DailyStats(date=key)
29
- _accumulate(by_date[key], e)
30
- sessions[key].add(e.session_id)
31
-
32
- for key, sids in sessions.items():
33
- by_date[key].session_count = len(sids)
34
-
35
- return sorted(by_date.values(), key=lambda s: s.date)
36
-
37
-
38
- def aggregate_monthly(entries: list[UsageEntry]) -> list[MonthlyStats]:
39
- by_month: dict[str, MonthlyStats] = {}
40
- sessions: dict[str, set[str]] = defaultdict(set)
41
-
42
- for e in entries:
43
- key = e.timestamp.strftime("%Y-%m")
44
- if key not in by_month:
45
- by_month[key] = MonthlyStats(month=key)
46
- _accumulate(by_month[key], e)
47
- sessions[key].add(e.session_id)
48
-
49
- for key, sids in sessions.items():
50
- by_month[key].session_count = len(sids)
51
-
52
- return sorted(by_month.values(), key=lambda s: s.month)
53
-
54
-
55
- def aggregate_weekly(entries: list[UsageEntry]) -> list[WeeklyStats]:
56
- by_week: dict[str, WeeklyStats] = {}
57
- sessions: dict[str, set[str]] = defaultdict(set)
58
-
59
- for e in entries:
60
- monday = e.timestamp.date() - timedelta(days=e.timestamp.weekday())
61
- sunday = monday + timedelta(days=6)
62
- key = monday.isoformat()
63
- if key not in by_week:
64
- by_week[key] = WeeklyStats(
65
- week=key,
66
- week_start=monday.strftime("%m-%d"),
67
- week_end=sunday.strftime("%m-%d"),
68
- )
69
- _accumulate(by_week[key], e)
70
- sessions[key].add(e.session_id)
71
-
72
- for key, sids in sessions.items():
73
- by_week[key].session_count = len(sids)
74
-
75
- return sorted(by_week.values(), key=lambda s: s.week)
76
-
77
-
78
- def aggregate_sessions(entries: list[UsageEntry]) -> list[SessionStats]:
79
- by_session: dict[str, list[UsageEntry]] = defaultdict(list)
80
- for e in entries:
81
- by_session[e.session_id].append(e)
82
-
83
- sessions: list[SessionStats] = []
84
- for session_id, session_entries in by_session.items():
85
- session_entries.sort(key=lambda e: e.timestamp)
86
- first = session_entries[0]
87
- last = session_entries[-1]
88
- duration = (last.timestamp - first.timestamp).total_seconds() / 60
89
-
90
- models: dict[str, int] = defaultdict(int)
91
- for e in session_entries:
92
- models[e.model] += e.total_tokens
93
- primary_model = max(models, key=models.get) if models else "unknown"
94
-
95
- s = SessionStats(
96
- session_id=session_id,
97
- project=first.project,
98
- model=primary_model,
99
- start_time=first.timestamp,
100
- end_time=last.timestamp,
101
- duration_minutes=round(duration, 1),
102
- )
103
- for e in session_entries:
104
- _accumulate(s, e)
105
- sessions.append(s)
106
-
107
- sessions.sort(key=lambda s: s.start_time, reverse=True)
108
- return sessions
File without changes
File without changes