opencode-usage 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ """OpenCode usage statistics CLI."""
2
+
3
+ __version__ = "0.1.0"
@@ -0,0 +1,5 @@
1
+ """Allow running as `python -m opencode_usage`."""
2
+
3
+ from .cli import main
4
+
5
+ main()
opencode_usage/cli.py ADDED
@@ -0,0 +1,236 @@
1
+ """CLI entry point for opencode-usage."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import argparse
6
+ import json
7
+ import re
8
+ import sys
9
+ from datetime import datetime, timedelta
10
+ from typing import Any
11
+
12
+ from . import render
13
+ from .db import OpenCodeDB, UsageRow
14
+ from .render import configure_console, render_daily, render_grouped, render_summary
15
+
16
+
17
+ def _parse_since(value: str) -> datetime:
18
+ """Parse a relative duration like '7d', '2w', '30d', '3h' or an ISO date."""
19
+ m = re.fullmatch(r"(\d+)([dhwm])", value.strip().lower())
20
+ if m:
21
+ n, unit = int(m.group(1)), m.group(2)
22
+ delta = {
23
+ "h": timedelta(hours=n),
24
+ "d": timedelta(days=n),
25
+ "w": timedelta(weeks=n),
26
+ "m": timedelta(days=n * 30),
27
+ }[unit]
28
+ return datetime.now().astimezone() - delta
29
+
30
+ # Try ISO date
31
+ try:
32
+ dt = datetime.fromisoformat(value)
33
+ if dt.tzinfo is None:
34
+ dt = dt.astimezone()
35
+ return dt
36
+ except ValueError:
37
+ pass
38
+
39
+ raise argparse.ArgumentTypeError(
40
+ f"Invalid time spec: '{value}'. Use '7d', '2w', '30d', '3h', or ISO date."
41
+ )
42
+
43
+
44
+ def _build_parser() -> argparse.ArgumentParser:
45
+ p = argparse.ArgumentParser(
46
+ prog="opencode-usage",
47
+ description="Track and display OpenCode token usage statistics.",
48
+ )
49
+
50
+ p.add_argument(
51
+ "command",
52
+ nargs="?",
53
+ default=None,
54
+ choices=["today", "yesterday"],
55
+ help="Quick shortcut: 'today' or 'yesterday'",
56
+ )
57
+ p.add_argument(
58
+ "--days",
59
+ type=int,
60
+ default=None,
61
+ metavar="N",
62
+ help="Show last N days (default: 7)",
63
+ )
64
+ p.add_argument(
65
+ "--since",
66
+ type=_parse_since,
67
+ default=None,
68
+ metavar="SPEC",
69
+ help="Time filter: '7d', '2w', '30d', '3h', or ISO date",
70
+ )
71
+ p.add_argument(
72
+ "--by",
73
+ choices=["model", "agent", "provider", "session", "day"],
74
+ default=None,
75
+ help="Group results by dimension",
76
+ )
77
+ p.add_argument(
78
+ "--limit",
79
+ type=int,
80
+ default=None,
81
+ metavar="N",
82
+ help="Max rows to display",
83
+ )
84
+ p.add_argument(
85
+ "--json",
86
+ action="store_true",
87
+ dest="json_output",
88
+ help="Output as JSON",
89
+ )
90
+ p.add_argument(
91
+ "--compare",
92
+ action="store_true",
93
+ help="Compare with previous period of same length",
94
+ )
95
+ p.add_argument(
96
+ "--no-color",
97
+ action="store_true",
98
+ dest="no_color",
99
+ help="Disable colored output",
100
+ )
101
+ p.add_argument(
102
+ "--db",
103
+ default=None,
104
+ metavar="PATH",
105
+ help="Path to OpenCode database (default: auto-detect)",
106
+ )
107
+ return p
108
+
109
+
110
+ def _resolve_since(args: argparse.Namespace) -> tuple[datetime | None, str]:
111
+ """Resolve the effective 'since' datetime and a human-readable period label."""
112
+ now = datetime.now().astimezone()
113
+
114
+ if args.command == "today":
115
+ since = now.replace(hour=0, minute=0, second=0, microsecond=0)
116
+ return since, "Today"
117
+
118
+ if args.command == "yesterday":
119
+ yesterday = now - timedelta(days=1)
120
+ since = yesterday.replace(hour=0, minute=0, second=0, microsecond=0)
121
+ return since, "Yesterday & Today"
122
+
123
+ if args.since is not None:
124
+ return args.since, f"Since {args.since.strftime('%Y-%m-%d')}"
125
+
126
+ if args.days is not None:
127
+ since = now - timedelta(days=args.days)
128
+ return since, f"Last {args.days} days"
129
+
130
+ # Default: last 7 days
131
+ since = now - timedelta(days=7)
132
+ return since, "Last 7 days"
133
+
134
+
135
+ def _fetch_rows(
136
+ db: OpenCodeDB,
137
+ group_by: str,
138
+ *,
139
+ since: datetime | None = None,
140
+ until: datetime | None = None,
141
+ limit: int | None = None,
142
+ ) -> list[UsageRow]:
143
+ """Fetch rows based on group_by dimension."""
144
+ if group_by == "day":
145
+ return db.daily(since=since, until=until, limit=limit)
146
+ if group_by == "model":
147
+ return db.by_model(since=since, until=until, limit=limit)
148
+ if group_by == "agent":
149
+ return db.by_agent(since=since, until=until, limit=limit)
150
+ if group_by == "provider":
151
+ return db.by_provider(since=since, until=until, limit=limit)
152
+ if group_by == "session":
153
+ return db.by_session(since=since, until=until, limit=limit)
154
+ return []
155
+
156
+
157
+ def _compute_deltas(
158
+ current: list[UsageRow],
159
+ previous: list[UsageRow],
160
+ ) -> list[float | None]:
161
+ """Compute token delta percentages between current and previous rows."""
162
+ prev_map: dict[str, int] = {}
163
+ for r in previous:
164
+ key = f"{r.label}:{r.detail}" if r.detail else r.label
165
+ prev_map[key] = prev_map.get(key, 0) + r.tokens.total
166
+
167
+ deltas: list[float | None] = []
168
+ for r in current:
169
+ key = f"{r.label}:{r.detail}" if r.detail else r.label
170
+ prev_val = prev_map.get(key)
171
+ if prev_val and prev_val > 0:
172
+ deltas.append((r.tokens.total - prev_val) / prev_val * 100)
173
+ else:
174
+ deltas.append(None)
175
+ return deltas
176
+
177
+
178
+ def main(argv: list[str] | None = None) -> None:
179
+ parser = _build_parser()
180
+ args = parser.parse_args(argv)
181
+
182
+ if args.no_color:
183
+ configure_console(no_color=True)
184
+
185
+ try:
186
+ db = OpenCodeDB(db_path=args.db)
187
+ except FileNotFoundError as e:
188
+ render.console.print(f"[red]Error:[/red] {e}")
189
+ sys.exit(1)
190
+
191
+ since, period = _resolve_since(args)
192
+ group_by = args.by or "day"
193
+
194
+ # Compute previous period for --compare
195
+ now = datetime.now().astimezone()
196
+ prev_since = None
197
+ if args.compare and since is not None:
198
+ period_length = now - since
199
+ prev_since = since - period_length
200
+
201
+ # Fetch current data
202
+ rows = _fetch_rows(db, group_by, since=since, limit=args.limit)
203
+ total = db.totals(since=since)
204
+
205
+ # Fetch previous period data for --compare
206
+ prev_total = None
207
+ prev_rows: list[UsageRow] = []
208
+ if prev_since is not None:
209
+ prev_total = db.totals(since=prev_since, until=since)
210
+ if group_by != "day":
211
+ prev_rows = _fetch_rows(db, group_by, since=prev_since, until=since, limit=args.limit)
212
+
213
+ # JSON output
214
+ if args.json_output:
215
+ output: dict[str, Any] = {
216
+ "period": period,
217
+ "total": db.to_dicts([total])[0],
218
+ "rows": db.to_dicts(rows),
219
+ }
220
+ if prev_total is not None:
221
+ output["previous_total"] = db.to_dicts([prev_total])[0]
222
+ if prev_rows:
223
+ output["previous_rows"] = db.to_dicts(prev_rows)
224
+ print(json.dumps(output, indent=2, ensure_ascii=False))
225
+ return
226
+
227
+ # Rich output
228
+ render_summary(total, period, prev_total=prev_total)
229
+ render.console.print()
230
+
231
+ deltas = _compute_deltas(rows, prev_rows) if prev_rows else None
232
+
233
+ if group_by == "day":
234
+ render_daily(rows, period)
235
+ else:
236
+ render_grouped(rows, group_by, period, deltas=deltas)
opencode_usage/db.py ADDED
@@ -0,0 +1,332 @@
1
+ """SQLite query layer for OpenCode's database."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import platform
7
+ import sqlite3
8
+ from dataclasses import dataclass, field
9
+ from datetime import datetime
10
+ from pathlib import Path
11
+ from typing import Any
12
+
13
+
14
+ def _default_db_path() -> Path:
15
+ """Resolve the OpenCode database path per platform."""
16
+ if custom := os.environ.get("OPENCODE_DB"):
17
+ return Path(custom)
18
+
19
+ system = platform.system()
20
+ if system == "Darwin":
21
+ base = Path.home() / ".local" / "share"
22
+ elif system == "Linux":
23
+ base = Path(os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share"))
24
+ elif system == "Windows":
25
+ base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local"))
26
+ else:
27
+ base = Path.home() / ".local" / "share"
28
+
29
+ return base / "opencode" / "opencode.db"
30
+
31
+
32
+ @dataclass
33
+ class TokenStats:
34
+ input: int = 0
35
+ output: int = 0
36
+ reasoning: int = 0
37
+ cache_read: int = 0
38
+ cache_write: int = 0
39
+ total: int = 0
40
+
41
+
42
+ @dataclass
43
+ class UsageRow:
44
+ """A single aggregated usage row."""
45
+
46
+ label: str
47
+ calls: int = 0
48
+ tokens: TokenStats = field(default_factory=TokenStats)
49
+ cost: float = 0.0
50
+ detail: str | None = None
51
+
52
+
53
+ class OpenCodeDB:
54
+ """Read-only access to the OpenCode SQLite database."""
55
+
56
+ def __init__(self, db_path: Path | str | None = None) -> None:
57
+ self.path = Path(db_path) if db_path else _default_db_path()
58
+ if not self.path.exists():
59
+ raise FileNotFoundError(
60
+ f"OpenCode database not found at {self.path}\nSet OPENCODE_DB env var to override."
61
+ )
62
+
63
+ def _connect(self) -> sqlite3.Connection:
64
+ conn = sqlite3.connect(f"file:{self.path}?mode=ro", uri=True)
65
+ conn.row_factory = sqlite3.Row
66
+ return conn
67
+
68
+ # ── query helpers ─────────────────────────────────────────────
69
+
70
+ def _time_filter(
71
+ self,
72
+ since: datetime | None,
73
+ until: datetime | None = None,
74
+ *,
75
+ col: str = "data",
76
+ ) -> tuple[str, list[Any]]:
77
+ """Return WHERE clause fragments and params for time filtering."""
78
+ clauses: list[str] = []
79
+ params: list[Any] = []
80
+ if since is not None:
81
+ ts_ms = int(since.timestamp() * 1000)
82
+ clauses.append(f"AND json_extract({col}, '$.time.created') >= ?")
83
+ params.append(ts_ms)
84
+ if until is not None:
85
+ ts_ms = int(until.timestamp() * 1000)
86
+ clauses.append(f"AND json_extract({col}, '$.time.created') < ?")
87
+ params.append(ts_ms)
88
+ return " ".join(clauses), params
89
+
90
+ def _base_query(
91
+ self,
92
+ group_expr: str,
93
+ since: datetime | None = None,
94
+ until: datetime | None = None,
95
+ order: str = "total_tokens DESC",
96
+ limit: int | None = None,
97
+ ) -> list[UsageRow]:
98
+ time_clause, params = self._time_filter(since, until)
99
+
100
+ sql = f"""
101
+ SELECT
102
+ {group_expr} AS label,
103
+ COUNT(*) AS calls,
104
+ COALESCE(SUM(json_extract(data, '$.tokens.input')), 0) AS input_tokens,
105
+ COALESCE(SUM(json_extract(data, '$.tokens.output')), 0) AS output_tokens,
106
+ COALESCE(SUM(json_extract(data, '$.tokens.reasoning')), 0) AS reasoning_tokens,
107
+ COALESCE(SUM(json_extract(data, '$.tokens.cache.read')), 0) AS cache_read,
108
+ COALESCE(SUM(json_extract(data, '$.tokens.cache.write')), 0) AS cache_write,
109
+ COALESCE(SUM(json_extract(data, '$.tokens.total')), 0) AS total_tokens,
110
+ COALESCE(SUM(json_extract(data, '$.cost')), 0) AS cost
111
+ FROM message
112
+ WHERE json_extract(data, '$.role') = 'assistant'
113
+ AND json_extract(data, '$.tokens.total') IS NOT NULL
114
+ {time_clause}
115
+ GROUP BY label
116
+ ORDER BY {order}
117
+ """
118
+ if limit:
119
+ sql += f" LIMIT {limit}"
120
+
121
+ conn = self._connect()
122
+ try:
123
+ rows = conn.execute(sql, params).fetchall()
124
+ finally:
125
+ conn.close()
126
+
127
+ return [
128
+ UsageRow(
129
+ label=r["label"] or "(unknown)",
130
+ calls=r["calls"],
131
+ tokens=TokenStats(
132
+ input=r["input_tokens"],
133
+ output=r["output_tokens"],
134
+ reasoning=r["reasoning_tokens"],
135
+ cache_read=r["cache_read"],
136
+ cache_write=r["cache_write"],
137
+ total=r["total_tokens"],
138
+ ),
139
+ cost=r["cost"],
140
+ )
141
+ for r in rows
142
+ ]
143
+
144
+ # ── public API ────────────────────────────────────────────────
145
+
146
+ def daily(
147
+ self,
148
+ since: datetime | None = None,
149
+ until: datetime | None = None,
150
+ limit: int | None = None,
151
+ ) -> list[UsageRow]:
152
+ return self._base_query(
153
+ group_expr=(
154
+ "date(json_extract(data, '$.time.created') / 1000, 'unixepoch', 'localtime')"
155
+ ),
156
+ since=since,
157
+ until=until,
158
+ order="label DESC",
159
+ limit=limit,
160
+ )
161
+
162
+ def by_model(
163
+ self,
164
+ since: datetime | None = None,
165
+ until: datetime | None = None,
166
+ limit: int | None = None,
167
+ ) -> list[UsageRow]:
168
+ return self._base_query(
169
+ group_expr="json_extract(data, '$.modelID')",
170
+ since=since,
171
+ until=until,
172
+ limit=limit,
173
+ )
174
+
175
+ def by_agent(
176
+ self,
177
+ since: datetime | None = None,
178
+ until: datetime | None = None,
179
+ limit: int | None = None,
180
+ ) -> list[UsageRow]:
181
+ """Group by agent x model, showing which model each agent uses."""
182
+ time_clause, params = self._time_filter(since, until)
183
+
184
+ sql = f"""
185
+ SELECT
186
+ json_extract(data, '$.agent') AS agent,
187
+ json_extract(data, '$.modelID') AS model,
188
+ COUNT(*) AS calls,
189
+ COALESCE(SUM(json_extract(data, '$.tokens.input')), 0) AS input_tokens,
190
+ COALESCE(SUM(json_extract(data, '$.tokens.output')), 0) AS output_tokens,
191
+ COALESCE(SUM(json_extract(data, '$.tokens.reasoning')), 0) AS reasoning_tokens,
192
+ COALESCE(SUM(json_extract(data, '$.tokens.cache.read')), 0) AS cache_read,
193
+ COALESCE(SUM(json_extract(data, '$.tokens.cache.write')), 0) AS cache_write,
194
+ COALESCE(SUM(json_extract(data, '$.tokens.total')), 0) AS total_tokens,
195
+ COALESCE(SUM(json_extract(data, '$.cost')), 0) AS cost
196
+ FROM message
197
+ WHERE json_extract(data, '$.role') = 'assistant'
198
+ AND json_extract(data, '$.tokens.total') IS NOT NULL
199
+ {time_clause}
200
+ GROUP BY agent, model
201
+ ORDER BY agent, total_tokens DESC
202
+ """
203
+ if limit:
204
+ sql += f" LIMIT {limit}"
205
+
206
+ conn = self._connect()
207
+ try:
208
+ rows = conn.execute(sql, params).fetchall()
209
+ finally:
210
+ conn.close()
211
+
212
+ return [
213
+ UsageRow(
214
+ label=r["agent"] or "(unknown)",
215
+ calls=r["calls"],
216
+ tokens=TokenStats(
217
+ input=r["input_tokens"],
218
+ output=r["output_tokens"],
219
+ reasoning=r["reasoning_tokens"],
220
+ cache_read=r["cache_read"],
221
+ cache_write=r["cache_write"],
222
+ total=r["total_tokens"],
223
+ ),
224
+ cost=r["cost"],
225
+ detail=r["model"],
226
+ )
227
+ for r in rows
228
+ ]
229
+
230
+ def by_provider(
231
+ self,
232
+ since: datetime | None = None,
233
+ until: datetime | None = None,
234
+ limit: int | None = None,
235
+ ) -> list[UsageRow]:
236
+ return self._base_query(
237
+ group_expr="json_extract(data, '$.providerID')",
238
+ since=since,
239
+ until=until,
240
+ limit=limit,
241
+ )
242
+
243
+ def by_session(
244
+ self,
245
+ since: datetime | None = None,
246
+ until: datetime | None = None,
247
+ limit: int | None = None,
248
+ ) -> list[UsageRow]:
249
+ """Group by session, using session title as label."""
250
+ time_clause, params = self._time_filter(since, until, col="m.data")
251
+
252
+ sql = f"""
253
+ SELECT
254
+ COALESCE(s.title, m.session_id) AS label,
255
+ COUNT(*) AS calls,
256
+ COALESCE(SUM(json_extract(m.data, '$.tokens.input')), 0) AS input_tokens,
257
+ COALESCE(SUM(json_extract(m.data, '$.tokens.output')), 0) AS output_tokens,
258
+ COALESCE(SUM(json_extract(m.data, '$.tokens.reasoning')), 0) AS reasoning_tokens,
259
+ COALESCE(SUM(json_extract(m.data, '$.tokens.cache.read')), 0) AS cache_read,
260
+ COALESCE(SUM(json_extract(m.data, '$.tokens.cache.write')), 0) AS cache_write,
261
+ COALESCE(SUM(json_extract(m.data, '$.tokens.total')), 0) AS total_tokens,
262
+ COALESCE(SUM(json_extract(m.data, '$.cost')), 0) AS cost
263
+ FROM message m
264
+ LEFT JOIN session s ON m.session_id = s.id
265
+ WHERE json_extract(m.data, '$.role') = 'assistant'
266
+ AND json_extract(m.data, '$.tokens.total') IS NOT NULL
267
+ {time_clause}
268
+ GROUP BY m.session_id
269
+ ORDER BY total_tokens DESC
270
+ """
271
+ if limit:
272
+ sql += f" LIMIT {limit}"
273
+
274
+ conn = self._connect()
275
+ try:
276
+ rows = conn.execute(sql, params).fetchall()
277
+ finally:
278
+ conn.close()
279
+
280
+ return [
281
+ UsageRow(
282
+ label=r["label"] or "(untitled)",
283
+ calls=r["calls"],
284
+ tokens=TokenStats(
285
+ input=r["input_tokens"],
286
+ output=r["output_tokens"],
287
+ reasoning=r["reasoning_tokens"],
288
+ cache_read=r["cache_read"],
289
+ cache_write=r["cache_write"],
290
+ total=r["total_tokens"],
291
+ ),
292
+ cost=r["cost"],
293
+ )
294
+ for r in rows
295
+ ]
296
+
297
+ def totals(
298
+ self,
299
+ since: datetime | None = None,
300
+ until: datetime | None = None,
301
+ ) -> UsageRow:
302
+ """Return a single aggregated row for the period."""
303
+ rows = self._base_query(
304
+ group_expr="'total'",
305
+ since=since,
306
+ until=until,
307
+ )
308
+ if rows:
309
+ return rows[0]
310
+ return UsageRow(label="total")
311
+
312
+ def to_dicts(self, rows: list[UsageRow]) -> list[dict[str, Any]]:
313
+ """Serialize rows for JSON output."""
314
+ result = []
315
+ for r in rows:
316
+ d: dict[str, Any] = {
317
+ "label": r.label,
318
+ "calls": r.calls,
319
+ "tokens": {
320
+ "input": r.tokens.input,
321
+ "output": r.tokens.output,
322
+ "reasoning": r.tokens.reasoning,
323
+ "cache_read": r.tokens.cache_read,
324
+ "cache_write": r.tokens.cache_write,
325
+ "total": r.tokens.total,
326
+ },
327
+ "cost": round(r.cost, 4),
328
+ }
329
+ if r.detail is not None:
330
+ d["model"] = r.detail
331
+ result.append(d)
332
+ return result
@@ -0,0 +1,222 @@
1
+ """Rich table rendering for usage stats."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING
6
+
7
+ from rich.console import Console
8
+ from rich.panel import Panel
9
+ from rich.table import Table
10
+ from rich.text import Text
11
+
12
+ if TYPE_CHECKING:
13
+ from .db import UsageRow
14
+
15
+ console = Console()
16
+
17
+
18
+ def configure_console(*, no_color: bool = False) -> None:
19
+ """Reconfigure the module-level console (e.g. for --no-color)."""
20
+ global console
21
+ console = Console(no_color=no_color)
22
+
23
+
24
+ def _fmt_tokens(n: int) -> str:
25
+ """Human-readable token count."""
26
+ if n >= 1_000_000_000:
27
+ return f"{n / 1_000_000_000:.1f}B"
28
+ if n >= 1_000_000:
29
+ return f"{n / 1_000_000:.1f}M"
30
+ if n >= 1_000:
31
+ return f"{n / 1_000:.1f}K"
32
+ return str(n)
33
+
34
+
35
+ def _fmt_cost(c: float) -> str:
36
+ if c == 0:
37
+ return "-"
38
+ if c < 0.01:
39
+ return f"${c:.4f}"
40
+ return f"${c:.2f}"
41
+
42
+
43
+ _SPARK_CHARS = "▁▂▃▄▅▆▇█"
44
+
45
+
46
+ def _spark_bar(value: int, max_value: int) -> str:
47
+ """Single-character bar proportional to value/max."""
48
+ if max_value <= 0 or value <= 0:
49
+ return "▁"
50
+ level = min(int(value / max_value * 7), 7)
51
+ return _SPARK_CHARS[level]
52
+
53
+
54
+ def _fmt_delta(pct: float) -> str:
55
+ """Format a percentage delta with color and arrow."""
56
+ if pct > 0:
57
+ return f"[red]↑{pct:.0f}%[/]"
58
+ if pct < 0:
59
+ return f"[green]↓{abs(pct):.0f}%[/]"
60
+ return "[dim]→0%[/]"
61
+
62
+
63
+ def _short_model(name: str) -> str:
64
+ """Abbreviate common model names to save table width."""
65
+ import re
66
+
67
+ # Strip vendor prefix: "vendor-variant-1-2-20251016" → "variant-1-2"
68
+ m = re.match(r"\w+-([a-z]\w+)-(\d+-\d+)(?:-\d+)?$", name)
69
+ if m:
70
+ return f"{m.group(1)}-{m.group(2)}"
71
+
72
+ # gemini-3-pro-preview → gemini-3-pro
73
+ name = re.sub(r"-preview$", "", name)
74
+ # grok-code-fast-1 → grok-fast-1
75
+ name = name.replace("grok-code-", "grok-")
76
+ # minimax-m2.5-free → minimax-m2.5
77
+ name = re.sub(r"-free$", "", name)
78
+ return name
79
+
80
+
81
+ def _make_table(
82
+ title: str,
83
+ label_header: str,
84
+ rows: list[UsageRow],
85
+ show_breakdown: bool = True,
86
+ show_detail: str | None = None,
87
+ trend_values: list[int] | None = None,
88
+ deltas: list[float | None] | None = None,
89
+ ) -> Table:
90
+ table = Table(
91
+ title=title,
92
+ show_header=True,
93
+ header_style="bold cyan",
94
+ border_style="dim",
95
+ title_style="bold white",
96
+ pad_edge=True,
97
+ )
98
+
99
+ label_max = 24 if show_detail else 30
100
+ detail_max = 18 if show_detail else 0
101
+ table.add_column(label_header, style="bold", no_wrap=True, max_width=label_max)
102
+ if show_detail:
103
+ table.add_column(show_detail, style="dim cyan", no_wrap=True, max_width=detail_max)
104
+ table.add_column("Calls", justify="right", style="magenta", min_width=5)
105
+ if show_breakdown:
106
+ table.add_column("Input", justify="right", style="green", min_width=6)
107
+ table.add_column("Output", justify="right", style="yellow", min_width=6)
108
+ table.add_column("Cache R", justify="right", style="dim", min_width=6)
109
+ table.add_column("Cache W", justify="right", style="dim", min_width=6)
110
+ table.add_column("Total", justify="right", style="bold white", min_width=7)
111
+ table.add_column("Cost", justify="right", style="bold red", min_width=7)
112
+ if trend_values is not None:
113
+ table.add_column("Trend", justify="center", style="cyan", no_wrap=True)
114
+ if deltas is not None:
115
+ table.add_column("Δ", justify="right", min_width=6)
116
+
117
+ # Precompute max for sparkline
118
+ trend_max = max(trend_values) if trend_values else 0
119
+
120
+ # Track previous label for deduplication + group separators
121
+ prev_label = None
122
+ for _i, r in enumerate(rows):
123
+ # Insert blank separator between agent groups
124
+ if show_detail and prev_label is not None and r.label != prev_label:
125
+ table.add_section()
126
+
127
+ display_label = r.label if r.label != prev_label else ""
128
+ prev_label = r.label
129
+
130
+ cols: list[str] = [display_label]
131
+ if show_detail:
132
+ cols.append(_short_model(r.detail) if r.detail else "")
133
+ cols.append(str(r.calls))
134
+ if show_breakdown:
135
+ cols.extend(
136
+ [
137
+ _fmt_tokens(r.tokens.input),
138
+ _fmt_tokens(r.tokens.output),
139
+ _fmt_tokens(r.tokens.cache_read),
140
+ _fmt_tokens(r.tokens.cache_write),
141
+ ]
142
+ )
143
+ cols.extend([_fmt_tokens(r.tokens.total), _fmt_cost(r.cost)])
144
+ if trend_values is not None:
145
+ tv = trend_values[_i] if _i < len(trend_values) else 0
146
+ cols.append(_spark_bar(tv, trend_max))
147
+ if deltas is not None:
148
+ d = deltas[_i] if _i < len(deltas) else None
149
+ cols.append(_fmt_delta(d) if d is not None else "[dim]-[/]")
150
+ table.add_row(*cols)
151
+
152
+ return table
153
+
154
+
155
+ def render_summary(
156
+ total: UsageRow,
157
+ period: str,
158
+ prev_total: UsageRow | None = None,
159
+ ) -> None:
160
+ """Print a one-line summary panel."""
161
+ text = Text()
162
+ text.append(" Calls: ", style="dim")
163
+ text.append(f"{total.calls:,}", style="bold magenta")
164
+ if prev_total is not None and prev_total.calls > 0:
165
+ pct = (total.calls - prev_total.calls) / prev_total.calls * 100
166
+ text.append(Text.from_markup(f" {_fmt_delta(pct)}"))
167
+ text.append(" │ Tokens: ", style="dim")
168
+ text.append(_fmt_tokens(total.tokens.total), style="bold white")
169
+ if prev_total is not None and prev_total.tokens.total > 0:
170
+ pct = (total.tokens.total - prev_total.tokens.total) / prev_total.tokens.total * 100
171
+ text.append(Text.from_markup(f" {_fmt_delta(pct)}"))
172
+ text.append(" │ Cost: ", style="dim")
173
+ text.append(_fmt_cost(total.cost), style="bold red")
174
+ if prev_total is not None and prev_total.cost > 0:
175
+ pct = (total.cost - prev_total.cost) / prev_total.cost * 100
176
+ text.append(Text.from_markup(f" {_fmt_delta(pct)}"))
177
+ console.print(Panel(text, title=f"[bold]OpenCode Usage — {period}[/bold]", border_style="blue"))
178
+
179
+
180
+ def render_daily(rows: list[UsageRow], period: str) -> None:
181
+ """Render the daily breakdown table."""
182
+ trend = [r.tokens.total for r in rows]
183
+ table = _make_table(
184
+ title=f"Daily Usage ({period})",
185
+ label_header="Date",
186
+ rows=rows,
187
+ show_breakdown=True,
188
+ trend_values=trend,
189
+ )
190
+ console.print(table)
191
+
192
+
193
+ def render_grouped(
194
+ rows: list[UsageRow],
195
+ group_by: str,
196
+ period: str,
197
+ deltas: list[float | None] | None = None,
198
+ ) -> None:
199
+ """Render a grouped breakdown table."""
200
+ label_map = {
201
+ "model": "Model",
202
+ "agent": "Agent",
203
+ "provider": "Provider",
204
+ "session": "Session",
205
+ }
206
+ label_header = label_map.get(group_by, group_by.title())
207
+
208
+ # Agent view has extra Model column — skip breakdown to save width
209
+ show_breakdown = group_by not in ("session", "agent")
210
+
211
+ # For agent view, show model as an extra column
212
+ show_detail = "Model" if group_by == "agent" else None
213
+
214
+ table = _make_table(
215
+ title=f"Usage by {label_header} ({period})",
216
+ label_header=label_header,
217
+ rows=rows,
218
+ show_breakdown=show_breakdown,
219
+ show_detail=show_detail,
220
+ deltas=deltas,
221
+ )
222
+ console.print(table)
@@ -0,0 +1,112 @@
1
+ Metadata-Version: 2.4
2
+ Name: opencode-usage
3
+ Version: 0.1.0
4
+ Summary: CLI tool to track and display OpenCode token usage statistics
5
+ License-Expression: MIT
6
+ License-File: LICENSE
7
+ Requires-Python: >=3.10
8
+ Requires-Dist: rich>=13.0
9
+ Description-Content-Type: text/markdown
10
+
11
+ # opencode-usage
12
+
13
+ CLI tool to track and display [OpenCode](https://github.com/opencodeco/opencode) token usage statistics. Reads directly from OpenCode's local SQLite database — no API keys or external services needed.
14
+
15
+ ## Features
16
+
17
+ - **Daily breakdown** — token usage and cost per day
18
+ - **Group by dimension** — model, agent, provider, or session
19
+ - **Agent × Model view** — see which model each agent uses
20
+ - **Time filtering** — last N days, relative durations (`7d`, `2w`), or ISO dates
21
+ - **JSON output** — pipe to `jq` or other tools
22
+ - **Cross-platform** — macOS, Linux, Windows
23
+
24
+ ## Installation
25
+
26
+ ```bash
27
+ git clone https://github.com/rchardx/opencode-usage.git
28
+ cd opencode-usage
29
+ uv sync
30
+ uv tool install -e .
31
+ ```
32
+
33
+ After installation, `opencode-usage` is available globally.
34
+
35
+ ## Usage
36
+
37
+ ```bash
38
+ # Default: last 7 days, daily breakdown
39
+ opencode-usage
40
+
41
+ # Quick shortcuts
42
+ opencode-usage today
43
+ opencode-usage yesterday
44
+
45
+ # Time filtering
46
+ opencode-usage --days 30
47
+ opencode-usage --since 7d
48
+ opencode-usage --since 2025-01-01
49
+
50
+ # Group by dimension
51
+ opencode-usage --by model
52
+ opencode-usage --by agent # shows model per agent
53
+ opencode-usage --by provider
54
+ opencode-usage --by session --limit 10
55
+
56
+ # JSON output
57
+ opencode-usage --json
58
+ opencode-usage --by model --json | jq '.rows[].label'
59
+ ```
60
+
61
+ ### Example output
62
+
63
+ ```
64
+ ╭──────────────── OpenCode Usage — Last 7 days ────────────────╮
65
+ │ Calls: 1,280 │ Tokens: 52.3M │ Cost: $0.00 │
66
+ ╰──────────────────────────────────────────────────────────────╯
67
+
68
+ Usage by Agent (Last 7 days)
69
+ ┏━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓
70
+ ┃ Agent ┃ Model ┃ Calls ┃ Total ┃ Cost ┃
71
+ ┡━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩
72
+ │ build │ deepseek-r1 │ 420 │ 18.2M │ - │
73
+ │ │ qwen-3-coder │ 105 │ 4.7M │ - │
74
+ ├────────────────────────┼──────────────┼───────┼─────────┼─────────┤
75
+ │ explore │ gemma-3 │ 310 │ 12.5M │ - │
76
+ │ │ minimax-m2.5 │ 198 │ 8.1M │ - │
77
+ ├────────────────────────┼──────────────┼───────┼─────────┼─────────┤
78
+ │ librarian │ llama-4 │ 156 │ 5.8M │ - │
79
+ ├────────────────────────┼──────────────┼───────┼─────────┼─────────┤
80
+ │ oracle │ qwen-3-coder │ 91 │ 3.0M │ - │
81
+ └────────────────────────┴──────────────┴───────┴─────────┴─────────┘
82
+ ```
83
+
84
+ ## Configuration
85
+
86
+ | Environment Variable | Description |
87
+ |---|---|
88
+ | `OPENCODE_DB` | Override database path (default: auto-detected per platform) |
89
+
90
+ Default database locations:
91
+
92
+ - **macOS / Linux**: `~/.local/share/opencode/opencode.db`
93
+ - **Windows**: `%LOCALAPPDATA%\opencode\opencode.db`
94
+
95
+ ## Development
96
+
97
+ ```bash
98
+ git clone https://github.com/rchardx/opencode-usage.git
99
+ cd opencode-usage
100
+ uv sync
101
+
102
+ # Lint & format
103
+ uvx ruff check .
104
+ uvx ruff format .
105
+
106
+ # Install pre-commit hooks
107
+ uvx pre-commit install
108
+ ```
109
+
110
+ ## License
111
+
112
+ [MIT](LICENSE)
@@ -0,0 +1,10 @@
1
+ opencode_usage/__init__.py,sha256=xwn10ySPJ9WJFDrUZpDwUviEIFIeEmi9YE5_uk8UPAs,60
2
+ opencode_usage/__main__.py,sha256=EW4jcBJRCwzJV0o9_q2DKMiMYJH94GOzHsUvV3jJJq4,82
3
+ opencode_usage/cli.py,sha256=WpwB-HFwOUk8wNUL2Cy-E5pQL1a5YdOe6BsVZ32p2Vo,6888
4
+ opencode_usage/db.py,sha256=sKzDw3SadjLddzlcvxopRoCXHdkXmfgMfm-OeEfrQUQ,11759
5
+ opencode_usage/render.py,sha256=Oh36CyTqxPzvS0jOgrZjJKnbD7UhlEfTx0q8P5jv82o,7319
6
+ opencode_usage-0.1.0.dist-info/METADATA,sha256=AkU39goYSRllKLxbLTu_Gz6UxCKEtAU9pMloH_D-awI,4399
7
+ opencode_usage-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
8
+ opencode_usage-0.1.0.dist-info/entry_points.txt,sha256=r8iyaidL9xB_JpEuouRw7F7S13feStWeO2KUTqdwsxU,59
9
+ opencode_usage-0.1.0.dist-info/licenses/LICENSE,sha256=7qsQMYbQXlISlrH_paIJIyNT2l7ODIFZzTudcgu-nkI,1064
10
+ opencode_usage-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ opencode-usage = opencode_usage.cli:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 rchardx
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.