codex-stats 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Vivek
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,92 @@
1
+ Metadata-Version: 2.4
2
+ Name: codex-stats
3
+ Version: 0.1.0
4
+ Summary: Local AI usage observability CLI for Codex sessions.
5
+ Author: Vivek
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/vivek378521/codex-stats
8
+ Project-URL: Repository, https://github.com/vivek378521/codex-stats
9
+ Project-URL: Issues, https://github.com/vivek378521/codex-stats/issues
10
+ Keywords: codex,cli,analytics,sqlite,usage
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Environment :: Console
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
20
+ Classifier: Topic :: Utilities
21
+ Requires-Python: >=3.11
22
+ Description-Content-Type: text/markdown
23
+ License-File: LICENSE
24
+ Provides-Extra: dev
25
+ Requires-Dist: build>=1.2.2; extra == "dev"
26
+ Dynamic: license-file
27
+
28
+ # codex-stats
29
+
30
+ Local AI usage observability CLI for Codex sessions.
31
+
32
+ ## What it does
33
+
34
+ `codex-stats` reads local Codex state from `~/.codex` and shows:
35
+
36
+ - session totals from `state_5.sqlite`
37
+ - request counts from rollout JSONL files
38
+ - model and project breakdown from local session metadata
39
+ - estimated cost from a local pricing table
40
+
41
+ ## Install
42
+
43
+ ```bash
44
+ pip install .
45
+ ```
46
+
47
+ After publish:
48
+
49
+ ```bash
50
+ pipx install codex-stats
51
+ ```
52
+
53
+ For local development:
54
+
55
+ ```bash
56
+ python3 -m venv .venv
57
+ source .venv/bin/activate
58
+ python -m pip install -U pip setuptools
59
+ python -m pip install -e .
60
+ ```
61
+
62
+ ## Usage
63
+
64
+ ```bash
65
+ codex-stats
66
+ codex-stats today
67
+ codex-stats session
68
+ codex-stats session --id <session_id>
69
+ codex-stats --json
70
+ ```
71
+
72
+ From the repo without installing:
73
+
74
+ ```bash
75
+ PYTHONPATH=src python3 -m codex_stats
76
+ ```
77
+
78
+ ## Notes
79
+
80
+ - This tool does not intercept Codex API traffic.
81
+ - Costs are estimates, not authoritative billing values.
82
+ - The current MVP relies on local file formats that may evolve with Codex CLI versions.
83
+
84
+ ## Release
85
+
86
+ Recommended publish flow:
87
+
88
+ 1. Create the GitHub repository `vivek378521/codex-stats`.
89
+ 2. Create a PyPI project named `codex-stats`.
90
+ 3. Configure PyPI Trusted Publishing for the GitHub repository.
91
+ 4. Push a version tag and publish a GitHub release.
92
+ 5. The release workflow will build and upload the package to PyPI.
@@ -0,0 +1,65 @@
1
+ # codex-stats
2
+
3
+ Local AI usage observability CLI for Codex sessions.
4
+
5
+ ## What it does
6
+
7
+ `codex-stats` reads local Codex state from `~/.codex` and shows:
8
+
9
+ - session totals from `state_5.sqlite`
10
+ - request counts from rollout JSONL files
11
+ - model and project breakdown from local session metadata
12
+ - estimated cost from a local pricing table
13
+
14
+ ## Install
15
+
16
+ ```bash
17
+ pip install .
18
+ ```
19
+
20
+ After publish:
21
+
22
+ ```bash
23
+ pipx install codex-stats
24
+ ```
25
+
26
+ For local development:
27
+
28
+ ```bash
29
+ python3 -m venv .venv
30
+ source .venv/bin/activate
31
+ python -m pip install -U pip setuptools
32
+ python -m pip install -e .
33
+ ```
34
+
35
+ ## Usage
36
+
37
+ ```bash
38
+ codex-stats
39
+ codex-stats today
40
+ codex-stats session
41
+ codex-stats session --id <session_id>
42
+ codex-stats --json
43
+ ```
44
+
45
+ From the repo without installing:
46
+
47
+ ```bash
48
+ PYTHONPATH=src python3 -m codex_stats
49
+ ```
50
+
51
+ ## Notes
52
+
53
+ - This tool does not intercept Codex API traffic.
54
+ - Costs are estimates, not authoritative billing values.
55
+ - The current MVP relies on local file formats that may evolve with Codex CLI versions.
56
+
57
+ ## Release
58
+
59
+ Recommended publish flow:
60
+
61
+ 1. Create the GitHub repository `vivek378521/codex-stats`.
62
+ 2. Create a PyPI project named `codex-stats`.
63
+ 3. Configure PyPI Trusted Publishing for the GitHub repository.
64
+ 4. Push a version tag and publish a GitHub release.
65
+ 5. The release workflow will build and upload the package to PyPI.
@@ -0,0 +1,46 @@
1
+ [build-system]
2
+ requires = ["setuptools>=68"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "codex-stats"
7
+ version = "0.1.0"
8
+ description = "Local AI usage observability CLI for Codex sessions."
9
+ readme = "README.md"
10
+ requires-python = ">=3.11"
11
+ license = { text = "MIT" }
12
+ authors = [
13
+ { name = "Vivek" }
14
+ ]
15
+ keywords = ["codex", "cli", "analytics", "sqlite", "usage"]
16
+ classifiers = [
17
+ "Development Status :: 3 - Alpha",
18
+ "Environment :: Console",
19
+ "Intended Audience :: Developers",
20
+ "License :: OSI Approved :: MIT License",
21
+ "Programming Language :: Python :: 3",
22
+ "Programming Language :: Python :: 3.11",
23
+ "Programming Language :: Python :: 3.12",
24
+ "Programming Language :: Python :: 3.13",
25
+ "Topic :: Software Development :: Libraries :: Python Modules",
26
+ "Topic :: Utilities",
27
+ ]
28
+
29
+ [project.optional-dependencies]
30
+ dev = [
31
+ "build>=1.2.2",
32
+ ]
33
+
34
+ [project.scripts]
35
+ codex-stats = "codex_stats.cli:main"
36
+
37
+ [project.urls]
38
+ Homepage = "https://github.com/vivek378521/codex-stats"
39
+ Repository = "https://github.com/vivek378521/codex-stats"
40
+ Issues = "https://github.com/vivek378521/codex-stats/issues"
41
+
42
+ [tool.setuptools]
43
+ package-dir = { "" = "src" }
44
+
45
+ [tool.setuptools.packages.find]
46
+ where = ["src"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,3 @@
1
+ __all__ = ["__version__"]
2
+
3
+ __version__ = "0.1.0"
@@ -0,0 +1,5 @@
1
+ from .cli import main
2
+
3
+
4
+ if __name__ == "__main__":
5
+ raise SystemExit(main())
@@ -0,0 +1,57 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import sys
5
+
6
+ from .config import Paths
7
+ from .display import as_json, format_session, format_summary
8
+ from .ingest import get_session, get_session_details
9
+ from .metrics import summarize_today
10
+
11
+
12
+ def build_parser() -> argparse.ArgumentParser:
13
+ parser = argparse.ArgumentParser(prog="codex-stats", description="Local usage analytics for Codex.")
14
+ parser.add_argument("--json", action="store_true", dest="json_output", help="Output JSON.")
15
+ subparsers = parser.add_subparsers(dest="command")
16
+
17
+ today_parser = subparsers.add_parser("today", help="Show today's usage summary.")
18
+ today_parser.add_argument("--json", action="store_true", dest="json_output", help="Output JSON.")
19
+
20
+ session_parser = subparsers.add_parser("session", help="Show a session summary.")
21
+ session_parser.add_argument("--id", dest="session_id", help="Specific session ID.")
22
+ session_parser.add_argument("--json", action="store_true", dest="json_output", help="Output JSON.")
23
+
24
+ return parser
25
+
26
+
27
+ def main(argv: list[str] | None = None) -> int:
28
+ parser = build_parser()
29
+ args = parser.parse_args(argv)
30
+ paths = Paths.discover()
31
+
32
+ if args.command in (None, "today"):
33
+ summary = summarize_today(paths)
34
+ if args.json_output:
35
+ print(as_json(summary.to_dict()))
36
+ else:
37
+ print(format_summary(summary))
38
+ return 0
39
+
40
+ if args.command == "session":
41
+ session = get_session(paths, session_id=args.session_id)
42
+ if session is None:
43
+ print("No Codex session found.", file=sys.stderr)
44
+ return 1
45
+ details = get_session_details(paths, session)
46
+ if args.json_output:
47
+ print(as_json(details.to_dict()))
48
+ else:
49
+ print(format_session(details))
50
+ return 0
51
+
52
+ parser.print_help()
53
+ return 1
54
+
55
+
56
+ if __name__ == "__main__":
57
+ raise SystemExit(main())
@@ -0,0 +1,23 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
6
+
7
+
8
+ @dataclass(frozen=True)
9
+ class Paths:
10
+ codex_home: Path
11
+ state_db: Path
12
+ logs_db: Path
13
+ sessions_dir: Path
14
+
15
+ @classmethod
16
+ def discover(cls) -> "Paths":
17
+ codex_home = Path(os.environ.get("CODEX_HOME", "~/.codex")).expanduser()
18
+ return cls(
19
+ codex_home=codex_home,
20
+ state_db=codex_home / "state_5.sqlite",
21
+ logs_db=codex_home / "logs_1.sqlite",
22
+ sessions_dir=codex_home / "sessions",
23
+ )
@@ -0,0 +1,59 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from datetime import UTC
5
+
6
+ from .models import SessionDetails, TimeSummary
7
+
8
+
9
+ def format_summary(summary: TimeSummary) -> str:
10
+ lines = [
11
+ "Codex Usage Summary",
12
+ "",
13
+ f"Window: {summary.label}",
14
+ f"Sessions: {summary.sessions}",
15
+ f"Requests: {summary.requests}",
16
+ f"Input tokens: {summary.input_tokens:,}",
17
+ f"Output tokens: {summary.output_tokens:,}",
18
+ f"Cached input tokens: {summary.cached_input_tokens:,}",
19
+ f"Reasoning tokens: {summary.reasoning_output_tokens:,}",
20
+ f"Total tokens: {summary.total_tokens:,}",
21
+ f"Estimated cost: ${summary.estimated_cost_usd:.2f}",
22
+ ]
23
+ if summary.top_model:
24
+ lines.append(f"Top model: {summary.top_model}")
25
+ return "\n".join(lines)
26
+
27
+
28
+ def format_session(details: SessionDetails) -> str:
29
+ session = details.session
30
+ lines = [
31
+ "Session Summary",
32
+ "",
33
+ f"Session ID: {session.session_id}",
34
+ f"Project: {session.project_name}",
35
+ f"Model: {session.model or 'unknown'}",
36
+ f"Started: {_fmt_dt(details.started_at or session.created_at)}",
37
+ f"Updated: {_fmt_dt(session.updated_at)}",
38
+ f"Requests: {details.request_count}",
39
+ f"Input tokens: {_fmt_optional_int(details.input_tokens)}",
40
+ f"Output tokens: {_fmt_optional_int(details.output_tokens)}",
41
+ f"Cached input tokens: {_fmt_optional_int(details.cached_input_tokens)}",
42
+ f"Reasoning tokens: {_fmt_optional_int(details.reasoning_output_tokens)}",
43
+ f"Total tokens: {details.effective_total_tokens():,}",
44
+ ]
45
+ return "\n".join(lines)
46
+
47
+
48
+ def as_json(payload: dict) -> str:
49
+ return json.dumps(payload, indent=2, sort_keys=True)
50
+
51
+
52
+ def _fmt_dt(value) -> str:
53
+ return value.astimezone(UTC).isoformat() if value else "unknown"
54
+
55
+
56
+ def _fmt_optional_int(value: int | None) -> str:
57
+ if value is None:
58
+ return "unknown"
59
+ return f"{value:,}"
@@ -0,0 +1,156 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import sqlite3
5
+ from collections.abc import Iterable
6
+ from datetime import UTC, date, datetime, tzinfo
7
+ from pathlib import Path
8
+
9
+ from .config import Paths
10
+ from .models import SessionDetails, SessionRecord
11
+
12
+
13
+ def _dt_from_unix(timestamp: int) -> datetime:
14
+ return datetime.fromtimestamp(timestamp, tz=UTC)
15
+
16
+
17
+ def _connect_sqlite(path: Path) -> sqlite3.Connection:
18
+ connection = sqlite3.connect(path)
19
+ connection.row_factory = sqlite3.Row
20
+ return connection
21
+
22
+
23
+ def iter_sessions(paths: Paths) -> Iterable[SessionRecord]:
24
+ if not paths.state_db.exists():
25
+ return []
26
+
27
+ query = """
28
+ SELECT
29
+ id,
30
+ created_at,
31
+ updated_at,
32
+ cwd,
33
+ model,
34
+ model_provider,
35
+ tokens_used,
36
+ rollout_path,
37
+ git_branch,
38
+ git_origin_url
39
+ FROM threads
40
+ ORDER BY updated_at DESC, created_at DESC
41
+ """
42
+ with _connect_sqlite(paths.state_db) as connection:
43
+ rows = connection.execute(query).fetchall()
44
+
45
+ sessions: list[SessionRecord] = []
46
+ for row in rows:
47
+ sessions.append(
48
+ SessionRecord(
49
+ session_id=row["id"],
50
+ created_at=_dt_from_unix(row["created_at"]),
51
+ updated_at=_dt_from_unix(row["updated_at"]),
52
+ cwd=row["cwd"],
53
+ model=row["model"],
54
+ model_provider=row["model_provider"],
55
+ tokens_used=row["tokens_used"],
56
+ rollout_path=Path(row["rollout_path"]),
57
+ git_branch=row["git_branch"],
58
+ git_origin_url=row["git_origin_url"],
59
+ )
60
+ )
61
+ return sessions
62
+
63
+
64
+ def get_session(paths: Paths, session_id: str | None = None) -> SessionRecord | None:
65
+ sessions = list(iter_sessions(paths))
66
+ if not sessions:
67
+ return None
68
+ if session_id is None:
69
+ return sessions[0]
70
+ for session in sessions:
71
+ if session.session_id == session_id:
72
+ return session
73
+ return None
74
+
75
+
76
+ def _read_rollout_details(path: Path) -> dict[str, int | str | None]:
77
+ details: dict[str, int | str | None] = {
78
+ "request_count": 0,
79
+ "input_tokens": None,
80
+ "output_tokens": None,
81
+ "cached_input_tokens": None,
82
+ "reasoning_output_tokens": None,
83
+ "total_tokens_from_rollout": None,
84
+ "started_at": None,
85
+ }
86
+ if not path.exists():
87
+ return details
88
+
89
+ with path.open("r", encoding="utf-8") as handle:
90
+ for raw_line in handle:
91
+ line = raw_line.strip()
92
+ if not line:
93
+ continue
94
+ try:
95
+ event = json.loads(line)
96
+ except json.JSONDecodeError:
97
+ continue
98
+
99
+ event_type = event.get("type")
100
+ payload = event.get("payload", {})
101
+ if event_type == "session_meta":
102
+ timestamp = payload.get("timestamp")
103
+ if isinstance(timestamp, str):
104
+ details["started_at"] = timestamp
105
+ if event_type != "event_msg":
106
+ continue
107
+
108
+ payload_type = payload.get("type")
109
+ if payload_type == "user_message":
110
+ details["request_count"] = int(details["request_count"] or 0) + 1
111
+ elif payload_type == "token_count":
112
+ info = payload.get("info") or {}
113
+ total_usage = info.get("total_token_usage") or {}
114
+ if total_usage:
115
+ details["input_tokens"] = total_usage.get("input_tokens")
116
+ details["output_tokens"] = total_usage.get("output_tokens")
117
+ details["cached_input_tokens"] = total_usage.get("cached_input_tokens")
118
+ details["reasoning_output_tokens"] = total_usage.get("reasoning_output_tokens")
119
+ details["total_tokens_from_rollout"] = total_usage.get("total_tokens")
120
+
121
+ return details
122
+
123
+
124
+ def get_session_details(paths: Paths, session: SessionRecord) -> SessionDetails:
125
+ rollout_details = _read_rollout_details(session.rollout_path)
126
+ started_at_raw = rollout_details["started_at"]
127
+ started_at = None
128
+ if isinstance(started_at_raw, str):
129
+ started_at = datetime.fromisoformat(started_at_raw.replace("Z", "+00:00"))
130
+
131
+ return SessionDetails(
132
+ session=session,
133
+ request_count=int(rollout_details["request_count"] or 0),
134
+ input_tokens=_as_optional_int(rollout_details["input_tokens"]),
135
+ output_tokens=_as_optional_int(rollout_details["output_tokens"]),
136
+ cached_input_tokens=_as_optional_int(rollout_details["cached_input_tokens"]),
137
+ reasoning_output_tokens=_as_optional_int(rollout_details["reasoning_output_tokens"]),
138
+ total_tokens_from_rollout=_as_optional_int(rollout_details["total_tokens_from_rollout"]),
139
+ started_at=started_at,
140
+ )
141
+
142
+
143
+ def sessions_for_day(paths: Paths, target_day: date, timezone: tzinfo | None = None) -> list[SessionRecord]:
144
+ return [
145
+ session
146
+ for session in iter_sessions(paths)
147
+ if session.created_at.astimezone(timezone or UTC).date() == target_day
148
+ ]
149
+
150
+
151
+ def _as_optional_int(value: object) -> int | None:
152
+ if value is None:
153
+ return None
154
+ if isinstance(value, int):
155
+ return value
156
+ return int(value)
@@ -0,0 +1,52 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import Counter
4
+ from datetime import datetime, tzinfo
5
+
6
+ from .config import Paths
7
+ from .ingest import get_session_details, sessions_for_day
8
+ from .models import SessionDetails, TimeSummary
9
+
10
+ # Conservative placeholder pricing. Replace with model-specific pricing later.
11
+ DEFAULT_USD_PER_1K_TOKENS = 0.01
12
+
13
+
14
+ def estimate_cost_usd(total_tokens: int, usd_per_1k_tokens: float = DEFAULT_USD_PER_1K_TOKENS) -> float:
15
+ return round((total_tokens / 1000.0) * usd_per_1k_tokens, 4)
16
+
17
+
18
+ def summarize_today(paths: Paths, now: datetime | None = None) -> TimeSummary:
19
+ current_time = now or datetime.now().astimezone()
20
+ timezone = current_time.tzinfo
21
+ sessions = sessions_for_day(paths, current_time.date(), timezone)
22
+ details = [get_session_details(paths, session) for session in sessions]
23
+ return summarize_details("today", details)
24
+
25
+
26
+ def summarize_details(label: str, details: list[SessionDetails]) -> TimeSummary:
27
+ sessions_count = len(details)
28
+ requests = sum(detail.request_count for detail in details)
29
+ input_tokens = sum(detail.input_tokens or 0 for detail in details)
30
+ output_tokens = sum(detail.output_tokens or 0 for detail in details)
31
+ cached_input_tokens = sum(detail.cached_input_tokens or 0 for detail in details)
32
+ reasoning_output_tokens = sum(detail.reasoning_output_tokens or 0 for detail in details)
33
+ total_tokens = sum(detail.effective_total_tokens() for detail in details)
34
+ model_counter = Counter(detail.session.model for detail in details if detail.session.model)
35
+ top_model = model_counter.most_common(1)[0][0] if model_counter else None
36
+ return TimeSummary(
37
+ label=label,
38
+ sessions=sessions_count,
39
+ requests=requests,
40
+ input_tokens=input_tokens,
41
+ output_tokens=output_tokens,
42
+ cached_input_tokens=cached_input_tokens,
43
+ reasoning_output_tokens=reasoning_output_tokens,
44
+ total_tokens=total_tokens,
45
+ estimated_cost_usd=estimate_cost_usd(total_tokens),
46
+ top_model=top_model,
47
+ )
48
+
49
+
50
+ def local_date(value: datetime, timezone: tzinfo | None) -> datetime.date:
51
+ target_timezone = timezone or value.astimezone().tzinfo
52
+ return value.astimezone(target_timezone).date()
@@ -0,0 +1,81 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import asdict, dataclass
4
+ from datetime import datetime
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+
9
+ @dataclass(frozen=True)
10
+ class SessionRecord:
11
+ session_id: str
12
+ created_at: datetime
13
+ updated_at: datetime
14
+ cwd: str
15
+ model: str | None
16
+ model_provider: str
17
+ tokens_used: int
18
+ rollout_path: Path
19
+ git_branch: str | None
20
+ git_origin_url: str | None
21
+
22
+ @property
23
+ def project_name(self) -> str:
24
+ cwd_path = Path(self.cwd)
25
+ return cwd_path.name or self.cwd
26
+
27
+ def to_dict(self) -> dict[str, Any]:
28
+ payload = asdict(self)
29
+ payload["created_at"] = self.created_at.isoformat()
30
+ payload["updated_at"] = self.updated_at.isoformat()
31
+ payload["rollout_path"] = str(self.rollout_path)
32
+ payload["project_name"] = self.project_name
33
+ return payload
34
+
35
+
36
+ @dataclass(frozen=True)
37
+ class SessionDetails:
38
+ session: SessionRecord
39
+ request_count: int
40
+ input_tokens: int | None
41
+ output_tokens: int | None
42
+ cached_input_tokens: int | None
43
+ reasoning_output_tokens: int | None
44
+ total_tokens_from_rollout: int | None
45
+ started_at: datetime | None
46
+
47
+ def effective_total_tokens(self) -> int:
48
+ if self.total_tokens_from_rollout is not None:
49
+ return self.total_tokens_from_rollout
50
+ return self.session.tokens_used
51
+
52
+ def to_dict(self) -> dict[str, Any]:
53
+ payload = {
54
+ "session": self.session.to_dict(),
55
+ "request_count": self.request_count,
56
+ "input_tokens": self.input_tokens,
57
+ "output_tokens": self.output_tokens,
58
+ "cached_input_tokens": self.cached_input_tokens,
59
+ "reasoning_output_tokens": self.reasoning_output_tokens,
60
+ "total_tokens_from_rollout": self.total_tokens_from_rollout,
61
+ "effective_total_tokens": self.effective_total_tokens(),
62
+ "started_at": self.started_at.isoformat() if self.started_at else None,
63
+ }
64
+ return payload
65
+
66
+
67
+ @dataclass(frozen=True)
68
+ class TimeSummary:
69
+ label: str
70
+ sessions: int
71
+ requests: int
72
+ input_tokens: int
73
+ output_tokens: int
74
+ cached_input_tokens: int
75
+ reasoning_output_tokens: int
76
+ total_tokens: int
77
+ estimated_cost_usd: float
78
+ top_model: str | None
79
+
80
+ def to_dict(self) -> dict[str, Any]:
81
+ return asdict(self)
@@ -0,0 +1,92 @@
1
+ Metadata-Version: 2.4
2
+ Name: codex-stats
3
+ Version: 0.1.0
4
+ Summary: Local AI usage observability CLI for Codex sessions.
5
+ Author: Vivek
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/vivek378521/codex-stats
8
+ Project-URL: Repository, https://github.com/vivek378521/codex-stats
9
+ Project-URL: Issues, https://github.com/vivek378521/codex-stats/issues
10
+ Keywords: codex,cli,analytics,sqlite,usage
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Environment :: Console
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
20
+ Classifier: Topic :: Utilities
21
+ Requires-Python: >=3.11
22
+ Description-Content-Type: text/markdown
23
+ License-File: LICENSE
24
+ Provides-Extra: dev
25
+ Requires-Dist: build>=1.2.2; extra == "dev"
26
+ Dynamic: license-file
27
+
28
+ # codex-stats
29
+
30
+ Local AI usage observability CLI for Codex sessions.
31
+
32
+ ## What it does
33
+
34
+ `codex-stats` reads local Codex state from `~/.codex` and shows:
35
+
36
+ - session totals from `state_5.sqlite`
37
+ - request counts from rollout JSONL files
38
+ - model and project breakdown from local session metadata
39
+ - estimated cost from a local pricing table
40
+
41
+ ## Install
42
+
43
+ ```bash
44
+ pip install .
45
+ ```
46
+
47
+ After publish:
48
+
49
+ ```bash
50
+ pipx install codex-stats
51
+ ```
52
+
53
+ For local development:
54
+
55
+ ```bash
56
+ python3 -m venv .venv
57
+ source .venv/bin/activate
58
+ python -m pip install -U pip setuptools
59
+ python -m pip install -e .
60
+ ```
61
+
62
+ ## Usage
63
+
64
+ ```bash
65
+ codex-stats
66
+ codex-stats today
67
+ codex-stats session
68
+ codex-stats session --id <session_id>
69
+ codex-stats --json
70
+ ```
71
+
72
+ From the repo without installing:
73
+
74
+ ```bash
75
+ PYTHONPATH=src python3 -m codex_stats
76
+ ```
77
+
78
+ ## Notes
79
+
80
+ - This tool does not intercept Codex API traffic.
81
+ - Costs are estimates, not authoritative billing values.
82
+ - The current MVP relies on local file formats that may evolve with Codex CLI versions.
83
+
84
+ ## Release
85
+
86
+ Recommended publish flow:
87
+
88
+ 1. Create the GitHub repository `vivek378521/codex-stats`.
89
+ 2. Create a PyPI project named `codex-stats`.
90
+ 3. Configure PyPI Trusted Publishing for the GitHub repository.
91
+ 4. Push a version tag and publish a GitHub release.
92
+ 5. The release workflow will build and upload the package to PyPI.
@@ -0,0 +1,19 @@
1
+ LICENSE
2
+ README.md
3
+ pyproject.toml
4
+ src/codex_stats/__init__.py
5
+ src/codex_stats/__main__.py
6
+ src/codex_stats/cli.py
7
+ src/codex_stats/config.py
8
+ src/codex_stats/display.py
9
+ src/codex_stats/ingest.py
10
+ src/codex_stats/metrics.py
11
+ src/codex_stats/models.py
12
+ src/codex_stats.egg-info/PKG-INFO
13
+ src/codex_stats.egg-info/SOURCES.txt
14
+ src/codex_stats.egg-info/dependency_links.txt
15
+ src/codex_stats.egg-info/entry_points.txt
16
+ src/codex_stats.egg-info/requires.txt
17
+ src/codex_stats.egg-info/top_level.txt
18
+ tests/test_cli.py
19
+ tests/test_metrics.py
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ codex-stats = codex_stats.cli:main
@@ -0,0 +1,3 @@
1
+
2
+ [dev]
3
+ build>=1.2.2
@@ -0,0 +1 @@
1
+ codex_stats
@@ -0,0 +1,27 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ import unittest
5
+ from pathlib import Path
6
+
7
+ sys.path.insert(0, str(Path(__file__).resolve().parents[1] / "src"))
8
+
9
+ from codex_stats.cli import build_parser
10
+
11
+
12
+ class CliTestCase(unittest.TestCase):
13
+ def test_default_parser(self) -> None:
14
+ parser = build_parser()
15
+ args = parser.parse_args([])
16
+ self.assertIsNone(args.command)
17
+ self.assertFalse(args.json_output)
18
+
19
+ def test_session_parser(self) -> None:
20
+ parser = build_parser()
21
+ args = parser.parse_args(["session", "--id", "abc"])
22
+ self.assertEqual(args.command, "session")
23
+ self.assertEqual(args.session_id, "abc")
24
+
25
+
26
+ if __name__ == "__main__":
27
+ unittest.main()
@@ -0,0 +1,162 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import sqlite3
5
+ import sys
6
+ import tempfile
7
+ import unittest
8
+ from pathlib import Path
9
+
10
+ sys.path.insert(0, str(Path(__file__).resolve().parents[1] / "src"))
11
+
12
+ from codex_stats.config import Paths
13
+ from codex_stats.ingest import get_session, get_session_details
14
+ from codex_stats.metrics import summarize_today
15
+
16
+
17
+ class MetricsTestCase(unittest.TestCase):
18
+ def setUp(self) -> None:
19
+ self.tmpdir = tempfile.TemporaryDirectory()
20
+ root = Path(self.tmpdir.name)
21
+ codex_home = root / ".codex"
22
+ sessions_dir = codex_home / "sessions" / "2026" / "04" / "03"
23
+ sessions_dir.mkdir(parents=True)
24
+ self.state_db = codex_home / "state_5.sqlite"
25
+ rollout_path = sessions_dir / "rollout-test.jsonl"
26
+
27
+ connection = sqlite3.connect(self.state_db)
28
+ connection.execute(
29
+ """
30
+ CREATE TABLE threads (
31
+ id TEXT PRIMARY KEY,
32
+ rollout_path TEXT NOT NULL,
33
+ created_at INTEGER NOT NULL,
34
+ updated_at INTEGER NOT NULL,
35
+ source TEXT NOT NULL,
36
+ model_provider TEXT NOT NULL,
37
+ cwd TEXT NOT NULL,
38
+ title TEXT NOT NULL,
39
+ sandbox_policy TEXT NOT NULL,
40
+ approval_mode TEXT NOT NULL,
41
+ tokens_used INTEGER NOT NULL DEFAULT 0,
42
+ has_user_event INTEGER NOT NULL DEFAULT 0,
43
+ archived INTEGER NOT NULL DEFAULT 0,
44
+ archived_at INTEGER,
45
+ git_sha TEXT,
46
+ git_branch TEXT,
47
+ git_origin_url TEXT,
48
+ cli_version TEXT NOT NULL DEFAULT '',
49
+ first_user_message TEXT NOT NULL DEFAULT '',
50
+ agent_nickname TEXT,
51
+ agent_role TEXT,
52
+ memory_mode TEXT NOT NULL DEFAULT 'enabled',
53
+ model TEXT,
54
+ reasoning_effort TEXT,
55
+ agent_path TEXT
56
+ )
57
+ """
58
+ )
59
+ connection.execute(
60
+ """
61
+ INSERT INTO threads (
62
+ id, rollout_path, created_at, updated_at, source, model_provider, cwd,
63
+ title, sandbox_policy, approval_mode, tokens_used, model
64
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
65
+ """,
66
+ (
67
+ "session-1",
68
+ str(rollout_path),
69
+ 1775222209,
70
+ 1775222447,
71
+ "cli",
72
+ "openai",
73
+ "/tmp/project",
74
+ "Test Thread",
75
+ "workspace-write",
76
+ "default",
77
+ 223342,
78
+ "gpt-5.4",
79
+ ),
80
+ )
81
+ connection.commit()
82
+ connection.close()
83
+
84
+ lines = [
85
+ {
86
+ "timestamp": "2026-04-03T13:17:23.324Z",
87
+ "type": "session_meta",
88
+ "payload": {"timestamp": "2026-04-03T13:16:49.765Z"},
89
+ },
90
+ {
91
+ "timestamp": "2026-04-03T13:17:23.325Z",
92
+ "type": "event_msg",
93
+ "payload": {"type": "user_message", "message": "first"},
94
+ },
95
+ {
96
+ "timestamp": "2026-04-03T13:17:23.740Z",
97
+ "type": "event_msg",
98
+ "payload": {
99
+ "type": "token_count",
100
+ "info": {
101
+ "total_token_usage": {
102
+ "input_tokens": 100,
103
+ "cached_input_tokens": 20,
104
+ "output_tokens": 10,
105
+ "reasoning_output_tokens": 3,
106
+ "total_tokens": 110,
107
+ }
108
+ },
109
+ },
110
+ },
111
+ {
112
+ "timestamp": "2026-04-03T13:18:23.325Z",
113
+ "type": "event_msg",
114
+ "payload": {"type": "user_message", "message": "second"},
115
+ },
116
+ {
117
+ "timestamp": "2026-04-03T13:18:23.740Z",
118
+ "type": "event_msg",
119
+ "payload": {
120
+ "type": "token_count",
121
+ "info": {
122
+ "total_token_usage": {
123
+ "input_tokens": 250,
124
+ "cached_input_tokens": 50,
125
+ "output_tokens": 30,
126
+ "reasoning_output_tokens": 7,
127
+ "total_tokens": 280,
128
+ }
129
+ },
130
+ },
131
+ },
132
+ ]
133
+ rollout_path.write_text("\n".join(json.dumps(line) for line in lines), encoding="utf-8")
134
+ self.paths = Paths(
135
+ codex_home=codex_home,
136
+ state_db=self.state_db,
137
+ logs_db=codex_home / "logs_1.sqlite",
138
+ sessions_dir=codex_home / "sessions",
139
+ )
140
+
141
+ def tearDown(self) -> None:
142
+ self.tmpdir.cleanup()
143
+
144
+ def test_session_details_are_read_from_local_state(self) -> None:
145
+ session = get_session(self.paths, "session-1")
146
+ assert session is not None
147
+ details = get_session_details(self.paths, session)
148
+ self.assertEqual(details.request_count, 2)
149
+ self.assertEqual(details.input_tokens, 250)
150
+ self.assertEqual(details.output_tokens, 30)
151
+ self.assertEqual(details.effective_total_tokens(), 280)
152
+
153
+ def test_today_summary_aggregates_sessions(self) -> None:
154
+ summary = summarize_today(self.paths)
155
+ self.assertEqual(summary.sessions, 1)
156
+ self.assertEqual(summary.requests, 2)
157
+ self.assertEqual(summary.total_tokens, 280)
158
+ self.assertEqual(summary.top_model, "gpt-5.4")
159
+
160
+
161
+ if __name__ == "__main__":
162
+ unittest.main()