ccburn 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ccburn/data/history.py ADDED
@@ -0,0 +1,397 @@
1
+ """SQLite history storage for usage snapshots."""
2
+
3
+ import logging
4
+ import sqlite3
5
+ from datetime import datetime, timedelta, timezone
6
+ from pathlib import Path
7
+
8
+ try:
9
+ from .models import LimitData, LimitType, UsageSnapshot
10
+ except ImportError:
11
+ from ccburn.data.models import LimitData, LimitType, UsageSnapshot
12
+
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class HistoryDB:
18
+ """SQLite database for storing usage snapshots."""
19
+
20
+ SCHEMA = """
21
+ -- Usage snapshots from API polling
22
+ CREATE TABLE IF NOT EXISTS usage_snapshots (
23
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
24
+ timestamp TEXT NOT NULL, -- ISO 8601 UTC
25
+
26
+ -- 5-hour rolling
27
+ five_hour_utilization REAL,
28
+ five_hour_resets_at TEXT,
29
+
30
+ -- 7-day all models
31
+ seven_day_all_utilization REAL,
32
+ seven_day_all_resets_at TEXT,
33
+
34
+ -- 7-day sonnet
35
+ seven_day_sonnet_utilization REAL,
36
+ seven_day_sonnet_resets_at TEXT,
37
+
38
+ -- 7-day opus
39
+ seven_day_opus_utilization REAL,
40
+ seven_day_opus_resets_at TEXT,
41
+
42
+ -- Raw API response for debugging
43
+ raw_response TEXT
44
+ );
45
+
46
+ CREATE INDEX IF NOT EXISTS idx_snapshots_timestamp ON usage_snapshots(timestamp);
47
+
48
+ -- Metadata
49
+ CREATE TABLE IF NOT EXISTS metadata (
50
+ key TEXT PRIMARY KEY,
51
+ value TEXT
52
+ );
53
+ """
54
+
55
+ RETENTION_DAYS = 7
56
+
57
+ def __init__(self, db_path: Path | str | None = None, in_memory: bool = False):
58
+ """Initialize the history database.
59
+
60
+ Args:
61
+ db_path: Path to SQLite database file (default: ~/.ccburn/history.db)
62
+ in_memory: If True, use in-memory database (for testing/fallback)
63
+ """
64
+ if in_memory:
65
+ self.db_path = ":memory:"
66
+ elif db_path is None:
67
+ self.db_path = Path.home() / ".ccburn" / "history.db"
68
+ else:
69
+ self.db_path = Path(db_path)
70
+
71
+ self._conn: sqlite3.Connection | None = None
72
+ self._initialized = False
73
+
74
+ def _ensure_dir(self) -> None:
75
+ """Ensure the database directory exists."""
76
+ if isinstance(self.db_path, Path):
77
+ self.db_path.parent.mkdir(parents=True, exist_ok=True)
78
+
79
+ def _get_connection(self) -> sqlite3.Connection:
80
+ """Get or create database connection."""
81
+ if self._conn is None:
82
+ self._ensure_dir()
83
+ self._conn = sqlite3.connect(
84
+ str(self.db_path) if isinstance(self.db_path, Path) else self.db_path,
85
+ detect_types=sqlite3.PARSE_DECLTYPES,
86
+ timeout=10.0, # Wait up to 10 seconds for locks
87
+ )
88
+ self._conn.row_factory = sqlite3.Row
89
+ # Enable WAL mode for better concurrent read/write performance
90
+ self._conn.execute("PRAGMA journal_mode=WAL")
91
+
92
+ if not self._initialized:
93
+ self._initialize_schema()
94
+ self._initialized = True
95
+
96
+ return self._conn
97
+
98
+ def _initialize_schema(self) -> None:
99
+ """Initialize the database schema."""
100
+ conn = self._conn
101
+ if conn is None:
102
+ return
103
+
104
+ conn.executescript(self.SCHEMA)
105
+ conn.commit()
106
+
107
+ def save_snapshot(self, snapshot: UsageSnapshot) -> None:
108
+ """Save a usage snapshot to the database.
109
+
110
+ Deduplicates by skipping if an identical snapshot exists within 5 seconds.
111
+
112
+ Args:
113
+ snapshot: The snapshot to save
114
+ """
115
+ conn = self._get_connection()
116
+
117
+ # Extract data from snapshot
118
+ five_hour_util = snapshot.session.utilization if snapshot.session else None
119
+
120
+ # Check for recent duplicate (within 5 seconds with same utilization)
121
+ recent_cutoff = (snapshot.timestamp - timedelta(seconds=5)).isoformat()
122
+ cursor = conn.execute(
123
+ """
124
+ SELECT 1 FROM usage_snapshots
125
+ WHERE timestamp > ?
126
+ AND five_hour_utilization = ?
127
+ LIMIT 1
128
+ """,
129
+ (recent_cutoff, five_hour_util),
130
+ )
131
+ if cursor.fetchone():
132
+ # Skip duplicate
133
+ return
134
+ five_hour_resets = (
135
+ snapshot.session.resets_at.isoformat() if snapshot.session else None
136
+ )
137
+
138
+ seven_day_util = snapshot.weekly.utilization if snapshot.weekly else None
139
+ seven_day_resets = (
140
+ snapshot.weekly.resets_at.isoformat() if snapshot.weekly else None
141
+ )
142
+
143
+ sonnet_util = (
144
+ snapshot.weekly_sonnet.utilization if snapshot.weekly_sonnet else None
145
+ )
146
+ sonnet_resets = (
147
+ snapshot.weekly_sonnet.resets_at.isoformat() if snapshot.weekly_sonnet else None
148
+ )
149
+
150
+ opus_util = snapshot.weekly_opus.utilization if snapshot.weekly_opus else None
151
+ opus_resets = (
152
+ snapshot.weekly_opus.resets_at.isoformat() if snapshot.weekly_opus else None
153
+ )
154
+
155
+ conn.execute(
156
+ """
157
+ INSERT INTO usage_snapshots (
158
+ timestamp,
159
+ five_hour_utilization, five_hour_resets_at,
160
+ seven_day_all_utilization, seven_day_all_resets_at,
161
+ seven_day_sonnet_utilization, seven_day_sonnet_resets_at,
162
+ seven_day_opus_utilization, seven_day_opus_resets_at,
163
+ raw_response
164
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
165
+ """,
166
+ (
167
+ snapshot.timestamp.isoformat(),
168
+ five_hour_util,
169
+ five_hour_resets,
170
+ seven_day_util,
171
+ seven_day_resets,
172
+ sonnet_util,
173
+ sonnet_resets,
174
+ opus_util,
175
+ opus_resets,
176
+ snapshot.raw_response,
177
+ ),
178
+ )
179
+ conn.commit()
180
+
181
+ def get_latest_snapshot(self) -> UsageSnapshot | None:
182
+ """Get the most recent snapshot from the database.
183
+
184
+ Returns:
185
+ Most recent UsageSnapshot, or None if no snapshots exist
186
+ """
187
+ conn = self._get_connection()
188
+ cursor = conn.execute(
189
+ "SELECT * FROM usage_snapshots ORDER BY timestamp DESC LIMIT 1"
190
+ )
191
+ row = cursor.fetchone()
192
+ if row:
193
+ return self._row_to_snapshot(row)
194
+ return None
195
+
196
+ def get_latest_snapshot_age_seconds(self) -> float | None:
197
+ """Get the age of the most recent snapshot in seconds.
198
+
199
+ Returns:
200
+ Age in seconds, or None if no snapshots exist
201
+ """
202
+ latest = self.get_latest_snapshot()
203
+ if latest:
204
+ now = datetime.now(timezone.utc)
205
+ return (now - latest.timestamp).total_seconds()
206
+ return None
207
+
208
+ def get_snapshots(
209
+ self,
210
+ since: datetime | None = None,
211
+ limit: int | None = None,
212
+ ) -> list[UsageSnapshot]:
213
+ """Get historical snapshots.
214
+
215
+ Args:
216
+ since: Only return snapshots after this time
217
+ limit: Maximum number of snapshots to return
218
+
219
+ Returns:
220
+ List of UsageSnapshot objects, sorted by timestamp ascending
221
+ """
222
+ conn = self._get_connection()
223
+
224
+ query = "SELECT * FROM usage_snapshots"
225
+ params: list = []
226
+
227
+ if since:
228
+ query += " WHERE timestamp >= ?"
229
+ params.append(since.isoformat())
230
+
231
+ query += " ORDER BY timestamp ASC"
232
+
233
+ if limit:
234
+ query += " LIMIT ?"
235
+ params.append(limit)
236
+
237
+ cursor = conn.execute(query, params)
238
+ rows = cursor.fetchall()
239
+
240
+ snapshots = []
241
+ for row in rows:
242
+ snapshot = self._row_to_snapshot(row)
243
+ if snapshot:
244
+ snapshots.append(snapshot)
245
+
246
+ return snapshots
247
+
248
+ def get_snapshots_for_limit(
249
+ self,
250
+ limit_type: LimitType,
251
+ since: datetime | None = None,
252
+ ) -> list[UsageSnapshot]:
253
+ """Get snapshots that have data for a specific limit type.
254
+
255
+ Args:
256
+ limit_type: Which limit to filter by
257
+ since: Only return snapshots after this time
258
+
259
+ Returns:
260
+ List of snapshots with non-null data for the limit type
261
+ """
262
+ snapshots = self.get_snapshots(since=since)
263
+
264
+ # Filter to only snapshots with data for this limit
265
+ filtered = []
266
+ for s in snapshots:
267
+ if s.get_limit(limit_type) is not None:
268
+ filtered.append(s)
269
+
270
+ return filtered
271
+
272
+ def _row_to_snapshot(self, row: sqlite3.Row) -> UsageSnapshot | None:
273
+ """Convert a database row to a UsageSnapshot."""
274
+ try:
275
+ timestamp = datetime.fromisoformat(row["timestamp"])
276
+ if timestamp.tzinfo is None:
277
+ timestamp = timestamp.replace(tzinfo=timezone.utc)
278
+
279
+ # Parse session limit
280
+ session = None
281
+ if row["five_hour_utilization"] is not None:
282
+ resets_at = datetime.fromisoformat(row["five_hour_resets_at"])
283
+ if resets_at.tzinfo is None:
284
+ resets_at = resets_at.replace(tzinfo=timezone.utc)
285
+ session = LimitData(
286
+ utilization=row["five_hour_utilization"],
287
+ resets_at=resets_at,
288
+ limit_type=LimitType.SESSION,
289
+ )
290
+
291
+ # Parse weekly limit
292
+ weekly = None
293
+ if row["seven_day_all_utilization"] is not None:
294
+ resets_at = datetime.fromisoformat(row["seven_day_all_resets_at"])
295
+ if resets_at.tzinfo is None:
296
+ resets_at = resets_at.replace(tzinfo=timezone.utc)
297
+ weekly = LimitData(
298
+ utilization=row["seven_day_all_utilization"],
299
+ resets_at=resets_at,
300
+ limit_type=LimitType.WEEKLY,
301
+ )
302
+
303
+ # Parse sonnet limit
304
+ weekly_sonnet = None
305
+ if row["seven_day_sonnet_utilization"] is not None:
306
+ resets_at = datetime.fromisoformat(row["seven_day_sonnet_resets_at"])
307
+ if resets_at.tzinfo is None:
308
+ resets_at = resets_at.replace(tzinfo=timezone.utc)
309
+ weekly_sonnet = LimitData(
310
+ utilization=row["seven_day_sonnet_utilization"],
311
+ resets_at=resets_at,
312
+ limit_type=LimitType.WEEKLY_SONNET,
313
+ )
314
+
315
+ # Parse opus limit
316
+ weekly_opus = None
317
+ if row["seven_day_opus_utilization"] is not None:
318
+ resets_at = datetime.fromisoformat(row["seven_day_opus_resets_at"])
319
+ if resets_at.tzinfo is None:
320
+ resets_at = resets_at.replace(tzinfo=timezone.utc)
321
+ weekly_opus = LimitData(
322
+ utilization=row["seven_day_opus_utilization"],
323
+ resets_at=resets_at,
324
+ limit_type=LimitType.WEEKLY, # Same window as weekly
325
+ )
326
+
327
+ return UsageSnapshot(
328
+ timestamp=timestamp,
329
+ session=session,
330
+ weekly=weekly,
331
+ weekly_sonnet=weekly_sonnet,
332
+ weekly_opus=weekly_opus,
333
+ raw_response=row["raw_response"],
334
+ )
335
+ except (ValueError, KeyError, TypeError) as e:
336
+ logger.warning(f"Failed to parse snapshot row: {e}")
337
+ return None
338
+
339
+ def prune_old_data(self) -> int:
340
+ """Remove data older than retention period.
341
+
342
+ Returns:
343
+ Number of rows deleted
344
+ """
345
+ conn = self._get_connection()
346
+ cutoff = datetime.now(timezone.utc) - timedelta(days=self.RETENTION_DAYS)
347
+
348
+ cursor = conn.execute(
349
+ "DELETE FROM usage_snapshots WHERE timestamp < ?",
350
+ (cutoff.isoformat(),),
351
+ )
352
+ conn.commit()
353
+
354
+ deleted = cursor.rowcount
355
+ if deleted > 0:
356
+ logger.info(f"Pruned {deleted} old snapshots")
357
+
358
+ return deleted
359
+
360
+ def clear_history(self) -> int:
361
+ """Clear all history data.
362
+
363
+ Returns:
364
+ Number of rows deleted
365
+ """
366
+ conn = self._get_connection()
367
+
368
+ cursor = conn.execute("DELETE FROM usage_snapshots")
369
+ conn.commit()
370
+
371
+ deleted = cursor.rowcount
372
+ logger.info(f"Cleared {deleted} snapshots from history")
373
+
374
+ return deleted
375
+
376
+ def get_snapshot_count(self) -> int:
377
+ """Get the total number of snapshots in the database.
378
+
379
+ Returns:
380
+ Number of snapshots
381
+ """
382
+ conn = self._get_connection()
383
+ cursor = conn.execute("SELECT COUNT(*) FROM usage_snapshots")
384
+ return cursor.fetchone()[0]
385
+
386
+ def close(self) -> None:
387
+ """Close the database connection."""
388
+ if self._conn:
389
+ self._conn.close()
390
+ self._conn = None
391
+ self._initialized = False
392
+
393
+ def __enter__(self) -> "HistoryDB":
394
+ return self
395
+
396
+ def __exit__(self, exc_type, exc_val, exc_tb) -> None:
397
+ self.close()
ccburn/data/models.py ADDED
@@ -0,0 +1,148 @@
1
+ """Data models for ccburn usage tracking."""
2
+
3
+ from dataclasses import dataclass
4
+ from datetime import datetime, timedelta, timezone
5
+ from enum import Enum
6
+
7
+
8
+ class LimitType(str, Enum):
9
+ """The three usage limits we track."""
10
+
11
+ SESSION = "session" # 5-hour rolling
12
+ WEEKLY = "weekly" # 7-day all models
13
+ WEEKLY_SONNET = "weekly-sonnet" # 7-day sonnet only
14
+
15
+ @property
16
+ def window_hours(self) -> int:
17
+ """Get the window duration in hours."""
18
+ return 5 if self == LimitType.SESSION else 168 # 7 * 24
19
+
20
+ @property
21
+ def display_name(self) -> str:
22
+ """Get human-readable display name."""
23
+ return {
24
+ LimitType.SESSION: "Session (5h)",
25
+ LimitType.WEEKLY: "Weekly",
26
+ LimitType.WEEKLY_SONNET: "Weekly Sonnet",
27
+ }[self]
28
+
29
+ @property
30
+ def api_field(self) -> str:
31
+ """Get the corresponding API field name."""
32
+ return {
33
+ LimitType.SESSION: "five_hour",
34
+ LimitType.WEEKLY: "seven_day",
35
+ LimitType.WEEKLY_SONNET: "seven_day_sonnet",
36
+ }[self]
37
+
38
+
39
+ @dataclass
40
+ class LimitData:
41
+ """Data for a single usage limit."""
42
+
43
+ utilization: float # 0.0 to 1.0
44
+ resets_at: datetime
45
+ limit_type: LimitType
46
+
47
+ @property
48
+ def window_hours(self) -> int:
49
+ """Get the window duration in hours."""
50
+ return self.limit_type.window_hours
51
+
52
+ @property
53
+ def window_start(self) -> datetime:
54
+ """Calculate when the current window started."""
55
+ return self.resets_at - timedelta(hours=self.window_hours)
56
+
57
+ @property
58
+ def utilization_percent(self) -> float:
59
+ """Get utilization as a percentage (0-100)."""
60
+ return self.utilization * 100
61
+
62
+
63
+ @dataclass
64
+ class UsageSnapshot:
65
+ """A point-in-time snapshot of all usage limits."""
66
+
67
+ timestamp: datetime
68
+ session: LimitData | None # five_hour from API
69
+ weekly: LimitData | None # seven_day from API
70
+ weekly_sonnet: LimitData | None # seven_day_sonnet from API
71
+ weekly_opus: LimitData | None # seven_day_opus from API (tracked but not displayed)
72
+ raw_response: str | None = None # Original JSON for debugging
73
+
74
+ def get_limit(self, limit_type: LimitType) -> LimitData | None:
75
+ """Get limit data by type."""
76
+ return {
77
+ LimitType.SESSION: self.session,
78
+ LimitType.WEEKLY: self.weekly,
79
+ LimitType.WEEKLY_SONNET: self.weekly_sonnet,
80
+ }.get(limit_type)
81
+
82
+ @classmethod
83
+ def from_api_response(cls, data: dict, timestamp: datetime | None = None) -> "UsageSnapshot":
84
+ """Create a UsageSnapshot from API response data."""
85
+ import json
86
+
87
+ if timestamp is None:
88
+ timestamp = datetime.now(timezone.utc)
89
+
90
+ def parse_limit(api_data: dict | None, limit_type: LimitType) -> LimitData | None:
91
+ if not api_data or not isinstance(api_data, dict):
92
+ return None
93
+ utilization = api_data.get("utilization")
94
+ resets_at_str = api_data.get("resets_at")
95
+ if utilization is None or resets_at_str is None:
96
+ return None
97
+ try:
98
+ resets_at = datetime.fromisoformat(resets_at_str.replace("Z", "+00:00"))
99
+ # API returns 0-100 scale, normalize to 0-1
100
+ util_normalized = float(utilization) / 100.0
101
+ return LimitData(
102
+ utilization=util_normalized,
103
+ resets_at=resets_at,
104
+ limit_type=limit_type,
105
+ )
106
+ except (ValueError, TypeError):
107
+ return None
108
+
109
+ # Parse weekly opus separately (uses same window as weekly)
110
+ opus_data = data.get("seven_day_opus")
111
+ weekly_opus = None
112
+ if opus_data and isinstance(opus_data, dict):
113
+ utilization = opus_data.get("utilization")
114
+ resets_at_str = opus_data.get("resets_at")
115
+ if utilization is not None and resets_at_str:
116
+ try:
117
+ resets_at = datetime.fromisoformat(resets_at_str.replace("Z", "+00:00"))
118
+ # API returns 0-100 scale, normalize to 0-1
119
+ # Create a pseudo LimitType for opus (uses weekly window)
120
+ weekly_opus = LimitData(
121
+ utilization=float(utilization) / 100.0,
122
+ resets_at=resets_at,
123
+ limit_type=LimitType.WEEKLY, # Same window as weekly
124
+ )
125
+ except (ValueError, TypeError):
126
+ pass
127
+
128
+ return cls(
129
+ timestamp=timestamp,
130
+ session=parse_limit(data.get("five_hour"), LimitType.SESSION),
131
+ weekly=parse_limit(data.get("seven_day"), LimitType.WEEKLY),
132
+ weekly_sonnet=parse_limit(data.get("seven_day_sonnet"), LimitType.WEEKLY_SONNET),
133
+ weekly_opus=weekly_opus,
134
+ raw_response=json.dumps(data),
135
+ )
136
+
137
+
138
+ @dataclass
139
+ class BurnMetrics:
140
+ """Calculated burn rate metrics for a specific limit."""
141
+
142
+ limit_type: LimitType
143
+ percent_per_hour: float
144
+ trend: str # "low", "moderate", "high", "critical"
145
+ estimated_minutes_to_100: int | None
146
+ budget_pace: float # 0.0 to 1.0 - what percentage of window has elapsed
147
+ status: str # "ahead_of_pace", "on_pace", "behind_pace"
148
+ recommendation: str # "plenty_available", "on_track", "moderate_pace", "conserve", "critical"
@@ -0,0 +1,141 @@
1
+ """Anthropic Usage API client."""
2
+
3
+ import json
4
+ import time
5
+ import urllib.error
6
+ import urllib.request
7
+
8
+ try:
9
+ from .credentials import get_access_token
10
+ from .models import UsageSnapshot
11
+ except ImportError:
12
+ from ccburn.data.credentials import get_access_token
13
+ from ccburn.data.models import UsageSnapshot
14
+
15
+
16
+ class UsageClientError(Exception):
17
+ """Base exception for usage client errors."""
18
+
19
+ pass
20
+
21
+
22
+ class APIError(UsageClientError):
23
+ """API returned an error response."""
24
+
25
+ def __init__(self, message: str, status_code: int | None = None):
26
+ super().__init__(message)
27
+ self.status_code = status_code
28
+
29
+
30
+ class NetworkError(UsageClientError):
31
+ """Network error during API call."""
32
+
33
+ pass
34
+
35
+
36
+ class UsageClient:
37
+ """Client for the Anthropic Usage API."""
38
+
39
+ API_URL = "https://api.anthropic.com/api/oauth/usage"
40
+ API_BETA_HEADER = "oauth-2025-04-20"
41
+ DEFAULT_TIMEOUT = 10 # seconds
42
+ MAX_RETRIES = 3
43
+ RETRY_DELAYS = [1, 2, 4] # Exponential backoff
44
+
45
+ def __init__(self, timeout: int = DEFAULT_TIMEOUT):
46
+ """Initialize the usage client.
47
+
48
+ Args:
49
+ timeout: Request timeout in seconds
50
+ """
51
+ self.timeout = timeout
52
+ self._last_response: dict | None = None
53
+ self._last_error: str | None = None
54
+
55
+ def fetch_usage(self) -> UsageSnapshot:
56
+ """Fetch current usage data from the API.
57
+
58
+ Returns:
59
+ UsageSnapshot with current usage data
60
+
61
+ Raises:
62
+ CredentialsError: If credentials are missing or invalid
63
+ APIError: If API returns an error
64
+ NetworkError: If network request fails after retries
65
+ """
66
+ token = get_access_token()
67
+
68
+ request = urllib.request.Request(
69
+ self.API_URL,
70
+ headers={
71
+ "Accept": "application/json",
72
+ "Authorization": f"Bearer {token}",
73
+ "anthropic-beta": self.API_BETA_HEADER,
74
+ },
75
+ )
76
+
77
+ last_error = None
78
+
79
+ for attempt, delay in enumerate(self.RETRY_DELAYS):
80
+ try:
81
+ with urllib.request.urlopen(request, timeout=self.timeout) as response:
82
+ data = json.loads(response.read())
83
+ self._last_response = data
84
+ self._last_error = None
85
+ return UsageSnapshot.from_api_response(data)
86
+
87
+ except urllib.error.HTTPError as e:
88
+ self._last_error = f"HTTP {e.code}: {e.reason}"
89
+ if e.code == 401:
90
+ raise APIError(
91
+ "Authentication failed. Your token may be invalid.\n"
92
+ "Please restart Claude Code to refresh authentication.",
93
+ status_code=e.code,
94
+ ) from e
95
+ elif e.code == 403:
96
+ raise APIError(
97
+ "Access forbidden. You may not have permission to access usage data.",
98
+ status_code=e.code,
99
+ ) from e
100
+ elif e.code >= 500:
101
+ # Server error - retry
102
+ last_error = NetworkError(f"Server error: {e.code} {e.reason}")
103
+ else:
104
+ raise APIError(f"API error: {e.code} {e.reason}", status_code=e.code) from e
105
+
106
+ except urllib.error.URLError as e:
107
+ self._last_error = str(e.reason)
108
+ last_error = NetworkError(f"Network error: {e.reason}")
109
+
110
+ except json.JSONDecodeError as e:
111
+ self._last_error = f"Invalid JSON: {e}"
112
+ raise APIError(f"Invalid JSON response from API: {e}") from e
113
+
114
+ except TimeoutError:
115
+ self._last_error = "Request timed out"
116
+ last_error = NetworkError("Request timed out")
117
+
118
+ # Wait before retry (except on last attempt)
119
+ if attempt < len(self.RETRY_DELAYS) - 1:
120
+ time.sleep(delay)
121
+
122
+ # All retries exhausted
123
+ if last_error:
124
+ raise last_error
125
+ raise NetworkError("Failed to fetch usage data after retries")
126
+
127
+ def get_last_response(self) -> dict | None:
128
+ """Get the last raw API response (for debugging).
129
+
130
+ Returns:
131
+ Last API response dictionary, or None if no successful call
132
+ """
133
+ return self._last_response
134
+
135
+ def get_last_error(self) -> str | None:
136
+ """Get the last error message.
137
+
138
+ Returns:
139
+ Last error message, or None if last call succeeded
140
+ """
141
+ return self._last_error
@@ -0,0 +1,17 @@
1
+ """Display layer for ccburn - Rich-based TUI components."""
2
+
3
+ try:
4
+ from .chart import BurnupChart
5
+ from .gauges import create_gauge_section, create_header
6
+ from .layout import BurnupLayout
7
+ except ImportError:
8
+ from ccburn.display.chart import BurnupChart
9
+ from ccburn.display.gauges import create_gauge_section, create_header
10
+ from ccburn.display.layout import BurnupLayout
11
+
12
+ __all__ = [
13
+ "create_gauge_section",
14
+ "create_header",
15
+ "BurnupChart",
16
+ "BurnupLayout",
17
+ ]