nullabot 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nullabot/__init__.py +3 -0
- nullabot/agents/__init__.py +7 -0
- nullabot/agents/claude_agent.py +785 -0
- nullabot/bot/__init__.py +5 -0
- nullabot/bot/telegram.py +1729 -0
- nullabot/cli.py +740 -0
- nullabot/core/__init__.py +13 -0
- nullabot/core/claude_code.py +303 -0
- nullabot/core/memory.py +864 -0
- nullabot/core/project.py +194 -0
- nullabot/core/rate_limiter.py +484 -0
- nullabot/core/reliability.py +420 -0
- nullabot/core/sandbox.py +143 -0
- nullabot/core/state.py +214 -0
- nullabot-1.0.1.dist-info/METADATA +130 -0
- nullabot-1.0.1.dist-info/RECORD +19 -0
- nullabot-1.0.1.dist-info/WHEEL +4 -0
- nullabot-1.0.1.dist-info/entry_points.txt +2 -0
- nullabot-1.0.1.dist-info/licenses/LICENSE +21 -0
nullabot/core/project.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Project Management - Create and manage project groups.
|
|
3
|
+
|
|
4
|
+
Each project is an isolated workspace with its own agent and state.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Optional
|
|
11
|
+
|
|
12
|
+
from pydantic import BaseModel, Field
|
|
13
|
+
|
|
14
|
+
from nullabot.core.sandbox import Sandbox
|
|
15
|
+
from nullabot.core.state import AgentState, AgentStatus, AgentType, Checkpoint
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ProjectConfig(BaseModel):
|
|
19
|
+
"""Project configuration stored in .nullabot/config.json"""
|
|
20
|
+
|
|
21
|
+
name: str
|
|
22
|
+
description: str = ""
|
|
23
|
+
created_at: datetime = Field(default_factory=datetime.now)
|
|
24
|
+
|
|
25
|
+
# Active agent (only one at a time)
|
|
26
|
+
active_agent: Optional[AgentType] = None
|
|
27
|
+
|
|
28
|
+
# Project-specific settings
|
|
29
|
+
allowed_paths: list[str] = Field(default_factory=list) # Extra paths agent can access
|
|
30
|
+
telegram_chat_id: Optional[int] = None # For notifications
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class Project:
|
|
34
|
+
"""
|
|
35
|
+
A project workspace with isolated sandbox and agent state.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, root: Path):
|
|
39
|
+
"""
|
|
40
|
+
Initialize project from existing folder.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
root: Path to project folder
|
|
44
|
+
"""
|
|
45
|
+
self.root = root.resolve()
|
|
46
|
+
self.sandbox = Sandbox(self.root)
|
|
47
|
+
self._config_path = self.sandbox._aurora_dir / "config.json"
|
|
48
|
+
self._config: Optional[ProjectConfig] = None
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def name(self) -> str:
|
|
52
|
+
return self.config.name
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def config(self) -> ProjectConfig:
|
|
56
|
+
"""Load or create project config."""
|
|
57
|
+
if self._config is None:
|
|
58
|
+
if self._config_path.exists():
|
|
59
|
+
data = json.loads(self._config_path.read_text())
|
|
60
|
+
self._config = ProjectConfig.model_validate(data)
|
|
61
|
+
else:
|
|
62
|
+
# Shouldn't happen - use create() for new projects
|
|
63
|
+
self._config = ProjectConfig(name=self.root.name)
|
|
64
|
+
self._save_config()
|
|
65
|
+
return self._config
|
|
66
|
+
|
|
67
|
+
def _save_config(self) -> None:
|
|
68
|
+
"""Save config to disk."""
|
|
69
|
+
if self._config:
|
|
70
|
+
self._config_path.write_text(self._config.model_dump_json(indent=2))
|
|
71
|
+
|
|
72
|
+
@classmethod
|
|
73
|
+
def create(cls, root: Path, name: str, description: str = "") -> "Project":
|
|
74
|
+
"""
|
|
75
|
+
Create a new project.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
root: Parent directory for project
|
|
79
|
+
name: Project name (will be folder name)
|
|
80
|
+
description: Optional description
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
New Project instance
|
|
84
|
+
"""
|
|
85
|
+
project_path = root / name
|
|
86
|
+
if project_path.exists():
|
|
87
|
+
raise ValueError(f"Project already exists: {project_path}")
|
|
88
|
+
|
|
89
|
+
# Create directory structure
|
|
90
|
+
project_path.mkdir(parents=True)
|
|
91
|
+
(project_path / ".nullabot").mkdir()
|
|
92
|
+
(project_path / "workspace").mkdir()
|
|
93
|
+
|
|
94
|
+
# Create config
|
|
95
|
+
config = ProjectConfig(name=name, description=description)
|
|
96
|
+
config_path = project_path / ".nullabot" / "config.json"
|
|
97
|
+
config_path.write_text(config.model_dump_json(indent=2))
|
|
98
|
+
|
|
99
|
+
return cls(project_path)
|
|
100
|
+
|
|
101
|
+
def get_agent_state(self, agent_type: AgentType) -> AgentState:
|
|
102
|
+
"""Get or create agent state."""
|
|
103
|
+
state = AgentState.load(self.sandbox.state_file)
|
|
104
|
+
if state is None or state.agent_type != agent_type:
|
|
105
|
+
state = AgentState(
|
|
106
|
+
agent_type=agent_type,
|
|
107
|
+
project_name=self.name,
|
|
108
|
+
)
|
|
109
|
+
return state
|
|
110
|
+
|
|
111
|
+
def save_agent_state(self, state: AgentState) -> None:
|
|
112
|
+
"""Save agent state."""
|
|
113
|
+
state.save(self.sandbox.state_file)
|
|
114
|
+
|
|
115
|
+
def get_checkpoint(self) -> Optional[Checkpoint]:
|
|
116
|
+
"""Load last checkpoint if exists."""
|
|
117
|
+
return Checkpoint.load(self.sandbox.checkpoint_file)
|
|
118
|
+
|
|
119
|
+
def save_checkpoint(self, checkpoint: Checkpoint) -> None:
|
|
120
|
+
"""Save checkpoint."""
|
|
121
|
+
checkpoint.save(self.sandbox.checkpoint_file)
|
|
122
|
+
|
|
123
|
+
def set_active_agent(self, agent_type: AgentType) -> None:
|
|
124
|
+
"""Set the active agent for this project."""
|
|
125
|
+
self._config = self.config # Ensure loaded
|
|
126
|
+
self._config.active_agent = agent_type
|
|
127
|
+
self._save_config()
|
|
128
|
+
|
|
129
|
+
def clear_active_agent(self) -> None:
|
|
130
|
+
"""Clear active agent (when stopping)."""
|
|
131
|
+
self._config = self.config
|
|
132
|
+
self._config.active_agent = None
|
|
133
|
+
self._save_config()
|
|
134
|
+
|
|
135
|
+
def __repr__(self) -> str:
|
|
136
|
+
return f"Project(name={self.name}, root={self.root})"
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class ProjectManager:
|
|
140
|
+
"""
|
|
141
|
+
Manages all projects.
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
def __init__(self, projects_dir: Path):
|
|
145
|
+
"""
|
|
146
|
+
Initialize project manager.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
projects_dir: Root directory containing all projects
|
|
150
|
+
"""
|
|
151
|
+
self.projects_dir = projects_dir.resolve()
|
|
152
|
+
self.projects_dir.mkdir(parents=True, exist_ok=True)
|
|
153
|
+
|
|
154
|
+
def list_projects(self) -> list[Project]:
|
|
155
|
+
"""List all projects."""
|
|
156
|
+
projects = []
|
|
157
|
+
for path in self.projects_dir.iterdir():
|
|
158
|
+
if path.is_dir() and (path / ".nullabot").exists():
|
|
159
|
+
projects.append(Project(path))
|
|
160
|
+
return sorted(projects, key=lambda p: p.name)
|
|
161
|
+
|
|
162
|
+
def get_project(self, name: str) -> Optional[Project]:
|
|
163
|
+
"""Get project by name."""
|
|
164
|
+
project_path = self.projects_dir / name
|
|
165
|
+
if project_path.exists() and (project_path / ".nullabot").exists():
|
|
166
|
+
return Project(project_path)
|
|
167
|
+
return None
|
|
168
|
+
|
|
169
|
+
def create_project(self, name: str, description: str = "") -> Project:
|
|
170
|
+
"""Create new project."""
|
|
171
|
+
return Project.create(self.projects_dir, name, description)
|
|
172
|
+
|
|
173
|
+
def delete_project(self, name: str) -> bool:
|
|
174
|
+
"""
|
|
175
|
+
Delete a project.
|
|
176
|
+
|
|
177
|
+
Warning: This deletes all project files!
|
|
178
|
+
"""
|
|
179
|
+
import shutil
|
|
180
|
+
|
|
181
|
+
project_path = self.projects_dir / name
|
|
182
|
+
if project_path.exists():
|
|
183
|
+
shutil.rmtree(project_path)
|
|
184
|
+
return True
|
|
185
|
+
return False
|
|
186
|
+
|
|
187
|
+
def get_running_projects(self) -> list[Project]:
|
|
188
|
+
"""Get all projects with running agents."""
|
|
189
|
+
running = []
|
|
190
|
+
for project in self.list_projects():
|
|
191
|
+
state = AgentState.load(project.sandbox.state_file)
|
|
192
|
+
if state and state.status == AgentStatus.RUNNING:
|
|
193
|
+
running.append(project)
|
|
194
|
+
return running
|
|
@@ -0,0 +1,484 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Rate Limiter - Don't burn API credits.
|
|
3
|
+
|
|
4
|
+
Handles Claude Code subscription limits:
|
|
5
|
+
- 5-hour rolling window
|
|
6
|
+
- Weekly limits
|
|
7
|
+
- Automatic wait when limit reached
|
|
8
|
+
|
|
9
|
+
For $200/month Max plan:
|
|
10
|
+
- 240-480 hours of Sonnet 4 per week
|
|
11
|
+
- 24-40 hours of Opus 4 per week
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import asyncio
|
|
15
|
+
import json
|
|
16
|
+
import re
|
|
17
|
+
from datetime import datetime, timedelta
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Optional, Callable
|
|
20
|
+
|
|
21
|
+
from pydantic import BaseModel, Field
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class FiveHourWindow(BaseModel):
|
|
25
|
+
"""Track the 5-hour rolling window for Claude Code."""
|
|
26
|
+
|
|
27
|
+
window_start: Optional[datetime] = None
|
|
28
|
+
requests_in_window: int = 0
|
|
29
|
+
tokens_in_window: int = 0
|
|
30
|
+
is_limit_reached: bool = False
|
|
31
|
+
limit_reached_at: Optional[datetime] = None
|
|
32
|
+
estimated_reset_at: Optional[datetime] = None
|
|
33
|
+
|
|
34
|
+
def start_window(self) -> None:
|
|
35
|
+
"""Start a new 5-hour window."""
|
|
36
|
+
self.window_start = datetime.now()
|
|
37
|
+
self.requests_in_window = 0
|
|
38
|
+
self.tokens_in_window = 0
|
|
39
|
+
self.is_limit_reached = False
|
|
40
|
+
self.limit_reached_at = None
|
|
41
|
+
|
|
42
|
+
def check_window_expired(self) -> bool:
|
|
43
|
+
"""Check if current 5-hour window has expired."""
|
|
44
|
+
if not self.window_start:
|
|
45
|
+
return True
|
|
46
|
+
elapsed = datetime.now() - self.window_start
|
|
47
|
+
return elapsed.total_seconds() >= 5 * 3600 # 5 hours
|
|
48
|
+
|
|
49
|
+
def record_request(self, tokens: int = 0) -> None:
|
|
50
|
+
"""Record a request in current window."""
|
|
51
|
+
if self.check_window_expired():
|
|
52
|
+
self.start_window()
|
|
53
|
+
self.requests_in_window += 1
|
|
54
|
+
self.tokens_in_window += tokens
|
|
55
|
+
|
|
56
|
+
def mark_limit_reached(self) -> datetime:
|
|
57
|
+
"""Mark that limit was reached, return estimated reset time."""
|
|
58
|
+
self.is_limit_reached = True
|
|
59
|
+
self.limit_reached_at = datetime.now()
|
|
60
|
+
# Reset happens 5 hours after window start
|
|
61
|
+
if self.window_start:
|
|
62
|
+
self.estimated_reset_at = self.window_start + timedelta(hours=5)
|
|
63
|
+
else:
|
|
64
|
+
# Fallback: 5 hours from now
|
|
65
|
+
self.estimated_reset_at = datetime.now() + timedelta(hours=5)
|
|
66
|
+
return self.estimated_reset_at
|
|
67
|
+
|
|
68
|
+
def get_wait_time(self) -> float:
|
|
69
|
+
"""Get seconds to wait until window resets."""
|
|
70
|
+
if not self.is_limit_reached:
|
|
71
|
+
return 0.0
|
|
72
|
+
if not self.estimated_reset_at:
|
|
73
|
+
return 300.0 # 5 min fallback
|
|
74
|
+
|
|
75
|
+
wait = (self.estimated_reset_at - datetime.now()).total_seconds()
|
|
76
|
+
return max(0.0, wait)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class WeeklyUsage(BaseModel):
|
|
80
|
+
"""Track weekly usage limits."""
|
|
81
|
+
|
|
82
|
+
week_start: Optional[datetime] = None
|
|
83
|
+
total_hours_used: float = 0.0
|
|
84
|
+
sonnet_hours: float = 0.0
|
|
85
|
+
opus_hours: float = 0.0
|
|
86
|
+
|
|
87
|
+
# Limits for $200 Max plan (conservative estimates)
|
|
88
|
+
max_sonnet_hours: float = 240.0 # 240-480, use lower bound
|
|
89
|
+
max_opus_hours: float = 24.0 # 24-40, use lower bound
|
|
90
|
+
|
|
91
|
+
def check_week_reset(self) -> None:
|
|
92
|
+
"""Reset if new week."""
|
|
93
|
+
now = datetime.now()
|
|
94
|
+
if not self.week_start:
|
|
95
|
+
# Start of current week (Monday)
|
|
96
|
+
self.week_start = now - timedelta(days=now.weekday())
|
|
97
|
+
self.week_start = self.week_start.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
# Check if it's been a week
|
|
101
|
+
if (now - self.week_start).days >= 7:
|
|
102
|
+
self.week_start = now - timedelta(days=now.weekday())
|
|
103
|
+
self.week_start = self.week_start.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
104
|
+
self.total_hours_used = 0.0
|
|
105
|
+
self.sonnet_hours = 0.0
|
|
106
|
+
self.opus_hours = 0.0
|
|
107
|
+
|
|
108
|
+
def record_usage(self, seconds: float, model: str = "opus") -> None:
|
|
109
|
+
"""Record usage time."""
|
|
110
|
+
self.check_week_reset()
|
|
111
|
+
hours = seconds / 3600.0
|
|
112
|
+
self.total_hours_used += hours
|
|
113
|
+
|
|
114
|
+
if "opus" in model.lower():
|
|
115
|
+
self.opus_hours += hours
|
|
116
|
+
else:
|
|
117
|
+
self.sonnet_hours += hours
|
|
118
|
+
|
|
119
|
+
def is_limit_reached(self, model: str = "opus") -> bool:
|
|
120
|
+
"""Check if weekly limit reached."""
|
|
121
|
+
self.check_week_reset()
|
|
122
|
+
if "opus" in model.lower():
|
|
123
|
+
return self.opus_hours >= self.max_opus_hours
|
|
124
|
+
return self.sonnet_hours >= self.max_sonnet_hours
|
|
125
|
+
|
|
126
|
+
def get_remaining_hours(self, model: str = "opus") -> float:
|
|
127
|
+
"""Get remaining hours for the week."""
|
|
128
|
+
self.check_week_reset()
|
|
129
|
+
if "opus" in model.lower():
|
|
130
|
+
return max(0, self.max_opus_hours - self.opus_hours)
|
|
131
|
+
return max(0, self.max_sonnet_hours - self.sonnet_hours)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
class RateLimitStats(BaseModel):
|
|
135
|
+
"""Statistics for rate limiting."""
|
|
136
|
+
|
|
137
|
+
total_requests: int = 0
|
|
138
|
+
total_tokens_used: int = 0
|
|
139
|
+
total_wait_time_seconds: float = 0.0
|
|
140
|
+
five_hour_limit_hits: int = 0
|
|
141
|
+
weekly_limit_hits: int = 0
|
|
142
|
+
last_request_time: Optional[datetime] = None
|
|
143
|
+
session_start: Optional[datetime] = None
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class ClaudeCodeRateLimiter:
|
|
147
|
+
"""
|
|
148
|
+
Rate limiter specifically for Claude Code subscriptions.
|
|
149
|
+
|
|
150
|
+
Handles:
|
|
151
|
+
- 5-hour rolling window limits
|
|
152
|
+
- Weekly usage limits
|
|
153
|
+
- Automatic detection and waiting for limit reset
|
|
154
|
+
- Graceful backoff on errors
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
# Error patterns that indicate rate limit
|
|
158
|
+
LIMIT_PATTERNS = [
|
|
159
|
+
r"5.?hour.*limit",
|
|
160
|
+
r"limit.*reached",
|
|
161
|
+
r"usage.*limit",
|
|
162
|
+
r"rate.*limit",
|
|
163
|
+
r"too.*many.*requests",
|
|
164
|
+
r"quota.*exceeded",
|
|
165
|
+
r"try.*again.*later",
|
|
166
|
+
r"429",
|
|
167
|
+
r"overloaded",
|
|
168
|
+
r"capacity",
|
|
169
|
+
r"exceeded.*quota",
|
|
170
|
+
r"usage.*cap",
|
|
171
|
+
r"rolling.*window",
|
|
172
|
+
r"subscription.*limit",
|
|
173
|
+
r"daily.*limit",
|
|
174
|
+
r"hourly.*limit",
|
|
175
|
+
r"wait.*before",
|
|
176
|
+
r"cool.*down",
|
|
177
|
+
r"throttl",
|
|
178
|
+
]
|
|
179
|
+
|
|
180
|
+
def __init__(
|
|
181
|
+
self,
|
|
182
|
+
plan: str = "max_200", # "pro", "max_100", "max_200"
|
|
183
|
+
on_limit_reached: Optional[Callable[[str, float], None]] = None,
|
|
184
|
+
auto_wait: bool = True,
|
|
185
|
+
):
|
|
186
|
+
"""
|
|
187
|
+
Initialize rate limiter.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
plan: Subscription plan ("pro", "max_100", "max_200")
|
|
191
|
+
on_limit_reached: Callback when limit hit (reason, wait_seconds)
|
|
192
|
+
auto_wait: Automatically wait when limit reached
|
|
193
|
+
"""
|
|
194
|
+
self.plan = plan
|
|
195
|
+
self.on_limit_reached = on_limit_reached
|
|
196
|
+
self.auto_wait = auto_wait
|
|
197
|
+
|
|
198
|
+
# 5-hour window tracking
|
|
199
|
+
self.five_hour = FiveHourWindow()
|
|
200
|
+
|
|
201
|
+
# Weekly tracking
|
|
202
|
+
self.weekly = WeeklyUsage()
|
|
203
|
+
self._configure_plan_limits()
|
|
204
|
+
|
|
205
|
+
# Stats
|
|
206
|
+
self.stats = RateLimitStats(session_start=datetime.now())
|
|
207
|
+
|
|
208
|
+
# Backoff
|
|
209
|
+
self.backoff_multiplier = 1.0
|
|
210
|
+
self.consecutive_errors = 0
|
|
211
|
+
|
|
212
|
+
# Lock for async safety
|
|
213
|
+
self._lock = asyncio.Lock()
|
|
214
|
+
|
|
215
|
+
# Cycle timing (for weekly hour calculation)
|
|
216
|
+
self._cycle_start: Optional[datetime] = None
|
|
217
|
+
|
|
218
|
+
def _configure_plan_limits(self) -> None:
|
|
219
|
+
"""Configure limits based on plan."""
|
|
220
|
+
if self.plan == "pro":
|
|
221
|
+
self.weekly.max_sonnet_hours = 40.0
|
|
222
|
+
self.weekly.max_opus_hours = 0.0 # Pro doesn't get Opus
|
|
223
|
+
elif self.plan == "max_100":
|
|
224
|
+
self.weekly.max_sonnet_hours = 140.0
|
|
225
|
+
self.weekly.max_opus_hours = 15.0
|
|
226
|
+
else: # max_200
|
|
227
|
+
self.weekly.max_sonnet_hours = 240.0
|
|
228
|
+
self.weekly.max_opus_hours = 24.0
|
|
229
|
+
|
|
230
|
+
def is_limit_error(self, error_message: str) -> bool:
|
|
231
|
+
"""Check if error message indicates rate limit."""
|
|
232
|
+
error_lower = error_message.lower()
|
|
233
|
+
for pattern in self.LIMIT_PATTERNS:
|
|
234
|
+
if re.search(pattern, error_lower):
|
|
235
|
+
return True
|
|
236
|
+
return False
|
|
237
|
+
|
|
238
|
+
def parse_wait_time_from_error(self, error_message: str) -> Optional[float]:
|
|
239
|
+
"""Try to parse wait time from error message."""
|
|
240
|
+
# Look for patterns like "try again in X minutes/hours"
|
|
241
|
+
patterns = [
|
|
242
|
+
r"(\d+)\s*hour",
|
|
243
|
+
r"(\d+)\s*minute",
|
|
244
|
+
r"(\d+)\s*second",
|
|
245
|
+
]
|
|
246
|
+
multipliers = [3600, 60, 1]
|
|
247
|
+
|
|
248
|
+
for pattern, mult in zip(patterns, multipliers):
|
|
249
|
+
match = re.search(pattern, error_message.lower())
|
|
250
|
+
if match:
|
|
251
|
+
return int(match.group(1)) * mult
|
|
252
|
+
|
|
253
|
+
return None
|
|
254
|
+
|
|
255
|
+
async def acquire(
|
|
256
|
+
self,
|
|
257
|
+
model: str = "opus",
|
|
258
|
+
estimated_tokens: int = 4000,
|
|
259
|
+
) -> tuple[bool, str, float]:
|
|
260
|
+
"""
|
|
261
|
+
Acquire permission to make a request.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
model: Model being used ("sonnet" or "opus")
|
|
265
|
+
estimated_tokens: Estimated tokens for request
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
Tuple of (can_proceed, status_message, wait_time)
|
|
269
|
+
"""
|
|
270
|
+
async with self._lock:
|
|
271
|
+
# Check weekly limit
|
|
272
|
+
if self.weekly.is_limit_reached(model):
|
|
273
|
+
remaining = self.weekly.get_remaining_hours(model)
|
|
274
|
+
msg = f"Weekly {model} limit reached. {remaining:.1f}h remaining."
|
|
275
|
+
self.stats.weekly_limit_hits += 1
|
|
276
|
+
|
|
277
|
+
if self.on_limit_reached:
|
|
278
|
+
# Wait until next week
|
|
279
|
+
wait = 7 * 24 * 3600 # Simplified
|
|
280
|
+
self.on_limit_reached("weekly_limit", wait)
|
|
281
|
+
|
|
282
|
+
return False, msg, 0.0
|
|
283
|
+
|
|
284
|
+
# Check 5-hour window
|
|
285
|
+
if self.five_hour.is_limit_reached:
|
|
286
|
+
wait_time = self.five_hour.get_wait_time()
|
|
287
|
+
|
|
288
|
+
if wait_time > 0:
|
|
289
|
+
msg = f"5-hour limit reached. Resets in {wait_time/60:.0f} minutes."
|
|
290
|
+
|
|
291
|
+
if self.auto_wait and wait_time <= 18000: # Max 5 hours
|
|
292
|
+
# Wait with progress updates
|
|
293
|
+
await self._wait_with_countdown(wait_time, "5-hour limit")
|
|
294
|
+
self.five_hour.is_limit_reached = False
|
|
295
|
+
self.five_hour.start_window()
|
|
296
|
+
else:
|
|
297
|
+
if self.on_limit_reached:
|
|
298
|
+
self.on_limit_reached("five_hour_limit", wait_time)
|
|
299
|
+
return False, msg, wait_time
|
|
300
|
+
else:
|
|
301
|
+
# Window has reset
|
|
302
|
+
self.five_hour.is_limit_reached = False
|
|
303
|
+
self.five_hour.start_window()
|
|
304
|
+
|
|
305
|
+
# Apply backoff if we've had errors
|
|
306
|
+
if self.backoff_multiplier > 1.0:
|
|
307
|
+
backoff_wait = (self.backoff_multiplier - 1) * 10 # 10s base
|
|
308
|
+
await asyncio.sleep(backoff_wait)
|
|
309
|
+
|
|
310
|
+
# Record the request
|
|
311
|
+
self.five_hour.record_request(estimated_tokens)
|
|
312
|
+
self.stats.total_requests += 1
|
|
313
|
+
self.stats.total_tokens_used += estimated_tokens
|
|
314
|
+
self.stats.last_request_time = datetime.now()
|
|
315
|
+
|
|
316
|
+
# Start cycle timer for weekly tracking
|
|
317
|
+
self._cycle_start = datetime.now()
|
|
318
|
+
|
|
319
|
+
return True, "ok", 0.0
|
|
320
|
+
|
|
321
|
+
async def _wait_with_countdown(self, wait_seconds: float, reason: str) -> None:
|
|
322
|
+
"""Wait with periodic status updates."""
|
|
323
|
+
from rich.console import Console
|
|
324
|
+
console = Console()
|
|
325
|
+
|
|
326
|
+
start = datetime.now()
|
|
327
|
+
total_wait = wait_seconds
|
|
328
|
+
|
|
329
|
+
while wait_seconds > 0:
|
|
330
|
+
hours = int(wait_seconds // 3600)
|
|
331
|
+
minutes = int((wait_seconds % 3600) // 60)
|
|
332
|
+
seconds = int(wait_seconds % 60)
|
|
333
|
+
|
|
334
|
+
console.print(
|
|
335
|
+
f"\r[yellow]{reason}: waiting {hours:02d}:{minutes:02d}:{seconds:02d}[/yellow]",
|
|
336
|
+
end=""
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
# Sleep in chunks to allow interruption
|
|
340
|
+
sleep_time = min(60, wait_seconds)
|
|
341
|
+
await asyncio.sleep(sleep_time)
|
|
342
|
+
wait_seconds -= sleep_time
|
|
343
|
+
|
|
344
|
+
elapsed = (datetime.now() - start).total_seconds()
|
|
345
|
+
self.stats.total_wait_time_seconds += elapsed
|
|
346
|
+
console.print(f"\n[green]{reason}: wait complete, resuming...[/green]")
|
|
347
|
+
|
|
348
|
+
def record_success(self, model: str = "opus") -> None:
|
|
349
|
+
"""Record successful API call."""
|
|
350
|
+
self.consecutive_errors = 0
|
|
351
|
+
self.backoff_multiplier = max(1.0, self.backoff_multiplier * 0.8)
|
|
352
|
+
|
|
353
|
+
# Record time for weekly tracking
|
|
354
|
+
if self._cycle_start:
|
|
355
|
+
elapsed = (datetime.now() - self._cycle_start).total_seconds()
|
|
356
|
+
self.weekly.record_usage(elapsed, model)
|
|
357
|
+
self._cycle_start = None
|
|
358
|
+
|
|
359
|
+
def record_error(self, error_message: str) -> tuple[bool, float]:
|
|
360
|
+
"""
|
|
361
|
+
Record an API error.
|
|
362
|
+
|
|
363
|
+
Returns:
|
|
364
|
+
Tuple of (is_rate_limit, wait_time_seconds)
|
|
365
|
+
"""
|
|
366
|
+
self.consecutive_errors += 1
|
|
367
|
+
|
|
368
|
+
# Check if it's a rate limit error
|
|
369
|
+
if self.is_limit_error(error_message):
|
|
370
|
+
self.stats.five_hour_limit_hits += 1
|
|
371
|
+
|
|
372
|
+
# Try to parse wait time
|
|
373
|
+
parsed_wait = self.parse_wait_time_from_error(error_message)
|
|
374
|
+
|
|
375
|
+
if parsed_wait:
|
|
376
|
+
wait_time = parsed_wait
|
|
377
|
+
else:
|
|
378
|
+
# Default: assume 5-hour window needs to reset
|
|
379
|
+
wait_time = self.five_hour.get_wait_time()
|
|
380
|
+
if wait_time <= 0:
|
|
381
|
+
# Start fresh window tracking
|
|
382
|
+
self.five_hour.mark_limit_reached()
|
|
383
|
+
wait_time = self.five_hour.get_wait_time()
|
|
384
|
+
|
|
385
|
+
self.five_hour.is_limit_reached = True
|
|
386
|
+
|
|
387
|
+
if self.on_limit_reached:
|
|
388
|
+
self.on_limit_reached("api_rate_limit", wait_time)
|
|
389
|
+
|
|
390
|
+
return True, wait_time
|
|
391
|
+
|
|
392
|
+
# Regular error - increase backoff
|
|
393
|
+
self.backoff_multiplier = min(8.0, self.backoff_multiplier * 1.5)
|
|
394
|
+
return False, 0.0
|
|
395
|
+
|
|
396
|
+
async def handle_limit_error(self, error_message: str) -> bool:
|
|
397
|
+
"""
|
|
398
|
+
Handle a rate limit error with automatic waiting.
|
|
399
|
+
|
|
400
|
+
Returns:
|
|
401
|
+
True if waited and ready to retry, False if should stop
|
|
402
|
+
"""
|
|
403
|
+
is_limit, wait_time = self.record_error(error_message)
|
|
404
|
+
|
|
405
|
+
if not is_limit:
|
|
406
|
+
return False
|
|
407
|
+
|
|
408
|
+
if self.auto_wait and wait_time > 0:
|
|
409
|
+
# Cap at 5 hours
|
|
410
|
+
wait_time = min(wait_time, 5 * 3600)
|
|
411
|
+
await self._wait_with_countdown(wait_time, "Rate limit")
|
|
412
|
+
self.five_hour.is_limit_reached = False
|
|
413
|
+
return True
|
|
414
|
+
|
|
415
|
+
return False
|
|
416
|
+
|
|
417
|
+
def get_status(self) -> dict:
|
|
418
|
+
"""Get current status."""
|
|
419
|
+
return {
|
|
420
|
+
"plan": self.plan,
|
|
421
|
+
"five_hour": {
|
|
422
|
+
"window_start": self.five_hour.window_start.isoformat() if self.five_hour.window_start else None,
|
|
423
|
+
"requests": self.five_hour.requests_in_window,
|
|
424
|
+
"is_limited": self.five_hour.is_limit_reached,
|
|
425
|
+
"reset_in_minutes": self.five_hour.get_wait_time() / 60 if self.five_hour.is_limit_reached else 0,
|
|
426
|
+
},
|
|
427
|
+
"weekly": {
|
|
428
|
+
"sonnet_hours_used": round(self.weekly.sonnet_hours, 2),
|
|
429
|
+
"sonnet_hours_remaining": round(self.weekly.get_remaining_hours("sonnet"), 2),
|
|
430
|
+
"opus_hours_used": round(self.weekly.opus_hours, 2),
|
|
431
|
+
"opus_hours_remaining": round(self.weekly.get_remaining_hours("opus"), 2),
|
|
432
|
+
},
|
|
433
|
+
"stats": {
|
|
434
|
+
"total_requests": self.stats.total_requests,
|
|
435
|
+
"total_wait_seconds": round(self.stats.total_wait_time_seconds, 0),
|
|
436
|
+
"limit_hits": self.stats.five_hour_limit_hits,
|
|
437
|
+
},
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
def save(self, path: Path) -> None:
|
|
441
|
+
"""Save state to disk."""
|
|
442
|
+
state = {
|
|
443
|
+
"plan": self.plan,
|
|
444
|
+
"five_hour": self.five_hour.model_dump(),
|
|
445
|
+
"weekly": self.weekly.model_dump(),
|
|
446
|
+
"stats": self.stats.model_dump(),
|
|
447
|
+
"backoff_multiplier": self.backoff_multiplier,
|
|
448
|
+
"saved_at": datetime.now().isoformat(),
|
|
449
|
+
}
|
|
450
|
+
path.write_text(json.dumps(state, indent=2, default=str), encoding="utf-8")
|
|
451
|
+
|
|
452
|
+
@classmethod
|
|
453
|
+
def load(cls, path: Path, **kwargs) -> Optional["ClaudeCodeRateLimiter"]:
|
|
454
|
+
"""Load state from disk."""
|
|
455
|
+
if not path.exists():
|
|
456
|
+
return None
|
|
457
|
+
|
|
458
|
+
try:
|
|
459
|
+
data = json.loads(path.read_text(encoding="utf-8"))
|
|
460
|
+
|
|
461
|
+
limiter = cls(plan=data.get("plan", "max_200"), **kwargs)
|
|
462
|
+
|
|
463
|
+
# Restore 5-hour window
|
|
464
|
+
if "five_hour" in data:
|
|
465
|
+
limiter.five_hour = FiveHourWindow.model_validate(data["five_hour"])
|
|
466
|
+
|
|
467
|
+
# Restore weekly
|
|
468
|
+
if "weekly" in data:
|
|
469
|
+
limiter.weekly = WeeklyUsage.model_validate(data["weekly"])
|
|
470
|
+
|
|
471
|
+
# Restore stats
|
|
472
|
+
if "stats" in data:
|
|
473
|
+
limiter.stats = RateLimitStats.model_validate(data["stats"])
|
|
474
|
+
|
|
475
|
+
limiter.backoff_multiplier = data.get("backoff_multiplier", 1.0)
|
|
476
|
+
|
|
477
|
+
return limiter
|
|
478
|
+
except Exception:
|
|
479
|
+
return None
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
# Alias for backward compatibility
|
|
483
|
+
RateLimiter = ClaudeCodeRateLimiter
|
|
484
|
+
AdaptiveRateLimiter = ClaudeCodeRateLimiter
|