anvil-dev-framework 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +33 -13
- package/VERSION +1 -1
- package/docs/ANV-263-hook-logging-investigation.md +116 -0
- package/docs/INSTALLATION.md +18 -0
- package/docs/command-reference.md +302 -2
- package/docs/session-workflow.md +62 -9
- package/docs/system-architecture.md +569 -0
- package/global/commands/anvil-settings.md +3 -1
- package/global/commands/audit.md +163 -0
- package/global/commands/checklist.md +180 -0
- package/global/commands/efficiency.md +356 -0
- package/global/commands/evidence.md +99 -32
- package/global/commands/insights.md +101 -3
- package/global/commands/orient.md +29 -0
- package/global/commands/patterns.md +115 -0
- package/global/commands/ralph.md +47 -1
- package/global/commands/token-budget.md +214 -0
- package/global/lib/__pycache__/agent_registry.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/claim_service.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/coderabbit_service.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/context_optimizer.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/coordination_service.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/doc_coverage_service.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/gate_logger.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/git_utils.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/github_service.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/handoff_generator.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/hygiene_service.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/issue_models.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/issue_provider.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/linear_data_service.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/linear_provider.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/local_provider.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/optimization_applier.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/orient_fast.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/quality_service.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/ralph_prompt_generator.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/ralph_state.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/state_manager.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/token_analyzer.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/token_metrics.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/transcript_parser.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/verification_runner.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/verify_iteration.cpython-314.pyc +0 -0
- package/global/lib/__pycache__/verify_subagent.cpython-314.pyc +0 -0
- package/global/lib/context_optimizer.py +323 -0
- package/global/lib/git_utils.py +267 -0
- package/global/lib/issue_models.py +28 -0
- package/global/lib/linear_provider.py +217 -16
- package/global/lib/optimization_applier.py +582 -0
- package/global/lib/orient_fast.py +24 -1
- package/global/lib/ralph_state.py +264 -24
- package/global/lib/token_analyzer.py +1357 -0
- package/global/lib/token_metrics.py +873 -0
- package/global/tests/__pycache__/test_context_optimizer.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_doc_coverage.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_git_utils.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_issue_models.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_linear_filtering.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_linear_provider.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_local_provider.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_optimization_applier.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_token_analyzer.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_token_analyzer_phase6.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/__pycache__/test_token_metrics.cpython-314-pytest-9.0.2.pyc +0 -0
- package/global/tests/test_context_optimizer.py +321 -0
- package/global/tests/test_git_utils.py +160 -0
- package/global/tests/test_issue_models.py +40 -0
- package/global/tests/test_linear_filtering.py +319 -0
- package/global/tests/test_linear_provider.py +125 -0
- package/global/tests/test_optimization_applier.py +508 -0
- package/global/tests/test_token_analyzer.py +735 -0
- package/global/tests/test_token_analyzer_phase6.py +537 -0
- package/global/tests/test_token_metrics.py +791 -0
- package/global/tools/anvil-memory/src/__tests__/commands.test.ts +238 -1
- package/global/tools/anvil-memory/src/commands/ralph-iteration.ts +249 -0
- package/global/tools/anvil-memory/src/index.ts +2 -8
- package/package.json +1 -1
- package/scripts/anvil +7 -2
- package/global/tools/anvil-memory/src/__tests__/ccs/context-monitor.test.ts +0 -535
- package/global/tools/anvil-memory/src/__tests__/ccs/edge-cases.test.ts +0 -645
- package/global/tools/anvil-memory/src/__tests__/ccs/fixtures.ts +0 -363
- package/global/tools/anvil-memory/src/__tests__/ccs/index.ts +0 -8
- package/global/tools/anvil-memory/src/__tests__/ccs/integration.test.ts +0 -417
- package/global/tools/anvil-memory/src/__tests__/ccs/prompt-generator.test.ts +0 -571
- package/global/tools/anvil-memory/src/__tests__/ccs/ralph-stop.test.ts +0 -440
- package/global/tools/anvil-memory/src/__tests__/ccs/test-utils.ts +0 -252
|
@@ -0,0 +1,582 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Optimization Applier Service for Anvil Framework.
|
|
3
|
+
|
|
4
|
+
Provides safe application of token efficiency optimizations with:
|
|
5
|
+
- Automatic backup creation before changes
|
|
6
|
+
- Before/after comparison
|
|
7
|
+
- Commit generation with appropriate messages
|
|
8
|
+
- Tracking of applied optimizations
|
|
9
|
+
- Rollback capability
|
|
10
|
+
|
|
11
|
+
Usage:
|
|
12
|
+
from optimization_applier import OptimizationApplier
|
|
13
|
+
|
|
14
|
+
applier = OptimizationApplier()
|
|
15
|
+
result = applier.apply_recommendation(recommendation_id=123)
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import os
|
|
19
|
+
import shutil
|
|
20
|
+
import subprocess
|
|
21
|
+
from dataclasses import dataclass, field
|
|
22
|
+
from datetime import datetime, timezone
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Optional, Dict, List, Any
|
|
25
|
+
from enum import Enum
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class OptimizationType(Enum):
|
|
29
|
+
"""Types of optimizations that can be applied."""
|
|
30
|
+
REMOVE_UNUSED_PATTERN = "remove_unused_pattern"
|
|
31
|
+
DEFER_LOADING = "defer_loading"
|
|
32
|
+
CONSOLIDATE_COMMANDS = "consolidate_commands"
|
|
33
|
+
REDUCE_CONTEXT = "reduce_context"
|
|
34
|
+
PRUNE_RARELY_USED = "prune_rarely_used"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class OptimizationResult:
|
|
39
|
+
"""Result of applying an optimization."""
|
|
40
|
+
success: bool
|
|
41
|
+
optimization_id: Optional[int] = None
|
|
42
|
+
recommendation_id: Optional[int] = None
|
|
43
|
+
optimization_type: Optional[OptimizationType] = None
|
|
44
|
+
files_modified: List[str] = field(default_factory=list)
|
|
45
|
+
backup_paths: List[str] = field(default_factory=list)
|
|
46
|
+
tokens_before: int = 0
|
|
47
|
+
tokens_after: int = 0
|
|
48
|
+
savings: int = 0
|
|
49
|
+
error_message: Optional[str] = None
|
|
50
|
+
commit_hash: Optional[str] = None
|
|
51
|
+
applied_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@dataclass
|
|
55
|
+
class AppliedOptimization:
|
|
56
|
+
"""Record of an applied optimization for tracking."""
|
|
57
|
+
id: int
|
|
58
|
+
recommendation_id: int
|
|
59
|
+
optimization_type: str
|
|
60
|
+
description: str
|
|
61
|
+
files_modified: List[str]
|
|
62
|
+
tokens_saved: int
|
|
63
|
+
applied_at: datetime
|
|
64
|
+
commit_hash: Optional[str]
|
|
65
|
+
backup_paths: List[str] = field(default_factory=list)
|
|
66
|
+
reverted: bool = False
|
|
67
|
+
reverted_at: Optional[datetime] = None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class OptimizationApplier:
|
|
71
|
+
"""
|
|
72
|
+
Service for safely applying token efficiency optimizations.
|
|
73
|
+
|
|
74
|
+
Features:
|
|
75
|
+
- Creates backups before modifying files
|
|
76
|
+
- Tracks all applied optimizations
|
|
77
|
+
- Supports rollback of problematic optimizations
|
|
78
|
+
- Generates appropriate commit messages
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
BACKUP_DIR = ".claude/backups/optimizations"
|
|
82
|
+
|
|
83
|
+
def __init__(
|
|
84
|
+
self,
|
|
85
|
+
project_root: Optional[Path] = None,
|
|
86
|
+
auto_commit: bool = False
|
|
87
|
+
):
|
|
88
|
+
"""
|
|
89
|
+
Initialize the optimization applier.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
project_root: Root directory of the project
|
|
93
|
+
auto_commit: Whether to automatically commit changes
|
|
94
|
+
"""
|
|
95
|
+
self.project_root = project_root or Path.cwd()
|
|
96
|
+
self.auto_commit = auto_commit
|
|
97
|
+
self._applied_optimizations: List[AppliedOptimization] = []
|
|
98
|
+
self._next_id = 1
|
|
99
|
+
|
|
100
|
+
# Ensure backup directory exists
|
|
101
|
+
self.backup_dir = self.project_root / self.BACKUP_DIR
|
|
102
|
+
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
|
103
|
+
|
|
104
|
+
# Try to import token_metrics for integration
|
|
105
|
+
try:
|
|
106
|
+
from token_metrics import TokenMetrics
|
|
107
|
+
self._metrics = TokenMetrics()
|
|
108
|
+
except ImportError:
|
|
109
|
+
self._metrics = None
|
|
110
|
+
|
|
111
|
+
def apply_recommendation(
|
|
112
|
+
self,
|
|
113
|
+
recommendation_id: int,
|
|
114
|
+
recommendation_type: str,
|
|
115
|
+
description: str,
|
|
116
|
+
target_files: List[str],
|
|
117
|
+
changes: Dict[str, Any],
|
|
118
|
+
estimated_savings: int
|
|
119
|
+
) -> OptimizationResult:
|
|
120
|
+
"""
|
|
121
|
+
Apply a recommendation from the efficiency analysis.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
recommendation_id: ID of the recommendation to apply
|
|
125
|
+
recommendation_type: Type of optimization
|
|
126
|
+
description: Human-readable description
|
|
127
|
+
target_files: Files to be modified
|
|
128
|
+
changes: Dict describing the changes to make
|
|
129
|
+
estimated_savings: Estimated token savings
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
OptimizationResult with details of the application
|
|
133
|
+
"""
|
|
134
|
+
result = OptimizationResult(
|
|
135
|
+
success=False,
|
|
136
|
+
recommendation_id=recommendation_id
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
try:
|
|
140
|
+
# Parse optimization type
|
|
141
|
+
opt_type = OptimizationType(recommendation_type)
|
|
142
|
+
result.optimization_type = opt_type
|
|
143
|
+
except ValueError:
|
|
144
|
+
result.error_message = f"Unknown optimization type: {recommendation_type}"
|
|
145
|
+
return result
|
|
146
|
+
|
|
147
|
+
# Step 1: Create backups
|
|
148
|
+
backup_paths = []
|
|
149
|
+
for file_path in target_files:
|
|
150
|
+
full_path = self.project_root / file_path
|
|
151
|
+
if full_path.exists():
|
|
152
|
+
backup_path = self._create_backup(full_path)
|
|
153
|
+
if backup_path:
|
|
154
|
+
backup_paths.append(str(backup_path))
|
|
155
|
+
|
|
156
|
+
result.backup_paths = backup_paths
|
|
157
|
+
|
|
158
|
+
# Step 2: Measure tokens before (if metrics available)
|
|
159
|
+
if self._metrics:
|
|
160
|
+
result.tokens_before = self._estimate_file_tokens(target_files)
|
|
161
|
+
|
|
162
|
+
# Step 3: Apply the optimization
|
|
163
|
+
try:
|
|
164
|
+
modified_files = self._apply_changes(opt_type, target_files, changes)
|
|
165
|
+
result.files_modified = modified_files
|
|
166
|
+
except Exception as e:
|
|
167
|
+
result.error_message = f"Failed to apply changes: {str(e)}"
|
|
168
|
+
self._restore_backups(backup_paths)
|
|
169
|
+
return result
|
|
170
|
+
|
|
171
|
+
# Step 4: Measure tokens after
|
|
172
|
+
if self._metrics:
|
|
173
|
+
result.tokens_after = self._estimate_file_tokens(target_files)
|
|
174
|
+
result.savings = result.tokens_before - result.tokens_after
|
|
175
|
+
|
|
176
|
+
# Step 5: Record the optimization
|
|
177
|
+
optimization_record = AppliedOptimization(
|
|
178
|
+
id=self._next_id,
|
|
179
|
+
recommendation_id=recommendation_id,
|
|
180
|
+
optimization_type=recommendation_type,
|
|
181
|
+
description=description,
|
|
182
|
+
files_modified=modified_files,
|
|
183
|
+
tokens_saved=result.savings or estimated_savings,
|
|
184
|
+
applied_at=datetime.now(timezone.utc),
|
|
185
|
+
commit_hash=None,
|
|
186
|
+
backup_paths=backup_paths
|
|
187
|
+
)
|
|
188
|
+
self._applied_optimizations.append(optimization_record)
|
|
189
|
+
result.optimization_id = self._next_id
|
|
190
|
+
self._next_id += 1
|
|
191
|
+
|
|
192
|
+
# Step 6: Auto-commit if enabled
|
|
193
|
+
if self.auto_commit and modified_files:
|
|
194
|
+
commit_hash = self._create_commit(
|
|
195
|
+
modified_files,
|
|
196
|
+
f"refactor: apply token optimization - {description}"
|
|
197
|
+
)
|
|
198
|
+
result.commit_hash = commit_hash
|
|
199
|
+
optimization_record.commit_hash = commit_hash
|
|
200
|
+
|
|
201
|
+
result.success = True
|
|
202
|
+
return result
|
|
203
|
+
|
|
204
|
+
def _create_backup(self, file_path: Path) -> Optional[Path]:
|
|
205
|
+
"""
|
|
206
|
+
Create a backup of a file before modification.
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
file_path: Path to the file to backup
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
Path to the backup file, or None if backup failed
|
|
213
|
+
"""
|
|
214
|
+
if not file_path.exists():
|
|
215
|
+
return None
|
|
216
|
+
|
|
217
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
218
|
+
backup_name = f"{file_path.name}.{timestamp}.bak"
|
|
219
|
+
backup_path = self.backup_dir / backup_name
|
|
220
|
+
|
|
221
|
+
try:
|
|
222
|
+
shutil.copy2(file_path, backup_path)
|
|
223
|
+
return backup_path
|
|
224
|
+
except Exception:
|
|
225
|
+
return None
|
|
226
|
+
|
|
227
|
+
def _restore_backups(self, backup_paths: List[str], original_paths: Optional[List[str]] = None) -> bool:
|
|
228
|
+
"""
|
|
229
|
+
Restore files from backups.
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
backup_paths: List of backup file paths
|
|
233
|
+
original_paths: Optional list of original file paths (parallel to backup_paths)
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
True if all restorations succeeded
|
|
237
|
+
"""
|
|
238
|
+
success = True
|
|
239
|
+
for i, backup_path in enumerate(backup_paths):
|
|
240
|
+
backup = Path(backup_path)
|
|
241
|
+
if backup.exists():
|
|
242
|
+
# If we have the original path mapping, use it
|
|
243
|
+
if original_paths and i < len(original_paths):
|
|
244
|
+
try:
|
|
245
|
+
shutil.copy2(backup, self.project_root / original_paths[i])
|
|
246
|
+
continue
|
|
247
|
+
except Exception:
|
|
248
|
+
success = False
|
|
249
|
+
continue
|
|
250
|
+
|
|
251
|
+
# Extract original filename (remove timestamp and .bak)
|
|
252
|
+
# Format: filename.ext.YYYYMMDD_HHMMSS.bak -> filename.ext
|
|
253
|
+
parts = backup.name.rsplit(".", 2) # Split into [name.ext, timestamp, bak]
|
|
254
|
+
if len(parts) >= 3:
|
|
255
|
+
original_name = parts[0] # This preserves the full filename with extension
|
|
256
|
+
# Find the original file location
|
|
257
|
+
try:
|
|
258
|
+
# For now, assume it's in project root or .claude
|
|
259
|
+
for possible_location in [
|
|
260
|
+
self.project_root / original_name,
|
|
261
|
+
self.project_root / ".claude" / original_name
|
|
262
|
+
]:
|
|
263
|
+
if possible_location.exists():
|
|
264
|
+
shutil.copy2(backup, possible_location)
|
|
265
|
+
break
|
|
266
|
+
except Exception:
|
|
267
|
+
success = False
|
|
268
|
+
|
|
269
|
+
return success
|
|
270
|
+
|
|
271
|
+
def _apply_changes(
|
|
272
|
+
self,
|
|
273
|
+
opt_type: OptimizationType,
|
|
274
|
+
target_files: List[str],
|
|
275
|
+
changes: Dict[str, Any]
|
|
276
|
+
) -> List[str]:
|
|
277
|
+
"""
|
|
278
|
+
Apply changes based on optimization type.
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
opt_type: Type of optimization
|
|
282
|
+
target_files: Files to modify
|
|
283
|
+
changes: Dict describing the changes
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
List of actually modified files
|
|
287
|
+
"""
|
|
288
|
+
modified = []
|
|
289
|
+
|
|
290
|
+
if opt_type == OptimizationType.REMOVE_UNUSED_PATTERN:
|
|
291
|
+
modified = self._remove_pattern(target_files, changes)
|
|
292
|
+
elif opt_type == OptimizationType.DEFER_LOADING:
|
|
293
|
+
modified = self._defer_loading(target_files, changes)
|
|
294
|
+
elif opt_type == OptimizationType.REDUCE_CONTEXT:
|
|
295
|
+
modified = self._reduce_context(target_files, changes)
|
|
296
|
+
elif opt_type == OptimizationType.PRUNE_RARELY_USED:
|
|
297
|
+
modified = self._prune_rarely_used(target_files, changes)
|
|
298
|
+
|
|
299
|
+
return modified
|
|
300
|
+
|
|
301
|
+
def _remove_pattern(
|
|
302
|
+
self,
|
|
303
|
+
target_files: List[str],
|
|
304
|
+
changes: Dict[str, Any]
|
|
305
|
+
) -> List[str]:
|
|
306
|
+
"""Remove unused pattern from files."""
|
|
307
|
+
modified = []
|
|
308
|
+
pattern_to_remove = changes.get("pattern", "")
|
|
309
|
+
|
|
310
|
+
for file_path in target_files:
|
|
311
|
+
full_path = self.project_root / file_path
|
|
312
|
+
if not full_path.exists():
|
|
313
|
+
continue
|
|
314
|
+
|
|
315
|
+
content = full_path.read_text()
|
|
316
|
+
if pattern_to_remove in content:
|
|
317
|
+
new_content = content.replace(pattern_to_remove, "")
|
|
318
|
+
full_path.write_text(new_content)
|
|
319
|
+
modified.append(file_path)
|
|
320
|
+
|
|
321
|
+
return modified
|
|
322
|
+
|
|
323
|
+
def _defer_loading(
|
|
324
|
+
self,
|
|
325
|
+
target_files: List[str],
|
|
326
|
+
changes: Dict[str, Any]
|
|
327
|
+
) -> List[str]:
|
|
328
|
+
"""Convert eager loading to deferred loading."""
|
|
329
|
+
modified = []
|
|
330
|
+
section_name = changes.get("section", "")
|
|
331
|
+
trigger_keywords = changes.get("triggers", [])
|
|
332
|
+
|
|
333
|
+
# This would modify CLAUDE.md to use trigger tables
|
|
334
|
+
# Implementation depends on specific format
|
|
335
|
+
|
|
336
|
+
return modified
|
|
337
|
+
|
|
338
|
+
def _reduce_context(
|
|
339
|
+
self,
|
|
340
|
+
target_files: List[str],
|
|
341
|
+
changes: Dict[str, Any]
|
|
342
|
+
) -> List[str]:
|
|
343
|
+
"""Reduce context size by removing or condensing sections."""
|
|
344
|
+
modified = []
|
|
345
|
+
sections_to_remove = changes.get("sections", [])
|
|
346
|
+
|
|
347
|
+
for file_path in target_files:
|
|
348
|
+
full_path = self.project_root / file_path
|
|
349
|
+
if not full_path.exists():
|
|
350
|
+
continue
|
|
351
|
+
|
|
352
|
+
content = full_path.read_text()
|
|
353
|
+
original_content = content
|
|
354
|
+
|
|
355
|
+
for section in sections_to_remove:
|
|
356
|
+
# Simple section removal by header
|
|
357
|
+
if f"## {section}" in content:
|
|
358
|
+
# Find section boundaries and remove
|
|
359
|
+
lines = content.split("\n")
|
|
360
|
+
new_lines = []
|
|
361
|
+
skip_until_next_section = False
|
|
362
|
+
|
|
363
|
+
for line in lines:
|
|
364
|
+
if line.startswith(f"## {section}"):
|
|
365
|
+
skip_until_next_section = True
|
|
366
|
+
continue
|
|
367
|
+
if skip_until_next_section and line.startswith("## "):
|
|
368
|
+
skip_until_next_section = False
|
|
369
|
+
if not skip_until_next_section:
|
|
370
|
+
new_lines.append(line)
|
|
371
|
+
|
|
372
|
+
content = "\n".join(new_lines)
|
|
373
|
+
|
|
374
|
+
if content != original_content:
|
|
375
|
+
full_path.write_text(content)
|
|
376
|
+
modified.append(file_path)
|
|
377
|
+
|
|
378
|
+
return modified
|
|
379
|
+
|
|
380
|
+
def _prune_rarely_used(
|
|
381
|
+
self,
|
|
382
|
+
target_files: List[str],
|
|
383
|
+
changes: Dict[str, Any]
|
|
384
|
+
) -> List[str]:
|
|
385
|
+
"""Prune rarely-used components."""
|
|
386
|
+
modified = []
|
|
387
|
+
components = changes.get("components", [])
|
|
388
|
+
|
|
389
|
+
# Implementation would remove specific component references
|
|
390
|
+
# from configuration files
|
|
391
|
+
|
|
392
|
+
return modified
|
|
393
|
+
|
|
394
|
+
def _estimate_file_tokens(self, file_paths: List[str]) -> int:
|
|
395
|
+
"""
|
|
396
|
+
Estimate tokens in a list of files.
|
|
397
|
+
|
|
398
|
+
Args:
|
|
399
|
+
file_paths: List of file paths
|
|
400
|
+
|
|
401
|
+
Returns:
|
|
402
|
+
Estimated total tokens
|
|
403
|
+
"""
|
|
404
|
+
total = 0
|
|
405
|
+
for file_path in file_paths:
|
|
406
|
+
full_path = self.project_root / file_path
|
|
407
|
+
if full_path.exists():
|
|
408
|
+
content = full_path.read_text()
|
|
409
|
+
# Rough estimate: 4 characters per token
|
|
410
|
+
total += len(content) // 4
|
|
411
|
+
|
|
412
|
+
return total
|
|
413
|
+
|
|
414
|
+
def _create_commit(self, files: List[str], message: str) -> Optional[str]:
|
|
415
|
+
"""
|
|
416
|
+
Create a git commit for the changes.
|
|
417
|
+
|
|
418
|
+
Args:
|
|
419
|
+
files: Files to include in the commit
|
|
420
|
+
message: Commit message
|
|
421
|
+
|
|
422
|
+
Returns:
|
|
423
|
+
Commit hash if successful, None otherwise
|
|
424
|
+
"""
|
|
425
|
+
try:
|
|
426
|
+
# Stage files
|
|
427
|
+
for f in files:
|
|
428
|
+
subprocess.run(
|
|
429
|
+
["git", "add", f],
|
|
430
|
+
cwd=self.project_root,
|
|
431
|
+
check=True,
|
|
432
|
+
capture_output=True
|
|
433
|
+
)
|
|
434
|
+
|
|
435
|
+
# Commit
|
|
436
|
+
result = subprocess.run(
|
|
437
|
+
["git", "commit", "-m", message],
|
|
438
|
+
cwd=self.project_root,
|
|
439
|
+
check=True,
|
|
440
|
+
capture_output=True,
|
|
441
|
+
text=True
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
# Get commit hash
|
|
445
|
+
hash_result = subprocess.run(
|
|
446
|
+
["git", "rev-parse", "HEAD"],
|
|
447
|
+
cwd=self.project_root,
|
|
448
|
+
check=True,
|
|
449
|
+
capture_output=True,
|
|
450
|
+
text=True
|
|
451
|
+
)
|
|
452
|
+
|
|
453
|
+
return hash_result.stdout.strip()[:8]
|
|
454
|
+
|
|
455
|
+
except subprocess.CalledProcessError:
|
|
456
|
+
return None
|
|
457
|
+
|
|
458
|
+
def rollback_optimization(self, optimization_id: int) -> bool:
|
|
459
|
+
"""
|
|
460
|
+
Rollback a previously applied optimization.
|
|
461
|
+
|
|
462
|
+
Args:
|
|
463
|
+
optimization_id: ID of the optimization to rollback
|
|
464
|
+
|
|
465
|
+
Returns:
|
|
466
|
+
True if rollback was successful
|
|
467
|
+
"""
|
|
468
|
+
# Find the optimization
|
|
469
|
+
opt = next(
|
|
470
|
+
(o for o in self._applied_optimizations if o.id == optimization_id),
|
|
471
|
+
None
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
if not opt:
|
|
475
|
+
return False
|
|
476
|
+
|
|
477
|
+
if opt.reverted:
|
|
478
|
+
return False # Already reverted
|
|
479
|
+
|
|
480
|
+
# If there's a commit hash, revert the commit
|
|
481
|
+
if opt.commit_hash:
|
|
482
|
+
try:
|
|
483
|
+
subprocess.run(
|
|
484
|
+
["git", "revert", "--no-commit", opt.commit_hash],
|
|
485
|
+
cwd=self.project_root,
|
|
486
|
+
check=True,
|
|
487
|
+
capture_output=True
|
|
488
|
+
)
|
|
489
|
+
subprocess.run(
|
|
490
|
+
["git", "commit", "-m", f"revert: rollback optimization {optimization_id}"],
|
|
491
|
+
cwd=self.project_root,
|
|
492
|
+
check=True,
|
|
493
|
+
capture_output=True
|
|
494
|
+
)
|
|
495
|
+
except subprocess.CalledProcessError:
|
|
496
|
+
return False
|
|
497
|
+
elif opt.backup_paths:
|
|
498
|
+
# No commit hash, restore from backups
|
|
499
|
+
if not self._restore_backups(opt.backup_paths, opt.files_modified):
|
|
500
|
+
return False
|
|
501
|
+
|
|
502
|
+
opt.reverted = True
|
|
503
|
+
opt.reverted_at = datetime.now(timezone.utc)
|
|
504
|
+
return True
|
|
505
|
+
|
|
506
|
+
def get_applied_optimizations(
|
|
507
|
+
self,
|
|
508
|
+
include_reverted: bool = False
|
|
509
|
+
) -> List[AppliedOptimization]:
|
|
510
|
+
"""
|
|
511
|
+
Get list of applied optimizations.
|
|
512
|
+
|
|
513
|
+
Args:
|
|
514
|
+
include_reverted: Whether to include reverted optimizations
|
|
515
|
+
|
|
516
|
+
Returns:
|
|
517
|
+
List of AppliedOptimization records
|
|
518
|
+
"""
|
|
519
|
+
if include_reverted:
|
|
520
|
+
return self._applied_optimizations.copy()
|
|
521
|
+
|
|
522
|
+
return [o for o in self._applied_optimizations if not o.reverted]
|
|
523
|
+
|
|
524
|
+
def get_total_savings(self) -> Dict[str, int]:
|
|
525
|
+
"""
|
|
526
|
+
Get total token savings from all active optimizations.
|
|
527
|
+
|
|
528
|
+
Returns:
|
|
529
|
+
Dict with total savings and count
|
|
530
|
+
"""
|
|
531
|
+
active = [o for o in self._applied_optimizations if not o.reverted]
|
|
532
|
+
|
|
533
|
+
return {
|
|
534
|
+
"total_tokens_saved": sum(o.tokens_saved for o in active),
|
|
535
|
+
"optimizations_count": len(active),
|
|
536
|
+
"reverted_count": len(self._applied_optimizations) - len(active)
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
def generate_impact_report(self) -> str:
|
|
540
|
+
"""
|
|
541
|
+
Generate a report of optimization impact.
|
|
542
|
+
|
|
543
|
+
Returns:
|
|
544
|
+
Formatted markdown report
|
|
545
|
+
"""
|
|
546
|
+
active = [o for o in self._applied_optimizations if not o.reverted]
|
|
547
|
+
reverted = [o for o in self._applied_optimizations if o.reverted]
|
|
548
|
+
|
|
549
|
+
lines = [
|
|
550
|
+
"## Optimization Impact Report",
|
|
551
|
+
"",
|
|
552
|
+
f"**Active Optimizations**: {len(active)}",
|
|
553
|
+
f"**Reverted Optimizations**: {len(reverted)}",
|
|
554
|
+
f"**Total Tokens Saved**: {sum(o.tokens_saved for o in active):,}",
|
|
555
|
+
"",
|
|
556
|
+
"### Active Optimizations",
|
|
557
|
+
""
|
|
558
|
+
]
|
|
559
|
+
|
|
560
|
+
if active:
|
|
561
|
+
lines.append("| ID | Type | Description | Tokens Saved | Applied |")
|
|
562
|
+
lines.append("|-----|------|-------------|--------------|---------|")
|
|
563
|
+
for opt in active:
|
|
564
|
+
applied = opt.applied_at.strftime("%Y-%m-%d")
|
|
565
|
+
lines.append(
|
|
566
|
+
f"| {opt.id} | {opt.optimization_type} | "
|
|
567
|
+
f"{opt.description[:30]}... | {opt.tokens_saved:,} | {applied} |"
|
|
568
|
+
)
|
|
569
|
+
else:
|
|
570
|
+
lines.append("*No active optimizations*")
|
|
571
|
+
|
|
572
|
+
if reverted:
|
|
573
|
+
lines.extend([
|
|
574
|
+
"",
|
|
575
|
+
"### Reverted Optimizations",
|
|
576
|
+
""
|
|
577
|
+
])
|
|
578
|
+
for opt in reverted:
|
|
579
|
+
reverted_date = opt.reverted_at.strftime("%Y-%m-%d") if opt.reverted_at else "?"
|
|
580
|
+
lines.append(f"- [{opt.id}] {opt.description} (reverted {reverted_date})")
|
|
581
|
+
|
|
582
|
+
return "\n".join(lines)
|
|
@@ -97,7 +97,12 @@ def check_linear_yaml() -> Optional[Dict[str, str]]:
|
|
|
97
97
|
|
|
98
98
|
def check_git_state() -> Dict[str, Any]:
|
|
99
99
|
"""Get git status, branch, and recent commits."""
|
|
100
|
-
result
|
|
100
|
+
result: Dict[str, Any] = {
|
|
101
|
+
"branch": "unknown",
|
|
102
|
+
"clean": False,
|
|
103
|
+
"recent_commits": [],
|
|
104
|
+
"uncommitted_files": [],
|
|
105
|
+
}
|
|
101
106
|
|
|
102
107
|
# Get branch
|
|
103
108
|
branch_out, ok = run_command("git branch --show-current")
|
|
@@ -108,12 +113,21 @@ def check_git_state() -> Dict[str, Any]:
|
|
|
108
113
|
status_out, ok = run_command("git status --porcelain")
|
|
109
114
|
if ok:
|
|
110
115
|
result["clean"] = len(status_out.strip()) == 0
|
|
116
|
+
if not result["clean"]:
|
|
117
|
+
# Capture uncommitted file list for warning
|
|
118
|
+
result["uncommitted_files"] = [
|
|
119
|
+
line.strip() for line in status_out.split("\n") if line.strip()
|
|
120
|
+
][:5] # Limit to 5 files
|
|
111
121
|
|
|
112
122
|
# Get recent commits
|
|
113
123
|
log_out, ok = run_command("git log --oneline -5")
|
|
114
124
|
if ok and log_out:
|
|
115
125
|
result["recent_commits"] = log_out.split("\n")[:5]
|
|
116
126
|
|
|
127
|
+
# Add compaction warning if uncommitted changes detected
|
|
128
|
+
if not result["clean"]:
|
|
129
|
+
result["warning"] = "Uncommitted changes detected - verify before proceeding"
|
|
130
|
+
|
|
117
131
|
return result
|
|
118
132
|
|
|
119
133
|
|
|
@@ -398,6 +412,15 @@ def format_output(result: OrientResult, as_json: bool = False) -> str:
|
|
|
398
412
|
clean_str = "clean" if git.get("clean") else "uncommitted changes"
|
|
399
413
|
lines.append(f"**Git**: {git.get('branch', '?')} ({clean_str})")
|
|
400
414
|
|
|
415
|
+
# Post-compaction verification warning
|
|
416
|
+
if not git.get("clean"):
|
|
417
|
+
lines.append("\n**⚠️ Uncommitted Changes Detected**")
|
|
418
|
+
lines.append("Verify these are expected before proceeding with new work:")
|
|
419
|
+
for f in git.get("uncommitted_files", [])[:5]:
|
|
420
|
+
lines.append(f" - {f}")
|
|
421
|
+
if git.get("warning"):
|
|
422
|
+
lines.append(f"_Note: {git.get('warning')}_\n")
|
|
423
|
+
|
|
401
424
|
# Active agents
|
|
402
425
|
if result.active_agents:
|
|
403
426
|
lines.append(f"**Active Agents**: {len(result.active_agents)}")
|