crackerjack 0.27.9__py3-none-any.whl → 0.29.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/.pre-commit-config-ai.yaml +1 -5
- crackerjack/__main__.py +48 -0
- crackerjack/crackerjack.py +1326 -45
- crackerjack/interactive.py +7 -16
- crackerjack/pyproject.toml +3 -1
- {crackerjack-0.27.9.dist-info → crackerjack-0.29.0.dist-info}/METADATA +636 -52
- {crackerjack-0.27.9.dist-info → crackerjack-0.29.0.dist-info}/RECORD +9 -9
- {crackerjack-0.27.9.dist-info → crackerjack-0.29.0.dist-info}/WHEEL +0 -0
- {crackerjack-0.27.9.dist-info → crackerjack-0.29.0.dist-info}/licenses/LICENSE +0 -0
crackerjack/crackerjack.py
CHANGED
|
@@ -37,6 +37,463 @@ class HookResult:
|
|
|
37
37
|
self.issues_found = []
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
@dataclass
|
|
41
|
+
class TaskStatus:
|
|
42
|
+
id: str
|
|
43
|
+
name: str
|
|
44
|
+
status: str
|
|
45
|
+
start_time: float | None = None
|
|
46
|
+
end_time: float | None = None
|
|
47
|
+
duration: float | None = None
|
|
48
|
+
details: str | None = None
|
|
49
|
+
error_message: str | None = None
|
|
50
|
+
files_changed: list[str] | None = None
|
|
51
|
+
|
|
52
|
+
def __post_init__(self) -> None:
|
|
53
|
+
if self.files_changed is None:
|
|
54
|
+
self.files_changed = []
|
|
55
|
+
if self.start_time is not None and self.end_time is not None:
|
|
56
|
+
self.duration = self.end_time - self.start_time
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class SessionTracker(BaseModel, arbitrary_types_allowed=True):
|
|
60
|
+
console: Console
|
|
61
|
+
session_id: str
|
|
62
|
+
start_time: float
|
|
63
|
+
progress_file: Path
|
|
64
|
+
tasks: dict[str, TaskStatus] = {}
|
|
65
|
+
current_task: str | None = None
|
|
66
|
+
metadata: dict[str, t.Any] = {}
|
|
67
|
+
|
|
68
|
+
def __init__(self, **data: t.Any) -> None:
|
|
69
|
+
super().__init__(**data)
|
|
70
|
+
if not self.tasks:
|
|
71
|
+
self.tasks = {}
|
|
72
|
+
if not self.metadata:
|
|
73
|
+
self.metadata = {}
|
|
74
|
+
|
|
75
|
+
def start_task(
|
|
76
|
+
self, task_id: str, task_name: str, details: str | None = None
|
|
77
|
+
) -> None:
|
|
78
|
+
task = TaskStatus(
|
|
79
|
+
id=task_id,
|
|
80
|
+
name=task_name,
|
|
81
|
+
status="in_progress",
|
|
82
|
+
start_time=time.time(),
|
|
83
|
+
details=details,
|
|
84
|
+
)
|
|
85
|
+
self.tasks[task_id] = task
|
|
86
|
+
self.current_task = task_id
|
|
87
|
+
self._update_progress_file()
|
|
88
|
+
self.console.print(f"[yellow]⏳[/yellow] Started: {task_name}")
|
|
89
|
+
|
|
90
|
+
def complete_task(
|
|
91
|
+
self,
|
|
92
|
+
task_id: str,
|
|
93
|
+
details: str | None = None,
|
|
94
|
+
files_changed: list[str] | None = None,
|
|
95
|
+
) -> None:
|
|
96
|
+
if task_id in self.tasks:
|
|
97
|
+
task = self.tasks[task_id]
|
|
98
|
+
task.status = "completed"
|
|
99
|
+
task.end_time = time.time()
|
|
100
|
+
task.duration = task.end_time - (task.start_time or task.end_time)
|
|
101
|
+
if details:
|
|
102
|
+
task.details = details
|
|
103
|
+
if files_changed:
|
|
104
|
+
task.files_changed = files_changed
|
|
105
|
+
self._update_progress_file()
|
|
106
|
+
self.console.print(f"[green]✅[/green] Completed: {task.name}")
|
|
107
|
+
if self.current_task == task_id:
|
|
108
|
+
self.current_task = None
|
|
109
|
+
|
|
110
|
+
def fail_task(
|
|
111
|
+
self,
|
|
112
|
+
task_id: str,
|
|
113
|
+
error_message: str,
|
|
114
|
+
details: str | None = None,
|
|
115
|
+
) -> None:
|
|
116
|
+
if task_id in self.tasks:
|
|
117
|
+
task = self.tasks[task_id]
|
|
118
|
+
task.status = "failed"
|
|
119
|
+
task.end_time = time.time()
|
|
120
|
+
task.duration = task.end_time - (task.start_time or task.end_time)
|
|
121
|
+
task.error_message = error_message
|
|
122
|
+
if details:
|
|
123
|
+
task.details = details
|
|
124
|
+
self._update_progress_file()
|
|
125
|
+
self.console.print(f"[red]❌[/red] Failed: {task.name} - {error_message}")
|
|
126
|
+
if self.current_task == task_id:
|
|
127
|
+
self.current_task = None
|
|
128
|
+
|
|
129
|
+
def skip_task(self, task_id: str, reason: str) -> None:
|
|
130
|
+
if task_id in self.tasks:
|
|
131
|
+
task = self.tasks[task_id]
|
|
132
|
+
task.status = "skipped"
|
|
133
|
+
task.end_time = time.time()
|
|
134
|
+
task.details = f"Skipped: {reason}"
|
|
135
|
+
self._update_progress_file()
|
|
136
|
+
self.console.print(f"[blue]⏩[/blue] Skipped: {task.name} - {reason}")
|
|
137
|
+
if self.current_task == task_id:
|
|
138
|
+
self.current_task = None
|
|
139
|
+
|
|
140
|
+
def _update_progress_file(self) -> None:
|
|
141
|
+
try:
|
|
142
|
+
content = self._generate_markdown_content()
|
|
143
|
+
self.progress_file.write_text(content, encoding="utf-8")
|
|
144
|
+
except OSError as e:
|
|
145
|
+
self.console.print(
|
|
146
|
+
f"[yellow]Warning: Failed to update progress file: {e}[/yellow]"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
def _generate_header_section(self) -> str:
|
|
150
|
+
from datetime import datetime
|
|
151
|
+
|
|
152
|
+
completed_tasks = sum(
|
|
153
|
+
1 for task in self.tasks.values() if task.status == "completed"
|
|
154
|
+
)
|
|
155
|
+
total_tasks = len(self.tasks)
|
|
156
|
+
overall_status = "In Progress"
|
|
157
|
+
if completed_tasks == total_tasks and total_tasks > 0:
|
|
158
|
+
overall_status = "Completed"
|
|
159
|
+
elif any(task.status == "failed" for task in self.tasks.values()):
|
|
160
|
+
overall_status = "Failed"
|
|
161
|
+
start_datetime = datetime.fromtimestamp(self.start_time)
|
|
162
|
+
|
|
163
|
+
return f"""# Crackerjack Session Progress: {self.session_id}
|
|
164
|
+
**Session ID**: {self.session_id}
|
|
165
|
+
**Started**: {start_datetime.strftime("%Y-%m-%d %H:%M:%S")}
|
|
166
|
+
**Status**: {overall_status}
|
|
167
|
+
**Progress**: {completed_tasks}/{total_tasks} tasks completed
|
|
168
|
+
|
|
169
|
+
- **Working Directory**: {self.metadata.get("working_dir", Path.cwd())}
|
|
170
|
+
- **Python Version**: {self.metadata.get("python_version", "Unknown")}
|
|
171
|
+
- **Crackerjack Version**: {self.metadata.get("crackerjack_version", "Unknown")}
|
|
172
|
+
- **CLI Options**: {self.metadata.get("cli_options", "Unknown")}
|
|
173
|
+
|
|
174
|
+
"""
|
|
175
|
+
|
|
176
|
+
def _generate_task_overview_section(self) -> str:
|
|
177
|
+
content = """## Task Progress Overview
|
|
178
|
+
| Task | Status | Duration | Details |
|
|
179
|
+
|------|--------|----------|---------|
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
for task in self.tasks.values():
|
|
183
|
+
status_emoji = {
|
|
184
|
+
"pending": "⏸️",
|
|
185
|
+
"in_progress": "⏳",
|
|
186
|
+
"completed": "✅",
|
|
187
|
+
"failed": "❌",
|
|
188
|
+
"skipped": "⏩",
|
|
189
|
+
}.get(task.status, "❓")
|
|
190
|
+
|
|
191
|
+
duration_str = f"{task.duration:.2f}s" if task.duration else "N/A"
|
|
192
|
+
details_str = (
|
|
193
|
+
task.details[:50] + "..."
|
|
194
|
+
if task.details and len(task.details) > 50
|
|
195
|
+
else (task.details or "")
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
content += f"| {task.name} | {status_emoji} {task.status} | {duration_str} | {details_str} |\n"
|
|
199
|
+
|
|
200
|
+
return content + "\n"
|
|
201
|
+
|
|
202
|
+
def _generate_task_details_section(self) -> str:
|
|
203
|
+
content = "## Detailed Task Log\n\n"
|
|
204
|
+
for task in self.tasks.values():
|
|
205
|
+
content += self._format_task_detail(task)
|
|
206
|
+
return content
|
|
207
|
+
|
|
208
|
+
def _format_task_detail(self, task: TaskStatus) -> str:
|
|
209
|
+
from datetime import datetime
|
|
210
|
+
|
|
211
|
+
if task.status == "completed":
|
|
212
|
+
return self._format_completed_task(task, datetime)
|
|
213
|
+
elif task.status == "in_progress":
|
|
214
|
+
return self._format_in_progress_task(task, datetime)
|
|
215
|
+
elif task.status == "failed":
|
|
216
|
+
return self._format_failed_task(task, datetime)
|
|
217
|
+
elif task.status == "skipped":
|
|
218
|
+
return self._format_skipped_task(task)
|
|
219
|
+
return ""
|
|
220
|
+
|
|
221
|
+
def _format_completed_task(self, task: TaskStatus, datetime: t.Any) -> str:
|
|
222
|
+
start_time = (
|
|
223
|
+
datetime.fromtimestamp(task.start_time) if task.start_time else "Unknown"
|
|
224
|
+
)
|
|
225
|
+
end_time = datetime.fromtimestamp(task.end_time) if task.end_time else "Unknown"
|
|
226
|
+
files_list = ", ".join(task.files_changed) if task.files_changed else "None"
|
|
227
|
+
return f"""### ✅ {task.name} - COMPLETED
|
|
228
|
+
- **Started**: {start_time}
|
|
229
|
+
- **Completed**: {end_time}
|
|
230
|
+
- **Duration**: {task.duration:.2f}s
|
|
231
|
+
- **Files Changed**: {files_list}
|
|
232
|
+
- **Details**: {task.details or "N/A"}
|
|
233
|
+
|
|
234
|
+
"""
|
|
235
|
+
|
|
236
|
+
def _format_in_progress_task(self, task: TaskStatus, datetime: t.Any) -> str:
|
|
237
|
+
start_time = (
|
|
238
|
+
datetime.fromtimestamp(task.start_time) if task.start_time else "Unknown"
|
|
239
|
+
)
|
|
240
|
+
return f"""### ⏳ {task.name} - IN PROGRESS
|
|
241
|
+
- **Started**: {start_time}
|
|
242
|
+
- **Current Status**: {task.details or "Processing..."}
|
|
243
|
+
|
|
244
|
+
"""
|
|
245
|
+
|
|
246
|
+
def _format_failed_task(self, task: TaskStatus, datetime: t.Any) -> str:
|
|
247
|
+
start_time = (
|
|
248
|
+
datetime.fromtimestamp(task.start_time) if task.start_time else "Unknown"
|
|
249
|
+
)
|
|
250
|
+
fail_time = (
|
|
251
|
+
datetime.fromtimestamp(task.end_time) if task.end_time else "Unknown"
|
|
252
|
+
)
|
|
253
|
+
return f"""### ❌ {task.name} - FAILED
|
|
254
|
+
- **Started**: {start_time}
|
|
255
|
+
- **Failed**: {fail_time}
|
|
256
|
+
- **Error**: {task.error_message or "Unknown error"}
|
|
257
|
+
- **Recovery Suggestions**: Check error details and retry the failed operation
|
|
258
|
+
|
|
259
|
+
"""
|
|
260
|
+
|
|
261
|
+
def _format_skipped_task(self, task: TaskStatus) -> str:
|
|
262
|
+
return f"""### ⏩ {task.name} - SKIPPED
|
|
263
|
+
- **Reason**: {task.details or "No reason provided"}
|
|
264
|
+
|
|
265
|
+
"""
|
|
266
|
+
|
|
267
|
+
def _generate_footer_section(self) -> str:
|
|
268
|
+
content = f"""## Session Recovery Information
|
|
269
|
+
If this session was interrupted, you can resume from where you left off:
|
|
270
|
+
|
|
271
|
+
```bash
|
|
272
|
+
python -m crackerjack --resume-from {self.progress_file.name}
|
|
273
|
+
```
|
|
274
|
+
|
|
275
|
+
"""
|
|
276
|
+
|
|
277
|
+
all_files: set[str] = set()
|
|
278
|
+
for task in self.tasks.values():
|
|
279
|
+
if task.files_changed:
|
|
280
|
+
all_files.update(task.files_changed)
|
|
281
|
+
|
|
282
|
+
if all_files:
|
|
283
|
+
for file_path in sorted(all_files):
|
|
284
|
+
content += f"- {file_path}\n"
|
|
285
|
+
else:
|
|
286
|
+
content += "- No files modified yet\n"
|
|
287
|
+
|
|
288
|
+
content += "\n## Next Steps\n\n"
|
|
289
|
+
|
|
290
|
+
pending_tasks = [
|
|
291
|
+
task for task in self.tasks.values() if task.status == "pending"
|
|
292
|
+
]
|
|
293
|
+
in_progress_tasks = [
|
|
294
|
+
task for task in self.tasks.values() if task.status == "in_progress"
|
|
295
|
+
]
|
|
296
|
+
failed_tasks = [task for task in self.tasks.values() if task.status == "failed"]
|
|
297
|
+
|
|
298
|
+
if failed_tasks:
|
|
299
|
+
content += "⚠️ Address failed tasks:\n"
|
|
300
|
+
for task in failed_tasks:
|
|
301
|
+
content += f"- Fix {task.name}: {task.error_message}\n"
|
|
302
|
+
elif in_progress_tasks:
|
|
303
|
+
content += "🔄 Currently working on:\n"
|
|
304
|
+
for task in in_progress_tasks:
|
|
305
|
+
content += f"- {task.name}\n"
|
|
306
|
+
elif pending_tasks:
|
|
307
|
+
content += "📋 Next tasks to complete:\n"
|
|
308
|
+
for task in pending_tasks:
|
|
309
|
+
content += f"- {task.name}\n"
|
|
310
|
+
else:
|
|
311
|
+
content += "🎉 All tasks completed successfully!\n"
|
|
312
|
+
|
|
313
|
+
return content
|
|
314
|
+
|
|
315
|
+
def _generate_markdown_content(self) -> str:
|
|
316
|
+
return (
|
|
317
|
+
self._generate_header_section()
|
|
318
|
+
+ self._generate_task_overview_section()
|
|
319
|
+
+ self._generate_task_details_section()
|
|
320
|
+
+ self._generate_footer_section()
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
@classmethod
|
|
324
|
+
def create_session(
|
|
325
|
+
cls,
|
|
326
|
+
console: Console,
|
|
327
|
+
session_id: str | None = None,
|
|
328
|
+
progress_file: Path | None = None,
|
|
329
|
+
metadata: dict[str, t.Any] | None = None,
|
|
330
|
+
) -> "SessionTracker":
|
|
331
|
+
import uuid
|
|
332
|
+
|
|
333
|
+
if session_id is None:
|
|
334
|
+
session_id = str(uuid.uuid4())[:8]
|
|
335
|
+
|
|
336
|
+
if progress_file is None:
|
|
337
|
+
timestamp = time.strftime("%Y%m%d-%H%M%S")
|
|
338
|
+
progress_file = Path(f"SESSION-PROGRESS-{timestamp}.md")
|
|
339
|
+
|
|
340
|
+
tracker = cls(
|
|
341
|
+
console=console,
|
|
342
|
+
session_id=session_id,
|
|
343
|
+
start_time=time.time(),
|
|
344
|
+
progress_file=progress_file,
|
|
345
|
+
metadata=metadata or {},
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
tracker._update_progress_file()
|
|
349
|
+
console.print(f"[green]📋[/green] Session tracking started: {progress_file}")
|
|
350
|
+
return tracker
|
|
351
|
+
|
|
352
|
+
@classmethod
|
|
353
|
+
def find_recent_progress_files(cls, directory: Path = Path.cwd()) -> list[Path]:
|
|
354
|
+
progress_files: list[Path] = []
|
|
355
|
+
for file_path in directory.glob("SESSION-PROGRESS-*.md"):
|
|
356
|
+
try:
|
|
357
|
+
if file_path.is_file():
|
|
358
|
+
progress_files.append(file_path)
|
|
359
|
+
except (OSError, PermissionError):
|
|
360
|
+
continue
|
|
361
|
+
|
|
362
|
+
return sorted(progress_files, key=lambda p: p.stat().st_mtime, reverse=True)
|
|
363
|
+
|
|
364
|
+
@classmethod
|
|
365
|
+
def is_session_incomplete(cls, progress_file: Path) -> bool:
|
|
366
|
+
if not progress_file.exists():
|
|
367
|
+
return False
|
|
368
|
+
try:
|
|
369
|
+
content = progress_file.read_text(encoding="utf-8")
|
|
370
|
+
has_in_progress = "⏳" in content or "in_progress" in content
|
|
371
|
+
has_failed = "❌" in content or "failed" in content
|
|
372
|
+
has_pending = "⏸️" in content or "pending" in content
|
|
373
|
+
stat = progress_file.stat()
|
|
374
|
+
age_hours = (time.time() - stat.st_mtime) / 3600
|
|
375
|
+
is_recent = age_hours < 24
|
|
376
|
+
|
|
377
|
+
return (has_in_progress or has_failed or has_pending) and is_recent
|
|
378
|
+
except (OSError, UnicodeDecodeError):
|
|
379
|
+
return False
|
|
380
|
+
|
|
381
|
+
@classmethod
|
|
382
|
+
def find_incomplete_session(cls, directory: Path = Path.cwd()) -> Path | None:
|
|
383
|
+
recent_files = cls.find_recent_progress_files(directory)
|
|
384
|
+
for progress_file in recent_files:
|
|
385
|
+
if cls.is_session_incomplete(progress_file):
|
|
386
|
+
return progress_file
|
|
387
|
+
|
|
388
|
+
return None
|
|
389
|
+
|
|
390
|
+
@classmethod
|
|
391
|
+
def auto_detect_session(
|
|
392
|
+
cls, console: Console, directory: Path = Path.cwd()
|
|
393
|
+
) -> "SessionTracker | None":
|
|
394
|
+
incomplete_session = cls.find_incomplete_session(directory)
|
|
395
|
+
if incomplete_session:
|
|
396
|
+
return cls._handle_incomplete_session(console, incomplete_session)
|
|
397
|
+
return None
|
|
398
|
+
|
|
399
|
+
@classmethod
|
|
400
|
+
def _handle_incomplete_session(
|
|
401
|
+
cls, console: Console, incomplete_session: Path
|
|
402
|
+
) -> "SessionTracker | None":
|
|
403
|
+
console.print(
|
|
404
|
+
f"[yellow]📋[/yellow] Found incomplete session: {incomplete_session.name}"
|
|
405
|
+
)
|
|
406
|
+
try:
|
|
407
|
+
content = incomplete_session.read_text(encoding="utf-8")
|
|
408
|
+
session_info = cls._parse_session_info(content)
|
|
409
|
+
cls._display_session_info(console, session_info)
|
|
410
|
+
return cls._prompt_resume_session(console, incomplete_session)
|
|
411
|
+
except Exception as e:
|
|
412
|
+
console.print(f"[yellow]⚠️[/yellow] Could not parse session file: {e}")
|
|
413
|
+
return None
|
|
414
|
+
|
|
415
|
+
@classmethod
|
|
416
|
+
def _parse_session_info(cls, content: str) -> dict[str, str | list[str] | None]:
|
|
417
|
+
import re
|
|
418
|
+
|
|
419
|
+
session_match = re.search(r"Session ID\*\*:\s*(.+)", content)
|
|
420
|
+
session_id: str = session_match.group(1).strip() if session_match else "unknown"
|
|
421
|
+
progress_match = re.search(r"Progress\*\*:\s*(\d+)/(\d+)", content)
|
|
422
|
+
progress_info: str | None = None
|
|
423
|
+
if progress_match:
|
|
424
|
+
completed = progress_match.group(1)
|
|
425
|
+
total = progress_match.group(2)
|
|
426
|
+
progress_info = f"{completed}/{total} tasks completed"
|
|
427
|
+
failed_tasks: list[str] = []
|
|
428
|
+
for line in content.split("\n"):
|
|
429
|
+
if "❌" in line and "- FAILED" in line:
|
|
430
|
+
task_match = re.search(r"### ❌ (.+?) - FAILED", line)
|
|
431
|
+
if task_match:
|
|
432
|
+
task_name: str = task_match.group(1)
|
|
433
|
+
failed_tasks.append(task_name)
|
|
434
|
+
|
|
435
|
+
return {
|
|
436
|
+
"session_id": session_id,
|
|
437
|
+
"progress_info": progress_info,
|
|
438
|
+
"failed_tasks": failed_tasks,
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
@classmethod
|
|
442
|
+
def _display_session_info(
|
|
443
|
+
cls, console: Console, session_info: dict[str, str | list[str] | None]
|
|
444
|
+
) -> None:
|
|
445
|
+
console.print(f"[cyan] Session ID:[/cyan] {session_info['session_id']}")
|
|
446
|
+
if session_info["progress_info"]:
|
|
447
|
+
console.print(f"[cyan] Progress:[/cyan] {session_info['progress_info']}")
|
|
448
|
+
if session_info["failed_tasks"]:
|
|
449
|
+
console.print(
|
|
450
|
+
f"[red] Failed tasks:[/red] {', '.join(session_info['failed_tasks'])}"
|
|
451
|
+
)
|
|
452
|
+
|
|
453
|
+
@classmethod
|
|
454
|
+
def _prompt_resume_session(
|
|
455
|
+
cls, console: Console, incomplete_session: Path
|
|
456
|
+
) -> "SessionTracker | None":
|
|
457
|
+
try:
|
|
458
|
+
import sys
|
|
459
|
+
|
|
460
|
+
console.print("[yellow]❓[/yellow] Resume this session? [y/N]: ", end="")
|
|
461
|
+
sys.stdout.flush()
|
|
462
|
+
response = input().strip().lower()
|
|
463
|
+
if response in ("y", "yes"):
|
|
464
|
+
return cls.resume_session(console, incomplete_session)
|
|
465
|
+
else:
|
|
466
|
+
console.print("[blue]ℹ️[/blue] Starting new session instead")
|
|
467
|
+
return None
|
|
468
|
+
except (KeyboardInterrupt, EOFError):
|
|
469
|
+
console.print("\n[blue]ℹ️[/blue] Starting new session instead")
|
|
470
|
+
return None
|
|
471
|
+
|
|
472
|
+
@classmethod
|
|
473
|
+
def resume_session(cls, console: Console, progress_file: Path) -> "SessionTracker":
|
|
474
|
+
if not progress_file.exists():
|
|
475
|
+
raise FileNotFoundError(f"Progress file not found: {progress_file}")
|
|
476
|
+
try:
|
|
477
|
+
content = progress_file.read_text(encoding="utf-8")
|
|
478
|
+
session_id = "resumed"
|
|
479
|
+
import re
|
|
480
|
+
|
|
481
|
+
session_match = re.search(r"Session ID\*\*:\s*(.+)", content)
|
|
482
|
+
if session_match:
|
|
483
|
+
session_id = session_match.group(1).strip()
|
|
484
|
+
tracker = cls(
|
|
485
|
+
console=console,
|
|
486
|
+
session_id=session_id,
|
|
487
|
+
start_time=time.time(),
|
|
488
|
+
progress_file=progress_file,
|
|
489
|
+
metadata={},
|
|
490
|
+
)
|
|
491
|
+
console.print(f"[green]🔄[/green] Resumed session from: {progress_file}")
|
|
492
|
+
return tracker
|
|
493
|
+
except Exception as e:
|
|
494
|
+
raise RuntimeError(f"Failed to resume session: {e}") from e
|
|
495
|
+
|
|
496
|
+
|
|
40
497
|
config_files = (
|
|
41
498
|
".gitignore",
|
|
42
499
|
".pre-commit-config.yaml",
|
|
@@ -44,6 +501,11 @@ config_files = (
|
|
|
44
501
|
".pre-commit-config-fast.yaml",
|
|
45
502
|
".libcst.codemod.yaml",
|
|
46
503
|
)
|
|
504
|
+
|
|
505
|
+
documentation_files = (
|
|
506
|
+
"CLAUDE.md",
|
|
507
|
+
"RULES.md",
|
|
508
|
+
)
|
|
47
509
|
default_python_version = "3.13"
|
|
48
510
|
|
|
49
511
|
|
|
@@ -61,6 +523,9 @@ class OptionsProtocol(t.Protocol):
|
|
|
61
523
|
no_config_updates: bool
|
|
62
524
|
verbose: bool
|
|
63
525
|
update_precommit: bool
|
|
526
|
+
update_docs: bool
|
|
527
|
+
force_update_docs: bool
|
|
528
|
+
compress_docs: bool
|
|
64
529
|
clean: bool
|
|
65
530
|
test: bool
|
|
66
531
|
benchmark: bool
|
|
@@ -76,6 +541,9 @@ class OptionsProtocol(t.Protocol):
|
|
|
76
541
|
skip_hooks: bool = False
|
|
77
542
|
comprehensive: bool = False
|
|
78
543
|
async_mode: bool = False
|
|
544
|
+
track_progress: bool = False
|
|
545
|
+
resume_from: str | None = None
|
|
546
|
+
progress_file: str | None = None
|
|
79
547
|
|
|
80
548
|
|
|
81
549
|
class CodeCleaner(BaseModel, arbitrary_types_allowed=True):
|
|
@@ -1216,6 +1684,363 @@ class ConfigManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
1216
1684
|
if configs_to_add:
|
|
1217
1685
|
self.execute_command(["git", "add"] + configs_to_add)
|
|
1218
1686
|
|
|
1687
|
+
def copy_documentation_templates(
|
|
1688
|
+
self, force_update: bool = False, compress_docs: bool = False
|
|
1689
|
+
) -> None:
|
|
1690
|
+
docs_to_add: list[str] = []
|
|
1691
|
+
for doc_file in documentation_files:
|
|
1692
|
+
if self._should_process_doc_file(doc_file):
|
|
1693
|
+
self._process_single_doc_file(
|
|
1694
|
+
doc_file, force_update, compress_docs, docs_to_add
|
|
1695
|
+
)
|
|
1696
|
+
|
|
1697
|
+
if docs_to_add:
|
|
1698
|
+
self.execute_command(["git", "add"] + docs_to_add)
|
|
1699
|
+
|
|
1700
|
+
def _should_process_doc_file(self, doc_file: str) -> bool:
|
|
1701
|
+
doc_path = self.our_path / doc_file
|
|
1702
|
+
if not doc_path.exists():
|
|
1703
|
+
return False
|
|
1704
|
+
if self.pkg_path.stem == "crackerjack":
|
|
1705
|
+
return False
|
|
1706
|
+
return True
|
|
1707
|
+
|
|
1708
|
+
def _process_single_doc_file(
|
|
1709
|
+
self,
|
|
1710
|
+
doc_file: str,
|
|
1711
|
+
force_update: bool,
|
|
1712
|
+
compress_docs: bool,
|
|
1713
|
+
docs_to_add: list[str],
|
|
1714
|
+
) -> None:
|
|
1715
|
+
doc_path = self.our_path / doc_file
|
|
1716
|
+
pkg_doc_path = self.pkg_path / doc_file
|
|
1717
|
+
should_update = force_update or not pkg_doc_path.exists()
|
|
1718
|
+
|
|
1719
|
+
if should_update:
|
|
1720
|
+
pkg_doc_path.touch()
|
|
1721
|
+
content = doc_path.read_text(encoding="utf-8")
|
|
1722
|
+
|
|
1723
|
+
auto_compress = self._should_compress_doc(doc_file, compress_docs)
|
|
1724
|
+
updated_content = self._customize_documentation_content(
|
|
1725
|
+
content, doc_file, auto_compress
|
|
1726
|
+
)
|
|
1727
|
+
pkg_doc_path.write_text(updated_content, encoding="utf-8")
|
|
1728
|
+
docs_to_add.append(doc_file)
|
|
1729
|
+
|
|
1730
|
+
self._print_doc_update_message(doc_file, auto_compress)
|
|
1731
|
+
|
|
1732
|
+
def _should_compress_doc(self, doc_file: str, compress_docs: bool) -> bool:
|
|
1733
|
+
return compress_docs or (
|
|
1734
|
+
self.pkg_path.stem != "crackerjack" and doc_file == "CLAUDE.md"
|
|
1735
|
+
)
|
|
1736
|
+
|
|
1737
|
+
def _print_doc_update_message(self, doc_file: str, auto_compress: bool) -> None:
|
|
1738
|
+
compression_note = (
|
|
1739
|
+
" (compressed for Claude Code)"
|
|
1740
|
+
if auto_compress and doc_file == "CLAUDE.md"
|
|
1741
|
+
else ""
|
|
1742
|
+
)
|
|
1743
|
+
self.console.print(
|
|
1744
|
+
f"[green]📋[/green] Updated {doc_file} with latest Crackerjack quality standards{compression_note}"
|
|
1745
|
+
)
|
|
1746
|
+
|
|
1747
|
+
def _customize_documentation_content(
|
|
1748
|
+
self, content: str, filename: str, compress: bool = False
|
|
1749
|
+
) -> str:
|
|
1750
|
+
if filename == "CLAUDE.md":
|
|
1751
|
+
return self._customize_claude_md(content, compress)
|
|
1752
|
+
elif filename == "RULES.md":
|
|
1753
|
+
return self._customize_rules_md(content)
|
|
1754
|
+
return content
|
|
1755
|
+
|
|
1756
|
+
def _compress_claude_md(self, content: str, target_size: int = 30000) -> str:
|
|
1757
|
+
content.split("\n")
|
|
1758
|
+
current_size = len(content)
|
|
1759
|
+
if current_size <= target_size:
|
|
1760
|
+
return content
|
|
1761
|
+
essential_sections = [
|
|
1762
|
+
"# ",
|
|
1763
|
+
"## Project Overview",
|
|
1764
|
+
"## Key Commands",
|
|
1765
|
+
"## Development Guidelines",
|
|
1766
|
+
"## Code Quality Compliance",
|
|
1767
|
+
"### Refurb Standards",
|
|
1768
|
+
"### Bandit Security Standards",
|
|
1769
|
+
"### Pyright Type Safety Standards",
|
|
1770
|
+
"## AI Code Generation Best Practices",
|
|
1771
|
+
"## Task Completion Requirements",
|
|
1772
|
+
]
|
|
1773
|
+
compression_strategies = [
|
|
1774
|
+
self._remove_redundant_examples,
|
|
1775
|
+
self._compress_command_examples,
|
|
1776
|
+
self._remove_verbose_sections,
|
|
1777
|
+
self._compress_repeated_patterns,
|
|
1778
|
+
self._summarize_long_sections,
|
|
1779
|
+
]
|
|
1780
|
+
compressed_content = content
|
|
1781
|
+
for strategy in compression_strategies:
|
|
1782
|
+
compressed_content = strategy(compressed_content)
|
|
1783
|
+
if len(compressed_content) <= target_size:
|
|
1784
|
+
break
|
|
1785
|
+
if len(compressed_content) > target_size:
|
|
1786
|
+
compressed_content = self._extract_essential_sections(
|
|
1787
|
+
compressed_content, essential_sections, target_size
|
|
1788
|
+
)
|
|
1789
|
+
|
|
1790
|
+
return self._add_compression_notice(compressed_content)
|
|
1791
|
+
|
|
1792
|
+
def _remove_redundant_examples(self, content: str) -> str:
|
|
1793
|
+
lines = content.split("\n")
|
|
1794
|
+
result = []
|
|
1795
|
+
in_example_block = False
|
|
1796
|
+
example_count = 0
|
|
1797
|
+
max_examples_per_section = 2
|
|
1798
|
+
for line in lines:
|
|
1799
|
+
if line.strip().startswith("```"):
|
|
1800
|
+
if not in_example_block:
|
|
1801
|
+
example_count += 1
|
|
1802
|
+
if example_count <= max_examples_per_section:
|
|
1803
|
+
result.append(line)
|
|
1804
|
+
in_example_block = True
|
|
1805
|
+
else:
|
|
1806
|
+
in_example_block = "skip"
|
|
1807
|
+
else:
|
|
1808
|
+
if in_example_block != "skip":
|
|
1809
|
+
result.append(line)
|
|
1810
|
+
in_example_block = False
|
|
1811
|
+
elif in_example_block == "skip":
|
|
1812
|
+
continue
|
|
1813
|
+
elif line.startswith(("## ", "### ")):
|
|
1814
|
+
example_count = 0
|
|
1815
|
+
result.append(line)
|
|
1816
|
+
else:
|
|
1817
|
+
result.append(line)
|
|
1818
|
+
|
|
1819
|
+
return "\n".join(result)
|
|
1820
|
+
|
|
1821
|
+
def _compress_command_examples(self, content: str) -> str:
|
|
1822
|
+
import re
|
|
1823
|
+
|
|
1824
|
+
content = re.sub(
|
|
1825
|
+
r"```bash\n((?:[^`]+\n){3,})```",
|
|
1826
|
+
lambda m: "```bash\n"
|
|
1827
|
+
+ "\n".join(m.group(1).split("\n")[:3])
|
|
1828
|
+
+ "\n# ... (additional commands available)\n```",
|
|
1829
|
+
content,
|
|
1830
|
+
flags=re.MULTILINE,
|
|
1831
|
+
)
|
|
1832
|
+
|
|
1833
|
+
return content
|
|
1834
|
+
|
|
1835
|
+
def _remove_verbose_sections(self, content: str) -> str:
|
|
1836
|
+
sections_to_compress = [
|
|
1837
|
+
"## Recent Bug Fixes and Improvements",
|
|
1838
|
+
"## Development Memories",
|
|
1839
|
+
"## Self-Maintenance Protocol for AI Assistants",
|
|
1840
|
+
"## Pre-commit Hook Maintenance",
|
|
1841
|
+
]
|
|
1842
|
+
lines = content.split("\n")
|
|
1843
|
+
result = []
|
|
1844
|
+
skip_section = False
|
|
1845
|
+
for line in lines:
|
|
1846
|
+
if any(line.startswith(section) for section in sections_to_compress):
|
|
1847
|
+
skip_section = True
|
|
1848
|
+
result.extend(
|
|
1849
|
+
(line, "*[Detailed information available in full CLAUDE.md]*")
|
|
1850
|
+
)
|
|
1851
|
+
result.append("")
|
|
1852
|
+
elif line.startswith("## ") and skip_section:
|
|
1853
|
+
skip_section = False
|
|
1854
|
+
result.append(line)
|
|
1855
|
+
elif not skip_section:
|
|
1856
|
+
result.append(line)
|
|
1857
|
+
|
|
1858
|
+
return "\n".join(result)
|
|
1859
|
+
|
|
1860
|
+
def _compress_repeated_patterns(self, content: str) -> str:
|
|
1861
|
+
import re
|
|
1862
|
+
|
|
1863
|
+
content = re.sub(r"\n{3,}", "\n\n", content)
|
|
1864
|
+
content = re.sub(
|
|
1865
|
+
r"(\*\*[A-Z][^*]+:\*\*[^\n]+\n){3,}",
|
|
1866
|
+
lambda m: m.group(0)[:200]
|
|
1867
|
+
+ "...\n*[Additional patterns available in full documentation]*\n",
|
|
1868
|
+
content,
|
|
1869
|
+
)
|
|
1870
|
+
|
|
1871
|
+
return content
|
|
1872
|
+
|
|
1873
|
+
def _summarize_long_sections(self, content: str) -> str:
|
|
1874
|
+
lines = content.split("\n")
|
|
1875
|
+
result = []
|
|
1876
|
+
current_section = []
|
|
1877
|
+
section_header = ""
|
|
1878
|
+
for line in lines:
|
|
1879
|
+
if line.startswith(("### ", "## ")):
|
|
1880
|
+
if current_section and len("\n".join(current_section)) > 1000:
|
|
1881
|
+
summary = self._create_section_summary(
|
|
1882
|
+
section_header, current_section
|
|
1883
|
+
)
|
|
1884
|
+
result.extend(summary)
|
|
1885
|
+
else:
|
|
1886
|
+
result.extend(current_section)
|
|
1887
|
+
current_section = [line]
|
|
1888
|
+
section_header = line
|
|
1889
|
+
else:
|
|
1890
|
+
current_section.append(line)
|
|
1891
|
+
if current_section:
|
|
1892
|
+
if len("\n".join(current_section)) > 1000:
|
|
1893
|
+
summary = self._create_section_summary(section_header, current_section)
|
|
1894
|
+
result.extend(summary)
|
|
1895
|
+
else:
|
|
1896
|
+
result.extend(current_section)
|
|
1897
|
+
|
|
1898
|
+
return "\n".join(result)
|
|
1899
|
+
|
|
1900
|
+
def _create_section_summary(
|
|
1901
|
+
self, header: str, section_lines: list[str]
|
|
1902
|
+
) -> list[str]:
|
|
1903
|
+
summary = [header, ""]
|
|
1904
|
+
|
|
1905
|
+
key_points = []
|
|
1906
|
+
for line in section_lines[2:]:
|
|
1907
|
+
if line.strip().startswith(("- ", "* ", "1. ", "2. ")):
|
|
1908
|
+
key_points.append(line)
|
|
1909
|
+
elif line.strip().startswith("**") and ":" in line:
|
|
1910
|
+
key_points.append(line)
|
|
1911
|
+
|
|
1912
|
+
if len(key_points) >= 5:
|
|
1913
|
+
break
|
|
1914
|
+
|
|
1915
|
+
if key_points:
|
|
1916
|
+
summary.extend(key_points[:5])
|
|
1917
|
+
summary.append("*[Complete details available in full CLAUDE.md]*")
|
|
1918
|
+
else:
|
|
1919
|
+
content_preview = " ".join(
|
|
1920
|
+
line.strip()
|
|
1921
|
+
for line in section_lines[2:10]
|
|
1922
|
+
if line.strip() and not line.startswith("#")
|
|
1923
|
+
)[:200]
|
|
1924
|
+
summary.extend(
|
|
1925
|
+
(
|
|
1926
|
+
f"{content_preview}...",
|
|
1927
|
+
"*[Full section available in complete documentation]*",
|
|
1928
|
+
)
|
|
1929
|
+
)
|
|
1930
|
+
|
|
1931
|
+
summary.append("")
|
|
1932
|
+
return summary
|
|
1933
|
+
|
|
1934
|
+
def _extract_essential_sections(
|
|
1935
|
+
self, content: str, essential_sections: list[str], target_size: int
|
|
1936
|
+
) -> str:
|
|
1937
|
+
lines = content.split("\n")
|
|
1938
|
+
result = []
|
|
1939
|
+
current_section = []
|
|
1940
|
+
keep_section = False
|
|
1941
|
+
|
|
1942
|
+
for line in lines:
|
|
1943
|
+
new_section_started = self._process_line_for_section(
|
|
1944
|
+
line, essential_sections, current_section, keep_section, result
|
|
1945
|
+
)
|
|
1946
|
+
if new_section_started is not None:
|
|
1947
|
+
current_section, keep_section = new_section_started
|
|
1948
|
+
else:
|
|
1949
|
+
current_section.append(line)
|
|
1950
|
+
|
|
1951
|
+
if self._should_stop_extraction(result, target_size):
|
|
1952
|
+
break
|
|
1953
|
+
|
|
1954
|
+
self._finalize_extraction(current_section, keep_section, result, target_size)
|
|
1955
|
+
return "\n".join(result)
|
|
1956
|
+
|
|
1957
|
+
def _process_line_for_section(
|
|
1958
|
+
self,
|
|
1959
|
+
line: str,
|
|
1960
|
+
essential_sections: list[str],
|
|
1961
|
+
current_section: list[str],
|
|
1962
|
+
keep_section: bool,
|
|
1963
|
+
result: list[str],
|
|
1964
|
+
) -> tuple[list[str], bool] | None:
|
|
1965
|
+
if any(line.startswith(section) for section in essential_sections):
|
|
1966
|
+
if current_section and keep_section:
|
|
1967
|
+
result.extend(current_section)
|
|
1968
|
+
return ([line], True)
|
|
1969
|
+
elif line.startswith(("## ", "### ")):
|
|
1970
|
+
if current_section and keep_section:
|
|
1971
|
+
result.extend(current_section)
|
|
1972
|
+
return ([line], False)
|
|
1973
|
+
return None
|
|
1974
|
+
|
|
1975
|
+
def _should_stop_extraction(self, result: list[str], target_size: int) -> bool:
|
|
1976
|
+
return len("\n".join(result)) > target_size
|
|
1977
|
+
|
|
1978
|
+
def _finalize_extraction(
|
|
1979
|
+
self,
|
|
1980
|
+
current_section: list[str],
|
|
1981
|
+
keep_section: bool,
|
|
1982
|
+
result: list[str],
|
|
1983
|
+
target_size: int,
|
|
1984
|
+
) -> None:
|
|
1985
|
+
if current_section and keep_section and len("\n".join(result)) < target_size:
|
|
1986
|
+
result.extend(current_section)
|
|
1987
|
+
|
|
1988
|
+
def _add_compression_notice(self, content: str) -> str:
|
|
1989
|
+
notice = """
|
|
1990
|
+
*Note: This CLAUDE.md has been automatically compressed by Crackerjack to optimize for Claude Code usage.
|
|
1991
|
+
Complete documentation is available in the source repository.*
|
|
1992
|
+
|
|
1993
|
+
"""
|
|
1994
|
+
|
|
1995
|
+
lines = content.split("\n")
|
|
1996
|
+
if len(lines) > 5:
|
|
1997
|
+
lines.insert(5, notice)
|
|
1998
|
+
|
|
1999
|
+
return "\n".join(lines)
|
|
2000
|
+
|
|
2001
|
+
def _customize_claude_md(self, content: str, compress: bool = False) -> str:
|
|
2002
|
+
project_name = self.pkg_name
|
|
2003
|
+
content = content.replace("crackerjack", project_name).replace(
|
|
2004
|
+
"Crackerjack", project_name.title()
|
|
2005
|
+
)
|
|
2006
|
+
header = f"""# {project_name.upper()}.md
|
|
2007
|
+
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
|
2008
|
+
|
|
2009
|
+
*This file was automatically generated by Crackerjack and contains the latest Python quality standards.*
|
|
2010
|
+
|
|
2011
|
+
{project_name.title()} is a Python project that follows modern development practices and maintains high code quality standards using automated tools and best practices.
|
|
2012
|
+
|
|
2013
|
+
"""
|
|
2014
|
+
|
|
2015
|
+
lines = content.split("\n")
|
|
2016
|
+
start_idx = 0
|
|
2017
|
+
for i, line in enumerate(lines):
|
|
2018
|
+
if line.startswith(("## Development Guidelines", "## Code Quality")):
|
|
2019
|
+
start_idx = i
|
|
2020
|
+
break
|
|
2021
|
+
|
|
2022
|
+
if start_idx > 0:
|
|
2023
|
+
relevant_content = "\n".join(lines[start_idx:])
|
|
2024
|
+
full_content = header + relevant_content
|
|
2025
|
+
else:
|
|
2026
|
+
full_content = header + content
|
|
2027
|
+
|
|
2028
|
+
if compress:
|
|
2029
|
+
return self._compress_claude_md(full_content)
|
|
2030
|
+
return full_content
|
|
2031
|
+
|
|
2032
|
+
def _customize_rules_md(self, content: str) -> str:
|
|
2033
|
+
project_name = self.pkg_name
|
|
2034
|
+
content = content.replace("crackerjack", project_name).replace(
|
|
2035
|
+
"Crackerjack", project_name.title()
|
|
2036
|
+
)
|
|
2037
|
+
header = f"""# {project_name.title()} Style Rules
|
|
2038
|
+
*This file was automatically generated by Crackerjack and contains the latest Python quality standards.*
|
|
2039
|
+
|
|
2040
|
+
"""
|
|
2041
|
+
|
|
2042
|
+
return header + content
|
|
2043
|
+
|
|
1219
2044
|
def execute_command(
|
|
1220
2045
|
self, cmd: list[str], **kwargs: t.Any
|
|
1221
2046
|
) -> subprocess.CompletedProcess[str]:
|
|
@@ -1307,13 +2132,22 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
1307
2132
|
"[bold bright_blue]⚡ INIT[/bold bright_blue] [bold bright_white]First-time project setup[/bold bright_white]"
|
|
1308
2133
|
)
|
|
1309
2134
|
self.console.print("─" * 80 + "\n")
|
|
1310
|
-
self.
|
|
2135
|
+
if self.options and getattr(self.options, "ai_agent", False):
|
|
2136
|
+
import subprocess
|
|
2137
|
+
|
|
2138
|
+
self.execute_command(
|
|
2139
|
+
["uv", "tool", "install", "keyring"],
|
|
2140
|
+
capture_output=True,
|
|
2141
|
+
stderr=subprocess.DEVNULL,
|
|
2142
|
+
)
|
|
2143
|
+
else:
|
|
2144
|
+
self.execute_command(["uv", "tool", "install", "keyring"])
|
|
1311
2145
|
self.execute_command(["git", "init"])
|
|
1312
2146
|
self.execute_command(["git", "branch", "-m", "main"])
|
|
1313
2147
|
self.execute_command(["git", "add", "pyproject.toml", "uv.lock"])
|
|
1314
2148
|
self.execute_command(["git", "config", "advice.addIgnoredFile", "false"])
|
|
1315
2149
|
install_cmd = ["uv", "run", "pre-commit", "install"]
|
|
1316
|
-
if
|
|
2150
|
+
if self.options and getattr(self.options, "ai_agent", False):
|
|
1317
2151
|
install_cmd.extend(["-c", ".pre-commit-config-ai.yaml"])
|
|
1318
2152
|
else:
|
|
1319
2153
|
install_cmd.extend(["-c", ".pre-commit-config-fast.yaml"])
|
|
@@ -1390,7 +2224,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
1390
2224
|
result = self.execute_command(cmd, capture_output=True, text=True)
|
|
1391
2225
|
total_duration = time.time() - start_time
|
|
1392
2226
|
hook_results = self._parse_hook_output(result.stdout, result.stderr)
|
|
1393
|
-
if
|
|
2227
|
+
if self.options and getattr(self.options, "ai_agent", False):
|
|
1394
2228
|
self._generate_hooks_analysis(hook_results, total_duration)
|
|
1395
2229
|
self._generate_quality_metrics()
|
|
1396
2230
|
self._generate_project_structure_analysis()
|
|
@@ -1512,7 +2346,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
1512
2346
|
return suggestions
|
|
1513
2347
|
|
|
1514
2348
|
def _generate_quality_metrics(self) -> None:
|
|
1515
|
-
if not (
|
|
2349
|
+
if not (self.options and getattr(self.options, "ai_agent", False)):
|
|
1516
2350
|
return
|
|
1517
2351
|
metrics = {
|
|
1518
2352
|
"project_info": {
|
|
@@ -1687,7 +2521,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
1687
2521
|
return recommendations
|
|
1688
2522
|
|
|
1689
2523
|
def _generate_project_structure_analysis(self) -> None:
|
|
1690
|
-
if not (
|
|
2524
|
+
if not (self.options and getattr(self.options, "ai_agent", False)):
|
|
1691
2525
|
return
|
|
1692
2526
|
structure = {
|
|
1693
2527
|
"project_overview": {
|
|
@@ -1711,7 +2545,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
1711
2545
|
)
|
|
1712
2546
|
|
|
1713
2547
|
def _generate_error_context_analysis(self) -> None:
|
|
1714
|
-
if not (
|
|
2548
|
+
if not (self.options and getattr(self.options, "ai_agent", False)):
|
|
1715
2549
|
return
|
|
1716
2550
|
context = {
|
|
1717
2551
|
"analysis_info": {
|
|
@@ -1732,7 +2566,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
1732
2566
|
)
|
|
1733
2567
|
|
|
1734
2568
|
def _generate_ai_agent_summary(self) -> None:
|
|
1735
|
-
if not (
|
|
2569
|
+
if not (self.options and getattr(self.options, "ai_agent", False)):
|
|
1736
2570
|
return
|
|
1737
2571
|
summary = {
|
|
1738
2572
|
"analysis_summary": {
|
|
@@ -2012,7 +2846,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
2012
2846
|
raise SystemExit(1)
|
|
2013
2847
|
else:
|
|
2014
2848
|
self.console.print(
|
|
2015
|
-
"\n[bold bright_green]
|
|
2849
|
+
"\n[bold bright_green]🏆 Pre-commit passed all checks![/bold bright_green]"
|
|
2016
2850
|
)
|
|
2017
2851
|
|
|
2018
2852
|
async def run_pre_commit_with_analysis_async(self) -> list[HookResult]:
|
|
@@ -2058,7 +2892,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
2058
2892
|
raise SystemExit(1)
|
|
2059
2893
|
else:
|
|
2060
2894
|
self.console.print(
|
|
2061
|
-
"\n[bold bright_green]
|
|
2895
|
+
"\n[bold bright_green]🏆 Pre-commit passed all checks![/bold bright_green]"
|
|
2062
2896
|
)
|
|
2063
2897
|
self._generate_analysis_files(hook_results)
|
|
2064
2898
|
|
|
@@ -2116,6 +2950,26 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
2116
2950
|
f"[yellow]Warning: Failed to generate AI summary: {e}[/yellow]"
|
|
2117
2951
|
)
|
|
2118
2952
|
|
|
2953
|
+
def update_precommit_hooks(self) -> None:
|
|
2954
|
+
try:
|
|
2955
|
+
result = self.execute_command(
|
|
2956
|
+
["uv", "run", "pre-commit", "autoupdate"],
|
|
2957
|
+
capture_output=True,
|
|
2958
|
+
text=True,
|
|
2959
|
+
)
|
|
2960
|
+
if result.returncode == 0:
|
|
2961
|
+
self.console.print(
|
|
2962
|
+
"[green]✅ Pre-commit hooks updated successfully[/green]"
|
|
2963
|
+
)
|
|
2964
|
+
if result.stdout.strip():
|
|
2965
|
+
self.console.print(f"[dim]{result.stdout}[/dim]")
|
|
2966
|
+
else:
|
|
2967
|
+
self.console.print(
|
|
2968
|
+
f"[red]❌ Failed to update pre-commit hooks: {result.stderr}[/red]"
|
|
2969
|
+
)
|
|
2970
|
+
except Exception as e:
|
|
2971
|
+
self.console.print(f"[red]❌ Error updating pre-commit hooks: {e}[/red]")
|
|
2972
|
+
|
|
2119
2973
|
|
|
2120
2974
|
class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
2121
2975
|
our_path: Path = Path(__file__).parent
|
|
@@ -2128,6 +2982,8 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2128
2982
|
code_cleaner: CodeCleaner | None = None
|
|
2129
2983
|
config_manager: ConfigManager | None = None
|
|
2130
2984
|
project_manager: ProjectManager | None = None
|
|
2985
|
+
session_tracker: SessionTracker | None = None
|
|
2986
|
+
options: t.Any = None
|
|
2131
2987
|
_file_cache: dict[str, list[Path]] = {}
|
|
2132
2988
|
_file_cache_with_mtime: dict[str, tuple[float, list[Path]]] = {}
|
|
2133
2989
|
_state_file: Path = Path(".crackerjack-state")
|
|
@@ -2255,13 +3111,6 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2255
3111
|
"\n\n[bold red]❌ UV sync failed. Is UV installed? Run `pipx install uv` and try again.[/bold red]\n\n"
|
|
2256
3112
|
)
|
|
2257
3113
|
|
|
2258
|
-
def _update_precommit(self, options: t.Any) -> None:
|
|
2259
|
-
if self.pkg_path.stem == "crackerjack" and options.update_precommit:
|
|
2260
|
-
update_cmd = ["uv", "run", "pre-commit", "autoupdate"]
|
|
2261
|
-
if options.ai_agent:
|
|
2262
|
-
update_cmd.extend(["-c", ".pre-commit-config-ai.yaml"])
|
|
2263
|
-
self.execute_command(update_cmd)
|
|
2264
|
-
|
|
2265
3114
|
def _clean_project(self, options: t.Any) -> None:
|
|
2266
3115
|
assert self.code_cleaner is not None
|
|
2267
3116
|
if options.clean:
|
|
@@ -2529,7 +3378,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2529
3378
|
|
|
2530
3379
|
def _handle_test_success(self, options: t.Any) -> None:
|
|
2531
3380
|
self.console.print(
|
|
2532
|
-
"\n\n[bold bright_green]
|
|
3381
|
+
"\n\n[bold bright_green]🏆 Tests passed successfully![/bold bright_green]\n"
|
|
2533
3382
|
)
|
|
2534
3383
|
self._print_ai_agent_files(options)
|
|
2535
3384
|
|
|
@@ -2598,6 +3447,121 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2598
3447
|
self._mark_version_bumped(version_type)
|
|
2599
3448
|
break
|
|
2600
3449
|
|
|
3450
|
+
def _validate_authentication_setup(self) -> None:
|
|
3451
|
+
import os
|
|
3452
|
+
import shutil
|
|
3453
|
+
|
|
3454
|
+
keyring_provider = self._get_keyring_provider()
|
|
3455
|
+
has_publish_token = bool(os.environ.get("UV_PUBLISH_TOKEN"))
|
|
3456
|
+
has_keyring = shutil.which("keyring") is not None
|
|
3457
|
+
self.console.print("[dim]🔐 Validating authentication setup...[/dim]")
|
|
3458
|
+
if has_publish_token:
|
|
3459
|
+
self._handle_publish_token_found()
|
|
3460
|
+
return
|
|
3461
|
+
if keyring_provider == "subprocess" and has_keyring:
|
|
3462
|
+
self._handle_keyring_validation()
|
|
3463
|
+
return
|
|
3464
|
+
if keyring_provider == "subprocess" and not has_keyring:
|
|
3465
|
+
self._handle_missing_keyring()
|
|
3466
|
+
if not keyring_provider:
|
|
3467
|
+
self._handle_no_keyring_provider()
|
|
3468
|
+
|
|
3469
|
+
def _handle_publish_token_found(self) -> None:
|
|
3470
|
+
self.console.print(
|
|
3471
|
+
"[dim] ✅ UV_PUBLISH_TOKEN environment variable found[/dim]"
|
|
3472
|
+
)
|
|
3473
|
+
|
|
3474
|
+
def _handle_keyring_validation(self) -> None:
|
|
3475
|
+
self.console.print(
|
|
3476
|
+
"[dim] ✅ Keyring provider configured and keyring executable found[/dim]"
|
|
3477
|
+
)
|
|
3478
|
+
try:
|
|
3479
|
+
result = self.execute_command(
|
|
3480
|
+
["keyring", "get", "https://upload.pypi.org/legacy/", "__token__"],
|
|
3481
|
+
capture_output=True,
|
|
3482
|
+
text=True,
|
|
3483
|
+
)
|
|
3484
|
+
if result.returncode == 0:
|
|
3485
|
+
self.console.print("[dim] ✅ PyPI token found in keyring[/dim]")
|
|
3486
|
+
else:
|
|
3487
|
+
self.console.print(
|
|
3488
|
+
"[yellow] ⚠️ No PyPI token found in keyring - will prompt during publish[/yellow]"
|
|
3489
|
+
)
|
|
3490
|
+
except Exception:
|
|
3491
|
+
self.console.print(
|
|
3492
|
+
"[yellow] ⚠️ Could not check keyring - will attempt publish anyway[/yellow]"
|
|
3493
|
+
)
|
|
3494
|
+
|
|
3495
|
+
def _handle_missing_keyring(self) -> None:
|
|
3496
|
+
if not (self.options and getattr(self.options, "ai_agent", False)):
|
|
3497
|
+
self.console.print(
|
|
3498
|
+
"[yellow] ⚠️ Keyring provider set to 'subprocess' but keyring executable not found[/yellow]"
|
|
3499
|
+
)
|
|
3500
|
+
self.console.print(
|
|
3501
|
+
"[yellow] Install keyring: uv tool install keyring[/yellow]"
|
|
3502
|
+
)
|
|
3503
|
+
|
|
3504
|
+
def _handle_no_keyring_provider(self) -> None:
|
|
3505
|
+
if not (self.options and getattr(self.options, "ai_agent", False)):
|
|
3506
|
+
self.console.print(
|
|
3507
|
+
"[yellow] ⚠️ No keyring provider configured and no UV_PUBLISH_TOKEN set[/yellow]"
|
|
3508
|
+
)
|
|
3509
|
+
|
|
3510
|
+
def _get_keyring_provider(self) -> str | None:
|
|
3511
|
+
import os
|
|
3512
|
+
import tomllib
|
|
3513
|
+
from pathlib import Path
|
|
3514
|
+
|
|
3515
|
+
env_provider = os.environ.get("UV_KEYRING_PROVIDER")
|
|
3516
|
+
if env_provider:
|
|
3517
|
+
return env_provider
|
|
3518
|
+
for config_file in ("pyproject.toml", "uv.toml"):
|
|
3519
|
+
config_path = Path(config_file)
|
|
3520
|
+
if config_path.exists():
|
|
3521
|
+
try:
|
|
3522
|
+
with config_path.open("rb") as f:
|
|
3523
|
+
config = tomllib.load(f)
|
|
3524
|
+
return config.get("tool", {}).get("uv", {}).get("keyring-provider")
|
|
3525
|
+
except Exception:
|
|
3526
|
+
continue
|
|
3527
|
+
|
|
3528
|
+
return None
|
|
3529
|
+
|
|
3530
|
+
def _build_publish_command(self) -> list[str]:
|
|
3531
|
+
import os
|
|
3532
|
+
|
|
3533
|
+
cmd = ["uv", "publish"]
|
|
3534
|
+
publish_token = os.environ.get("UV_PUBLISH_TOKEN")
|
|
3535
|
+
if publish_token:
|
|
3536
|
+
cmd.extend(["--token", publish_token])
|
|
3537
|
+
keyring_provider = self._get_keyring_provider()
|
|
3538
|
+
if keyring_provider:
|
|
3539
|
+
cmd.extend(["--keyring-provider", keyring_provider])
|
|
3540
|
+
|
|
3541
|
+
return cmd
|
|
3542
|
+
|
|
3543
|
+
def _display_authentication_help(self) -> None:
|
|
3544
|
+
self.console.print(
|
|
3545
|
+
"\n[bold bright_red]❌ Publish failed. Run crackerjack again to retry publishing without re-bumping version.[/bold bright_red]"
|
|
3546
|
+
)
|
|
3547
|
+
if not (self.options and getattr(self.options, "ai_agent", False)):
|
|
3548
|
+
self.console.print("\n[bold yellow]🔐 Authentication Help:[/bold yellow]")
|
|
3549
|
+
self.console.print(" [dim]To fix authentication issues, you can:[/dim]")
|
|
3550
|
+
self.console.print(
|
|
3551
|
+
" [dim]1. Set PyPI token: export UV_PUBLISH_TOKEN=pypi-your-token-here[/dim]"
|
|
3552
|
+
)
|
|
3553
|
+
self.console.print(
|
|
3554
|
+
" [dim]2. Install keyring: uv tool install keyring[/dim]"
|
|
3555
|
+
)
|
|
3556
|
+
self.console.print(
|
|
3557
|
+
" [dim]3. Store token in keyring: keyring set https://upload.pypi.org/legacy/ __token__[/dim]"
|
|
3558
|
+
)
|
|
3559
|
+
self.console.print(
|
|
3560
|
+
" [dim]4. Ensure keyring-provider is set in pyproject.toml:[/dim]"
|
|
3561
|
+
)
|
|
3562
|
+
self.console.print(" [dim] [tool.uv][/dim]")
|
|
3563
|
+
self.console.print(' [dim] keyring-provider = "subprocess"[/dim]')
|
|
3564
|
+
|
|
2601
3565
|
def _publish_project(self, options: OptionsProtocol) -> None:
|
|
2602
3566
|
if options.publish:
|
|
2603
3567
|
self.console.print("\n" + "-" * 80)
|
|
@@ -2616,18 +3580,169 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2616
3580
|
)
|
|
2617
3581
|
raise SystemExit(1)
|
|
2618
3582
|
try:
|
|
2619
|
-
self.
|
|
3583
|
+
self._validate_authentication_setup()
|
|
3584
|
+
publish_cmd = self._build_publish_command()
|
|
3585
|
+
self.execute_command(publish_cmd)
|
|
2620
3586
|
self._mark_publish_completed()
|
|
2621
3587
|
self._clear_state()
|
|
2622
3588
|
self.console.print(
|
|
2623
|
-
"\n[bold bright_green]
|
|
3589
|
+
"\n[bold bright_green]🏆 Package published successfully![/bold bright_green]"
|
|
2624
3590
|
)
|
|
2625
3591
|
except SystemExit:
|
|
2626
|
-
self.
|
|
2627
|
-
"\n[bold bright_red]❌ Publish failed. Run crackerjack again to retry publishing without re-bumping version.[/bold bright_red]"
|
|
2628
|
-
)
|
|
3592
|
+
self._display_authentication_help()
|
|
2629
3593
|
raise
|
|
2630
3594
|
|
|
3595
|
+
def _analyze_git_changes(self) -> dict[str, t.Any]:
|
|
3596
|
+
diff_result = self._get_git_diff_output()
|
|
3597
|
+
changes = self._parse_git_diff_output(diff_result)
|
|
3598
|
+
changes["stats"] = self._get_git_stats()
|
|
3599
|
+
return changes
|
|
3600
|
+
|
|
3601
|
+
def _get_git_diff_output(self) -> t.Any:
|
|
3602
|
+
diff_cmd = ["git", "diff", "--cached", "--name-status"]
|
|
3603
|
+
diff_result = self.execute_command(diff_cmd, capture_output=True, text=True)
|
|
3604
|
+
if not diff_result.stdout and diff_result.returncode == 0:
|
|
3605
|
+
diff_cmd = ["git", "diff", "--name-status"]
|
|
3606
|
+
diff_result = self.execute_command(diff_cmd, capture_output=True, text=True)
|
|
3607
|
+
return diff_result
|
|
3608
|
+
|
|
3609
|
+
def _parse_git_diff_output(self, diff_result: t.Any) -> dict[str, t.Any]:
|
|
3610
|
+
changes = {
|
|
3611
|
+
"added": [],
|
|
3612
|
+
"modified": [],
|
|
3613
|
+
"deleted": [],
|
|
3614
|
+
"renamed": [],
|
|
3615
|
+
"total_changes": 0,
|
|
3616
|
+
}
|
|
3617
|
+
if diff_result.returncode == 0 and diff_result.stdout:
|
|
3618
|
+
self._process_diff_lines(diff_result.stdout, changes)
|
|
3619
|
+
return changes
|
|
3620
|
+
|
|
3621
|
+
def _process_diff_lines(self, stdout: str, changes: dict[str, t.Any]) -> None:
|
|
3622
|
+
for line in stdout.strip().split("\n"):
|
|
3623
|
+
if not line:
|
|
3624
|
+
continue
|
|
3625
|
+
self._process_single_diff_line(line, changes)
|
|
3626
|
+
|
|
3627
|
+
def _process_single_diff_line(self, line: str, changes: dict[str, t.Any]) -> None:
|
|
3628
|
+
parts = line.split("\t")
|
|
3629
|
+
if len(parts) >= 2:
|
|
3630
|
+
status, filename = parts[0], parts[1]
|
|
3631
|
+
self._categorize_file_change(status, filename, parts, changes)
|
|
3632
|
+
changes["total_changes"] += 1
|
|
3633
|
+
|
|
3634
|
+
def _categorize_file_change(
|
|
3635
|
+
self, status: str, filename: str, parts: list[str], changes: dict[str, t.Any]
|
|
3636
|
+
) -> None:
|
|
3637
|
+
if status == "A":
|
|
3638
|
+
changes["added"].append(filename)
|
|
3639
|
+
elif status == "M":
|
|
3640
|
+
changes["modified"].append(filename)
|
|
3641
|
+
elif status == "D":
|
|
3642
|
+
changes["deleted"].append(filename)
|
|
3643
|
+
elif status.startswith("R"):
|
|
3644
|
+
if len(parts) >= 3:
|
|
3645
|
+
changes["renamed"].append((parts[1], parts[2]))
|
|
3646
|
+
else:
|
|
3647
|
+
changes["renamed"].append((filename, "unknown"))
|
|
3648
|
+
|
|
3649
|
+
def _get_git_stats(self) -> str:
|
|
3650
|
+
stat_cmd = ["git", "diff", "--cached", "--stat"]
|
|
3651
|
+
stat_result = self.execute_command(stat_cmd, capture_output=True, text=True)
|
|
3652
|
+
if not stat_result.stdout and stat_result.returncode == 0:
|
|
3653
|
+
stat_cmd = ["git", "diff", "--stat"]
|
|
3654
|
+
stat_result = self.execute_command(stat_cmd, capture_output=True, text=True)
|
|
3655
|
+
return stat_result.stdout if stat_result.returncode == 0 else ""
|
|
3656
|
+
|
|
3657
|
+
def _categorize_changes(self, changes: dict[str, t.Any]) -> dict[str, list[str]]:
|
|
3658
|
+
categories = {
|
|
3659
|
+
"docs": [],
|
|
3660
|
+
"tests": [],
|
|
3661
|
+
"config": [],
|
|
3662
|
+
"core": [],
|
|
3663
|
+
"ci": [],
|
|
3664
|
+
"deps": [],
|
|
3665
|
+
}
|
|
3666
|
+
file_patterns = {
|
|
3667
|
+
"docs": ["README.md", "CLAUDE.md", "RULES.md", "docs/", ".md"],
|
|
3668
|
+
"tests": ["test_", "_test.py", "tests/", "conftest.py"],
|
|
3669
|
+
"config": ["pyproject.toml", ".yaml", ".yml", ".json", ".gitignore"],
|
|
3670
|
+
"ci": [".github/", "ci/", ".pre-commit"],
|
|
3671
|
+
"deps": ["requirements", "pyproject.toml", "uv.lock"],
|
|
3672
|
+
}
|
|
3673
|
+
for file_list in ("added", "modified", "deleted"):
|
|
3674
|
+
for filename in changes.get(file_list, []):
|
|
3675
|
+
categorized = False
|
|
3676
|
+
for category, patterns in file_patterns.items():
|
|
3677
|
+
if any(pattern in filename for pattern in patterns):
|
|
3678
|
+
categories[category].append(filename)
|
|
3679
|
+
categorized = True
|
|
3680
|
+
break
|
|
3681
|
+
if not categorized:
|
|
3682
|
+
categories["core"].append(filename)
|
|
3683
|
+
|
|
3684
|
+
return categories
|
|
3685
|
+
|
|
3686
|
+
def _get_primary_changes(self, categories: dict[str, list[str]]) -> list[str]:
|
|
3687
|
+
primary_changes = []
|
|
3688
|
+
category_mapping = [
|
|
3689
|
+
("core", "core functionality"),
|
|
3690
|
+
("tests", "tests"),
|
|
3691
|
+
("docs", "documentation"),
|
|
3692
|
+
("config", "configuration"),
|
|
3693
|
+
("deps", "dependencies"),
|
|
3694
|
+
]
|
|
3695
|
+
for key, label in category_mapping:
|
|
3696
|
+
if categories[key]:
|
|
3697
|
+
primary_changes.append(label)
|
|
3698
|
+
|
|
3699
|
+
return primary_changes or ["project files"]
|
|
3700
|
+
|
|
3701
|
+
def _determine_primary_action(self, changes: dict[str, t.Any]) -> str:
|
|
3702
|
+
added_count = len(changes["added"])
|
|
3703
|
+
modified_count = len(changes["modified"])
|
|
3704
|
+
deleted_count = len(changes["deleted"])
|
|
3705
|
+
if added_count > modified_count + deleted_count:
|
|
3706
|
+
return "Add"
|
|
3707
|
+
elif deleted_count > modified_count + added_count:
|
|
3708
|
+
return "Remove"
|
|
3709
|
+
elif changes["renamed"]:
|
|
3710
|
+
return "Refactor"
|
|
3711
|
+
return "Update"
|
|
3712
|
+
|
|
3713
|
+
def _generate_body_lines(self, changes: dict[str, t.Any]) -> list[str]:
|
|
3714
|
+
body_lines = []
|
|
3715
|
+
change_types = [
|
|
3716
|
+
("added", "Added"),
|
|
3717
|
+
("modified", "Modified"),
|
|
3718
|
+
("deleted", "Deleted"),
|
|
3719
|
+
("renamed", "Renamed"),
|
|
3720
|
+
]
|
|
3721
|
+
for change_type, label in change_types:
|
|
3722
|
+
items = changes.get(change_type, [])
|
|
3723
|
+
if items:
|
|
3724
|
+
count = len(items)
|
|
3725
|
+
body_lines.append(f"- {label} {count} file(s)")
|
|
3726
|
+
if change_type not in ("deleted", "renamed"):
|
|
3727
|
+
for file in items[:3]:
|
|
3728
|
+
body_lines.append(f" * {file}")
|
|
3729
|
+
if count > 3:
|
|
3730
|
+
body_lines.append(f" * ... and {count - 3} more")
|
|
3731
|
+
|
|
3732
|
+
return body_lines
|
|
3733
|
+
|
|
3734
|
+
def _generate_commit_message(self, changes: dict[str, t.Any]) -> str:
|
|
3735
|
+
if changes["total_changes"] == 0:
|
|
3736
|
+
return "Update project files"
|
|
3737
|
+
categories = self._categorize_changes(changes)
|
|
3738
|
+
primary_changes = self._get_primary_changes(categories)
|
|
3739
|
+
primary_action = self._determine_primary_action(changes)
|
|
3740
|
+
commit_subject = f"{primary_action} {' and '.join(primary_changes[:2])}"
|
|
3741
|
+
body_lines = self._generate_body_lines(changes)
|
|
3742
|
+
if body_lines:
|
|
3743
|
+
return f"{commit_subject}\n\n" + "\n".join(body_lines)
|
|
3744
|
+
return commit_subject
|
|
3745
|
+
|
|
2631
3746
|
def _commit_and_push(self, options: OptionsProtocol) -> None:
|
|
2632
3747
|
if options.commit:
|
|
2633
3748
|
self.console.print("\n" + "-" * 80)
|
|
@@ -2635,12 +3750,71 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2635
3750
|
"[bold bright_white]📝 COMMIT[/bold bright_white] [bold bright_white]Saving changes to git[/bold bright_white]"
|
|
2636
3751
|
)
|
|
2637
3752
|
self.console.print("-" * 80 + "\n")
|
|
2638
|
-
|
|
3753
|
+
changes = self._analyze_git_changes()
|
|
3754
|
+
if changes["total_changes"] > 0:
|
|
3755
|
+
self.console.print("[dim]🔍 Analyzing changes...[/dim]\n")
|
|
3756
|
+
if changes["stats"]:
|
|
3757
|
+
self.console.print(changes["stats"])
|
|
3758
|
+
suggested_msg = self._generate_commit_message(changes)
|
|
3759
|
+
self.console.print(
|
|
3760
|
+
"\n[bold cyan]📋 Suggested commit message:[/bold cyan]"
|
|
3761
|
+
)
|
|
3762
|
+
self.console.print(f"[cyan]{suggested_msg}[/cyan]\n")
|
|
3763
|
+
user_choice = (
|
|
3764
|
+
input("Use suggested message? [Y/n/e to edit]: ").strip().lower()
|
|
3765
|
+
)
|
|
3766
|
+
if user_choice in ("", "y"):
|
|
3767
|
+
commit_msg = suggested_msg
|
|
3768
|
+
elif user_choice == "e":
|
|
3769
|
+
import os
|
|
3770
|
+
import tempfile
|
|
3771
|
+
|
|
3772
|
+
with tempfile.NamedTemporaryFile(
|
|
3773
|
+
mode="w", suffix=".txt", delete=False
|
|
3774
|
+
) as f:
|
|
3775
|
+
f.write(suggested_msg)
|
|
3776
|
+
temp_path = f.name
|
|
3777
|
+
editor = os.environ.get("EDITOR", "vi")
|
|
3778
|
+
self.execute_command([editor, temp_path])
|
|
3779
|
+
with open(temp_path) as f:
|
|
3780
|
+
commit_msg = f.read().strip()
|
|
3781
|
+
Path(temp_path).unlink()
|
|
3782
|
+
else:
|
|
3783
|
+
commit_msg = input("\nEnter custom commit message: ")
|
|
3784
|
+
else:
|
|
3785
|
+
commit_msg = input("\nCommit message: ")
|
|
2639
3786
|
self.execute_command(
|
|
2640
3787
|
["git", "commit", "-m", commit_msg, "--no-verify", "--", "."]
|
|
2641
3788
|
)
|
|
2642
3789
|
self.execute_command(["git", "push", "origin", "main", "--no-verify"])
|
|
2643
3790
|
|
|
3791
|
+
def _update_precommit(self, options: OptionsProtocol) -> None:
|
|
3792
|
+
if options.update_precommit:
|
|
3793
|
+
self.console.print("\n" + "-" * 80)
|
|
3794
|
+
self.console.print(
|
|
3795
|
+
"[bold bright_blue]🔄 UPDATE[/bold bright_blue] [bold bright_white]Updating pre-commit hooks[/bold bright_white]"
|
|
3796
|
+
)
|
|
3797
|
+
self.console.print("-" * 80 + "\n")
|
|
3798
|
+
if self.pkg_path.stem == "crackerjack":
|
|
3799
|
+
update_cmd = ["uv", "run", "pre-commit", "autoupdate"]
|
|
3800
|
+
if getattr(options, "ai_agent", False):
|
|
3801
|
+
update_cmd.extend(["-c", ".pre-commit-config-ai.yaml"])
|
|
3802
|
+
self.execute_command(update_cmd)
|
|
3803
|
+
else:
|
|
3804
|
+
self.project_manager.update_precommit_hooks()
|
|
3805
|
+
|
|
3806
|
+
def _update_docs(self, options: OptionsProtocol) -> None:
|
|
3807
|
+
if options.update_docs or options.force_update_docs:
|
|
3808
|
+
self.console.print("\n" + "-" * 80)
|
|
3809
|
+
self.console.print(
|
|
3810
|
+
"[bold bright_blue]📋 DOCS UPDATE[/bold bright_blue] [bold bright_white]Updating documentation with quality standards[/bold bright_white]"
|
|
3811
|
+
)
|
|
3812
|
+
self.console.print("-" * 80 + "\n")
|
|
3813
|
+
self.config_manager.copy_documentation_templates(
|
|
3814
|
+
force_update=options.force_update_docs,
|
|
3815
|
+
compress_docs=options.compress_docs,
|
|
3816
|
+
)
|
|
3817
|
+
|
|
2644
3818
|
def execute_command(
|
|
2645
3819
|
self, cmd: list[str], **kwargs: t.Any
|
|
2646
3820
|
) -> subprocess.CompletedProcess[str]:
|
|
@@ -2712,7 +3886,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2712
3886
|
raise SystemExit(1)
|
|
2713
3887
|
else:
|
|
2714
3888
|
self.console.print(
|
|
2715
|
-
"\n[bold bright_green]
|
|
3889
|
+
"\n[bold bright_green]🏆 All comprehensive quality checks passed![/bold bright_green]"
|
|
2716
3890
|
)
|
|
2717
3891
|
|
|
2718
3892
|
async def _run_comprehensive_quality_checks_async(
|
|
@@ -2762,11 +3936,86 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2762
3936
|
raise SystemExit(1)
|
|
2763
3937
|
else:
|
|
2764
3938
|
self.console.print(
|
|
2765
|
-
"[bold bright_green]
|
|
3939
|
+
"[bold bright_green]🏆 All comprehensive quality checks passed![/bold bright_green]"
|
|
3940
|
+
)
|
|
3941
|
+
|
|
3942
|
+
def _run_tracked_task(
|
|
3943
|
+
self, task_id: str, task_name: str, task_func: t.Callable[[], None]
|
|
3944
|
+
) -> None:
|
|
3945
|
+
if self.session_tracker:
|
|
3946
|
+
self.session_tracker.start_task(task_id, task_name)
|
|
3947
|
+
try:
|
|
3948
|
+
task_func()
|
|
3949
|
+
if self.session_tracker:
|
|
3950
|
+
self.session_tracker.complete_task(task_id, f"{task_name} completed")
|
|
3951
|
+
except Exception as e:
|
|
3952
|
+
if self.session_tracker:
|
|
3953
|
+
self.session_tracker.fail_task(task_id, str(e))
|
|
3954
|
+
raise
|
|
3955
|
+
|
|
3956
|
+
def _run_pre_commit_task(self, options: OptionsProtocol) -> None:
|
|
3957
|
+
if not options.skip_hooks:
|
|
3958
|
+
if getattr(options, "ai_agent", False):
|
|
3959
|
+
self.project_manager.run_pre_commit_with_analysis()
|
|
3960
|
+
else:
|
|
3961
|
+
self.project_manager.run_pre_commit()
|
|
3962
|
+
else:
|
|
3963
|
+
self.console.print(
|
|
3964
|
+
"\n[bold bright_yellow]⏭️ Skipping pre-commit hooks...[/bold bright_yellow]\n"
|
|
2766
3965
|
)
|
|
3966
|
+
if self.session_tracker:
|
|
3967
|
+
self.session_tracker.skip_task("pre_commit", "Skipped by user request")
|
|
3968
|
+
|
|
3969
|
+
def _initialize_session_tracking(self, options: OptionsProtocol) -> None:
|
|
3970
|
+
if options.resume_from:
|
|
3971
|
+
try:
|
|
3972
|
+
progress_file = Path(options.resume_from)
|
|
3973
|
+
self.session_tracker = SessionTracker.resume_session(
|
|
3974
|
+
console=self.console,
|
|
3975
|
+
progress_file=progress_file,
|
|
3976
|
+
)
|
|
3977
|
+
return
|
|
3978
|
+
except Exception as e:
|
|
3979
|
+
self.console.print(
|
|
3980
|
+
f"[yellow]Warning: Failed to resume from {options.resume_from}: {e}[/yellow]"
|
|
3981
|
+
)
|
|
3982
|
+
self.session_tracker = None
|
|
3983
|
+
return
|
|
3984
|
+
if options.track_progress:
|
|
3985
|
+
try:
|
|
3986
|
+
auto_tracker = SessionTracker.auto_detect_session(self.console)
|
|
3987
|
+
if auto_tracker:
|
|
3988
|
+
self.session_tracker = auto_tracker
|
|
3989
|
+
return
|
|
3990
|
+
progress_file = (
|
|
3991
|
+
Path(options.progress_file) if options.progress_file else None
|
|
3992
|
+
)
|
|
3993
|
+
try:
|
|
3994
|
+
from importlib.metadata import version
|
|
3995
|
+
|
|
3996
|
+
crackerjack_version = version("crackerjack")
|
|
3997
|
+
except (ImportError, ModuleNotFoundError):
|
|
3998
|
+
crackerjack_version = "unknown"
|
|
3999
|
+
metadata = {
|
|
4000
|
+
"working_dir": str(self.pkg_path),
|
|
4001
|
+
"python_version": self.python_version,
|
|
4002
|
+
"crackerjack_version": crackerjack_version,
|
|
4003
|
+
"cli_options": str(options),
|
|
4004
|
+
}
|
|
4005
|
+
self.session_tracker = SessionTracker.create_session(
|
|
4006
|
+
console=self.console,
|
|
4007
|
+
progress_file=progress_file,
|
|
4008
|
+
metadata=metadata,
|
|
4009
|
+
)
|
|
4010
|
+
except Exception as e:
|
|
4011
|
+
self.console.print(
|
|
4012
|
+
f"[yellow]Warning: Failed to initialize session tracking: {e}[/yellow]"
|
|
4013
|
+
)
|
|
4014
|
+
self.session_tracker = None
|
|
2767
4015
|
|
|
2768
4016
|
def process(self, options: OptionsProtocol) -> None:
|
|
2769
4017
|
assert self.project_manager is not None
|
|
4018
|
+
self._initialize_session_tracking(options)
|
|
2770
4019
|
self.console.print("\n" + "-" * 80)
|
|
2771
4020
|
self.console.print(
|
|
2772
4021
|
"[bold bright_cyan]⚒️ CRACKERJACKING[/bold bright_cyan] [bold bright_white]Starting workflow execution[/bold bright_white]"
|
|
@@ -2777,28 +4026,59 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2777
4026
|
options.test = True
|
|
2778
4027
|
options.publish = options.all
|
|
2779
4028
|
options.commit = True
|
|
2780
|
-
self.
|
|
2781
|
-
|
|
2782
|
-
|
|
2783
|
-
self.
|
|
2784
|
-
|
|
4029
|
+
self._run_tracked_task(
|
|
4030
|
+
"setup", "Initialize project structure", self._setup_package
|
|
4031
|
+
)
|
|
4032
|
+
self._run_tracked_task(
|
|
4033
|
+
"update_project",
|
|
4034
|
+
"Update project configuration",
|
|
4035
|
+
lambda: self._update_project(options),
|
|
4036
|
+
)
|
|
4037
|
+
self._run_tracked_task(
|
|
4038
|
+
"update_precommit",
|
|
4039
|
+
"Update pre-commit hooks",
|
|
4040
|
+
lambda: self._update_precommit(options),
|
|
4041
|
+
)
|
|
4042
|
+
self._run_tracked_task(
|
|
4043
|
+
"update_docs",
|
|
4044
|
+
"Update documentation templates",
|
|
4045
|
+
lambda: self._update_docs(options),
|
|
4046
|
+
)
|
|
4047
|
+
self._run_tracked_task(
|
|
4048
|
+
"clean_project", "Clean project code", lambda: self._clean_project(options)
|
|
4049
|
+
)
|
|
4050
|
+
if self.project_manager is not None:
|
|
4051
|
+
self.project_manager.options = options
|
|
2785
4052
|
if not options.skip_hooks:
|
|
2786
|
-
|
|
2787
|
-
|
|
2788
|
-
|
|
2789
|
-
self.
|
|
2790
|
-
else:
|
|
2791
|
-
self.console.print(
|
|
2792
|
-
"\n[bold bright_yellow]⏭️ Skipping pre-commit hooks...[/bold bright_yellow]\n"
|
|
4053
|
+
self._run_tracked_task(
|
|
4054
|
+
"pre_commit",
|
|
4055
|
+
"Run pre-commit hooks",
|
|
4056
|
+
lambda: self._run_pre_commit_task(options),
|
|
2793
4057
|
)
|
|
2794
|
-
|
|
2795
|
-
|
|
2796
|
-
self.
|
|
2797
|
-
|
|
2798
|
-
|
|
4058
|
+
else:
|
|
4059
|
+
self._run_pre_commit_task(options)
|
|
4060
|
+
self._run_tracked_task(
|
|
4061
|
+
"run_tests", "Execute test suite", lambda: self._run_tests(options)
|
|
4062
|
+
)
|
|
4063
|
+
self._run_tracked_task(
|
|
4064
|
+
"quality_checks",
|
|
4065
|
+
"Run comprehensive quality checks",
|
|
4066
|
+
lambda: self._run_comprehensive_quality_checks(options),
|
|
4067
|
+
)
|
|
4068
|
+
self._run_tracked_task(
|
|
4069
|
+
"bump_version", "Bump version numbers", lambda: self._bump_version(options)
|
|
4070
|
+
)
|
|
4071
|
+
self._run_tracked_task(
|
|
4072
|
+
"commit_push",
|
|
4073
|
+
"Commit and push changes",
|
|
4074
|
+
lambda: self._commit_and_push(options),
|
|
4075
|
+
)
|
|
4076
|
+
self._run_tracked_task(
|
|
4077
|
+
"publish", "Publish project", lambda: self._publish_project(options)
|
|
4078
|
+
)
|
|
2799
4079
|
self.console.print("\n" + "-" * 80)
|
|
2800
4080
|
self.console.print(
|
|
2801
|
-
"[bold bright_green]
|
|
4081
|
+
"[bold bright_green]🏆 CRACKERJACK COMPLETE[/bold bright_green] [bold bright_white]Workflow completed successfully![/bold bright_white]"
|
|
2802
4082
|
)
|
|
2803
4083
|
self.console.print("-" * 80 + "\n")
|
|
2804
4084
|
|
|
@@ -2818,7 +4098,8 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2818
4098
|
self._update_project(options)
|
|
2819
4099
|
self._update_precommit(options)
|
|
2820
4100
|
await self._clean_project_async(options)
|
|
2821
|
-
self.project_manager
|
|
4101
|
+
if self.project_manager is not None:
|
|
4102
|
+
self.project_manager.options = options
|
|
2822
4103
|
if not options.skip_hooks:
|
|
2823
4104
|
if getattr(options, "ai_agent", False):
|
|
2824
4105
|
await self.project_manager.run_pre_commit_with_analysis_async()
|
|
@@ -2835,7 +4116,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
2835
4116
|
self._publish_project(options)
|
|
2836
4117
|
self.console.print("\n" + "-" * 80)
|
|
2837
4118
|
self.console.print(
|
|
2838
|
-
"[bold bright_green]
|
|
4119
|
+
"[bold bright_green]🏆 CRACKERJACK COMPLETE[/bold bright_green] [bold bright_white]Workflow completed successfully![/bold bright_white]"
|
|
2839
4120
|
)
|
|
2840
4121
|
self.console.print("-" * 80 + "\n")
|
|
2841
4122
|
|