crackerjack 0.31.10__py3-none-any.whl → 0.31.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crackerjack might be problematic. Click here for more details.

Files changed (155) hide show
  1. crackerjack/CLAUDE.md +288 -705
  2. crackerjack/__main__.py +22 -8
  3. crackerjack/agents/__init__.py +0 -3
  4. crackerjack/agents/architect_agent.py +0 -43
  5. crackerjack/agents/base.py +1 -9
  6. crackerjack/agents/coordinator.py +2 -148
  7. crackerjack/agents/documentation_agent.py +109 -81
  8. crackerjack/agents/dry_agent.py +122 -97
  9. crackerjack/agents/formatting_agent.py +3 -16
  10. crackerjack/agents/import_optimization_agent.py +1174 -130
  11. crackerjack/agents/performance_agent.py +956 -188
  12. crackerjack/agents/performance_helpers.py +229 -0
  13. crackerjack/agents/proactive_agent.py +1 -48
  14. crackerjack/agents/refactoring_agent.py +516 -246
  15. crackerjack/agents/refactoring_helpers.py +282 -0
  16. crackerjack/agents/security_agent.py +393 -90
  17. crackerjack/agents/test_creation_agent.py +1776 -120
  18. crackerjack/agents/test_specialist_agent.py +59 -15
  19. crackerjack/agents/tracker.py +0 -102
  20. crackerjack/api.py +145 -37
  21. crackerjack/cli/handlers.py +48 -30
  22. crackerjack/cli/interactive.py +11 -11
  23. crackerjack/cli/options.py +66 -4
  24. crackerjack/code_cleaner.py +808 -148
  25. crackerjack/config/global_lock_config.py +110 -0
  26. crackerjack/config/hooks.py +43 -64
  27. crackerjack/core/async_workflow_orchestrator.py +247 -97
  28. crackerjack/core/autofix_coordinator.py +192 -109
  29. crackerjack/core/enhanced_container.py +46 -63
  30. crackerjack/core/file_lifecycle.py +549 -0
  31. crackerjack/core/performance.py +9 -8
  32. crackerjack/core/performance_monitor.py +395 -0
  33. crackerjack/core/phase_coordinator.py +281 -94
  34. crackerjack/core/proactive_workflow.py +9 -58
  35. crackerjack/core/resource_manager.py +501 -0
  36. crackerjack/core/service_watchdog.py +490 -0
  37. crackerjack/core/session_coordinator.py +4 -8
  38. crackerjack/core/timeout_manager.py +504 -0
  39. crackerjack/core/websocket_lifecycle.py +475 -0
  40. crackerjack/core/workflow_orchestrator.py +343 -209
  41. crackerjack/dynamic_config.py +50 -9
  42. crackerjack/errors.py +3 -4
  43. crackerjack/executors/async_hook_executor.py +63 -13
  44. crackerjack/executors/cached_hook_executor.py +14 -14
  45. crackerjack/executors/hook_executor.py +100 -37
  46. crackerjack/executors/hook_lock_manager.py +856 -0
  47. crackerjack/executors/individual_hook_executor.py +120 -86
  48. crackerjack/intelligence/__init__.py +0 -7
  49. crackerjack/intelligence/adaptive_learning.py +13 -86
  50. crackerjack/intelligence/agent_orchestrator.py +15 -78
  51. crackerjack/intelligence/agent_registry.py +12 -59
  52. crackerjack/intelligence/agent_selector.py +31 -92
  53. crackerjack/intelligence/integration.py +1 -41
  54. crackerjack/interactive.py +9 -9
  55. crackerjack/managers/async_hook_manager.py +25 -8
  56. crackerjack/managers/hook_manager.py +9 -9
  57. crackerjack/managers/publish_manager.py +57 -59
  58. crackerjack/managers/test_command_builder.py +6 -36
  59. crackerjack/managers/test_executor.py +9 -61
  60. crackerjack/managers/test_manager.py +17 -63
  61. crackerjack/managers/test_manager_backup.py +77 -127
  62. crackerjack/managers/test_progress.py +4 -23
  63. crackerjack/mcp/cache.py +5 -12
  64. crackerjack/mcp/client_runner.py +10 -10
  65. crackerjack/mcp/context.py +64 -6
  66. crackerjack/mcp/dashboard.py +14 -11
  67. crackerjack/mcp/enhanced_progress_monitor.py +55 -55
  68. crackerjack/mcp/file_monitor.py +72 -42
  69. crackerjack/mcp/progress_components.py +103 -84
  70. crackerjack/mcp/progress_monitor.py +122 -49
  71. crackerjack/mcp/rate_limiter.py +12 -12
  72. crackerjack/mcp/server_core.py +16 -22
  73. crackerjack/mcp/service_watchdog.py +26 -26
  74. crackerjack/mcp/state.py +15 -0
  75. crackerjack/mcp/tools/core_tools.py +95 -39
  76. crackerjack/mcp/tools/error_analyzer.py +6 -32
  77. crackerjack/mcp/tools/execution_tools.py +1 -56
  78. crackerjack/mcp/tools/execution_tools_backup.py +35 -131
  79. crackerjack/mcp/tools/intelligence_tool_registry.py +0 -36
  80. crackerjack/mcp/tools/intelligence_tools.py +2 -55
  81. crackerjack/mcp/tools/monitoring_tools.py +308 -145
  82. crackerjack/mcp/tools/proactive_tools.py +12 -42
  83. crackerjack/mcp/tools/progress_tools.py +23 -15
  84. crackerjack/mcp/tools/utility_tools.py +3 -40
  85. crackerjack/mcp/tools/workflow_executor.py +40 -60
  86. crackerjack/mcp/websocket/app.py +0 -3
  87. crackerjack/mcp/websocket/endpoints.py +206 -268
  88. crackerjack/mcp/websocket/jobs.py +213 -66
  89. crackerjack/mcp/websocket/server.py +84 -6
  90. crackerjack/mcp/websocket/websocket_handler.py +137 -29
  91. crackerjack/models/config_adapter.py +3 -16
  92. crackerjack/models/protocols.py +162 -3
  93. crackerjack/models/resource_protocols.py +454 -0
  94. crackerjack/models/task.py +3 -3
  95. crackerjack/monitoring/__init__.py +0 -0
  96. crackerjack/monitoring/ai_agent_watchdog.py +25 -71
  97. crackerjack/monitoring/regression_prevention.py +28 -87
  98. crackerjack/orchestration/advanced_orchestrator.py +44 -78
  99. crackerjack/orchestration/coverage_improvement.py +10 -60
  100. crackerjack/orchestration/execution_strategies.py +16 -16
  101. crackerjack/orchestration/test_progress_streamer.py +61 -53
  102. crackerjack/plugins/base.py +1 -1
  103. crackerjack/plugins/managers.py +22 -20
  104. crackerjack/py313.py +65 -21
  105. crackerjack/services/backup_service.py +467 -0
  106. crackerjack/services/bounded_status_operations.py +627 -0
  107. crackerjack/services/cache.py +7 -9
  108. crackerjack/services/config.py +35 -52
  109. crackerjack/services/config_integrity.py +5 -16
  110. crackerjack/services/config_merge.py +542 -0
  111. crackerjack/services/contextual_ai_assistant.py +17 -19
  112. crackerjack/services/coverage_ratchet.py +44 -73
  113. crackerjack/services/debug.py +25 -39
  114. crackerjack/services/dependency_monitor.py +52 -50
  115. crackerjack/services/enhanced_filesystem.py +14 -11
  116. crackerjack/services/file_hasher.py +1 -1
  117. crackerjack/services/filesystem.py +1 -12
  118. crackerjack/services/git.py +71 -47
  119. crackerjack/services/health_metrics.py +31 -27
  120. crackerjack/services/initialization.py +276 -428
  121. crackerjack/services/input_validator.py +760 -0
  122. crackerjack/services/log_manager.py +16 -16
  123. crackerjack/services/logging.py +7 -6
  124. crackerjack/services/metrics.py +43 -43
  125. crackerjack/services/pattern_cache.py +2 -31
  126. crackerjack/services/pattern_detector.py +26 -63
  127. crackerjack/services/performance_benchmarks.py +20 -45
  128. crackerjack/services/regex_patterns.py +2887 -0
  129. crackerjack/services/regex_utils.py +537 -0
  130. crackerjack/services/secure_path_utils.py +683 -0
  131. crackerjack/services/secure_status_formatter.py +534 -0
  132. crackerjack/services/secure_subprocess.py +605 -0
  133. crackerjack/services/security.py +47 -10
  134. crackerjack/services/security_logger.py +492 -0
  135. crackerjack/services/server_manager.py +109 -50
  136. crackerjack/services/smart_scheduling.py +8 -25
  137. crackerjack/services/status_authentication.py +603 -0
  138. crackerjack/services/status_security_manager.py +442 -0
  139. crackerjack/services/thread_safe_status_collector.py +546 -0
  140. crackerjack/services/tool_version_service.py +1 -23
  141. crackerjack/services/unified_config.py +36 -58
  142. crackerjack/services/validation_rate_limiter.py +269 -0
  143. crackerjack/services/version_checker.py +9 -40
  144. crackerjack/services/websocket_resource_limiter.py +572 -0
  145. crackerjack/slash_commands/__init__.py +52 -2
  146. crackerjack/tools/__init__.py +0 -0
  147. crackerjack/tools/validate_input_validator_patterns.py +262 -0
  148. crackerjack/tools/validate_regex_patterns.py +198 -0
  149. {crackerjack-0.31.10.dist-info → crackerjack-0.31.13.dist-info}/METADATA +197 -12
  150. crackerjack-0.31.13.dist-info/RECORD +178 -0
  151. crackerjack/cli/facade.py +0 -104
  152. crackerjack-0.31.10.dist-info/RECORD +0 -149
  153. {crackerjack-0.31.10.dist-info → crackerjack-0.31.13.dist-info}/WHEEL +0 -0
  154. {crackerjack-0.31.10.dist-info → crackerjack-0.31.13.dist-info}/entry_points.txt +0 -0
  155. {crackerjack-0.31.10.dist-info → crackerjack-0.31.13.dist-info}/licenses/LICENSE +0 -0
@@ -48,15 +48,15 @@ class LogManager:
48
48
  def create_debug_log_file(self, session_id: str | None = None) -> Path:
49
49
  timestamp = int(time.time())
50
50
  if session_id:
51
- filename = f"debug-{timestamp}-{session_id}.log"
51
+ filename = f"debug -{timestamp}-{session_id}.log"
52
52
  else:
53
- filename = f"debug-{timestamp}.log"
53
+ filename = f"debug -{timestamp}.log"
54
54
 
55
55
  return self.debug_dir / filename
56
56
 
57
57
  def create_error_log_file(self, error_type: str = "general") -> Path:
58
58
  timestamp = int(time.time())
59
- filename = f"error-{error_type}-{timestamp}.log"
59
+ filename = f"error -{error_type}-{timestamp}.log"
60
60
  return self.error_dir / filename
61
61
 
62
62
  def rotate_logs(
@@ -95,7 +95,7 @@ class LogManager:
95
95
  log_file.unlink()
96
96
  removed_count += 1
97
97
  console.print(
98
- f"[dim]🗑️ Removed old log: {log_file.name} ({reason})[/dim]",
98
+ f"[dim]🗑️ Removed old log: {log_file.name} ({reason})[/ dim]",
99
99
  )
100
100
 
101
101
  return removed_count
@@ -146,40 +146,40 @@ class LogManager:
146
146
  results = {"found": len(legacy_files), "moved": 0, "failed": 0}
147
147
 
148
148
  if not legacy_files:
149
- console.print("[green]✅ No legacy log files found to migrate[/green]")
149
+ console.print("[green]✅ No legacy log files found to migrate[/ green]")
150
150
  return results
151
151
 
152
152
  console.print(
153
- f"[yellow]📦 Found {len(legacy_files)} legacy debug log files to migrate[/yellow]",
153
+ f"[yellow]📦 Found {len(legacy_files)} legacy debug log files to migrate[/ yellow]",
154
154
  )
155
155
 
156
156
  for legacy_file in legacy_files:
157
157
  try:
158
- parts = legacy_file.stem.split(" - ")
158
+ parts = legacy_file.stem.split("-")
159
159
  if len(parts) >= 3 and parts[-1].isdigit():
160
160
  timestamp = parts[-1]
161
161
  else:
162
162
  timestamp = str(int(legacy_file.stat().st_mtime))
163
163
 
164
- new_filename = f"debug-{timestamp}-migrated.log"
164
+ new_filename = f"debug -{timestamp}- migrated.log"
165
165
  new_path = self.debug_dir / new_filename
166
166
 
167
167
  if dry_run:
168
168
  console.print(
169
- f"[cyan]📋 Would move: {legacy_file.name} → {new_filename}[/cyan]",
169
+ f"[cyan]📋 Would move: {legacy_file.name} → {new_filename}[/ cyan]",
170
170
  )
171
171
  results["moved"] += 1
172
172
  else:
173
173
  shutil.move(str(legacy_file), str(new_path))
174
174
  results["moved"] += 1
175
175
  console.print(
176
- f"[green]✅ Moved: {legacy_file.name} → {new_filename}[/green]",
176
+ f"[green]✅ Moved: {legacy_file.name} → {new_filename}[/ green]",
177
177
  )
178
178
 
179
179
  except (OSError, ValueError) as e:
180
180
  results["failed"] += 1
181
181
  console.print(
182
- f"[red]❌ Failed to migrate {legacy_file.name}: {e}[/red]",
182
+ f"[red]❌ Failed to migrate {legacy_file.name}: {e}[/ red]",
183
183
  )
184
184
 
185
185
  return results
@@ -236,7 +236,7 @@ class LogManager:
236
236
  )
237
237
 
238
238
  formatter = logging.Formatter(
239
- "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
239
+ "%(asctime)s - %(name)s - %(levelname)s-%(message)s",
240
240
  )
241
241
  handler.setFormatter(formatter)
242
242
 
@@ -247,8 +247,8 @@ class LogManager:
247
247
  def print_log_summary(self) -> None:
248
248
  stats = self.get_log_stats()
249
249
 
250
- console.print("\n[bold]📊 Log File Summary[/bold]")
251
- console.print(f"[dim]Location: {self.log_dir}[/dim]")
250
+ console.print("\n[bold]📊 Log File Summary[/ bold]")
251
+ console.print(f"[dim]Location: {self.log_dir}[/ dim]")
252
252
 
253
253
  total_files = 0
254
254
  total_size = 0.0
@@ -273,10 +273,10 @@ class LogManager:
273
273
 
274
274
  if total_files > 0:
275
275
  console.print(
276
- f"\n[bold]Total: {total_files} files, {total_size:.2f}MB[/bold]",
276
+ f"\n[bold]Total: {total_files} files, {total_size: .2f}MB[/ bold]",
277
277
  )
278
278
  else:
279
- console.print("\n[dim]No log files found[/dim]")
279
+ console.print("\n[dim]No log files found[/ dim]")
280
280
 
281
281
 
282
282
  log_manager = LogManager()
@@ -9,6 +9,7 @@ from types import TracebackType
9
9
  from typing import Any
10
10
 
11
11
  import structlog
12
+ from structlog.types import EventDict, Processor
12
13
 
13
14
  correlation_id: ContextVar[str | None] = ContextVar("correlation_id", default=None)
14
15
 
@@ -25,12 +26,12 @@ def get_correlation_id() -> str:
25
26
  return cid
26
27
 
27
28
 
28
- def add_correlation_id(_: Any, __: Any, event_dict: dict[str, Any]) -> dict[str, Any]:
29
+ def add_correlation_id(_: Any, __: Any, event_dict: EventDict) -> EventDict:
29
30
  event_dict["correlation_id"] = get_correlation_id()
30
31
  return event_dict
31
32
 
32
33
 
33
- def add_timestamp(_: Any, __: Any, event_dict: dict[str, Any]) -> dict[str, Any]:
34
+ def add_timestamp(_: Any, __: Any, event_dict: EventDict) -> EventDict:
34
35
  event_dict["timestamp"] = time.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
35
36
  return event_dict
36
37
 
@@ -40,7 +41,7 @@ def setup_structured_logging(
40
41
  json_output: bool = True,
41
42
  log_file: Path | None = None,
42
43
  ) -> None:
43
- processors = [
44
+ processors: list[Processor] = [
44
45
  structlog.stdlib.filter_by_level,
45
46
  add_timestamp,
46
47
  add_correlation_id,
@@ -52,12 +53,12 @@ def setup_structured_logging(
52
53
  ]
53
54
 
54
55
  if json_output:
55
- processors.append(structlog.processors.JSONRenderer()) # type: ignore[arg-type]
56
+ processors.append(structlog.processors.JSONRenderer())
56
57
  else:
57
- processors.append(structlog.dev.ConsoleRenderer(colors=True)) # type: ignore[arg-type]
58
+ processors.append(structlog.dev.ConsoleRenderer(colors=True))
58
59
 
59
60
  structlog.configure(
60
- processors=processors, # type: ignore[arg-type]
61
+ processors=processors,
61
62
  wrapper_class=structlog.stdlib.BoundLogger,
62
63
  logger_factory=structlog.stdlib.LoggerFactory(),
63
64
  cache_logger_on_first_use=True,
@@ -21,45 +21,45 @@ class MetricsCollector:
21
21
  def _init_database(self) -> None:
22
22
  with self._get_connection() as conn:
23
23
  conn.executescript("""
24
- -- Jobs table
24
+ - - Jobs table
25
25
  CREATE TABLE IF NOT EXISTS jobs (
26
26
  id INTEGER PRIMARY KEY AUTOINCREMENT,
27
27
  job_id TEXT UNIQUE NOT NULL,
28
28
  start_time TIMESTAMP NOT NULL,
29
29
  end_time TIMESTAMP,
30
- status TEXT NOT NULL, -- 'running', 'success', 'failed', 'cancelled'
30
+ status TEXT NOT NULL, - - 'running', 'success', 'failed', 'cancelled'
31
31
  iterations INTEGER DEFAULT 0,
32
32
  ai_agent BOOLEAN DEFAULT 0,
33
33
  error_message TEXT,
34
- metadata TEXT -- JSON field for additional data
34
+ metadata TEXT - - JSON field for additional data
35
35
  );
36
36
 
37
- -- Errors table
37
+ - - Errors table
38
38
  CREATE TABLE IF NOT EXISTS errors (
39
39
  id INTEGER PRIMARY KEY AUTOINCREMENT,
40
40
  job_id TEXT,
41
41
  timestamp TIMESTAMP NOT NULL,
42
- error_type TEXT NOT NULL, -- 'hook', 'test', 'lint', 'type_check', etc.
43
- error_category TEXT, -- 'ruff', 'pyright', 'pytest', etc.
42
+ error_type TEXT NOT NULL, - - 'hook', 'test', 'lint', 'type_check', etc.
43
+ error_category TEXT, - - 'ruff', 'pyright', 'pytest', etc.
44
44
  error_message TEXT,
45
45
  file_path TEXT,
46
46
  line_number INTEGER,
47
47
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
48
48
  );
49
49
 
50
- -- Hook executions table
50
+ - - Hook executions table
51
51
  CREATE TABLE IF NOT EXISTS hook_executions (
52
52
  id INTEGER PRIMARY KEY AUTOINCREMENT,
53
53
  job_id TEXT,
54
54
  timestamp TIMESTAMP NOT NULL,
55
55
  hook_name TEXT NOT NULL,
56
- hook_type TEXT, -- 'fast', 'comprehensive'
56
+ hook_type TEXT, - - 'fast', 'comprehensive'
57
57
  execution_time_ms INTEGER,
58
- status TEXT, -- 'success', 'failed', 'skipped'
58
+ status TEXT, - - 'success', 'failed', 'skipped'
59
59
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
60
60
  );
61
61
 
62
- -- Test executions table
62
+ - - Test executions table
63
63
  CREATE TABLE IF NOT EXISTS test_executions (
64
64
  id INTEGER PRIMARY KEY AUTOINCREMENT,
65
65
  job_id TEXT,
@@ -73,17 +73,17 @@ class MetricsCollector:
73
73
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
74
74
  );
75
75
 
76
- -- Orchestration executions table (NEW)
76
+ - - Orchestration executions table (NEW)
77
77
  CREATE TABLE IF NOT EXISTS orchestration_executions (
78
78
  id INTEGER PRIMARY KEY AUTOINCREMENT,
79
79
  job_id TEXT,
80
80
  timestamp TIMESTAMP NOT NULL,
81
- execution_strategy TEXT NOT NULL, -- 'batch', 'individual', 'adaptive', 'selective'
82
- progress_level TEXT NOT NULL, -- 'basic', 'detailed', 'granular', 'streaming'
83
- ai_mode TEXT NOT NULL, -- 'single-agent', 'multi-agent', 'coordinator'
81
+ execution_strategy TEXT NOT NULL, - - 'batch', 'individual', 'adaptive', 'selective'
82
+ progress_level TEXT NOT NULL, - - 'basic', 'detailed', 'granular', 'streaming'
83
+ ai_mode TEXT NOT NULL, - - 'single - agent', 'multi - agent', 'coordinator'
84
84
  iteration_count INTEGER DEFAULT 1,
85
- strategy_switches INTEGER DEFAULT 0, -- How many times strategy changed
86
- correlation_insights TEXT, -- JSON of correlation analysis results
85
+ strategy_switches INTEGER DEFAULT 0, - - How many times strategy changed
86
+ correlation_insights TEXT, - - JSON of correlation analysis results
87
87
  total_execution_time_ms INTEGER,
88
88
  hooks_execution_time_ms INTEGER,
89
89
  tests_execution_time_ms INTEGER,
@@ -91,7 +91,7 @@ class MetricsCollector:
91
91
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
92
92
  );
93
93
 
94
- -- Strategy decisions table (NEW)
94
+ - - Strategy decisions table (NEW)
95
95
  CREATE TABLE IF NOT EXISTS strategy_decisions (
96
96
  id INTEGER PRIMARY KEY AUTOINCREMENT,
97
97
  job_id TEXT,
@@ -99,29 +99,29 @@ class MetricsCollector:
99
99
  timestamp TIMESTAMP NOT NULL,
100
100
  previous_strategy TEXT,
101
101
  selected_strategy TEXT NOT NULL,
102
- decision_reason TEXT, -- Why this strategy was chosen
103
- context_data TEXT, -- JSON of execution context
104
- effectiveness_score REAL, -- How well the strategy worked (0-1)
102
+ decision_reason TEXT, - - Why this strategy was chosen
103
+ context_data TEXT, - - JSON of execution context
104
+ effectiveness_score REAL, - - How well the strategy worked (0 - 1)
105
105
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
106
106
  );
107
107
 
108
- -- Individual test executions table (NEW - more granular than test_executions)
108
+ - - Individual test executions table (NEW - more granular than test_executions)
109
109
  CREATE TABLE IF NOT EXISTS individual_test_executions (
110
110
  id INTEGER PRIMARY KEY AUTOINCREMENT,
111
111
  job_id TEXT,
112
112
  timestamp TIMESTAMP NOT NULL,
113
- test_id TEXT NOT NULL, -- Full test identifier
113
+ test_id TEXT NOT NULL, - - Full test identifier
114
114
  test_file TEXT NOT NULL,
115
115
  test_class TEXT,
116
116
  test_method TEXT,
117
- status TEXT NOT NULL, -- 'passed', 'failed', 'skipped', 'error'
117
+ status TEXT NOT NULL, - - 'passed', 'failed', 'skipped', 'error'
118
118
  execution_time_ms INTEGER,
119
119
  error_message TEXT,
120
120
  error_traceback TEXT,
121
121
  FOREIGN KEY (job_id) REFERENCES jobs(job_id)
122
122
  );
123
123
 
124
- -- Daily summary table (for quick stats)
124
+ - - Daily summary table (for quick stats)
125
125
  CREATE TABLE IF NOT EXISTS daily_summary (
126
126
  date DATE PRIMARY KEY,
127
127
  total_jobs INTEGER DEFAULT 0,
@@ -134,12 +134,12 @@ class MetricsCollector:
134
134
  type_errors INTEGER DEFAULT 0,
135
135
  avg_job_duration_ms INTEGER,
136
136
  total_ai_fixes INTEGER DEFAULT 0,
137
- orchestrated_jobs INTEGER DEFAULT 0, -- NEW
138
- avg_orchestration_iterations REAL DEFAULT 0, -- NEW
139
- most_effective_strategy TEXT -- NEW
137
+ orchestrated_jobs INTEGER DEFAULT 0, - - NEW
138
+ avg_orchestration_iterations REAL DEFAULT 0, - - NEW
139
+ most_effective_strategy TEXT - - NEW
140
140
  );
141
141
 
142
- -- Create indexes for performance
142
+ --Create indexes for performance
143
143
  CREATE INDEX IF NOT EXISTS idx_jobs_start_time ON jobs(start_time);
144
144
  CREATE INDEX IF NOT EXISTS idx_errors_job_id ON errors(job_id);
145
145
  CREATE INDEX IF NOT EXISTS idx_errors_type ON errors(error_type);
@@ -190,8 +190,8 @@ class MetricsCollector:
190
190
  conn.execute(
191
191
  """
192
192
  UPDATE jobs
193
- SET end_time = ?, status = ?, iterations = ?, error_message = ?
194
- WHERE job_id = ?
193
+ SET end_time=?, status=?, iterations=?, error_message=?
194
+ WHERE job_id=?
195
195
  """,
196
196
  (datetime.now(), status, iterations, error_message, job_id),
197
197
  )
@@ -394,7 +394,7 @@ class MetricsCollector:
394
394
  AVG(
395
395
  SELECT iteration_count
396
396
  FROM orchestration_executions o
397
- WHERE o.job_id = sd.job_id
397
+ WHERE o.job_id=sd.job_id
398
398
  ) as avg_iterations_needed
399
399
  FROM strategy_decisions sd
400
400
  WHERE effectiveness_score IS NOT NULL
@@ -435,7 +435,7 @@ class MetricsCollector:
435
435
  COUNT(*) as failure_count,
436
436
  AVG(execution_time_ms) as avg_execution_time
437
437
  FROM individual_test_executions
438
- WHERE status = 'failed'
438
+ WHERE status='failed'
439
439
  GROUP BY test_file, test_class, test_method
440
440
  ORDER BY failure_count DESC
441
441
  LIMIT 15
@@ -457,14 +457,14 @@ class MetricsCollector:
457
457
  """
458
458
  SELECT
459
459
  COUNT(*) as total_jobs,
460
- SUM(CASE WHEN status = 'success' THEN 1 ELSE 0 END) as successful_jobs,
461
- SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed_jobs,
460
+ SUM(CASE WHEN status='success' THEN 1 ELSE 0 END) as successful_jobs,
461
+ SUM(CASE WHEN status='failed' THEN 1 ELSE 0 END) as failed_jobs,
462
462
  AVG(CASE
463
463
  WHEN end_time IS NOT NULL
464
- THEN (julianday(end_time) - julianday(start_time)) * 86400000
464
+ THEN (julianday(end_time)-julianday(start_time)) * 86400000
465
465
  ELSE NULL
466
466
  END) as avg_duration_ms,
467
- SUM(CASE WHEN ai_agent = 1 AND status = 'success' THEN 1 ELSE 0 END) as ai_fixes
467
+ SUM(CASE WHEN ai_agent=1 AND status='success' THEN 1 ELSE 0 END) as ai_fixes
468
468
  FROM jobs
469
469
  WHERE DATE(start_time) = ?
470
470
  """,
@@ -475,10 +475,10 @@ class MetricsCollector:
475
475
  """
476
476
  SELECT
477
477
  COUNT(*) as total_errors,
478
- SUM(CASE WHEN error_type = 'hook' THEN 1 ELSE 0 END) as hook_errors,
479
- SUM(CASE WHEN error_type = 'test' THEN 1 ELSE 0 END) as test_errors,
480
- SUM(CASE WHEN error_type = 'lint' THEN 1 ELSE 0 END) as lint_errors,
481
- SUM(CASE WHEN error_type = 'type_check' THEN 1 ELSE 0 END) as type_errors
478
+ SUM(CASE WHEN error_type='hook' THEN 1 ELSE 0 END) as hook_errors,
479
+ SUM(CASE WHEN error_type='test' THEN 1 ELSE 0 END) as test_errors,
480
+ SUM(CASE WHEN error_type='lint' THEN 1 ELSE 0 END) as lint_errors,
481
+ SUM(CASE WHEN error_type='type_check' THEN 1 ELSE 0 END) as type_errors
482
482
  FROM errors
483
483
  WHERE DATE(timestamp) = ?
484
484
  """,
@@ -534,9 +534,9 @@ class MetricsCollector:
534
534
  job_stats = conn.execute("""
535
535
  SELECT
536
536
  COUNT(*) as total_jobs,
537
- SUM(CASE WHEN status = 'success' THEN 1 ELSE 0 END) as successful_jobs,
538
- SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed_jobs,
539
- SUM(CASE WHEN ai_agent = 1 THEN 1 ELSE 0 END) as ai_agent_jobs,
537
+ SUM(CASE WHEN status='success' THEN 1 ELSE 0 END) as successful_jobs,
538
+ SUM(CASE WHEN status='failed' THEN 1 ELSE 0 END) as failed_jobs,
539
+ SUM(CASE WHEN ai_agent=1 THEN 1 ELSE 0 END) as ai_agent_jobs,
540
540
  AVG(iterations) as avg_iterations
541
541
  FROM jobs
542
542
  """).fetchone()
@@ -10,8 +10,6 @@ from ..agents.base import FixResult, Issue, IssueType
10
10
 
11
11
  @dataclass
12
12
  class CachedPattern:
13
- """A cached pattern from successful fixes."""
14
-
15
13
  pattern_id: str
16
14
  issue_type: IssueType
17
15
  strategy: str
@@ -27,27 +25,18 @@ class CachedPattern:
27
25
 
28
26
 
29
27
  class PatternCache:
30
- """Cache for successful architectural patterns and fixes.
31
-
32
- Learns from successful fixes and provides patterns for reuse,
33
- reducing iteration cycles and improving code quality consistency.
34
- """
35
-
36
28
  def __init__(self, project_path: Path) -> None:
37
29
  self.project_path = project_path
38
30
  self.cache_dir = project_path / ".crackerjack" / "patterns"
39
31
  self.cache_file = self.cache_dir / "pattern_cache.json"
40
32
  self.logger = logging.getLogger(__name__)
41
33
 
42
- # In-memory cache for performance
43
34
  self._patterns: dict[str, CachedPattern] = {}
44
35
  self._loaded = False
45
36
 
46
- # Ensure cache directory exists
47
37
  self.cache_dir.mkdir(parents=True, exist_ok=True)
48
38
 
49
39
  def _load_patterns(self) -> None:
50
- """Load patterns from disk cache."""
51
40
  if self._loaded:
52
41
  return
53
42
 
@@ -83,7 +72,6 @@ class PatternCache:
83
72
  self._loaded = True
84
73
 
85
74
  def _save_patterns(self) -> None:
86
- """Save patterns to disk cache."""
87
75
  try:
88
76
  data = {
89
77
  "version": "1.0",
@@ -105,22 +93,19 @@ class PatternCache:
105
93
  def cache_successful_pattern(
106
94
  self, issue: Issue, plan: dict[str, t.Any], result: FixResult
107
95
  ) -> str:
108
- """Cache a successful pattern for future reuse."""
109
96
  self._load_patterns()
110
97
 
111
- # Generate pattern ID
112
98
  pattern_id = (
113
99
  f"{issue.type.value}_{plan.get('strategy', 'default')}_{int(time.time())}"
114
100
  )
115
101
 
116
- # Create cached pattern
117
102
  cached_pattern = CachedPattern(
118
103
  pattern_id=pattern_id,
119
104
  issue_type=issue.type,
120
105
  strategy=plan.get("strategy", "unknown"),
121
106
  patterns=plan.get("patterns", []),
122
107
  confidence=result.confidence,
123
- success_rate=1.0, # Initial success rate
108
+ success_rate=1.0,
124
109
  usage_count=0,
125
110
  last_used=0.0,
126
111
  created_at=time.time(),
@@ -138,7 +123,6 @@ class PatternCache:
138
123
  },
139
124
  )
140
125
 
141
- # Store in memory and disk
142
126
  self._patterns[pattern_id] = cached_pattern
143
127
  self._save_patterns()
144
128
 
@@ -146,7 +130,6 @@ class PatternCache:
146
130
  return pattern_id
147
131
 
148
132
  def get_patterns_for_issue(self, issue: Issue) -> list[CachedPattern]:
149
- """Get cached patterns that match the given issue type."""
150
133
  self._load_patterns()
151
134
 
152
135
  matching_patterns = [
@@ -155,7 +138,6 @@ class PatternCache:
155
138
  if pattern.issue_type == issue.type
156
139
  ]
157
140
 
158
- # Sort by success rate and confidence
159
141
  matching_patterns.sort(
160
142
  key=lambda p: (p.success_rate, p.confidence), reverse=True
161
143
  )
@@ -163,17 +145,14 @@ class PatternCache:
163
145
  return matching_patterns
164
146
 
165
147
  def get_best_pattern_for_issue(self, issue: Issue) -> CachedPattern | None:
166
- """Get the best cached pattern for the given issue."""
167
148
  patterns = self.get_patterns_for_issue(issue)
168
149
 
169
150
  if not patterns:
170
151
  return None
171
152
 
172
- # Return the highest-rated pattern
173
153
  return patterns[0]
174
154
 
175
155
  def use_pattern(self, pattern_id: str) -> bool:
176
- """Mark a pattern as used and update usage statistics."""
177
156
  self._load_patterns()
178
157
 
179
158
  if pattern_id not in self._patterns:
@@ -191,7 +170,6 @@ class PatternCache:
191
170
  return True
192
171
 
193
172
  def update_pattern_success_rate(self, pattern_id: str, success: bool) -> None:
194
- """Update the success rate of a pattern based on usage outcome."""
195
173
  self._load_patterns()
196
174
 
197
175
  if pattern_id not in self._patterns:
@@ -199,7 +177,6 @@ class PatternCache:
199
177
 
200
178
  pattern = self._patterns[pattern_id]
201
179
 
202
- # Update success rate using weighted average
203
180
  total_uses = pattern.usage_count
204
181
  if total_uses > 0:
205
182
  current_successes = pattern.success_rate * total_uses
@@ -209,11 +186,10 @@ class PatternCache:
209
186
 
210
187
  self._save_patterns()
211
188
  self.logger.debug(
212
- f"Updated pattern {pattern_id} success rate: {pattern.success_rate:.2f}"
189
+ f"Updated pattern {pattern_id} success rate: {pattern.success_rate: .2f}"
213
190
  )
214
191
 
215
192
  def get_pattern_statistics(self) -> dict[str, t.Any]:
216
- """Get statistics about cached patterns."""
217
193
  self._load_patterns()
218
194
 
219
195
  if not self._patterns:
@@ -241,7 +217,6 @@ class PatternCache:
241
217
  }
242
218
 
243
219
  def _get_most_used_patterns(self, limit: int = 5) -> list[dict[str, t.Any]]:
244
- """Get the most frequently used patterns."""
245
220
  patterns = sorted(
246
221
  self._patterns.values(), key=lambda p: p.usage_count, reverse=True
247
222
  )[:limit]
@@ -261,7 +236,6 @@ class PatternCache:
261
236
  def cleanup_old_patterns(
262
237
  self, max_age_days: int = 30, min_usage_count: int = 2
263
238
  ) -> int:
264
- """Clean up old, unused patterns to prevent cache bloat."""
265
239
  self._load_patterns()
266
240
 
267
241
  cutoff_time = time.time() - (max_age_days * 24 * 60 * 60)
@@ -285,7 +259,6 @@ class PatternCache:
285
259
  return len(patterns_to_remove)
286
260
 
287
261
  def clear_cache(self) -> None:
288
- """Clear all cached patterns."""
289
262
  self._patterns.clear()
290
263
  self._loaded = False
291
264
 
@@ -295,7 +268,6 @@ class PatternCache:
295
268
  self.logger.info("Cleared pattern cache")
296
269
 
297
270
  def export_patterns(self, export_path: Path) -> bool:
298
- """Export patterns to a file for sharing or backup."""
299
271
  self._load_patterns()
300
272
 
301
273
  try:
@@ -322,7 +294,6 @@ class PatternCache:
322
294
  return False
323
295
 
324
296
  def import_patterns(self, import_path: Path, merge: bool = True) -> bool:
325
- """Import patterns from a file."""
326
297
  try:
327
298
  with import_path.open() as f:
328
299
  data = json.load(f)