crackerjack 0.30.3__py3-none-any.whl → 0.31.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +1005 -0
- crackerjack/RULES.md +380 -0
- crackerjack/__init__.py +42 -13
- crackerjack/__main__.py +225 -299
- crackerjack/agents/__init__.py +41 -0
- crackerjack/agents/architect_agent.py +281 -0
- crackerjack/agents/base.py +169 -0
- crackerjack/agents/coordinator.py +512 -0
- crackerjack/agents/documentation_agent.py +498 -0
- crackerjack/agents/dry_agent.py +388 -0
- crackerjack/agents/formatting_agent.py +245 -0
- crackerjack/agents/import_optimization_agent.py +281 -0
- crackerjack/agents/performance_agent.py +669 -0
- crackerjack/agents/proactive_agent.py +104 -0
- crackerjack/agents/refactoring_agent.py +788 -0
- crackerjack/agents/security_agent.py +529 -0
- crackerjack/agents/test_creation_agent.py +652 -0
- crackerjack/agents/test_specialist_agent.py +486 -0
- crackerjack/agents/tracker.py +212 -0
- crackerjack/api.py +560 -0
- crackerjack/cli/__init__.py +24 -0
- crackerjack/cli/facade.py +104 -0
- crackerjack/cli/handlers.py +267 -0
- crackerjack/cli/interactive.py +471 -0
- crackerjack/cli/options.py +401 -0
- crackerjack/cli/utils.py +18 -0
- crackerjack/code_cleaner.py +618 -928
- crackerjack/config/__init__.py +19 -0
- crackerjack/config/hooks.py +218 -0
- crackerjack/core/__init__.py +0 -0
- crackerjack/core/async_workflow_orchestrator.py +406 -0
- crackerjack/core/autofix_coordinator.py +200 -0
- crackerjack/core/container.py +104 -0
- crackerjack/core/enhanced_container.py +542 -0
- crackerjack/core/performance.py +243 -0
- crackerjack/core/phase_coordinator.py +561 -0
- crackerjack/core/proactive_workflow.py +316 -0
- crackerjack/core/session_coordinator.py +289 -0
- crackerjack/core/workflow_orchestrator.py +640 -0
- crackerjack/dynamic_config.py +94 -103
- crackerjack/errors.py +263 -41
- crackerjack/executors/__init__.py +11 -0
- crackerjack/executors/async_hook_executor.py +431 -0
- crackerjack/executors/cached_hook_executor.py +242 -0
- crackerjack/executors/hook_executor.py +345 -0
- crackerjack/executors/individual_hook_executor.py +669 -0
- crackerjack/intelligence/__init__.py +44 -0
- crackerjack/intelligence/adaptive_learning.py +751 -0
- crackerjack/intelligence/agent_orchestrator.py +551 -0
- crackerjack/intelligence/agent_registry.py +414 -0
- crackerjack/intelligence/agent_selector.py +502 -0
- crackerjack/intelligence/integration.py +290 -0
- crackerjack/interactive.py +576 -315
- crackerjack/managers/__init__.py +11 -0
- crackerjack/managers/async_hook_manager.py +135 -0
- crackerjack/managers/hook_manager.py +137 -0
- crackerjack/managers/publish_manager.py +411 -0
- crackerjack/managers/test_command_builder.py +151 -0
- crackerjack/managers/test_executor.py +435 -0
- crackerjack/managers/test_manager.py +258 -0
- crackerjack/managers/test_manager_backup.py +1124 -0
- crackerjack/managers/test_progress.py +144 -0
- crackerjack/mcp/__init__.py +0 -0
- crackerjack/mcp/cache.py +336 -0
- crackerjack/mcp/client_runner.py +104 -0
- crackerjack/mcp/context.py +615 -0
- crackerjack/mcp/dashboard.py +636 -0
- crackerjack/mcp/enhanced_progress_monitor.py +479 -0
- crackerjack/mcp/file_monitor.py +336 -0
- crackerjack/mcp/progress_components.py +569 -0
- crackerjack/mcp/progress_monitor.py +949 -0
- crackerjack/mcp/rate_limiter.py +332 -0
- crackerjack/mcp/server.py +22 -0
- crackerjack/mcp/server_core.py +244 -0
- crackerjack/mcp/service_watchdog.py +501 -0
- crackerjack/mcp/state.py +395 -0
- crackerjack/mcp/task_manager.py +257 -0
- crackerjack/mcp/tools/__init__.py +17 -0
- crackerjack/mcp/tools/core_tools.py +249 -0
- crackerjack/mcp/tools/error_analyzer.py +308 -0
- crackerjack/mcp/tools/execution_tools.py +370 -0
- crackerjack/mcp/tools/execution_tools_backup.py +1097 -0
- crackerjack/mcp/tools/intelligence_tool_registry.py +80 -0
- crackerjack/mcp/tools/intelligence_tools.py +314 -0
- crackerjack/mcp/tools/monitoring_tools.py +502 -0
- crackerjack/mcp/tools/proactive_tools.py +384 -0
- crackerjack/mcp/tools/progress_tools.py +141 -0
- crackerjack/mcp/tools/utility_tools.py +341 -0
- crackerjack/mcp/tools/workflow_executor.py +360 -0
- crackerjack/mcp/websocket/__init__.py +14 -0
- crackerjack/mcp/websocket/app.py +39 -0
- crackerjack/mcp/websocket/endpoints.py +559 -0
- crackerjack/mcp/websocket/jobs.py +253 -0
- crackerjack/mcp/websocket/server.py +116 -0
- crackerjack/mcp/websocket/websocket_handler.py +78 -0
- crackerjack/mcp/websocket_server.py +10 -0
- crackerjack/models/__init__.py +31 -0
- crackerjack/models/config.py +93 -0
- crackerjack/models/config_adapter.py +230 -0
- crackerjack/models/protocols.py +118 -0
- crackerjack/models/task.py +154 -0
- crackerjack/monitoring/ai_agent_watchdog.py +450 -0
- crackerjack/monitoring/regression_prevention.py +638 -0
- crackerjack/orchestration/__init__.py +0 -0
- crackerjack/orchestration/advanced_orchestrator.py +970 -0
- crackerjack/orchestration/execution_strategies.py +341 -0
- crackerjack/orchestration/test_progress_streamer.py +636 -0
- crackerjack/plugins/__init__.py +15 -0
- crackerjack/plugins/base.py +200 -0
- crackerjack/plugins/hooks.py +246 -0
- crackerjack/plugins/loader.py +335 -0
- crackerjack/plugins/managers.py +259 -0
- crackerjack/py313.py +8 -3
- crackerjack/services/__init__.py +22 -0
- crackerjack/services/cache.py +314 -0
- crackerjack/services/config.py +347 -0
- crackerjack/services/config_integrity.py +99 -0
- crackerjack/services/contextual_ai_assistant.py +516 -0
- crackerjack/services/coverage_ratchet.py +347 -0
- crackerjack/services/debug.py +736 -0
- crackerjack/services/dependency_monitor.py +617 -0
- crackerjack/services/enhanced_filesystem.py +439 -0
- crackerjack/services/file_hasher.py +151 -0
- crackerjack/services/filesystem.py +395 -0
- crackerjack/services/git.py +165 -0
- crackerjack/services/health_metrics.py +611 -0
- crackerjack/services/initialization.py +847 -0
- crackerjack/services/log_manager.py +286 -0
- crackerjack/services/logging.py +174 -0
- crackerjack/services/metrics.py +578 -0
- crackerjack/services/pattern_cache.py +362 -0
- crackerjack/services/pattern_detector.py +515 -0
- crackerjack/services/performance_benchmarks.py +653 -0
- crackerjack/services/security.py +163 -0
- crackerjack/services/server_manager.py +234 -0
- crackerjack/services/smart_scheduling.py +144 -0
- crackerjack/services/tool_version_service.py +61 -0
- crackerjack/services/unified_config.py +437 -0
- crackerjack/services/version_checker.py +248 -0
- crackerjack/slash_commands/__init__.py +14 -0
- crackerjack/slash_commands/init.md +122 -0
- crackerjack/slash_commands/run.md +163 -0
- crackerjack/slash_commands/status.md +127 -0
- crackerjack-0.31.4.dist-info/METADATA +742 -0
- crackerjack-0.31.4.dist-info/RECORD +148 -0
- crackerjack-0.31.4.dist-info/entry_points.txt +2 -0
- crackerjack/.gitignore +0 -34
- crackerjack/.libcst.codemod.yaml +0 -18
- crackerjack/.pdm.toml +0 -1
- crackerjack/crackerjack.py +0 -3805
- crackerjack/pyproject.toml +0 -286
- crackerjack-0.30.3.dist-info/METADATA +0 -1290
- crackerjack-0.30.3.dist-info/RECORD +0 -16
- {crackerjack-0.30.3.dist-info → crackerjack-0.31.4.dist-info}/WHEEL +0 -0
- {crackerjack-0.30.3.dist-info → crackerjack-0.31.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,362 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import time
|
|
4
|
+
import typing as t
|
|
5
|
+
from dataclasses import asdict, dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from ..agents.base import FixResult, Issue, IssueType
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class CachedPattern:
|
|
13
|
+
"""A cached pattern from successful fixes."""
|
|
14
|
+
|
|
15
|
+
pattern_id: str
|
|
16
|
+
issue_type: IssueType
|
|
17
|
+
strategy: str
|
|
18
|
+
patterns: list[str]
|
|
19
|
+
confidence: float
|
|
20
|
+
success_rate: float
|
|
21
|
+
usage_count: int
|
|
22
|
+
last_used: float
|
|
23
|
+
created_at: float
|
|
24
|
+
files_modified: list[str]
|
|
25
|
+
fixes_applied: list[str]
|
|
26
|
+
metadata: dict[str, t.Any]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class PatternCache:
|
|
30
|
+
"""Cache for successful architectural patterns and fixes.
|
|
31
|
+
|
|
32
|
+
Learns from successful fixes and provides patterns for reuse,
|
|
33
|
+
reducing iteration cycles and improving code quality consistency.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(self, project_path: Path) -> None:
|
|
37
|
+
self.project_path = project_path
|
|
38
|
+
self.cache_dir = project_path / ".crackerjack" / "patterns"
|
|
39
|
+
self.cache_file = self.cache_dir / "pattern_cache.json"
|
|
40
|
+
self.logger = logging.getLogger(__name__)
|
|
41
|
+
|
|
42
|
+
# In-memory cache for performance
|
|
43
|
+
self._patterns: dict[str, CachedPattern] = {}
|
|
44
|
+
self._loaded = False
|
|
45
|
+
|
|
46
|
+
# Ensure cache directory exists
|
|
47
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
48
|
+
|
|
49
|
+
def _load_patterns(self) -> None:
|
|
50
|
+
"""Load patterns from disk cache."""
|
|
51
|
+
if self._loaded:
|
|
52
|
+
return
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
if self.cache_file.exists():
|
|
56
|
+
with self.cache_file.open() as f:
|
|
57
|
+
data = json.load(f)
|
|
58
|
+
|
|
59
|
+
for pattern_data in data.get("patterns", []):
|
|
60
|
+
pattern = CachedPattern(
|
|
61
|
+
pattern_id=pattern_data["pattern_id"],
|
|
62
|
+
issue_type=IssueType(pattern_data["issue_type"]),
|
|
63
|
+
strategy=pattern_data["strategy"],
|
|
64
|
+
patterns=pattern_data["patterns"],
|
|
65
|
+
confidence=pattern_data["confidence"],
|
|
66
|
+
success_rate=pattern_data["success_rate"],
|
|
67
|
+
usage_count=pattern_data["usage_count"],
|
|
68
|
+
last_used=pattern_data["last_used"],
|
|
69
|
+
created_at=pattern_data["created_at"],
|
|
70
|
+
files_modified=pattern_data["files_modified"],
|
|
71
|
+
fixes_applied=pattern_data["fixes_applied"],
|
|
72
|
+
metadata=pattern_data.get("metadata", {}),
|
|
73
|
+
)
|
|
74
|
+
self._patterns[pattern.pattern_id] = pattern
|
|
75
|
+
|
|
76
|
+
self.logger.info(f"Loaded {len(self._patterns)} cached patterns")
|
|
77
|
+
else:
|
|
78
|
+
self.logger.info("No existing pattern cache found")
|
|
79
|
+
|
|
80
|
+
except Exception as e:
|
|
81
|
+
self.logger.warning(f"Failed to load pattern cache: {e}")
|
|
82
|
+
|
|
83
|
+
self._loaded = True
|
|
84
|
+
|
|
85
|
+
def _save_patterns(self) -> None:
|
|
86
|
+
"""Save patterns to disk cache."""
|
|
87
|
+
try:
|
|
88
|
+
data = {
|
|
89
|
+
"version": "1.0",
|
|
90
|
+
"created": time.time(),
|
|
91
|
+
"patterns": [
|
|
92
|
+
{**asdict(pattern), "issue_type": pattern.issue_type.value}
|
|
93
|
+
for pattern in self._patterns.values()
|
|
94
|
+
],
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
with self.cache_file.open("w") as f:
|
|
98
|
+
json.dump(data, f, indent=2)
|
|
99
|
+
|
|
100
|
+
self.logger.debug(f"Saved {len(self._patterns)} patterns to cache")
|
|
101
|
+
|
|
102
|
+
except Exception as e:
|
|
103
|
+
self.logger.error(f"Failed to save pattern cache: {e}")
|
|
104
|
+
|
|
105
|
+
def cache_successful_pattern(
|
|
106
|
+
self, issue: Issue, plan: dict[str, t.Any], result: FixResult
|
|
107
|
+
) -> str:
|
|
108
|
+
"""Cache a successful pattern for future reuse."""
|
|
109
|
+
self._load_patterns()
|
|
110
|
+
|
|
111
|
+
# Generate pattern ID
|
|
112
|
+
pattern_id = (
|
|
113
|
+
f"{issue.type.value}_{plan.get('strategy', 'default')}_{int(time.time())}"
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
# Create cached pattern
|
|
117
|
+
cached_pattern = CachedPattern(
|
|
118
|
+
pattern_id=pattern_id,
|
|
119
|
+
issue_type=issue.type,
|
|
120
|
+
strategy=plan.get("strategy", "unknown"),
|
|
121
|
+
patterns=plan.get("patterns", []),
|
|
122
|
+
confidence=result.confidence,
|
|
123
|
+
success_rate=1.0, # Initial success rate
|
|
124
|
+
usage_count=0,
|
|
125
|
+
last_used=0.0,
|
|
126
|
+
created_at=time.time(),
|
|
127
|
+
files_modified=result.files_modified,
|
|
128
|
+
fixes_applied=result.fixes_applied,
|
|
129
|
+
metadata={
|
|
130
|
+
"issue_id": issue.id,
|
|
131
|
+
"issue_message": issue.message,
|
|
132
|
+
"file_path": issue.file_path,
|
|
133
|
+
"line_number": issue.line_number,
|
|
134
|
+
"severity": issue.severity.value,
|
|
135
|
+
"plan_details": plan,
|
|
136
|
+
"remaining_issues": result.remaining_issues,
|
|
137
|
+
"recommendations": result.recommendations,
|
|
138
|
+
},
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# Store in memory and disk
|
|
142
|
+
self._patterns[pattern_id] = cached_pattern
|
|
143
|
+
self._save_patterns()
|
|
144
|
+
|
|
145
|
+
self.logger.info(f"Cached successful pattern: {pattern_id}")
|
|
146
|
+
return pattern_id
|
|
147
|
+
|
|
148
|
+
def get_patterns_for_issue(self, issue: Issue) -> list[CachedPattern]:
|
|
149
|
+
"""Get cached patterns that match the given issue type."""
|
|
150
|
+
self._load_patterns()
|
|
151
|
+
|
|
152
|
+
matching_patterns = [
|
|
153
|
+
pattern
|
|
154
|
+
for pattern in self._patterns.values()
|
|
155
|
+
if pattern.issue_type == issue.type
|
|
156
|
+
]
|
|
157
|
+
|
|
158
|
+
# Sort by success rate and confidence
|
|
159
|
+
matching_patterns.sort(
|
|
160
|
+
key=lambda p: (p.success_rate, p.confidence), reverse=True
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
return matching_patterns
|
|
164
|
+
|
|
165
|
+
def get_best_pattern_for_issue(self, issue: Issue) -> CachedPattern | None:
|
|
166
|
+
"""Get the best cached pattern for the given issue."""
|
|
167
|
+
patterns = self.get_patterns_for_issue(issue)
|
|
168
|
+
|
|
169
|
+
if not patterns:
|
|
170
|
+
return None
|
|
171
|
+
|
|
172
|
+
# Return the highest-rated pattern
|
|
173
|
+
return patterns[0]
|
|
174
|
+
|
|
175
|
+
def use_pattern(self, pattern_id: str) -> bool:
|
|
176
|
+
"""Mark a pattern as used and update usage statistics."""
|
|
177
|
+
self._load_patterns()
|
|
178
|
+
|
|
179
|
+
if pattern_id not in self._patterns:
|
|
180
|
+
return False
|
|
181
|
+
|
|
182
|
+
pattern = self._patterns[pattern_id]
|
|
183
|
+
pattern.usage_count += 1
|
|
184
|
+
pattern.last_used = time.time()
|
|
185
|
+
|
|
186
|
+
self._save_patterns()
|
|
187
|
+
self.logger.debug(
|
|
188
|
+
f"Used pattern {pattern_id} (usage count: {pattern.usage_count})"
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
return True
|
|
192
|
+
|
|
193
|
+
def update_pattern_success_rate(self, pattern_id: str, success: bool) -> None:
|
|
194
|
+
"""Update the success rate of a pattern based on usage outcome."""
|
|
195
|
+
self._load_patterns()
|
|
196
|
+
|
|
197
|
+
if pattern_id not in self._patterns:
|
|
198
|
+
return
|
|
199
|
+
|
|
200
|
+
pattern = self._patterns[pattern_id]
|
|
201
|
+
|
|
202
|
+
# Update success rate using weighted average
|
|
203
|
+
total_uses = pattern.usage_count
|
|
204
|
+
if total_uses > 0:
|
|
205
|
+
current_successes = pattern.success_rate * total_uses
|
|
206
|
+
if success:
|
|
207
|
+
current_successes += 1
|
|
208
|
+
pattern.success_rate = current_successes / total_uses
|
|
209
|
+
|
|
210
|
+
self._save_patterns()
|
|
211
|
+
self.logger.debug(
|
|
212
|
+
f"Updated pattern {pattern_id} success rate: {pattern.success_rate:.2f}"
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
def get_pattern_statistics(self) -> dict[str, t.Any]:
|
|
216
|
+
"""Get statistics about cached patterns."""
|
|
217
|
+
self._load_patterns()
|
|
218
|
+
|
|
219
|
+
if not self._patterns:
|
|
220
|
+
return {"total_patterns": 0}
|
|
221
|
+
|
|
222
|
+
patterns_by_type = {}
|
|
223
|
+
total_usage = 0
|
|
224
|
+
avg_success_rate = 0.0
|
|
225
|
+
|
|
226
|
+
for pattern in self._patterns.values():
|
|
227
|
+
issue_type = pattern.issue_type.value
|
|
228
|
+
patterns_by_type[issue_type] = patterns_by_type.get(issue_type, 0) + 1
|
|
229
|
+
total_usage += pattern.usage_count
|
|
230
|
+
avg_success_rate += pattern.success_rate
|
|
231
|
+
|
|
232
|
+
avg_success_rate = avg_success_rate / len(self._patterns)
|
|
233
|
+
|
|
234
|
+
return {
|
|
235
|
+
"total_patterns": len(self._patterns),
|
|
236
|
+
"patterns_by_type": patterns_by_type,
|
|
237
|
+
"total_usage": total_usage,
|
|
238
|
+
"average_success_rate": avg_success_rate,
|
|
239
|
+
"cache_file": str(self.cache_file),
|
|
240
|
+
"most_used_patterns": self._get_most_used_patterns(),
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
def _get_most_used_patterns(self, limit: int = 5) -> list[dict[str, t.Any]]:
|
|
244
|
+
"""Get the most frequently used patterns."""
|
|
245
|
+
patterns = sorted(
|
|
246
|
+
self._patterns.values(), key=lambda p: p.usage_count, reverse=True
|
|
247
|
+
)[:limit]
|
|
248
|
+
|
|
249
|
+
return [
|
|
250
|
+
{
|
|
251
|
+
"pattern_id": p.pattern_id,
|
|
252
|
+
"issue_type": p.issue_type.value,
|
|
253
|
+
"strategy": p.strategy,
|
|
254
|
+
"usage_count": p.usage_count,
|
|
255
|
+
"success_rate": p.success_rate,
|
|
256
|
+
"confidence": p.confidence,
|
|
257
|
+
}
|
|
258
|
+
for p in patterns
|
|
259
|
+
]
|
|
260
|
+
|
|
261
|
+
def cleanup_old_patterns(
|
|
262
|
+
self, max_age_days: int = 30, min_usage_count: int = 2
|
|
263
|
+
) -> int:
|
|
264
|
+
"""Clean up old, unused patterns to prevent cache bloat."""
|
|
265
|
+
self._load_patterns()
|
|
266
|
+
|
|
267
|
+
cutoff_time = time.time() - (max_age_days * 24 * 60 * 60)
|
|
268
|
+
patterns_to_remove = [
|
|
269
|
+
pattern_id
|
|
270
|
+
for pattern_id, pattern in self._patterns.items()
|
|
271
|
+
if (
|
|
272
|
+
pattern.created_at < cutoff_time
|
|
273
|
+
and pattern.usage_count < min_usage_count
|
|
274
|
+
)
|
|
275
|
+
or (pattern.success_rate < 0.2 and pattern.usage_count > 5)
|
|
276
|
+
]
|
|
277
|
+
|
|
278
|
+
for pattern_id in patterns_to_remove:
|
|
279
|
+
del self._patterns[pattern_id]
|
|
280
|
+
|
|
281
|
+
if patterns_to_remove:
|
|
282
|
+
self._save_patterns()
|
|
283
|
+
self.logger.info(f"Cleaned up {len(patterns_to_remove)} old patterns")
|
|
284
|
+
|
|
285
|
+
return len(patterns_to_remove)
|
|
286
|
+
|
|
287
|
+
def clear_cache(self) -> None:
|
|
288
|
+
"""Clear all cached patterns."""
|
|
289
|
+
self._patterns.clear()
|
|
290
|
+
self._loaded = False
|
|
291
|
+
|
|
292
|
+
if self.cache_file.exists():
|
|
293
|
+
self.cache_file.unlink()
|
|
294
|
+
|
|
295
|
+
self.logger.info("Cleared pattern cache")
|
|
296
|
+
|
|
297
|
+
def export_patterns(self, export_path: Path) -> bool:
|
|
298
|
+
"""Export patterns to a file for sharing or backup."""
|
|
299
|
+
self._load_patterns()
|
|
300
|
+
|
|
301
|
+
try:
|
|
302
|
+
data = {
|
|
303
|
+
"version": "1.0",
|
|
304
|
+
"exported_at": time.time(),
|
|
305
|
+
"project_path": str(self.project_path),
|
|
306
|
+
"patterns": [
|
|
307
|
+
{**asdict(pattern), "issue_type": pattern.issue_type.value}
|
|
308
|
+
for pattern in self._patterns.values()
|
|
309
|
+
],
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
with export_path.open("w") as f:
|
|
313
|
+
json.dump(data, f, indent=2)
|
|
314
|
+
|
|
315
|
+
self.logger.info(
|
|
316
|
+
f"Exported {len(self._patterns)} patterns to {export_path}"
|
|
317
|
+
)
|
|
318
|
+
return True
|
|
319
|
+
|
|
320
|
+
except Exception as e:
|
|
321
|
+
self.logger.error(f"Failed to export patterns: {e}")
|
|
322
|
+
return False
|
|
323
|
+
|
|
324
|
+
def import_patterns(self, import_path: Path, merge: bool = True) -> bool:
|
|
325
|
+
"""Import patterns from a file."""
|
|
326
|
+
try:
|
|
327
|
+
with import_path.open() as f:
|
|
328
|
+
data = json.load(f)
|
|
329
|
+
|
|
330
|
+
imported_count = 0
|
|
331
|
+
|
|
332
|
+
for pattern_data in data.get("patterns", []):
|
|
333
|
+
pattern = CachedPattern(
|
|
334
|
+
pattern_id=pattern_data["pattern_id"],
|
|
335
|
+
issue_type=IssueType(pattern_data["issue_type"]),
|
|
336
|
+
strategy=pattern_data["strategy"],
|
|
337
|
+
patterns=pattern_data["patterns"],
|
|
338
|
+
confidence=pattern_data["confidence"],
|
|
339
|
+
success_rate=pattern_data["success_rate"],
|
|
340
|
+
usage_count=pattern_data["usage_count"],
|
|
341
|
+
last_used=pattern_data["last_used"],
|
|
342
|
+
created_at=pattern_data["created_at"],
|
|
343
|
+
files_modified=pattern_data["files_modified"],
|
|
344
|
+
fixes_applied=pattern_data["fixes_applied"],
|
|
345
|
+
metadata=pattern_data.get("metadata", {}),
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
if not merge or pattern.pattern_id not in self._patterns:
|
|
349
|
+
self._patterns[pattern.pattern_id] = pattern
|
|
350
|
+
imported_count += 1
|
|
351
|
+
|
|
352
|
+
if imported_count > 0:
|
|
353
|
+
self._save_patterns()
|
|
354
|
+
self.logger.info(
|
|
355
|
+
f"Imported {imported_count} patterns from {import_path}"
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
return True
|
|
359
|
+
|
|
360
|
+
except Exception as e:
|
|
361
|
+
self.logger.error(f"Failed to import patterns: {e}")
|
|
362
|
+
return False
|