claude-mpm 4.4.0__py3-none-any.whl → 4.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/WORKFLOW.md +2 -14
  3. claude_mpm/agents/agent_loader.py +3 -2
  4. claude_mpm/agents/agent_loader_integration.py +2 -1
  5. claude_mpm/agents/async_agent_loader.py +2 -2
  6. claude_mpm/agents/base_agent_loader.py +2 -2
  7. claude_mpm/agents/frontmatter_validator.py +1 -0
  8. claude_mpm/agents/system_agent_config.py +2 -1
  9. claude_mpm/cli/commands/configure.py +2 -29
  10. claude_mpm/cli/commands/doctor.py +44 -5
  11. claude_mpm/cli/commands/mpm_init.py +117 -63
  12. claude_mpm/cli/parsers/configure_parser.py +6 -15
  13. claude_mpm/cli/startup_logging.py +1 -3
  14. claude_mpm/config/agent_config.py +1 -1
  15. claude_mpm/config/paths.py +2 -1
  16. claude_mpm/core/agent_name_normalizer.py +1 -0
  17. claude_mpm/core/config.py +2 -1
  18. claude_mpm/core/config_aliases.py +2 -1
  19. claude_mpm/core/file_utils.py +0 -1
  20. claude_mpm/core/framework/__init__.py +38 -0
  21. claude_mpm/core/framework/formatters/__init__.py +11 -0
  22. claude_mpm/core/framework/formatters/capability_generator.py +367 -0
  23. claude_mpm/core/framework/formatters/content_formatter.py +288 -0
  24. claude_mpm/core/framework/formatters/context_generator.py +184 -0
  25. claude_mpm/core/framework/loaders/__init__.py +13 -0
  26. claude_mpm/core/framework/loaders/agent_loader.py +206 -0
  27. claude_mpm/core/framework/loaders/file_loader.py +223 -0
  28. claude_mpm/core/framework/loaders/instruction_loader.py +161 -0
  29. claude_mpm/core/framework/loaders/packaged_loader.py +232 -0
  30. claude_mpm/core/framework/processors/__init__.py +11 -0
  31. claude_mpm/core/framework/processors/memory_processor.py +230 -0
  32. claude_mpm/core/framework/processors/metadata_processor.py +146 -0
  33. claude_mpm/core/framework/processors/template_processor.py +244 -0
  34. claude_mpm/core/framework_loader.py +298 -1795
  35. claude_mpm/core/log_manager.py +2 -1
  36. claude_mpm/core/tool_access_control.py +1 -0
  37. claude_mpm/core/unified_agent_registry.py +2 -1
  38. claude_mpm/core/unified_paths.py +1 -0
  39. claude_mpm/experimental/cli_enhancements.py +1 -0
  40. claude_mpm/hooks/__init__.py +9 -1
  41. claude_mpm/hooks/base_hook.py +1 -0
  42. claude_mpm/hooks/instruction_reinforcement.py +1 -0
  43. claude_mpm/hooks/kuzu_memory_hook.py +359 -0
  44. claude_mpm/hooks/validation_hooks.py +1 -1
  45. claude_mpm/scripts/mpm_doctor.py +1 -0
  46. claude_mpm/services/agents/loading/agent_profile_loader.py +1 -1
  47. claude_mpm/services/agents/loading/base_agent_manager.py +1 -1
  48. claude_mpm/services/agents/loading/framework_agent_loader.py +1 -1
  49. claude_mpm/services/agents/management/agent_capabilities_generator.py +1 -0
  50. claude_mpm/services/agents/management/agent_management_service.py +1 -1
  51. claude_mpm/services/agents/memory/memory_categorization_service.py +0 -1
  52. claude_mpm/services/agents/memory/memory_file_service.py +6 -2
  53. claude_mpm/services/agents/memory/memory_format_service.py +0 -1
  54. claude_mpm/services/agents/registry/deployed_agent_discovery.py +1 -1
  55. claude_mpm/services/async_session_logger.py +1 -1
  56. claude_mpm/services/claude_session_logger.py +1 -0
  57. claude_mpm/services/core/path_resolver.py +2 -0
  58. claude_mpm/services/diagnostics/checks/__init__.py +2 -0
  59. claude_mpm/services/diagnostics/checks/installation_check.py +126 -25
  60. claude_mpm/services/diagnostics/checks/mcp_services_check.py +399 -0
  61. claude_mpm/services/diagnostics/diagnostic_runner.py +4 -0
  62. claude_mpm/services/diagnostics/doctor_reporter.py +259 -32
  63. claude_mpm/services/event_bus/direct_relay.py +2 -1
  64. claude_mpm/services/event_bus/event_bus.py +1 -0
  65. claude_mpm/services/event_bus/relay.py +3 -2
  66. claude_mpm/services/framework_claude_md_generator/content_assembler.py +1 -1
  67. claude_mpm/services/infrastructure/daemon_manager.py +1 -1
  68. claude_mpm/services/mcp_config_manager.py +67 -4
  69. claude_mpm/services/mcp_gateway/core/process_pool.py +320 -0
  70. claude_mpm/services/mcp_gateway/core/startup_verification.py +2 -2
  71. claude_mpm/services/mcp_gateway/main.py +3 -13
  72. claude_mpm/services/mcp_gateway/server/stdio_server.py +4 -10
  73. claude_mpm/services/mcp_gateway/tools/__init__.py +14 -2
  74. claude_mpm/services/mcp_gateway/tools/external_mcp_services.py +38 -6
  75. claude_mpm/services/mcp_gateway/tools/kuzu_memory_service.py +527 -0
  76. claude_mpm/services/memory/cache/simple_cache.py +1 -1
  77. claude_mpm/services/project/archive_manager.py +159 -96
  78. claude_mpm/services/project/documentation_manager.py +64 -45
  79. claude_mpm/services/project/enhanced_analyzer.py +132 -89
  80. claude_mpm/services/project/project_organizer.py +225 -131
  81. claude_mpm/services/response_tracker.py +1 -1
  82. claude_mpm/services/shared/__init__.py +2 -1
  83. claude_mpm/services/shared/service_factory.py +8 -5
  84. claude_mpm/services/socketio/server/eventbus_integration.py +1 -1
  85. claude_mpm/services/unified/__init__.py +1 -1
  86. claude_mpm/services/unified/analyzer_strategies/__init__.py +3 -3
  87. claude_mpm/services/unified/analyzer_strategies/code_analyzer.py +97 -53
  88. claude_mpm/services/unified/analyzer_strategies/dependency_analyzer.py +81 -40
  89. claude_mpm/services/unified/analyzer_strategies/performance_analyzer.py +277 -178
  90. claude_mpm/services/unified/analyzer_strategies/security_analyzer.py +196 -112
  91. claude_mpm/services/unified/analyzer_strategies/structure_analyzer.py +83 -49
  92. claude_mpm/services/unified/config_strategies/__init__.py +175 -0
  93. claude_mpm/services/unified/config_strategies/config_schema.py +735 -0
  94. claude_mpm/services/unified/config_strategies/context_strategy.py +750 -0
  95. claude_mpm/services/unified/config_strategies/error_handling_strategy.py +1009 -0
  96. claude_mpm/services/unified/config_strategies/file_loader_strategy.py +879 -0
  97. claude_mpm/services/unified/config_strategies/unified_config_service.py +814 -0
  98. claude_mpm/services/unified/config_strategies/validation_strategy.py +1144 -0
  99. claude_mpm/services/unified/deployment_strategies/__init__.py +7 -7
  100. claude_mpm/services/unified/deployment_strategies/base.py +24 -28
  101. claude_mpm/services/unified/deployment_strategies/cloud_strategies.py +168 -88
  102. claude_mpm/services/unified/deployment_strategies/local.py +49 -34
  103. claude_mpm/services/unified/deployment_strategies/utils.py +39 -43
  104. claude_mpm/services/unified/deployment_strategies/vercel.py +30 -24
  105. claude_mpm/services/unified/interfaces.py +0 -26
  106. claude_mpm/services/unified/migration.py +17 -40
  107. claude_mpm/services/unified/strategies.py +9 -26
  108. claude_mpm/services/unified/unified_analyzer.py +48 -44
  109. claude_mpm/services/unified/unified_config.py +21 -19
  110. claude_mpm/services/unified/unified_deployment.py +21 -26
  111. claude_mpm/storage/state_storage.py +1 -0
  112. claude_mpm/utils/agent_dependency_loader.py +18 -6
  113. claude_mpm/utils/common.py +14 -12
  114. claude_mpm/utils/database_connector.py +15 -12
  115. claude_mpm/utils/error_handler.py +1 -0
  116. claude_mpm/utils/log_cleanup.py +1 -0
  117. claude_mpm/utils/path_operations.py +1 -0
  118. claude_mpm/utils/session_logging.py +1 -1
  119. claude_mpm/utils/subprocess_utils.py +1 -0
  120. claude_mpm/validation/agent_validator.py +1 -1
  121. {claude_mpm-4.4.0.dist-info → claude_mpm-4.4.4.dist-info}/METADATA +23 -17
  122. {claude_mpm-4.4.0.dist-info → claude_mpm-4.4.4.dist-info}/RECORD +126 -105
  123. claude_mpm/cli/commands/configure_tui.py +0 -1927
  124. claude_mpm/services/mcp_gateway/tools/ticket_tools.py +0 -645
  125. claude_mpm/services/mcp_gateway/tools/unified_ticket_tool.py +0 -602
  126. {claude_mpm-4.4.0.dist-info → claude_mpm-4.4.4.dist-info}/WHEEL +0 -0
  127. {claude_mpm-4.4.0.dist-info → claude_mpm-4.4.4.dist-info}/entry_points.txt +0 -0
  128. {claude_mpm-4.4.0.dist-info → claude_mpm-4.4.4.dist-info}/licenses/LICENSE +0 -0
  129. {claude_mpm-4.4.0.dist-info → claude_mpm-4.4.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1009 @@
1
+ """
2
+ Error Handling Strategy - Unifies 99 error handling patterns into composable handlers
3
+ Part of Phase 3 Configuration Consolidation
4
+ """
5
+
6
+ import json
7
+ import traceback
8
+ from abc import ABC, abstractmethod
9
+ from dataclasses import dataclass, field
10
+ from datetime import datetime
11
+ from enum import Enum
12
+ from pathlib import Path
13
+ from typing import Any, Callable, Dict, List, Optional, Type, Union
14
+
15
+ from claude_mpm.core.logging_utils import get_logger
16
+
17
+ from .unified_config_service import IConfigStrategy
18
+
19
+
20
+ class ErrorSeverity(Enum):
21
+ """Error severity levels"""
22
+
23
+ CRITICAL = "critical" # System failure
24
+ ERROR = "error" # Operation failure
25
+ WARNING = "warning" # Recoverable issue
26
+ INFO = "info" # Informational
27
+ DEBUG = "debug" # Debug information
28
+
29
+
30
+ class ErrorCategory(Enum):
31
+ """Categories of errors for handling strategy"""
32
+
33
+ FILE_IO = "file_io"
34
+ PARSING = "parsing"
35
+ VALIDATION = "validation"
36
+ NETWORK = "network"
37
+ PERMISSION = "permission"
38
+ TYPE_CONVERSION = "type_conversion"
39
+ MISSING_DEPENDENCY = "missing_dependency"
40
+ CONFIGURATION = "configuration"
41
+ RUNTIME = "runtime"
42
+ UNKNOWN = "unknown"
43
+
44
+
45
+ @dataclass
46
+ class ErrorContext:
47
+ """Context information for error handling"""
48
+
49
+ error: Exception
50
+ category: ErrorCategory
51
+ severity: ErrorSeverity
52
+ source: Optional[str] = None
53
+ operation: Optional[str] = None
54
+ timestamp: datetime = field(default_factory=datetime.now)
55
+ traceback: Optional[str] = None
56
+ metadata: Dict[str, Any] = field(default_factory=dict)
57
+ recovery_attempted: bool = False
58
+ recovery_successful: bool = False
59
+
60
+
61
+ @dataclass
62
+ class ErrorHandlingResult:
63
+ """Result of error handling operation"""
64
+
65
+ handled: bool
66
+ recovered: bool = False
67
+ fallback_value: Any = None
68
+ should_retry: bool = False
69
+ retry_after: Optional[int] = None # seconds
70
+ should_escalate: bool = False
71
+ message: Optional[str] = None
72
+ actions_taken: List[str] = field(default_factory=list)
73
+
74
+
75
+ class BaseErrorHandler(ABC):
76
+ """Base class for all error handlers"""
77
+
78
+ def __init__(self):
79
+ self.logger = get_logger(self.__class__.__name__)
80
+
81
+ @abstractmethod
82
+ def can_handle(self, context: ErrorContext) -> bool:
83
+ """Check if this handler can handle the error"""
84
+
85
+ @abstractmethod
86
+ def handle(self, context: ErrorContext) -> ErrorHandlingResult:
87
+ """Handle the error"""
88
+
89
+ def log_error(self, context: ErrorContext, message: str = None):
90
+ """Log error with appropriate level"""
91
+ log_message = message or str(context.error)
92
+
93
+ if context.severity == ErrorSeverity.CRITICAL:
94
+ self.logger.critical(log_message)
95
+ elif context.severity == ErrorSeverity.ERROR:
96
+ self.logger.error(log_message)
97
+ elif context.severity == ErrorSeverity.WARNING:
98
+ self.logger.warning(log_message)
99
+ elif context.severity == ErrorSeverity.INFO:
100
+ self.logger.info(log_message)
101
+ else:
102
+ self.logger.debug(log_message)
103
+
104
+
105
+ class FileIOErrorHandler(BaseErrorHandler):
106
+ """Handles file I/O errors - consolidates 18 file error patterns"""
107
+
108
+ ERROR_MAPPING = {
109
+ FileNotFoundError: "File not found",
110
+ PermissionError: "Permission denied",
111
+ IsADirectoryError: "Path is a directory",
112
+ NotADirectoryError: "Path is not a directory",
113
+ IOError: "I/O operation failed",
114
+ OSError: "Operating system error",
115
+ }
116
+
117
+ def can_handle(self, context: ErrorContext) -> bool:
118
+ """Check if error is file I/O related"""
119
+ return context.category == ErrorCategory.FILE_IO or isinstance(
120
+ context.error, (FileNotFoundError, PermissionError, IOError, OSError)
121
+ )
122
+
123
+ def handle(self, context: ErrorContext) -> ErrorHandlingResult:
124
+ """Handle file I/O errors with recovery strategies"""
125
+ result = ErrorHandlingResult(handled=True)
126
+
127
+ error_type = type(context.error)
128
+ error_message = self.ERROR_MAPPING.get(error_type, "Unknown file error")
129
+
130
+ # Log the error
131
+ self.log_error(context, f"{error_message}: {context.source}")
132
+
133
+ # Try recovery strategies
134
+ if isinstance(context.error, FileNotFoundError):
135
+ result = self._handle_file_not_found(context)
136
+ elif isinstance(context.error, PermissionError):
137
+ result = self._handle_permission_error(context)
138
+ else:
139
+ result = self._handle_generic_io_error(context)
140
+
141
+ result.actions_taken.append(f"Handled {error_type.__name__}")
142
+ return result
143
+
144
+ def _handle_file_not_found(self, context: ErrorContext) -> ErrorHandlingResult:
145
+ """Handle file not found errors"""
146
+ result = ErrorHandlingResult(handled=True)
147
+
148
+ # Check for fallback locations
149
+ if context.metadata.get("fallback_paths"):
150
+ for fallback in context.metadata["fallback_paths"]:
151
+ fallback_path = Path(fallback)
152
+ if fallback_path.exists():
153
+ result.recovered = True
154
+ result.fallback_value = str(fallback_path)
155
+ result.actions_taken.append(f"Used fallback path: {fallback_path}")
156
+ self.logger.info(f"Using fallback configuration: {fallback_path}")
157
+ return result
158
+
159
+ # Check for default values
160
+ if context.metadata.get("default_config"):
161
+ result.recovered = True
162
+ result.fallback_value = context.metadata["default_config"]
163
+ result.actions_taken.append("Used default configuration")
164
+ return result
165
+
166
+ # Create file if requested
167
+ if context.metadata.get("create_if_missing"):
168
+ path = Path(context.source)
169
+ try:
170
+ path.parent.mkdir(parents=True, exist_ok=True)
171
+
172
+ # Create with default content
173
+ default_content = context.metadata.get("default_content", {})
174
+
175
+ if path.suffix == ".json":
176
+ path.write_text(json.dumps(default_content, indent=2))
177
+ else:
178
+ path.write_text(str(default_content))
179
+
180
+ result.recovered = True
181
+ result.should_retry = True
182
+ result.actions_taken.append(f"Created missing file: {path}")
183
+ self.logger.info(f"Created missing configuration file: {path}")
184
+
185
+ except Exception as e:
186
+ self.logger.error(f"Failed to create file: {e}")
187
+ result.should_escalate = True
188
+
189
+ return result
190
+
191
+ def _handle_permission_error(self, context: ErrorContext) -> ErrorHandlingResult:
192
+ """Handle permission errors"""
193
+ result = ErrorHandlingResult(handled=True)
194
+
195
+ # Try alternative location
196
+ if context.metadata.get("alt_location"):
197
+ alt_path = Path(context.metadata["alt_location"])
198
+ try:
199
+ # Test write permission
200
+ alt_path.parent.mkdir(parents=True, exist_ok=True)
201
+ test_file = alt_path.parent / ".test_write"
202
+ test_file.touch()
203
+ test_file.unlink()
204
+
205
+ result.recovered = True
206
+ result.fallback_value = str(alt_path)
207
+ result.actions_taken.append(f"Using alternative location: {alt_path}")
208
+
209
+ except:
210
+ result.should_escalate = True
211
+
212
+ # Use read-only mode if applicable
213
+ elif context.metadata.get("allow_readonly"):
214
+ result.recovered = True
215
+ result.fallback_value = {"readonly": True}
216
+ result.actions_taken.append("Switched to read-only mode")
217
+
218
+ return result
219
+
220
+ def _handle_generic_io_error(self, context: ErrorContext) -> ErrorHandlingResult:
221
+ """Handle generic I/O errors"""
222
+ result = ErrorHandlingResult(handled=True)
223
+
224
+ # Retry with exponential backoff
225
+ retry_count = context.metadata.get("retry_count", 0)
226
+ max_retries = context.metadata.get("max_retries", 3)
227
+
228
+ if retry_count < max_retries:
229
+ result.should_retry = True
230
+ result.retry_after = 2**retry_count # Exponential backoff
231
+ result.actions_taken.append(
232
+ f"Retry {retry_count + 1}/{max_retries} after {result.retry_after}s"
233
+ )
234
+ else:
235
+ result.should_escalate = True
236
+ result.message = f"Failed after {max_retries} retries"
237
+
238
+ return result
239
+
240
+
241
+ class ParsingErrorHandler(BaseErrorHandler):
242
+ """Handles parsing errors - consolidates 22 parsing error patterns"""
243
+
244
+ PARSER_ERRORS = {
245
+ json.JSONDecodeError: ErrorCategory.PARSING,
246
+ ValueError: ErrorCategory.PARSING, # Common for parsing
247
+ SyntaxError: ErrorCategory.PARSING,
248
+ }
249
+
250
+ def can_handle(self, context: ErrorContext) -> bool:
251
+ """Check if error is parsing related"""
252
+ return (
253
+ context.category == ErrorCategory.PARSING
254
+ or type(context.error) in self.PARSER_ERRORS
255
+ or "parse" in str(context.error).lower()
256
+ or "decode" in str(context.error).lower()
257
+ )
258
+
259
+ def handle(self, context: ErrorContext) -> ErrorHandlingResult:
260
+ """Handle parsing errors with recovery strategies"""
261
+ result = ErrorHandlingResult(handled=True)
262
+
263
+ # Try recovery strategies based on error type
264
+ if isinstance(context.error, json.JSONDecodeError):
265
+ result = self._handle_json_error(context)
266
+ elif "yaml" in str(context.error).lower():
267
+ result = self._handle_yaml_error(context)
268
+ else:
269
+ result = self._handle_generic_parse_error(context)
270
+
271
+ return result
272
+
273
+ def _handle_json_error(self, context: ErrorContext) -> ErrorHandlingResult:
274
+ """Handle JSON parsing errors"""
275
+ result = ErrorHandlingResult(handled=True)
276
+
277
+ content = context.metadata.get("content", "")
278
+
279
+ # Try to fix common JSON issues
280
+ fixes = [
281
+ self._fix_json_comments,
282
+ self._fix_json_quotes,
283
+ self._fix_json_trailing_commas,
284
+ self._fix_json_unquoted_keys,
285
+ ]
286
+
287
+ for fix_func in fixes:
288
+ try:
289
+ fixed_content = fix_func(content)
290
+ parsed = json.loads(fixed_content)
291
+ result.recovered = True
292
+ result.fallback_value = parsed
293
+ result.actions_taken.append(f"Fixed JSON with {fix_func.__name__}")
294
+ self.logger.info(f"Recovered from JSON error using {fix_func.__name__}")
295
+ return result
296
+ except:
297
+ continue
298
+
299
+ # Use lenient parser if available
300
+ if context.metadata.get("allow_lenient"):
301
+ result = self._parse_lenient_json(content, result)
302
+
303
+ return result
304
+
305
+ def _fix_json_comments(self, content: str) -> str:
306
+ """Remove comments from JSON"""
307
+ import re
308
+
309
+ # Remove single-line comments
310
+ content = re.sub(r"//.*?$", "", content, flags=re.MULTILINE)
311
+ # Remove multi-line comments
312
+ content = re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL)
313
+ return content
314
+
315
+ def _fix_json_quotes(self, content: str) -> str:
316
+ """Fix quote issues in JSON"""
317
+ import re
318
+
319
+ # Replace single quotes with double quotes (careful with values)
320
+ # This is a simple approach - more sophisticated parsing might be needed
321
+ content = re.sub(r"'([^']*)':", r'"\1":', content) # Keys
322
+ content = re.sub(r":\s*'([^']*)'", r': "\1"', content) # Values
323
+ return content
324
+
325
+ def _fix_json_trailing_commas(self, content: str) -> str:
326
+ """Remove trailing commas"""
327
+ import re
328
+
329
+ content = re.sub(r",\s*}", "}", content)
330
+ content = re.sub(r",\s*]", "]", content)
331
+ return content
332
+
333
+ def _fix_json_unquoted_keys(self, content: str) -> str:
334
+ """Add quotes to unquoted keys"""
335
+ import re
336
+
337
+ # Match unquoted keys (word characters followed by colon)
338
+ content = re.sub(r"(\w+):", r'"\1":', content)
339
+ return content
340
+
341
+ def _parse_lenient_json(
342
+ self, content: str, result: ErrorHandlingResult
343
+ ) -> ErrorHandlingResult:
344
+ """Parse JSON leniently"""
345
+ try:
346
+ # Try using ast.literal_eval for Python literals
347
+ import ast
348
+
349
+ parsed = ast.literal_eval(content)
350
+ result.recovered = True
351
+ result.fallback_value = parsed
352
+ result.actions_taken.append("Parsed as Python literal")
353
+ except:
354
+ # Return empty dict as last resort
355
+ result.recovered = True
356
+ result.fallback_value = {}
357
+ result.actions_taken.append("Used empty configuration as fallback")
358
+
359
+ return result
360
+
361
+ def _handle_yaml_error(self, context: ErrorContext) -> ErrorHandlingResult:
362
+ """Handle YAML parsing errors"""
363
+ result = ErrorHandlingResult(handled=True)
364
+
365
+ content = context.metadata.get("content", "")
366
+
367
+ # Try to fix common YAML issues
368
+ try:
369
+ import yaml
370
+
371
+ # Try with safe loader
372
+ parsed = yaml.safe_load(content)
373
+ result.recovered = True
374
+ result.fallback_value = parsed
375
+ result.actions_taken.append("Parsed with safe YAML loader")
376
+
377
+ except:
378
+ # Try to fix tabs
379
+ content = content.replace("\t", " ")
380
+ try:
381
+ parsed = yaml.safe_load(content)
382
+ result.recovered = True
383
+ result.fallback_value = parsed
384
+ result.actions_taken.append("Fixed YAML tabs")
385
+ except:
386
+ result.fallback_value = {}
387
+ result.actions_taken.append("Used empty configuration as fallback")
388
+
389
+ return result
390
+
391
+ def _handle_generic_parse_error(self, context: ErrorContext) -> ErrorHandlingResult:
392
+ """Handle generic parsing errors"""
393
+ result = ErrorHandlingResult(handled=True)
394
+
395
+ # Try alternative formats
396
+ content = context.metadata.get("content", "")
397
+
398
+ formats = [
399
+ ("json", json.loads),
400
+ ("yaml", self._try_yaml),
401
+ ("ini", self._try_ini),
402
+ ("properties", self._try_properties),
403
+ ]
404
+
405
+ for format_name, parser in formats:
406
+ try:
407
+ parsed = parser(content)
408
+ if parsed:
409
+ result.recovered = True
410
+ result.fallback_value = parsed
411
+ result.actions_taken.append(f"Parsed as {format_name}")
412
+ return result
413
+ except:
414
+ continue
415
+
416
+ # Use default/empty config
417
+ result.recovered = True
418
+ result.fallback_value = context.metadata.get("default_config", {})
419
+ result.actions_taken.append("Used default configuration")
420
+
421
+ return result
422
+
423
+ def _try_yaml(self, content: str) -> Dict:
424
+ """Try parsing as YAML"""
425
+ import yaml
426
+
427
+ return yaml.safe_load(content)
428
+
429
+ def _try_ini(self, content: str) -> Dict:
430
+ """Try parsing as INI"""
431
+ import configparser
432
+
433
+ parser = configparser.ConfigParser()
434
+ parser.read_string(content)
435
+ return {s: dict(parser.items(s)) for s in parser.sections()}
436
+
437
+ def _try_properties(self, content: str) -> Dict:
438
+ """Try parsing as properties file"""
439
+ result = {}
440
+ for line in content.splitlines():
441
+ line = line.strip()
442
+ if line and not line.startswith("#") and "=" in line:
443
+ key, value = line.split("=", 1)
444
+ result[key.strip()] = value.strip()
445
+ return result
446
+
447
+
448
+ class ValidationErrorHandler(BaseErrorHandler):
449
+ """Handles validation errors - consolidates 15 validation error patterns"""
450
+
451
+ def can_handle(self, context: ErrorContext) -> bool:
452
+ """Check if error is validation related"""
453
+ return (
454
+ context.category == ErrorCategory.VALIDATION
455
+ or "validation" in str(context.error).lower()
456
+ or "invalid" in str(context.error).lower()
457
+ or "constraint" in str(context.error).lower()
458
+ )
459
+
460
+ def handle(self, context: ErrorContext) -> ErrorHandlingResult:
461
+ """Handle validation errors"""
462
+ result = ErrorHandlingResult(handled=True)
463
+
464
+ # Get validation details
465
+ field = context.metadata.get("field")
466
+ value = context.metadata.get("value")
467
+ schema = context.metadata.get("schema")
468
+
469
+ # Try to fix or provide default
470
+ if field and schema:
471
+ result = self._fix_validation_error(field, value, schema, result)
472
+ else:
473
+ result = self._handle_generic_validation(context, result)
474
+
475
+ return result
476
+
477
+ def _fix_validation_error(
478
+ self, field: str, value: Any, schema: Dict, result: ErrorHandlingResult
479
+ ) -> ErrorHandlingResult:
480
+ """Try to fix validation error"""
481
+ field_schema = schema.get("properties", {}).get(field, {})
482
+
483
+ # Try type coercion
484
+ if "type" in field_schema:
485
+ expected_type = field_schema["type"]
486
+ coerced = self._coerce_type(value, expected_type)
487
+
488
+ if coerced is not None:
489
+ result.recovered = True
490
+ result.fallback_value = {field: coerced}
491
+ result.actions_taken.append(f"Coerced {field} to {expected_type}")
492
+ return result
493
+
494
+ # Use default value if available
495
+ if "default" in field_schema:
496
+ result.recovered = True
497
+ result.fallback_value = {field: field_schema["default"]}
498
+ result.actions_taken.append(f"Used default value for {field}")
499
+ return result
500
+
501
+ # Use minimum/maximum for range errors
502
+ if "minimum" in field_schema and isinstance(value, (int, float)):
503
+ if value < field_schema["minimum"]:
504
+ result.recovered = True
505
+ result.fallback_value = {field: field_schema["minimum"]}
506
+ result.actions_taken.append(f"Clamped {field} to minimum")
507
+ return result
508
+
509
+ if "maximum" in field_schema and isinstance(value, (int, float)):
510
+ if value > field_schema["maximum"]:
511
+ result.recovered = True
512
+ result.fallback_value = {field: field_schema["maximum"]}
513
+ result.actions_taken.append(f"Clamped {field} to maximum")
514
+ return result
515
+
516
+ return result
517
+
518
+ def _coerce_type(self, value: Any, expected_type: str) -> Any:
519
+ """Attempt to coerce value to expected type"""
520
+ try:
521
+ if expected_type == "string":
522
+ return str(value)
523
+ if expected_type == "integer":
524
+ return int(value)
525
+ if expected_type == "number":
526
+ return float(value)
527
+ if expected_type == "boolean":
528
+ if isinstance(value, str):
529
+ return value.lower() in ["true", "yes", "1", "on"]
530
+ return bool(value)
531
+ if expected_type == "array":
532
+ if isinstance(value, str):
533
+ # Try comma-separated
534
+ return [v.strip() for v in value.split(",")]
535
+ return list(value)
536
+ if expected_type == "object":
537
+ if isinstance(value, str):
538
+ return json.loads(value)
539
+ return dict(value)
540
+ except:
541
+ return None
542
+
543
+ def _handle_generic_validation(
544
+ self, context: ErrorContext, result: ErrorHandlingResult
545
+ ) -> ErrorHandlingResult:
546
+ """Handle generic validation errors"""
547
+ # Use strict vs lenient mode
548
+ if context.metadata.get("strict", True):
549
+ result.should_escalate = True
550
+ result.message = "Validation failed in strict mode"
551
+ else:
552
+ # In lenient mode, use config as-is with warnings
553
+ result.recovered = True
554
+ result.fallback_value = context.metadata.get("config", {})
555
+ result.actions_taken.append("Accepted configuration in lenient mode")
556
+ self.logger.warning(
557
+ f"Validation error ignored in lenient mode: {context.error}"
558
+ )
559
+
560
+ return result
561
+
562
+
563
+ class NetworkErrorHandler(BaseErrorHandler):
564
+ """Handles network-related errors - consolidates 12 network error patterns"""
565
+
566
+ NETWORK_ERRORS = [
567
+ ConnectionError,
568
+ TimeoutError,
569
+ ConnectionRefusedError,
570
+ ConnectionResetError,
571
+ BrokenPipeError,
572
+ ]
573
+
574
+ def can_handle(self, context: ErrorContext) -> bool:
575
+ """Check if error is network related"""
576
+ return (
577
+ context.category == ErrorCategory.NETWORK
578
+ or any(isinstance(context.error, err) for err in self.NETWORK_ERRORS)
579
+ or "connection" in str(context.error).lower()
580
+ or "timeout" in str(context.error).lower()
581
+ )
582
+
583
+ def handle(self, context: ErrorContext) -> ErrorHandlingResult:
584
+ """Handle network errors with retry logic"""
585
+ result = ErrorHandlingResult(handled=True)
586
+
587
+ # Implement exponential backoff retry
588
+ retry_count = context.metadata.get("retry_count", 0)
589
+ max_retries = context.metadata.get("max_retries", 5)
590
+
591
+ if retry_count < max_retries:
592
+ # Calculate backoff time
593
+ backoff = min(300, 2**retry_count) # Max 5 minutes
594
+ result.should_retry = True
595
+ result.retry_after = backoff
596
+ result.actions_taken.append(
597
+ f"Retry {retry_count + 1}/{max_retries} after {backoff}s"
598
+ )
599
+
600
+ # Add jitter to prevent thundering herd
601
+ import random
602
+
603
+ result.retry_after += random.uniform(0, backoff * 0.1)
604
+
605
+ # Try offline/cached mode
606
+ elif context.metadata.get("cache_available"):
607
+ result.recovered = True
608
+ result.fallback_value = context.metadata.get("cached_config")
609
+ result.actions_taken.append("Using cached configuration")
610
+ else:
611
+ result.should_escalate = True
612
+ result.message = f"Network error after {max_retries} retries"
613
+
614
+ return result
615
+
616
+
617
+ class TypeConversionErrorHandler(BaseErrorHandler):
618
+ """Handles type conversion errors - consolidates 10 type conversion patterns"""
619
+
620
+ def can_handle(self, context: ErrorContext) -> bool:
621
+ """Check if error is type conversion related"""
622
+ return (
623
+ context.category == ErrorCategory.TYPE_CONVERSION
624
+ or isinstance(context.error, (TypeError, ValueError))
625
+ or "type" in str(context.error).lower()
626
+ or "convert" in str(context.error).lower()
627
+ )
628
+
629
+ def handle(self, context: ErrorContext) -> ErrorHandlingResult:
630
+ """Handle type conversion errors"""
631
+ result = ErrorHandlingResult(handled=True)
632
+
633
+ source_value = context.metadata.get("value")
634
+ target_type = context.metadata.get("target_type")
635
+
636
+ if source_value is not None and target_type:
637
+ # Try intelligent conversion
638
+ converted = self._smart_convert(source_value, target_type)
639
+
640
+ if converted is not None:
641
+ result.recovered = True
642
+ result.fallback_value = converted
643
+ result.actions_taken.append(f"Converted to {target_type}")
644
+ else:
645
+ # Use default for type
646
+ default = self._get_type_default(target_type)
647
+ result.recovered = True
648
+ result.fallback_value = default
649
+ result.actions_taken.append(f"Used default for {target_type}")
650
+
651
+ return result
652
+
653
+ def _smart_convert(self, value: Any, target_type: Type) -> Any:
654
+ """Smart type conversion with fallbacks"""
655
+ converters = {
656
+ str: self._to_string,
657
+ int: self._to_int,
658
+ float: self._to_float,
659
+ bool: self._to_bool,
660
+ list: self._to_list,
661
+ dict: self._to_dict,
662
+ }
663
+
664
+ converter = converters.get(target_type)
665
+ if converter:
666
+ try:
667
+ return converter(value)
668
+ except:
669
+ pass
670
+
671
+ return None
672
+
673
+ def _to_string(self, value: Any) -> str:
674
+ """Convert to string"""
675
+ if isinstance(value, bytes):
676
+ return value.decode("utf-8", errors="replace")
677
+ return str(value)
678
+
679
+ def _to_int(self, value: Any) -> int:
680
+ """Convert to integer"""
681
+ if isinstance(value, str):
682
+ # Try to extract number from string
683
+ import re
684
+
685
+ match = re.search(r"-?\d+", value)
686
+ if match:
687
+ return int(match.group())
688
+ return int(float(value))
689
+
690
+ def _to_float(self, value: Any) -> float:
691
+ """Convert to float"""
692
+ if isinstance(value, str):
693
+ # Handle percentage
694
+ if "%" in value:
695
+ return float(value.replace("%", "")) / 100
696
+ # Handle comma as decimal separator
697
+ value = value.replace(",", ".")
698
+ return float(value)
699
+
700
+ def _to_bool(self, value: Any) -> bool:
701
+ """Convert to boolean"""
702
+ if isinstance(value, str):
703
+ return value.lower() in ["true", "yes", "1", "on", "enabled"]
704
+ return bool(value)
705
+
706
+ def _to_list(self, value: Any) -> list:
707
+ """Convert to list"""
708
+ if isinstance(value, str):
709
+ # Try JSON array
710
+ if value.startswith("["):
711
+ try:
712
+ return json.loads(value)
713
+ except:
714
+ pass
715
+ # Try comma-separated
716
+ return [v.strip() for v in value.split(",")]
717
+ if hasattr(value, "__iter__") and not isinstance(value, (str, bytes, dict)):
718
+ return list(value)
719
+ return [value]
720
+
721
+ def _to_dict(self, value: Any) -> dict:
722
+ """Convert to dictionary"""
723
+ if isinstance(value, str):
724
+ # Try JSON object
725
+ try:
726
+ return json.loads(value)
727
+ except:
728
+ pass
729
+ # Try key=value pairs
730
+ result = {}
731
+ for pair in value.split(","):
732
+ if "=" in pair:
733
+ k, v = pair.split("=", 1)
734
+ result[k.strip()] = v.strip()
735
+ return result
736
+ if hasattr(value, "__dict__"):
737
+ return vars(value)
738
+ return {}
739
+
740
+ def _get_type_default(self, target_type: Type) -> Any:
741
+ """Get default value for type"""
742
+ defaults = {
743
+ str: "",
744
+ int: 0,
745
+ float: 0.0,
746
+ bool: False,
747
+ list: [],
748
+ dict: {},
749
+ type(None): None,
750
+ }
751
+ return defaults.get(target_type)
752
+
753
+
754
+ class CompositeErrorHandler(BaseErrorHandler):
755
+ """Orchestrates multiple error handlers - consolidates 22 composite patterns"""
756
+
757
+ def __init__(self):
758
+ super().__init__()
759
+ self.handlers = [
760
+ FileIOErrorHandler(),
761
+ ParsingErrorHandler(),
762
+ ValidationErrorHandler(),
763
+ NetworkErrorHandler(),
764
+ TypeConversionErrorHandler(),
765
+ ]
766
+
767
+ def can_handle(self, context: ErrorContext) -> bool:
768
+ """Composite handler can handle any error"""
769
+ return True
770
+
771
+ def handle(self, context: ErrorContext) -> ErrorHandlingResult:
772
+ """Try multiple handlers in sequence"""
773
+ # First, try specific handlers
774
+ for handler in self.handlers:
775
+ if handler.can_handle(context):
776
+ result = handler.handle(context)
777
+
778
+ if result.recovered or not result.should_escalate:
779
+ return result
780
+
781
+ # If no specific handler worked, use fallback strategies
782
+ return self._handle_unknown_error(context)
783
+
784
+ def _handle_unknown_error(self, context: ErrorContext) -> ErrorHandlingResult:
785
+ """Handle unknown errors with generic strategies"""
786
+ result = ErrorHandlingResult(handled=True)
787
+
788
+ # Log the full error
789
+ self.logger.error(
790
+ f"Unknown error in {context.operation}: {context.error}", exc_info=True
791
+ )
792
+
793
+ # Try generic recovery strategies
794
+ if context.metadata.get("default_config"):
795
+ result.recovered = True
796
+ result.fallback_value = context.metadata["default_config"]
797
+ result.actions_taken.append("Used default configuration for unknown error")
798
+ elif context.metadata.get("skip_on_error"):
799
+ result.recovered = True
800
+ result.fallback_value = {}
801
+ result.actions_taken.append("Skipped configuration due to error")
802
+ else:
803
+ result.should_escalate = True
804
+ result.message = f"Unhandled error: {context.error}"
805
+
806
+ return result
807
+
808
+
809
+ class ErrorHandlingStrategy(IConfigStrategy):
810
+ """
811
+ Main error handling strategy
812
+ Unifies 99 error handling patterns into composable handlers
813
+ """
814
+
815
+ def __init__(self):
816
+ self.logger = get_logger(self.__class__.__name__)
817
+ self.composite_handler = CompositeErrorHandler()
818
+ self.error_history: List[ErrorContext] = []
819
+ self.recovery_strategies: Dict[str, Callable] = {}
820
+
821
+ def can_handle(self, source: Union[str, Path, Dict]) -> bool:
822
+ """Error handler can handle any source"""
823
+ return True
824
+
825
+ def load(self, source: Any, **kwargs) -> Dict[str, Any]:
826
+ """Not used for error handling"""
827
+ return {}
828
+
829
+ def validate(self, config: Dict[str, Any], schema: Optional[Dict] = None) -> bool:
830
+ """Validate with error handling"""
831
+ return True
832
+
833
+ def transform(self, config: Dict[str, Any]) -> Dict[str, Any]:
834
+ """Transform config with error handling"""
835
+ return config
836
+
837
+ def handle_error(
838
+ self,
839
+ error: Exception,
840
+ source: Optional[str] = None,
841
+ operation: Optional[str] = None,
842
+ **metadata,
843
+ ) -> ErrorHandlingResult:
844
+ """Main error handling entry point"""
845
+ # Categorize error
846
+ category = self._categorize_error(error)
847
+ severity = self._determine_severity(error, category)
848
+
849
+ # Create error context
850
+ context = ErrorContext(
851
+ error=error,
852
+ category=category,
853
+ severity=severity,
854
+ source=source,
855
+ operation=operation,
856
+ traceback=traceback.format_exc(),
857
+ metadata=metadata,
858
+ )
859
+
860
+ # Record in history
861
+ self.error_history.append(context)
862
+
863
+ # Handle the error
864
+ result = self.composite_handler.handle(context)
865
+
866
+ # Apply recovery strategies if needed
867
+ if not result.recovered and self.recovery_strategies:
868
+ result = self._apply_recovery_strategies(context, result)
869
+
870
+ # Update context
871
+ context.recovery_attempted = result.recovered or result.should_retry
872
+ context.recovery_successful = result.recovered
873
+
874
+ return result
875
+
876
+ def _categorize_error(self, error: Exception) -> ErrorCategory:
877
+ """Categorize the error type"""
878
+ error_type = type(error)
879
+
880
+ # File I/O errors
881
+ if isinstance(error, (FileNotFoundError, PermissionError, IOError, OSError)):
882
+ return ErrorCategory.FILE_IO
883
+
884
+ # Parsing errors
885
+ if isinstance(error, (json.JSONDecodeError, ValueError, SyntaxError)):
886
+ if "parse" in str(error).lower() or "decode" in str(error).lower():
887
+ return ErrorCategory.PARSING
888
+
889
+ # Network errors
890
+ if isinstance(error, (ConnectionError, TimeoutError)):
891
+ return ErrorCategory.NETWORK
892
+
893
+ # Type conversion errors
894
+ if isinstance(error, TypeError):
895
+ return ErrorCategory.TYPE_CONVERSION
896
+
897
+ # Check error message for hints
898
+ error_msg = str(error).lower()
899
+
900
+ if "validation" in error_msg or "invalid" in error_msg:
901
+ return ErrorCategory.VALIDATION
902
+ if "permission" in error_msg or "access" in error_msg:
903
+ return ErrorCategory.PERMISSION
904
+ if "not found" in error_msg or "missing" in error_msg:
905
+ return ErrorCategory.MISSING_DEPENDENCY
906
+ if "config" in error_msg or "setting" in error_msg:
907
+ return ErrorCategory.CONFIGURATION
908
+
909
+ return ErrorCategory.UNKNOWN
910
+
911
+ def _determine_severity(
912
+ self, error: Exception, category: ErrorCategory
913
+ ) -> ErrorSeverity:
914
+ """Determine error severity"""
915
+ # Critical errors
916
+ critical_types = [MemoryError, SystemError, KeyboardInterrupt]
917
+ if type(error) in critical_types:
918
+ return ErrorSeverity.CRITICAL
919
+
920
+ # Category-based severity
921
+ severity_map = {
922
+ ErrorCategory.FILE_IO: ErrorSeverity.ERROR,
923
+ ErrorCategory.PARSING: ErrorSeverity.WARNING,
924
+ ErrorCategory.VALIDATION: ErrorSeverity.WARNING,
925
+ ErrorCategory.NETWORK: ErrorSeverity.ERROR,
926
+ ErrorCategory.PERMISSION: ErrorSeverity.ERROR,
927
+ ErrorCategory.TYPE_CONVERSION: ErrorSeverity.WARNING,
928
+ ErrorCategory.MISSING_DEPENDENCY: ErrorSeverity.ERROR,
929
+ ErrorCategory.CONFIGURATION: ErrorSeverity.ERROR,
930
+ ErrorCategory.RUNTIME: ErrorSeverity.ERROR,
931
+ ErrorCategory.UNKNOWN: ErrorSeverity.ERROR,
932
+ }
933
+
934
+ return severity_map.get(category, ErrorSeverity.ERROR)
935
+
936
+ def _apply_recovery_strategies(
937
+ self, context: ErrorContext, result: ErrorHandlingResult
938
+ ) -> ErrorHandlingResult:
939
+ """Apply custom recovery strategies"""
940
+ for name, strategy in self.recovery_strategies.items():
941
+ try:
942
+ recovery_result = strategy(context)
943
+ if recovery_result:
944
+ result.recovered = True
945
+ result.fallback_value = recovery_result
946
+ result.actions_taken.append(f"Applied recovery strategy: {name}")
947
+ return result
948
+ except Exception as e:
949
+ self.logger.debug(f"Recovery strategy {name} failed: {e}")
950
+
951
+ return result
952
+
953
+ def register_recovery_strategy(self, name: str, strategy: Callable):
954
+ """Register a custom recovery strategy"""
955
+ self.recovery_strategies[name] = strategy
956
+ self.logger.debug(f"Registered recovery strategy: {name}")
957
+
958
+ def get_error_statistics(self) -> Dict[str, Any]:
959
+ """Get error handling statistics"""
960
+ if not self.error_history:
961
+ return {
962
+ "total_errors": 0,
963
+ "categories": {},
964
+ "severities": {},
965
+ "recovery_rate": 0.0,
966
+ }
967
+
968
+ total = len(self.error_history)
969
+ recovered = sum(1 for e in self.error_history if e.recovery_successful)
970
+
971
+ categories = {}
972
+ severities = {}
973
+
974
+ for error in self.error_history:
975
+ # Count by category
976
+ cat_name = error.category.value
977
+ categories[cat_name] = categories.get(cat_name, 0) + 1
978
+
979
+ # Count by severity
980
+ sev_name = error.severity.value
981
+ severities[sev_name] = severities.get(sev_name, 0) + 1
982
+
983
+ return {
984
+ "total_errors": total,
985
+ "recovered": recovered,
986
+ "recovery_rate": (recovered / total) * 100 if total > 0 else 0,
987
+ "categories": categories,
988
+ "severities": severities,
989
+ "recent_errors": [
990
+ {
991
+ "timestamp": e.timestamp.isoformat(),
992
+ "category": e.category.value,
993
+ "severity": e.severity.value,
994
+ "operation": e.operation,
995
+ "recovered": e.recovery_successful,
996
+ }
997
+ for e in self.error_history[-10:] # Last 10 errors
998
+ ],
999
+ }
1000
+
1001
+
1002
+ # Export main components
1003
+ __all__ = [
1004
+ "ErrorCategory",
1005
+ "ErrorContext",
1006
+ "ErrorHandlingResult",
1007
+ "ErrorHandlingStrategy",
1008
+ "ErrorSeverity",
1009
+ ]