fraiseql-confiture 0.3.4__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- confiture/__init__.py +48 -0
- confiture/_core.cp311-win_amd64.pyd +0 -0
- confiture/cli/__init__.py +0 -0
- confiture/cli/dry_run.py +116 -0
- confiture/cli/lint_formatter.py +193 -0
- confiture/cli/main.py +1656 -0
- confiture/config/__init__.py +0 -0
- confiture/config/environment.py +263 -0
- confiture/core/__init__.py +51 -0
- confiture/core/anonymization/__init__.py +0 -0
- confiture/core/anonymization/audit.py +485 -0
- confiture/core/anonymization/benchmarking.py +372 -0
- confiture/core/anonymization/breach_notification.py +652 -0
- confiture/core/anonymization/compliance.py +617 -0
- confiture/core/anonymization/composer.py +298 -0
- confiture/core/anonymization/data_subject_rights.py +669 -0
- confiture/core/anonymization/factory.py +319 -0
- confiture/core/anonymization/governance.py +737 -0
- confiture/core/anonymization/performance.py +1092 -0
- confiture/core/anonymization/profile.py +284 -0
- confiture/core/anonymization/registry.py +195 -0
- confiture/core/anonymization/security/kms_manager.py +547 -0
- confiture/core/anonymization/security/lineage.py +888 -0
- confiture/core/anonymization/security/token_store.py +686 -0
- confiture/core/anonymization/strategies/__init__.py +41 -0
- confiture/core/anonymization/strategies/address.py +359 -0
- confiture/core/anonymization/strategies/credit_card.py +374 -0
- confiture/core/anonymization/strategies/custom.py +161 -0
- confiture/core/anonymization/strategies/date.py +218 -0
- confiture/core/anonymization/strategies/differential_privacy.py +398 -0
- confiture/core/anonymization/strategies/email.py +141 -0
- confiture/core/anonymization/strategies/format_preserving_encryption.py +310 -0
- confiture/core/anonymization/strategies/hash.py +150 -0
- confiture/core/anonymization/strategies/ip_address.py +235 -0
- confiture/core/anonymization/strategies/masking_retention.py +252 -0
- confiture/core/anonymization/strategies/name.py +298 -0
- confiture/core/anonymization/strategies/phone.py +119 -0
- confiture/core/anonymization/strategies/preserve.py +85 -0
- confiture/core/anonymization/strategies/redact.py +101 -0
- confiture/core/anonymization/strategies/salted_hashing.py +322 -0
- confiture/core/anonymization/strategies/text_redaction.py +183 -0
- confiture/core/anonymization/strategies/tokenization.py +334 -0
- confiture/core/anonymization/strategy.py +241 -0
- confiture/core/anonymization/syncer_audit.py +357 -0
- confiture/core/blue_green.py +683 -0
- confiture/core/builder.py +500 -0
- confiture/core/checksum.py +358 -0
- confiture/core/connection.py +132 -0
- confiture/core/differ.py +522 -0
- confiture/core/drift.py +564 -0
- confiture/core/dry_run.py +182 -0
- confiture/core/health.py +313 -0
- confiture/core/hooks/__init__.py +87 -0
- confiture/core/hooks/base.py +232 -0
- confiture/core/hooks/context.py +146 -0
- confiture/core/hooks/execution_strategies.py +57 -0
- confiture/core/hooks/observability.py +220 -0
- confiture/core/hooks/phases.py +53 -0
- confiture/core/hooks/registry.py +295 -0
- confiture/core/large_tables.py +775 -0
- confiture/core/linting/__init__.py +70 -0
- confiture/core/linting/composer.py +192 -0
- confiture/core/linting/libraries/__init__.py +17 -0
- confiture/core/linting/libraries/gdpr.py +168 -0
- confiture/core/linting/libraries/general.py +184 -0
- confiture/core/linting/libraries/hipaa.py +144 -0
- confiture/core/linting/libraries/pci_dss.py +104 -0
- confiture/core/linting/libraries/sox.py +120 -0
- confiture/core/linting/schema_linter.py +491 -0
- confiture/core/linting/versioning.py +151 -0
- confiture/core/locking.py +389 -0
- confiture/core/migration_generator.py +298 -0
- confiture/core/migrator.py +793 -0
- confiture/core/observability/__init__.py +44 -0
- confiture/core/observability/audit.py +323 -0
- confiture/core/observability/logging.py +187 -0
- confiture/core/observability/metrics.py +174 -0
- confiture/core/observability/tracing.py +192 -0
- confiture/core/pg_version.py +418 -0
- confiture/core/pool.py +406 -0
- confiture/core/risk/__init__.py +39 -0
- confiture/core/risk/predictor.py +188 -0
- confiture/core/risk/scoring.py +248 -0
- confiture/core/rollback_generator.py +388 -0
- confiture/core/schema_analyzer.py +769 -0
- confiture/core/schema_to_schema.py +590 -0
- confiture/core/security/__init__.py +32 -0
- confiture/core/security/logging.py +201 -0
- confiture/core/security/validation.py +416 -0
- confiture/core/signals.py +371 -0
- confiture/core/syncer.py +540 -0
- confiture/exceptions.py +192 -0
- confiture/integrations/__init__.py +0 -0
- confiture/models/__init__.py +0 -0
- confiture/models/lint.py +193 -0
- confiture/models/migration.py +180 -0
- confiture/models/schema.py +203 -0
- confiture/scenarios/__init__.py +36 -0
- confiture/scenarios/compliance.py +586 -0
- confiture/scenarios/ecommerce.py +199 -0
- confiture/scenarios/financial.py +253 -0
- confiture/scenarios/healthcare.py +315 -0
- confiture/scenarios/multi_tenant.py +340 -0
- confiture/scenarios/saas.py +295 -0
- confiture/testing/FRAMEWORK_API.md +722 -0
- confiture/testing/__init__.py +38 -0
- confiture/testing/fixtures/__init__.py +11 -0
- confiture/testing/fixtures/data_validator.py +229 -0
- confiture/testing/fixtures/migration_runner.py +167 -0
- confiture/testing/fixtures/schema_snapshotter.py +352 -0
- confiture/testing/frameworks/__init__.py +10 -0
- confiture/testing/frameworks/mutation.py +587 -0
- confiture/testing/frameworks/performance.py +479 -0
- confiture/testing/utils/__init__.py +0 -0
- fraiseql_confiture-0.3.4.dist-info/METADATA +438 -0
- fraiseql_confiture-0.3.4.dist-info/RECORD +119 -0
- fraiseql_confiture-0.3.4.dist-info/WHEEL +4 -0
- fraiseql_confiture-0.3.4.dist-info/entry_points.txt +2 -0
- fraiseql_confiture-0.3.4.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
"""Base classes for hooks with priority and dependencies."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from typing import TYPE_CHECKING, Any, Generic, TypeVar
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from .context import HookContext
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
T = TypeVar("T")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class HookError(Exception):
|
|
19
|
+
"""Exception raised during hook execution.
|
|
20
|
+
|
|
21
|
+
Provides detailed error information including the hook that failed,
|
|
22
|
+
the context in which it failed, and any nested exceptions.
|
|
23
|
+
|
|
24
|
+
Attributes:
|
|
25
|
+
hook_id: ID of the hook that failed
|
|
26
|
+
hook_name: Name of the hook that failed
|
|
27
|
+
phase: Migration phase when error occurred (e.g., "pre_migration", "post_migration")
|
|
28
|
+
message: Error message
|
|
29
|
+
cause: Original exception that caused this error
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(
|
|
33
|
+
self,
|
|
34
|
+
message: str,
|
|
35
|
+
hook_id: str | None = None,
|
|
36
|
+
hook_name: str | None = None,
|
|
37
|
+
phase: str | None = None,
|
|
38
|
+
cause: Exception | None = None,
|
|
39
|
+
):
|
|
40
|
+
"""Initialize hook error.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
message: Error message
|
|
44
|
+
hook_id: ID of hook that failed
|
|
45
|
+
hook_name: Name of hook that failed
|
|
46
|
+
phase: Migration phase when error occurred
|
|
47
|
+
cause: Original exception (for chaining)
|
|
48
|
+
"""
|
|
49
|
+
self.hook_id = hook_id
|
|
50
|
+
self.hook_name = hook_name
|
|
51
|
+
self.phase = phase
|
|
52
|
+
self.cause = cause
|
|
53
|
+
|
|
54
|
+
# Build detailed error message
|
|
55
|
+
parts = [message]
|
|
56
|
+
if hook_name:
|
|
57
|
+
parts.append(f"(hook: {hook_name})")
|
|
58
|
+
if phase:
|
|
59
|
+
parts.append(f"(phase: {phase})")
|
|
60
|
+
|
|
61
|
+
full_message = " ".join(parts)
|
|
62
|
+
super().__init__(full_message)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class HookResult:
|
|
67
|
+
"""Result of hook execution."""
|
|
68
|
+
|
|
69
|
+
success: bool
|
|
70
|
+
rows_affected: int = 0
|
|
71
|
+
stats: dict[str, Any] | None = None
|
|
72
|
+
error: str | None = None
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class Hook(Generic[T], ABC):
|
|
76
|
+
"""Base class for all hooks."""
|
|
77
|
+
|
|
78
|
+
def __init__(
|
|
79
|
+
self,
|
|
80
|
+
hook_id: str,
|
|
81
|
+
name: str,
|
|
82
|
+
priority: int = 5, # 1-10, lower = higher priority
|
|
83
|
+
depends_on: list[str] | None = None,
|
|
84
|
+
):
|
|
85
|
+
self.id = hook_id
|
|
86
|
+
self.name = name
|
|
87
|
+
self.priority = priority
|
|
88
|
+
self.depends_on = depends_on or []
|
|
89
|
+
|
|
90
|
+
@abstractmethod
|
|
91
|
+
async def execute(self, context: HookContext[T]) -> HookResult:
|
|
92
|
+
"""Execute hook - must be implemented by subclasses."""
|
|
93
|
+
pass
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class HookExecutor:
|
|
97
|
+
"""Executes hooks in configured order with proper error handling.
|
|
98
|
+
|
|
99
|
+
Manages hook execution with support for:
|
|
100
|
+
- Sequential execution with proper ordering
|
|
101
|
+
- Dependency resolution
|
|
102
|
+
- Error handling and recovery
|
|
103
|
+
- Execution context management
|
|
104
|
+
- Performance tracking
|
|
105
|
+
|
|
106
|
+
Example:
|
|
107
|
+
>>> executor = HookExecutor(registry=registry)
|
|
108
|
+
>>> await executor.execute_phase("pre_migration", context)
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
def __init__(self, registry: Any | None = None):
|
|
112
|
+
"""Initialize hook executor.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
registry: Hook registry with registered hooks (optional)
|
|
116
|
+
"""
|
|
117
|
+
self.registry = registry
|
|
118
|
+
self._executed_hooks: set[str] = set()
|
|
119
|
+
self._hook_results: dict[str, HookResult] = {}
|
|
120
|
+
|
|
121
|
+
async def execute_phase(self, phase: str, context: Any) -> dict[str, HookResult]:
|
|
122
|
+
"""Execute all hooks for a given phase.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
phase: Phase name (e.g., "pre_migration", "post_migration")
|
|
126
|
+
context: Hook execution context with migration state
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
Dictionary mapping hook IDs to their execution results
|
|
130
|
+
|
|
131
|
+
Raises:
|
|
132
|
+
HookError: If any hook fails during execution
|
|
133
|
+
"""
|
|
134
|
+
if not self.registry:
|
|
135
|
+
logger.debug(f"No hook registry configured, skipping phase: {phase}")
|
|
136
|
+
return {}
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
# Get hooks for this phase
|
|
140
|
+
hooks = self.registry.get_hooks(phase) if hasattr(self.registry, "get_hooks") else []
|
|
141
|
+
|
|
142
|
+
if not hooks:
|
|
143
|
+
logger.debug(f"No hooks registered for phase: {phase}")
|
|
144
|
+
return {}
|
|
145
|
+
|
|
146
|
+
# Sort hooks by priority (lower number = higher priority)
|
|
147
|
+
sorted_hooks = sorted(hooks, key=lambda h: getattr(h, "priority", 5))
|
|
148
|
+
|
|
149
|
+
# Execute hooks
|
|
150
|
+
for hook in sorted_hooks:
|
|
151
|
+
await self._execute_single_hook(hook, phase, context)
|
|
152
|
+
|
|
153
|
+
return self._hook_results
|
|
154
|
+
|
|
155
|
+
except HookError:
|
|
156
|
+
raise
|
|
157
|
+
except Exception as e:
|
|
158
|
+
raise HookError(
|
|
159
|
+
message=f"Unexpected error executing phase '{phase}'",
|
|
160
|
+
phase=phase,
|
|
161
|
+
cause=e,
|
|
162
|
+
) from e
|
|
163
|
+
|
|
164
|
+
async def _execute_single_hook(self, hook: Any, phase: str, context: Any) -> None:
|
|
165
|
+
"""Execute a single hook with error handling.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
hook: Hook instance to execute
|
|
169
|
+
phase: Phase name
|
|
170
|
+
context: Hook execution context
|
|
171
|
+
|
|
172
|
+
Raises:
|
|
173
|
+
HookError: If hook execution fails
|
|
174
|
+
"""
|
|
175
|
+
hook_id = getattr(hook, "id", "unknown")
|
|
176
|
+
hook_name = getattr(hook, "name", "unknown")
|
|
177
|
+
|
|
178
|
+
# Check dependencies
|
|
179
|
+
depends_on = getattr(hook, "depends_on", [])
|
|
180
|
+
if depends_on:
|
|
181
|
+
for dep_id in depends_on:
|
|
182
|
+
if dep_id not in self._executed_hooks:
|
|
183
|
+
raise HookError(
|
|
184
|
+
message=f"Dependency '{dep_id}' not executed",
|
|
185
|
+
hook_id=hook_id,
|
|
186
|
+
hook_name=hook_name,
|
|
187
|
+
phase=phase,
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
try:
|
|
191
|
+
logger.debug(f"Executing hook '{hook_name}' ({hook_id}) in phase '{phase}'")
|
|
192
|
+
|
|
193
|
+
# Execute the hook
|
|
194
|
+
if hasattr(hook, "execute"):
|
|
195
|
+
result = (
|
|
196
|
+
await hook.execute(context)
|
|
197
|
+
if hasattr(hook.execute, "__await__")
|
|
198
|
+
else hook.execute(context)
|
|
199
|
+
)
|
|
200
|
+
else:
|
|
201
|
+
raise HookError(
|
|
202
|
+
message="Hook does not have execute method",
|
|
203
|
+
hook_id=hook_id,
|
|
204
|
+
hook_name=hook_name,
|
|
205
|
+
phase=phase,
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
# Store result
|
|
209
|
+
self._hook_results[hook_id] = result
|
|
210
|
+
self._executed_hooks.add(hook_id)
|
|
211
|
+
|
|
212
|
+
if not result.success:
|
|
213
|
+
error_msg = result.error or "Unknown error"
|
|
214
|
+
raise HookError(
|
|
215
|
+
message=f"Hook execution failed: {error_msg}",
|
|
216
|
+
hook_id=hook_id,
|
|
217
|
+
hook_name=hook_name,
|
|
218
|
+
phase=phase,
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
logger.debug(f"Hook '{hook_name}' completed successfully")
|
|
222
|
+
|
|
223
|
+
except HookError:
|
|
224
|
+
raise
|
|
225
|
+
except Exception as e:
|
|
226
|
+
raise HookError(
|
|
227
|
+
message=f"Exception during hook execution: {str(e)}",
|
|
228
|
+
hook_id=hook_id,
|
|
229
|
+
hook_name=hook_name,
|
|
230
|
+
phase=phase,
|
|
231
|
+
cause=e,
|
|
232
|
+
) from e
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"""Type-safe hook contexts."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import UTC, datetime
|
|
7
|
+
from typing import Any, Generic, TypeVar
|
|
8
|
+
from uuid import UUID, uuid4
|
|
9
|
+
|
|
10
|
+
T = TypeVar("T")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class Schema:
|
|
15
|
+
"""Basic schema representation."""
|
|
16
|
+
|
|
17
|
+
name: str
|
|
18
|
+
tables: list[str] = field(default_factory=list)
|
|
19
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class SchemaDifference:
|
|
24
|
+
"""Represents a difference between schemas."""
|
|
25
|
+
|
|
26
|
+
type: str # e.g., "added_table", "dropped_column", "type_change"
|
|
27
|
+
details: dict[str, Any] = field(default_factory=dict)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class RiskAssessment:
|
|
32
|
+
"""Risk assessment for a migration."""
|
|
33
|
+
|
|
34
|
+
level: str # "LOW", "MEDIUM", "HIGH", "CRITICAL"
|
|
35
|
+
score: float # 0.0-1.0
|
|
36
|
+
factors: dict[str, float] = field(default_factory=dict)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@dataclass
|
|
40
|
+
class MigrationStep:
|
|
41
|
+
"""Individual migration step."""
|
|
42
|
+
|
|
43
|
+
id: str
|
|
44
|
+
description: str
|
|
45
|
+
estimated_duration_ms: int
|
|
46
|
+
query: str | None = None
|
|
47
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class SchemaAnalysisContext:
|
|
52
|
+
"""Context available in before/after_analyze_schema hooks."""
|
|
53
|
+
|
|
54
|
+
source_schema: Schema
|
|
55
|
+
target_schema: Schema
|
|
56
|
+
analysis_time_ms: int
|
|
57
|
+
tables_analyzed: int
|
|
58
|
+
columns_analyzed: int
|
|
59
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@dataclass
|
|
63
|
+
class SchemaDiffContext:
|
|
64
|
+
"""Context available in before/after_diff_schemas hooks."""
|
|
65
|
+
|
|
66
|
+
source_schema: Schema
|
|
67
|
+
target_schema: Schema
|
|
68
|
+
differences: list[SchemaDifference] = field(default_factory=list)
|
|
69
|
+
diff_time_ms: int = 0
|
|
70
|
+
breaking_changes: list[str] = field(default_factory=list)
|
|
71
|
+
safe_changes: list[str] = field(default_factory=list)
|
|
72
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
class MigrationPlanContext:
|
|
77
|
+
"""Context available in before/after_plan_migration hooks."""
|
|
78
|
+
|
|
79
|
+
migration_steps: list[MigrationStep] = field(default_factory=list)
|
|
80
|
+
estimated_duration_ms: int = 0
|
|
81
|
+
estimated_downtime_ms: int = 0
|
|
82
|
+
risk_assessment: RiskAssessment | None = None
|
|
83
|
+
affected_tables: list[str] = field(default_factory=list)
|
|
84
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@dataclass
|
|
88
|
+
class ExecutionContext:
|
|
89
|
+
"""Context available during before/after_execute."""
|
|
90
|
+
|
|
91
|
+
current_step: MigrationStep | None = None
|
|
92
|
+
steps_completed: int = 0
|
|
93
|
+
total_steps: int = 0
|
|
94
|
+
elapsed_time_ms: int = 0
|
|
95
|
+
rows_affected: int = 0
|
|
96
|
+
current_connections: int = 0
|
|
97
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@dataclass
|
|
101
|
+
class RollbackContext:
|
|
102
|
+
"""Context available during before/after_rollback."""
|
|
103
|
+
|
|
104
|
+
rollback_reason: str
|
|
105
|
+
steps_to_rollback: list[MigrationStep] = field(default_factory=list)
|
|
106
|
+
original_error: Exception | None = None
|
|
107
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@dataclass
|
|
111
|
+
class ValidationContext:
|
|
112
|
+
"""Context available during before/after_validate."""
|
|
113
|
+
|
|
114
|
+
validation_results: list[dict[str, Any]] = field(default_factory=list)
|
|
115
|
+
passed: bool = True
|
|
116
|
+
errors: list[str] = field(default_factory=list)
|
|
117
|
+
warnings: list[str] = field(default_factory=list)
|
|
118
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class HookContext(Generic[T]):
|
|
122
|
+
"""Type-safe hook context with phase-specific information."""
|
|
123
|
+
|
|
124
|
+
def __init__(
|
|
125
|
+
self,
|
|
126
|
+
phase: Any, # HookPhase | HookEvent | HookAlert
|
|
127
|
+
data: T,
|
|
128
|
+
execution_id: UUID | None = None,
|
|
129
|
+
hook_id: str | None = None,
|
|
130
|
+
):
|
|
131
|
+
self.phase = phase
|
|
132
|
+
self.data: T = data # Type-safe data
|
|
133
|
+
self.execution_id = execution_id or uuid4() # Correlation ID for tracing
|
|
134
|
+
self.hook_id = hook_id or "unknown"
|
|
135
|
+
self.timestamp = datetime.now(UTC)
|
|
136
|
+
self.parent_execution_id: UUID | None = None # For nested hooks
|
|
137
|
+
|
|
138
|
+
def get_data(self) -> T:
|
|
139
|
+
"""Get phase-specific data (type-safe)."""
|
|
140
|
+
return self.data
|
|
141
|
+
|
|
142
|
+
def add_metadata(self, key: str, value: Any) -> None:
|
|
143
|
+
"""Add metadata for observability."""
|
|
144
|
+
if hasattr(self.data, "metadata") and isinstance(self.data.metadata, dict):
|
|
145
|
+
metadata: dict[str, Any] = self.data.metadata # type: ignore[union-attr]
|
|
146
|
+
metadata[key] = value
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""Hook execution strategies and configuration."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class HookExecutionStrategy(Enum):
|
|
11
|
+
"""Defines how hooks execute within a phase."""
|
|
12
|
+
|
|
13
|
+
SEQUENTIAL = "sequential" # One by one, in priority order
|
|
14
|
+
PARALLEL = "parallel" # All simultaneously via asyncio.gather()
|
|
15
|
+
PARALLEL_WITH_DEPS = "parallel_with_deps" # DAG execution respecting dependencies
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class HookErrorStrategy(Enum):
|
|
19
|
+
"""What happens when a hook fails."""
|
|
20
|
+
|
|
21
|
+
FAIL_FAST = "fail_fast" # Stop execution, fail migration
|
|
22
|
+
FAIL_SAFE = "fail_safe" # Log error, continue migration
|
|
23
|
+
RETRY = "retry" # Retry with exponential backoff
|
|
24
|
+
ALERT_ONLY = "alert_only" # Alert but continue
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class HookContextMutationPolicy(Enum):
|
|
28
|
+
"""Whether downstream hooks can see upstream modifications."""
|
|
29
|
+
|
|
30
|
+
IMMUTABLE = "immutable" # Context is read-only
|
|
31
|
+
MUTABLE = "mutable" # Hooks can modify for downstream
|
|
32
|
+
COPY_ON_WRITE = "copy_on_write" # Each hook gets modified copy
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class RetryConfig:
|
|
37
|
+
"""Retry strategy for RETRY error handling."""
|
|
38
|
+
|
|
39
|
+
max_attempts: int = 3
|
|
40
|
+
initial_delay_ms: int = 100
|
|
41
|
+
max_delay_ms: int = 30000
|
|
42
|
+
backoff_multiplier: float = 2.0
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass
|
|
46
|
+
class HookPhaseConfig:
|
|
47
|
+
"""Configuration for hook execution in a specific phase."""
|
|
48
|
+
|
|
49
|
+
phase: Any # HookPhase | HookEvent | HookAlert
|
|
50
|
+
execution_strategy: HookExecutionStrategy = HookExecutionStrategy.SEQUENTIAL
|
|
51
|
+
error_strategy: HookErrorStrategy = HookErrorStrategy.FAIL_FAST
|
|
52
|
+
context_mutation_policy: HookContextMutationPolicy = HookContextMutationPolicy.IMMUTABLE
|
|
53
|
+
timeout_per_hook_ms: int = 30000 # 30 seconds per hook
|
|
54
|
+
timeout_per_phase_ms: int = 300000 # 5 minutes per phase
|
|
55
|
+
max_parallel_hooks: int = 4 # Limit concurrent execution
|
|
56
|
+
retry_config: RetryConfig | None = None # For RETRY strategy
|
|
57
|
+
circuit_breaker_enabled: bool = True # Prevent cascading failures
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
"""Observability and tracing infrastructure for hooks."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from datetime import UTC, datetime
|
|
8
|
+
from enum import Enum
|
|
9
|
+
from typing import Any
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class HookExecutionStatus(Enum):
|
|
16
|
+
"""Status of hook execution."""
|
|
17
|
+
|
|
18
|
+
PENDING = "pending"
|
|
19
|
+
COMPLETED = "completed"
|
|
20
|
+
FAILED = "failed"
|
|
21
|
+
TIMEOUT = "timeout"
|
|
22
|
+
SKIPPED = "skipped"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class CircuitBreakerState(Enum):
|
|
26
|
+
"""Circuit breaker states."""
|
|
27
|
+
|
|
28
|
+
CLOSED = "closed" # Normal operation
|
|
29
|
+
OPEN = "open" # Blocking requests
|
|
30
|
+
HALF_OPEN = "half_open" # Testing recovery
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class HookExecutionEvent:
|
|
35
|
+
"""Record of a single hook execution."""
|
|
36
|
+
|
|
37
|
+
execution_id: UUID # Trace correlation ID
|
|
38
|
+
hook_id: str
|
|
39
|
+
phase: str
|
|
40
|
+
status: HookExecutionStatus
|
|
41
|
+
duration_ms: int
|
|
42
|
+
rows_affected: int = 0
|
|
43
|
+
error: str | None = None
|
|
44
|
+
reason: str | None = None
|
|
45
|
+
stats: dict[str, Any] | None = None
|
|
46
|
+
timestamp: datetime = field(default_factory=lambda: datetime.now(UTC))
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@dataclass
|
|
50
|
+
class HookExecutionResult:
|
|
51
|
+
"""Result of executing all hooks in a phase."""
|
|
52
|
+
|
|
53
|
+
phase: str
|
|
54
|
+
hooks_executed: int
|
|
55
|
+
results: list[HookExecutionEvent] | None = None
|
|
56
|
+
total_duration_ms: int = 0
|
|
57
|
+
failed_count: int = 0
|
|
58
|
+
timeout_count: int = 0
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass
|
|
62
|
+
class ExecutionDAG:
|
|
63
|
+
"""Directed acyclic graph of hook dependencies."""
|
|
64
|
+
|
|
65
|
+
execution_id: UUID
|
|
66
|
+
hooks: list[str] = field(default_factory=list)
|
|
67
|
+
edges: list[tuple[str, str]] = field(default_factory=list) # (from, to) pairs
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@dataclass
|
|
71
|
+
class PerformanceTrace:
|
|
72
|
+
"""Detailed performance trace of hook execution."""
|
|
73
|
+
|
|
74
|
+
execution_id: UUID
|
|
75
|
+
total_duration_ms: int
|
|
76
|
+
hook_events: list[HookExecutionEvent] = field(default_factory=list)
|
|
77
|
+
critical_path: list[str] = field(default_factory=list)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class CircuitBreaker:
|
|
81
|
+
"""Prevent cascading failures from failing hooks."""
|
|
82
|
+
|
|
83
|
+
def __init__(
|
|
84
|
+
self,
|
|
85
|
+
hook_id: str,
|
|
86
|
+
failure_threshold: int = 5,
|
|
87
|
+
recovery_timeout_ms: int = 60000,
|
|
88
|
+
):
|
|
89
|
+
self.hook_id = hook_id
|
|
90
|
+
self.failure_threshold = failure_threshold
|
|
91
|
+
self.recovery_timeout_ms = recovery_timeout_ms
|
|
92
|
+
self.failure_count = 0
|
|
93
|
+
self.last_failure_time = None
|
|
94
|
+
self.state = CircuitBreakerState.CLOSED
|
|
95
|
+
|
|
96
|
+
@property
|
|
97
|
+
def is_open(self) -> bool:
|
|
98
|
+
"""Is circuit breaker open (blocking requests)?"""
|
|
99
|
+
if self.state == CircuitBreakerState.OPEN:
|
|
100
|
+
# Check if recovery timeout has elapsed
|
|
101
|
+
if (
|
|
102
|
+
self.last_failure_time
|
|
103
|
+
and (datetime.now(UTC) - self.last_failure_time).total_seconds() * 1000
|
|
104
|
+
> self.recovery_timeout_ms
|
|
105
|
+
):
|
|
106
|
+
self.state = CircuitBreakerState.HALF_OPEN
|
|
107
|
+
self.failure_count = 0
|
|
108
|
+
return False
|
|
109
|
+
return True
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
def record_success(self) -> None:
|
|
113
|
+
"""Record successful hook execution."""
|
|
114
|
+
if self.state == CircuitBreakerState.HALF_OPEN:
|
|
115
|
+
self.state = CircuitBreakerState.CLOSED
|
|
116
|
+
self.failure_count = 0
|
|
117
|
+
|
|
118
|
+
def record_failure(self) -> None:
|
|
119
|
+
"""Record failed hook execution."""
|
|
120
|
+
self.failure_count += 1
|
|
121
|
+
self.last_failure_time = datetime.now(UTC)
|
|
122
|
+
|
|
123
|
+
if self.failure_count >= self.failure_threshold:
|
|
124
|
+
self.state = CircuitBreakerState.OPEN
|
|
125
|
+
logger.warning(
|
|
126
|
+
f"Circuit breaker opened for hook {self.hook_id} after "
|
|
127
|
+
f"{self.failure_count} failures"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
class HookExecutionTracer:
|
|
132
|
+
"""Track and trace hook execution for debugging."""
|
|
133
|
+
|
|
134
|
+
def __init__(self):
|
|
135
|
+
self.execution_log: list[HookExecutionEvent] = []
|
|
136
|
+
self.execution_graphs: dict[UUID, ExecutionDAG] = {}
|
|
137
|
+
|
|
138
|
+
def record_execution(self, event: HookExecutionEvent) -> None:
|
|
139
|
+
"""Record hook execution event."""
|
|
140
|
+
self.execution_log.append(event)
|
|
141
|
+
logger.info(
|
|
142
|
+
f"Hook {event.hook_id} in {event.phase}: {event.status.value} ({event.duration_ms}ms)"
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
def get_execution_log(
|
|
146
|
+
self,
|
|
147
|
+
execution_id: UUID | None = None,
|
|
148
|
+
phase: str | None = None,
|
|
149
|
+
) -> list[HookExecutionEvent]:
|
|
150
|
+
"""Get execution log with optional filtering."""
|
|
151
|
+
log = self.execution_log
|
|
152
|
+
|
|
153
|
+
if execution_id:
|
|
154
|
+
log = [e for e in log if e.execution_id == execution_id]
|
|
155
|
+
|
|
156
|
+
if phase:
|
|
157
|
+
log = [e for e in log if e.phase == phase]
|
|
158
|
+
|
|
159
|
+
return log
|
|
160
|
+
|
|
161
|
+
def get_execution_dag(self, execution_id: UUID) -> ExecutionDAG | None:
|
|
162
|
+
"""Get execution DAG showing hook dependencies."""
|
|
163
|
+
return self.execution_graphs.get(execution_id)
|
|
164
|
+
|
|
165
|
+
def get_performance_trace(self, execution_id: UUID) -> PerformanceTrace:
|
|
166
|
+
"""Get detailed performance trace."""
|
|
167
|
+
events = self.get_execution_log(execution_id=execution_id)
|
|
168
|
+
|
|
169
|
+
return PerformanceTrace(
|
|
170
|
+
execution_id=execution_id,
|
|
171
|
+
total_duration_ms=sum(e.duration_ms for e in events),
|
|
172
|
+
hook_events=events,
|
|
173
|
+
critical_path=self._compute_critical_path(events),
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
def _compute_critical_path(self, events: list[HookExecutionEvent]) -> list[str]:
|
|
177
|
+
"""Compute critical path - hooks that contributed most to total duration.
|
|
178
|
+
|
|
179
|
+
Algorithm:
|
|
180
|
+
1. Sort events by timestamp (execution order)
|
|
181
|
+
2. Identify sequential execution blocks (no overlap)
|
|
182
|
+
3. Return hooks in the longest duration chain
|
|
183
|
+
|
|
184
|
+
Note: This assumes sequential execution. For parallel execution,
|
|
185
|
+
a full DAG analysis with explicit dependencies would be needed.
|
|
186
|
+
"""
|
|
187
|
+
if not events:
|
|
188
|
+
return []
|
|
189
|
+
|
|
190
|
+
if len(events) == 1:
|
|
191
|
+
return [events[0].hook_id]
|
|
192
|
+
|
|
193
|
+
# Sort events by timestamp and end time
|
|
194
|
+
sorted_events = sorted(events, key=lambda e: e.timestamp)
|
|
195
|
+
|
|
196
|
+
# Find hooks that form a critical path (non-overlapping sequential chain)
|
|
197
|
+
critical_path = []
|
|
198
|
+
max_end_time = None
|
|
199
|
+
|
|
200
|
+
for event in sorted_events:
|
|
201
|
+
# Only include events that start after the previous one ended
|
|
202
|
+
# (indicating sequential dependency)
|
|
203
|
+
if max_end_time is None or event.timestamp >= max_end_time:
|
|
204
|
+
critical_path.append(event.hook_id)
|
|
205
|
+
# Update end time (approximated as timestamp + duration)
|
|
206
|
+
end_timestamp = event.timestamp.timestamp() + (event.duration_ms / 1000)
|
|
207
|
+
max_end_time = datetime.fromtimestamp(end_timestamp, tz=UTC)
|
|
208
|
+
|
|
209
|
+
# If we got no sequential chain, return the longest single execution
|
|
210
|
+
if not critical_path:
|
|
211
|
+
longest = max(sorted_events, key=lambda e: e.duration_ms)
|
|
212
|
+
return [longest.hook_id]
|
|
213
|
+
|
|
214
|
+
return critical_path
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
class HookExecutionError(Exception):
|
|
218
|
+
"""Exception raised when hook execution fails."""
|
|
219
|
+
|
|
220
|
+
pass
|