fraiseql-confiture 0.3.7__cp311-cp311-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- confiture/__init__.py +48 -0
- confiture/_core.cpython-311-darwin.so +0 -0
- confiture/cli/__init__.py +0 -0
- confiture/cli/dry_run.py +116 -0
- confiture/cli/lint_formatter.py +193 -0
- confiture/cli/main.py +1893 -0
- confiture/config/__init__.py +0 -0
- confiture/config/environment.py +263 -0
- confiture/core/__init__.py +51 -0
- confiture/core/anonymization/__init__.py +0 -0
- confiture/core/anonymization/audit.py +485 -0
- confiture/core/anonymization/benchmarking.py +372 -0
- confiture/core/anonymization/breach_notification.py +652 -0
- confiture/core/anonymization/compliance.py +617 -0
- confiture/core/anonymization/composer.py +298 -0
- confiture/core/anonymization/data_subject_rights.py +669 -0
- confiture/core/anonymization/factory.py +319 -0
- confiture/core/anonymization/governance.py +737 -0
- confiture/core/anonymization/performance.py +1092 -0
- confiture/core/anonymization/profile.py +284 -0
- confiture/core/anonymization/registry.py +195 -0
- confiture/core/anonymization/security/kms_manager.py +547 -0
- confiture/core/anonymization/security/lineage.py +888 -0
- confiture/core/anonymization/security/token_store.py +686 -0
- confiture/core/anonymization/strategies/__init__.py +41 -0
- confiture/core/anonymization/strategies/address.py +359 -0
- confiture/core/anonymization/strategies/credit_card.py +374 -0
- confiture/core/anonymization/strategies/custom.py +161 -0
- confiture/core/anonymization/strategies/date.py +218 -0
- confiture/core/anonymization/strategies/differential_privacy.py +398 -0
- confiture/core/anonymization/strategies/email.py +141 -0
- confiture/core/anonymization/strategies/format_preserving_encryption.py +310 -0
- confiture/core/anonymization/strategies/hash.py +150 -0
- confiture/core/anonymization/strategies/ip_address.py +235 -0
- confiture/core/anonymization/strategies/masking_retention.py +252 -0
- confiture/core/anonymization/strategies/name.py +298 -0
- confiture/core/anonymization/strategies/phone.py +119 -0
- confiture/core/anonymization/strategies/preserve.py +85 -0
- confiture/core/anonymization/strategies/redact.py +101 -0
- confiture/core/anonymization/strategies/salted_hashing.py +322 -0
- confiture/core/anonymization/strategies/text_redaction.py +183 -0
- confiture/core/anonymization/strategies/tokenization.py +334 -0
- confiture/core/anonymization/strategy.py +241 -0
- confiture/core/anonymization/syncer_audit.py +357 -0
- confiture/core/blue_green.py +683 -0
- confiture/core/builder.py +500 -0
- confiture/core/checksum.py +358 -0
- confiture/core/connection.py +184 -0
- confiture/core/differ.py +522 -0
- confiture/core/drift.py +564 -0
- confiture/core/dry_run.py +182 -0
- confiture/core/health.py +313 -0
- confiture/core/hooks/__init__.py +87 -0
- confiture/core/hooks/base.py +232 -0
- confiture/core/hooks/context.py +146 -0
- confiture/core/hooks/execution_strategies.py +57 -0
- confiture/core/hooks/observability.py +220 -0
- confiture/core/hooks/phases.py +53 -0
- confiture/core/hooks/registry.py +295 -0
- confiture/core/large_tables.py +775 -0
- confiture/core/linting/__init__.py +70 -0
- confiture/core/linting/composer.py +192 -0
- confiture/core/linting/libraries/__init__.py +17 -0
- confiture/core/linting/libraries/gdpr.py +168 -0
- confiture/core/linting/libraries/general.py +184 -0
- confiture/core/linting/libraries/hipaa.py +144 -0
- confiture/core/linting/libraries/pci_dss.py +104 -0
- confiture/core/linting/libraries/sox.py +120 -0
- confiture/core/linting/schema_linter.py +491 -0
- confiture/core/linting/versioning.py +151 -0
- confiture/core/locking.py +389 -0
- confiture/core/migration_generator.py +298 -0
- confiture/core/migrator.py +882 -0
- confiture/core/observability/__init__.py +44 -0
- confiture/core/observability/audit.py +323 -0
- confiture/core/observability/logging.py +187 -0
- confiture/core/observability/metrics.py +174 -0
- confiture/core/observability/tracing.py +192 -0
- confiture/core/pg_version.py +418 -0
- confiture/core/pool.py +406 -0
- confiture/core/risk/__init__.py +39 -0
- confiture/core/risk/predictor.py +188 -0
- confiture/core/risk/scoring.py +248 -0
- confiture/core/rollback_generator.py +388 -0
- confiture/core/schema_analyzer.py +769 -0
- confiture/core/schema_to_schema.py +590 -0
- confiture/core/security/__init__.py +32 -0
- confiture/core/security/logging.py +201 -0
- confiture/core/security/validation.py +416 -0
- confiture/core/signals.py +371 -0
- confiture/core/syncer.py +540 -0
- confiture/exceptions.py +192 -0
- confiture/integrations/__init__.py +0 -0
- confiture/models/__init__.py +24 -0
- confiture/models/lint.py +193 -0
- confiture/models/migration.py +265 -0
- confiture/models/schema.py +203 -0
- confiture/models/sql_file_migration.py +225 -0
- confiture/scenarios/__init__.py +36 -0
- confiture/scenarios/compliance.py +586 -0
- confiture/scenarios/ecommerce.py +199 -0
- confiture/scenarios/financial.py +253 -0
- confiture/scenarios/healthcare.py +315 -0
- confiture/scenarios/multi_tenant.py +340 -0
- confiture/scenarios/saas.py +295 -0
- confiture/testing/FRAMEWORK_API.md +722 -0
- confiture/testing/__init__.py +100 -0
- confiture/testing/fixtures/__init__.py +11 -0
- confiture/testing/fixtures/data_validator.py +229 -0
- confiture/testing/fixtures/migration_runner.py +167 -0
- confiture/testing/fixtures/schema_snapshotter.py +352 -0
- confiture/testing/frameworks/__init__.py +10 -0
- confiture/testing/frameworks/mutation.py +587 -0
- confiture/testing/frameworks/performance.py +479 -0
- confiture/testing/loader.py +225 -0
- confiture/testing/pytest/__init__.py +38 -0
- confiture/testing/pytest_plugin.py +190 -0
- confiture/testing/sandbox.py +304 -0
- confiture/testing/utils/__init__.py +0 -0
- fraiseql_confiture-0.3.7.dist-info/METADATA +438 -0
- fraiseql_confiture-0.3.7.dist-info/RECORD +124 -0
- fraiseql_confiture-0.3.7.dist-info/WHEEL +4 -0
- fraiseql_confiture-0.3.7.dist-info/entry_points.txt +4 -0
- fraiseql_confiture-0.3.7.dist-info/licenses/LICENSE +21 -0
confiture/cli/main.py
ADDED
|
@@ -0,0 +1,1893 @@
|
|
|
1
|
+
"""Main CLI entry point for Confiture.
|
|
2
|
+
|
|
3
|
+
This module defines the main Typer application and all CLI commands.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import typer
|
|
10
|
+
from rich.console import Console
|
|
11
|
+
from rich.table import Table
|
|
12
|
+
|
|
13
|
+
from confiture.cli.lint_formatter import format_lint_report, save_report
|
|
14
|
+
from confiture.core.builder import SchemaBuilder
|
|
15
|
+
from confiture.core.differ import SchemaDiffer
|
|
16
|
+
from confiture.core.linting import SchemaLinter
|
|
17
|
+
from confiture.core.linting.schema_linter import (
|
|
18
|
+
LintConfig as LinterConfig,
|
|
19
|
+
)
|
|
20
|
+
from confiture.core.linting.schema_linter import (
|
|
21
|
+
LintReport as LinterReport,
|
|
22
|
+
)
|
|
23
|
+
from confiture.core.linting.schema_linter import (
|
|
24
|
+
RuleSeverity,
|
|
25
|
+
)
|
|
26
|
+
from confiture.core.migration_generator import MigrationGenerator
|
|
27
|
+
from confiture.models.lint import LintReport, LintSeverity, Violation
|
|
28
|
+
|
|
29
|
+
# Valid output formats for linting
|
|
30
|
+
LINT_FORMATS = ("table", "json", "csv")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _convert_linter_report(linter_report: LinterReport, schema_name: str = "schema") -> LintReport:
|
|
34
|
+
"""Convert a schema_linter.LintReport to models.lint.LintReport.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
linter_report: Report from SchemaLinter
|
|
38
|
+
schema_name: Name of schema being linted
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
LintReport compatible with format_lint_report
|
|
42
|
+
"""
|
|
43
|
+
violations = []
|
|
44
|
+
|
|
45
|
+
# Map RuleSeverity to LintSeverity
|
|
46
|
+
severity_map = {
|
|
47
|
+
RuleSeverity.ERROR: LintSeverity.ERROR,
|
|
48
|
+
RuleSeverity.WARNING: LintSeverity.WARNING,
|
|
49
|
+
RuleSeverity.INFO: LintSeverity.INFO,
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Convert all violations
|
|
53
|
+
for violation in linter_report.errors:
|
|
54
|
+
violations.append(
|
|
55
|
+
Violation(
|
|
56
|
+
rule_name=violation.rule_name,
|
|
57
|
+
severity=severity_map[violation.severity],
|
|
58
|
+
message=violation.message,
|
|
59
|
+
location=violation.object_name,
|
|
60
|
+
)
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
for violation in linter_report.warnings:
|
|
64
|
+
violations.append(
|
|
65
|
+
Violation(
|
|
66
|
+
rule_name=violation.rule_name,
|
|
67
|
+
severity=severity_map[violation.severity],
|
|
68
|
+
message=violation.message,
|
|
69
|
+
location=violation.object_name,
|
|
70
|
+
)
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
for violation in linter_report.info:
|
|
74
|
+
violations.append(
|
|
75
|
+
Violation(
|
|
76
|
+
rule_name=violation.rule_name,
|
|
77
|
+
severity=severity_map[violation.severity],
|
|
78
|
+
message=violation.message,
|
|
79
|
+
location=violation.object_name,
|
|
80
|
+
)
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
return LintReport(
|
|
84
|
+
violations=violations,
|
|
85
|
+
schema_name=schema_name,
|
|
86
|
+
tables_checked=0, # Not tracked in linter
|
|
87
|
+
columns_checked=0, # Not tracked in linter
|
|
88
|
+
errors_count=len(linter_report.errors),
|
|
89
|
+
warnings_count=len(linter_report.warnings),
|
|
90
|
+
info_count=len(linter_report.info),
|
|
91
|
+
execution_time_ms=0, # Not tracked in linter
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
# Create Typer app
|
|
96
|
+
app = typer.Typer(
|
|
97
|
+
name="confiture",
|
|
98
|
+
help="PostgreSQL migrations, sweetly done š",
|
|
99
|
+
add_completion=False,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# Create Rich console for pretty output
|
|
103
|
+
console = Console()
|
|
104
|
+
|
|
105
|
+
# Version
|
|
106
|
+
__version__ = "0.3.5"
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def version_callback(value: bool) -> None:
|
|
110
|
+
"""Print version and exit."""
|
|
111
|
+
if value:
|
|
112
|
+
console.print(f"confiture version {__version__}")
|
|
113
|
+
raise typer.Exit()
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
@app.callback()
|
|
117
|
+
def main(
|
|
118
|
+
version: bool = typer.Option(
|
|
119
|
+
False,
|
|
120
|
+
"--version",
|
|
121
|
+
callback=version_callback,
|
|
122
|
+
is_eager=True,
|
|
123
|
+
help="Show version and exit",
|
|
124
|
+
),
|
|
125
|
+
) -> None:
|
|
126
|
+
"""Confiture - PostgreSQL migrations, sweetly done š."""
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@app.command()
|
|
131
|
+
def init(
|
|
132
|
+
path: Path = typer.Argument(
|
|
133
|
+
Path("."),
|
|
134
|
+
help="Project directory to initialize",
|
|
135
|
+
),
|
|
136
|
+
) -> None:
|
|
137
|
+
"""Initialize a new Confiture project.
|
|
138
|
+
|
|
139
|
+
Creates necessary directory structure and configuration files.
|
|
140
|
+
"""
|
|
141
|
+
try:
|
|
142
|
+
# Create directory structure
|
|
143
|
+
db_dir = path / "db"
|
|
144
|
+
schema_dir = db_dir / "schema"
|
|
145
|
+
seeds_dir = db_dir / "seeds"
|
|
146
|
+
migrations_dir = db_dir / "migrations"
|
|
147
|
+
environments_dir = db_dir / "environments"
|
|
148
|
+
|
|
149
|
+
# Check if already initialized
|
|
150
|
+
if db_dir.exists():
|
|
151
|
+
console.print(
|
|
152
|
+
"[yellow]ā ļø Project already exists. Some files may be overwritten.[/yellow]"
|
|
153
|
+
)
|
|
154
|
+
if not typer.confirm("Continue?"):
|
|
155
|
+
raise typer.Exit()
|
|
156
|
+
|
|
157
|
+
# Create directories
|
|
158
|
+
schema_dir.mkdir(parents=True, exist_ok=True)
|
|
159
|
+
(seeds_dir / "common").mkdir(parents=True, exist_ok=True)
|
|
160
|
+
(seeds_dir / "development").mkdir(parents=True, exist_ok=True)
|
|
161
|
+
(seeds_dir / "test").mkdir(parents=True, exist_ok=True)
|
|
162
|
+
migrations_dir.mkdir(parents=True, exist_ok=True)
|
|
163
|
+
environments_dir.mkdir(parents=True, exist_ok=True)
|
|
164
|
+
|
|
165
|
+
# Create example schema directory structure
|
|
166
|
+
(schema_dir / "00_common").mkdir(exist_ok=True)
|
|
167
|
+
(schema_dir / "10_tables").mkdir(exist_ok=True)
|
|
168
|
+
|
|
169
|
+
# Create example schema file
|
|
170
|
+
example_schema = schema_dir / "00_common" / "extensions.sql"
|
|
171
|
+
example_schema.write_text(
|
|
172
|
+
"""-- PostgreSQL extensions
|
|
173
|
+
-- Add commonly used extensions here
|
|
174
|
+
|
|
175
|
+
-- Example:
|
|
176
|
+
-- CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
|
177
|
+
-- CREATE EXTENSION IF NOT EXISTS "pg_trgm";
|
|
178
|
+
"""
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
# Create example table
|
|
182
|
+
example_table = schema_dir / "10_tables" / "example.sql"
|
|
183
|
+
example_table.write_text(
|
|
184
|
+
"""-- Example table
|
|
185
|
+
-- Replace with your actual schema
|
|
186
|
+
|
|
187
|
+
CREATE TABLE IF NOT EXISTS users (
|
|
188
|
+
id SERIAL PRIMARY KEY,
|
|
189
|
+
username TEXT NOT NULL UNIQUE,
|
|
190
|
+
email TEXT NOT NULL UNIQUE,
|
|
191
|
+
created_at TIMESTAMP DEFAULT NOW()
|
|
192
|
+
);
|
|
193
|
+
"""
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
# Create example seed file
|
|
197
|
+
example_seed = seeds_dir / "common" / "00_example.sql"
|
|
198
|
+
example_seed.write_text(
|
|
199
|
+
"""-- Common seed data
|
|
200
|
+
-- These records are included in all non-production environments
|
|
201
|
+
|
|
202
|
+
-- Example: Test users
|
|
203
|
+
-- INSERT INTO users (username, email) VALUES
|
|
204
|
+
-- ('admin', 'admin@example.com'),
|
|
205
|
+
-- ('editor', 'editor@example.com'),
|
|
206
|
+
-- ('reader', 'reader@example.com');
|
|
207
|
+
"""
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
# Create local environment config
|
|
211
|
+
local_config = environments_dir / "local.yaml"
|
|
212
|
+
local_config.write_text(
|
|
213
|
+
"""# Local development environment configuration
|
|
214
|
+
|
|
215
|
+
name: local
|
|
216
|
+
include_dirs:
|
|
217
|
+
- db/schema/00_common
|
|
218
|
+
- db/schema/10_tables
|
|
219
|
+
exclude_dirs: []
|
|
220
|
+
|
|
221
|
+
database:
|
|
222
|
+
host: localhost
|
|
223
|
+
port: 5432
|
|
224
|
+
database: myapp_local
|
|
225
|
+
user: postgres
|
|
226
|
+
password: postgres
|
|
227
|
+
"""
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
# Create README
|
|
231
|
+
readme = db_dir / "README.md"
|
|
232
|
+
readme.write_text(
|
|
233
|
+
"""# Database Schema
|
|
234
|
+
|
|
235
|
+
This directory contains your database schema and migrations.
|
|
236
|
+
|
|
237
|
+
## Directory Structure
|
|
238
|
+
|
|
239
|
+
- `schema/` - DDL files organized by category
|
|
240
|
+
- `00_common/` - Extensions, types, functions
|
|
241
|
+
- `10_tables/` - Table definitions
|
|
242
|
+
- `migrations/` - Python migration files
|
|
243
|
+
- `environments/` - Environment-specific configurations
|
|
244
|
+
|
|
245
|
+
## Quick Start
|
|
246
|
+
|
|
247
|
+
1. Edit schema files in `schema/`
|
|
248
|
+
2. Generate migrations: `confiture migrate diff old.sql new.sql --generate`
|
|
249
|
+
3. Apply migrations: `confiture migrate up`
|
|
250
|
+
|
|
251
|
+
## Learn More
|
|
252
|
+
|
|
253
|
+
Documentation: https://github.com/evoludigit/confiture
|
|
254
|
+
"""
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
console.print("[green]ā
Confiture project initialized successfully![/green]")
|
|
258
|
+
console.print(f"\nš Created structure in: {path.absolute()}")
|
|
259
|
+
console.print("\nš Next steps:")
|
|
260
|
+
console.print(" 1. Edit your schema files in db/schema/")
|
|
261
|
+
console.print(" 2. Configure environments in db/environments/")
|
|
262
|
+
console.print(" 3. Run 'confiture migrate diff' to detect changes")
|
|
263
|
+
|
|
264
|
+
except Exception as e:
|
|
265
|
+
console.print(f"[red]ā Error initializing project: {e}[/red]")
|
|
266
|
+
raise typer.Exit(1) from e
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
@app.command()
|
|
270
|
+
def build(
|
|
271
|
+
env: str = typer.Option(
|
|
272
|
+
"local",
|
|
273
|
+
"--env",
|
|
274
|
+
"-e",
|
|
275
|
+
help="Environment to build (references db/environments/{env}.yaml)",
|
|
276
|
+
),
|
|
277
|
+
output: Path = typer.Option(
|
|
278
|
+
None,
|
|
279
|
+
"--output",
|
|
280
|
+
"-o",
|
|
281
|
+
help="Output file path (default: db/generated/schema_{env}.sql)",
|
|
282
|
+
),
|
|
283
|
+
project_dir: Path = typer.Option(
|
|
284
|
+
Path("."),
|
|
285
|
+
"--project-dir",
|
|
286
|
+
help="Project directory (default: current directory)",
|
|
287
|
+
),
|
|
288
|
+
show_hash: bool = typer.Option(
|
|
289
|
+
False,
|
|
290
|
+
"--show-hash",
|
|
291
|
+
help="Display schema hash after build",
|
|
292
|
+
),
|
|
293
|
+
schema_only: bool = typer.Option(
|
|
294
|
+
False,
|
|
295
|
+
"--schema-only",
|
|
296
|
+
help="Build schema only, exclude seed data",
|
|
297
|
+
),
|
|
298
|
+
) -> None:
|
|
299
|
+
"""Build complete schema from DDL files.
|
|
300
|
+
|
|
301
|
+
This command builds a complete schema by concatenating all SQL files
|
|
302
|
+
from the db/schema/ directory in deterministic order. This is the
|
|
303
|
+
fastest way to create or recreate a database from scratch.
|
|
304
|
+
|
|
305
|
+
The build process:
|
|
306
|
+
1. Reads environment configuration (db/environments/{env}.yaml)
|
|
307
|
+
2. Discovers all .sql files in configured include_dirs
|
|
308
|
+
3. Concatenates files in alphabetical order
|
|
309
|
+
4. Adds metadata headers (environment, file count, timestamp)
|
|
310
|
+
5. Writes to output file (default: db/generated/schema_{env}.sql)
|
|
311
|
+
|
|
312
|
+
Examples:
|
|
313
|
+
# Build local environment schema
|
|
314
|
+
confiture build
|
|
315
|
+
|
|
316
|
+
# Build for specific environment
|
|
317
|
+
confiture build --env production
|
|
318
|
+
|
|
319
|
+
# Custom output location
|
|
320
|
+
confiture build --output /tmp/schema.sql
|
|
321
|
+
|
|
322
|
+
# Show hash for change detection
|
|
323
|
+
confiture build --show-hash
|
|
324
|
+
"""
|
|
325
|
+
try:
|
|
326
|
+
# Create schema builder
|
|
327
|
+
builder = SchemaBuilder(env=env, project_dir=project_dir)
|
|
328
|
+
|
|
329
|
+
# Override to exclude seeds if --schema-only is specified
|
|
330
|
+
if schema_only:
|
|
331
|
+
builder.include_dirs = [d for d in builder.include_dirs if "seed" not in str(d).lower()]
|
|
332
|
+
# Recalculate base_dir after filtering
|
|
333
|
+
if builder.include_dirs:
|
|
334
|
+
builder.base_dir = builder._find_common_parent(builder.include_dirs)
|
|
335
|
+
|
|
336
|
+
# Set default output path if not specified
|
|
337
|
+
if output is None:
|
|
338
|
+
output_dir = project_dir / "db" / "generated"
|
|
339
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
340
|
+
output = output_dir / f"schema_{env}.sql"
|
|
341
|
+
|
|
342
|
+
# Build schema
|
|
343
|
+
console.print(f"[cyan]šØ Building schema for environment: {env}[/cyan]")
|
|
344
|
+
|
|
345
|
+
sql_files = builder.find_sql_files()
|
|
346
|
+
console.print(f"[cyan]š Found {len(sql_files)} SQL files[/cyan]")
|
|
347
|
+
|
|
348
|
+
schema = builder.build(output_path=output)
|
|
349
|
+
|
|
350
|
+
# Success message
|
|
351
|
+
console.print("[green]ā
Schema built successfully![/green]")
|
|
352
|
+
console.print(f"\nš Output: {output.absolute()}")
|
|
353
|
+
console.print(f"š Size: {len(schema):,} bytes")
|
|
354
|
+
console.print(f"š Files: {len(sql_files)}")
|
|
355
|
+
|
|
356
|
+
# Show hash if requested
|
|
357
|
+
if show_hash:
|
|
358
|
+
schema_hash = builder.compute_hash()
|
|
359
|
+
console.print(f"š Hash: {schema_hash}")
|
|
360
|
+
|
|
361
|
+
console.print("\nš” Next steps:")
|
|
362
|
+
console.print(f" ⢠Apply schema: psql -f {output}")
|
|
363
|
+
console.print(" ⢠Or use: confiture migrate up")
|
|
364
|
+
|
|
365
|
+
except FileNotFoundError as e:
|
|
366
|
+
console.print(f"[red]ā File not found: {e}[/red]")
|
|
367
|
+
console.print("\nš” Tip: Run 'confiture init' to create project structure")
|
|
368
|
+
raise typer.Exit(1) from e
|
|
369
|
+
except Exception as e:
|
|
370
|
+
console.print(f"[red]ā Error building schema: {e}[/red]")
|
|
371
|
+
raise typer.Exit(1) from e
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
@app.command()
|
|
375
|
+
def lint(
|
|
376
|
+
env: str = typer.Option(
|
|
377
|
+
"local",
|
|
378
|
+
"--env",
|
|
379
|
+
"-e",
|
|
380
|
+
help="Environment to lint (references db/environments/{env}.yaml)",
|
|
381
|
+
),
|
|
382
|
+
project_dir: Path = typer.Option(
|
|
383
|
+
Path("."),
|
|
384
|
+
"--project-dir",
|
|
385
|
+
help="Project directory (default: current directory)",
|
|
386
|
+
),
|
|
387
|
+
format_type: str = typer.Option(
|
|
388
|
+
"table",
|
|
389
|
+
"--format",
|
|
390
|
+
"-f",
|
|
391
|
+
help="Output format (table, json, csv)",
|
|
392
|
+
),
|
|
393
|
+
output: Path = typer.Option(
|
|
394
|
+
None,
|
|
395
|
+
"--output",
|
|
396
|
+
"-o",
|
|
397
|
+
help="Output file path (only with json/csv format)",
|
|
398
|
+
),
|
|
399
|
+
fail_on_error: bool = typer.Option(
|
|
400
|
+
True,
|
|
401
|
+
"--fail-on-error",
|
|
402
|
+
help="Exit with code 1 if errors found",
|
|
403
|
+
),
|
|
404
|
+
fail_on_warning: bool = typer.Option(
|
|
405
|
+
False,
|
|
406
|
+
"--fail-on-warning",
|
|
407
|
+
help="Exit with code 1 if warnings found (stricter)",
|
|
408
|
+
),
|
|
409
|
+
) -> None:
|
|
410
|
+
"""Lint schema against best practices.
|
|
411
|
+
|
|
412
|
+
Validates the schema against 6 built-in linting rules:
|
|
413
|
+
- Naming conventions (snake_case)
|
|
414
|
+
- Primary keys on all tables
|
|
415
|
+
- Documentation (COMMENT on tables)
|
|
416
|
+
- Multi-tenant identifier columns
|
|
417
|
+
- Indexes on foreign keys
|
|
418
|
+
- Security best practices (passwords, tokens, secrets)
|
|
419
|
+
|
|
420
|
+
Examples:
|
|
421
|
+
# Lint local environment, display as table
|
|
422
|
+
confiture lint
|
|
423
|
+
|
|
424
|
+
# Lint production environment, output as JSON
|
|
425
|
+
confiture lint --env production --format json
|
|
426
|
+
|
|
427
|
+
# Save results to file
|
|
428
|
+
confiture lint --format json --output lint-report.json
|
|
429
|
+
|
|
430
|
+
# Strict mode: fail on warnings
|
|
431
|
+
confiture lint --fail-on-warning
|
|
432
|
+
"""
|
|
433
|
+
try:
|
|
434
|
+
# Validate format option
|
|
435
|
+
if format_type not in LINT_FORMATS:
|
|
436
|
+
console.print(f"[red]ā Invalid format: {format_type}[/red]")
|
|
437
|
+
console.print(f"Valid formats: {', '.join(LINT_FORMATS)}")
|
|
438
|
+
raise typer.Exit(1)
|
|
439
|
+
|
|
440
|
+
# Create linter configuration (use LinterConfig for the linter)
|
|
441
|
+
config = LinterConfig(
|
|
442
|
+
enabled=True,
|
|
443
|
+
fail_on_error=fail_on_error,
|
|
444
|
+
fail_on_warning=fail_on_warning,
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
# Create linter and run linting
|
|
448
|
+
console.print(f"[cyan]š Linting schema for environment: {env}[/cyan]")
|
|
449
|
+
linter = SchemaLinter(env=env, config=config)
|
|
450
|
+
linter_report = linter.lint()
|
|
451
|
+
|
|
452
|
+
# Convert to model LintReport for formatting
|
|
453
|
+
report = _convert_linter_report(linter_report, schema_name=env)
|
|
454
|
+
|
|
455
|
+
# Display results based on format
|
|
456
|
+
if format_type == "table":
|
|
457
|
+
format_lint_report(report, format_type="table", console=console)
|
|
458
|
+
else:
|
|
459
|
+
# JSON/CSV format: format and optionally save
|
|
460
|
+
# Cast format_type for type checker
|
|
461
|
+
fmt = "json" if format_type == "json" else "csv"
|
|
462
|
+
formatted = format_lint_report(
|
|
463
|
+
report,
|
|
464
|
+
format_type=fmt,
|
|
465
|
+
console=console,
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
if output:
|
|
469
|
+
save_report(report, output, format_type=fmt)
|
|
470
|
+
console.print(f"[green]ā
Report saved to: {output.absolute()}[/green]")
|
|
471
|
+
else:
|
|
472
|
+
console.print(formatted)
|
|
473
|
+
|
|
474
|
+
# Determine exit code based on violations and fail mode
|
|
475
|
+
should_fail = (report.has_errors and fail_on_error) or (
|
|
476
|
+
report.has_warnings and fail_on_warning
|
|
477
|
+
)
|
|
478
|
+
if should_fail:
|
|
479
|
+
raise typer.Exit(1)
|
|
480
|
+
|
|
481
|
+
except FileNotFoundError as e:
|
|
482
|
+
console.print(f"[red]ā File not found: {e}[/red]")
|
|
483
|
+
console.print("\nš” Tip: Make sure schema files exist in db/schema/")
|
|
484
|
+
raise typer.Exit(1) from e
|
|
485
|
+
except Exception as e:
|
|
486
|
+
console.print(f"[red]ā Error linting schema: {e}[/red]")
|
|
487
|
+
raise typer.Exit(1) from e
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
# Create migrate subcommand group
|
|
491
|
+
migrate_app = typer.Typer(help="Migration commands")
|
|
492
|
+
app.add_typer(migrate_app, name="migrate")
|
|
493
|
+
|
|
494
|
+
|
|
495
|
+
@migrate_app.command("status")
|
|
496
|
+
def migrate_status(
|
|
497
|
+
migrations_dir: Path = typer.Option(
|
|
498
|
+
Path("db/migrations"),
|
|
499
|
+
"--migrations-dir",
|
|
500
|
+
help="Migrations directory",
|
|
501
|
+
),
|
|
502
|
+
config: Path = typer.Option(
|
|
503
|
+
None,
|
|
504
|
+
"--config",
|
|
505
|
+
"-c",
|
|
506
|
+
help="Configuration file (optional, to show applied status)",
|
|
507
|
+
),
|
|
508
|
+
output_format: str = typer.Option(
|
|
509
|
+
"table",
|
|
510
|
+
"--format",
|
|
511
|
+
"-f",
|
|
512
|
+
help="Output format: table (default) or json",
|
|
513
|
+
),
|
|
514
|
+
output_file: Path = typer.Option(
|
|
515
|
+
None,
|
|
516
|
+
"--output",
|
|
517
|
+
"-o",
|
|
518
|
+
help="Save output to file (useful with --format json)",
|
|
519
|
+
),
|
|
520
|
+
) -> None:
|
|
521
|
+
"""Show migration status.
|
|
522
|
+
|
|
523
|
+
If config is provided, shows which migrations are applied vs pending.
|
|
524
|
+
|
|
525
|
+
Examples:
|
|
526
|
+
confiture migrate status
|
|
527
|
+
confiture migrate status --format json
|
|
528
|
+
confiture migrate status -f json -o status.json
|
|
529
|
+
"""
|
|
530
|
+
try:
|
|
531
|
+
# Validate output format
|
|
532
|
+
if output_format not in ("table", "json"):
|
|
533
|
+
console.print(f"[red]ā Invalid format: {output_format}. Use 'table' or 'json'[/red]")
|
|
534
|
+
raise typer.Exit(1)
|
|
535
|
+
|
|
536
|
+
if not migrations_dir.exists():
|
|
537
|
+
if output_format == "json":
|
|
538
|
+
result = {"error": f"Migrations directory not found: {migrations_dir.absolute()}"}
|
|
539
|
+
_output_json(result, output_file, console)
|
|
540
|
+
else:
|
|
541
|
+
console.print("[yellow]No migrations directory found.[/yellow]")
|
|
542
|
+
console.print(f"Expected: {migrations_dir.absolute()}")
|
|
543
|
+
return
|
|
544
|
+
|
|
545
|
+
# Find migration files (both Python and SQL)
|
|
546
|
+
py_files = list(migrations_dir.glob("*.py"))
|
|
547
|
+
sql_files = list(migrations_dir.glob("*.up.sql"))
|
|
548
|
+
migration_files = sorted(py_files + sql_files, key=lambda f: f.name.split("_")[0])
|
|
549
|
+
|
|
550
|
+
if not migration_files:
|
|
551
|
+
if output_format == "json":
|
|
552
|
+
result = {
|
|
553
|
+
"applied": [],
|
|
554
|
+
"pending": [],
|
|
555
|
+
"current": None,
|
|
556
|
+
"total": 0,
|
|
557
|
+
"migrations": [],
|
|
558
|
+
}
|
|
559
|
+
_output_json(result, output_file, console)
|
|
560
|
+
else:
|
|
561
|
+
console.print("[yellow]No migrations found.[/yellow]")
|
|
562
|
+
return
|
|
563
|
+
|
|
564
|
+
# Get applied migrations from database if config provided
|
|
565
|
+
applied_versions: set[str] = set()
|
|
566
|
+
db_error: str | None = None
|
|
567
|
+
if config and config.exists():
|
|
568
|
+
try:
|
|
569
|
+
from confiture.core.connection import create_connection, load_config
|
|
570
|
+
from confiture.core.migrator import Migrator
|
|
571
|
+
|
|
572
|
+
config_data = load_config(config)
|
|
573
|
+
conn = create_connection(config_data)
|
|
574
|
+
migrator = Migrator(connection=conn)
|
|
575
|
+
migrator.initialize()
|
|
576
|
+
applied_versions = set(migrator.get_applied_versions())
|
|
577
|
+
conn.close()
|
|
578
|
+
except Exception as e:
|
|
579
|
+
db_error = str(e)
|
|
580
|
+
if output_format != "json":
|
|
581
|
+
console.print(f"[yellow]ā ļø Could not connect to database: {e}[/yellow]")
|
|
582
|
+
console.print("[yellow]Showing file list only (status unknown)[/yellow]\n")
|
|
583
|
+
|
|
584
|
+
# Build migrations data
|
|
585
|
+
migrations_data: list[dict[str, str]] = []
|
|
586
|
+
applied_list: list[str] = []
|
|
587
|
+
pending_list: list[str] = []
|
|
588
|
+
|
|
589
|
+
for migration_file in migration_files:
|
|
590
|
+
# Extract version and name from filename
|
|
591
|
+
# Python: "001_add_users.py" -> version="001", name="add_users"
|
|
592
|
+
# SQL: "001_add_users.up.sql" -> version="001", name="add_users"
|
|
593
|
+
base_name = migration_file.stem
|
|
594
|
+
if base_name.endswith(".up"):
|
|
595
|
+
base_name = base_name[:-3] # Remove ".up" suffix
|
|
596
|
+
parts = base_name.split("_", 1)
|
|
597
|
+
version = parts[0] if len(parts) > 0 else "???"
|
|
598
|
+
name = parts[1] if len(parts) > 1 else base_name
|
|
599
|
+
|
|
600
|
+
# Determine status
|
|
601
|
+
if applied_versions:
|
|
602
|
+
if version in applied_versions:
|
|
603
|
+
status = "applied"
|
|
604
|
+
applied_list.append(version)
|
|
605
|
+
else:
|
|
606
|
+
status = "pending"
|
|
607
|
+
pending_list.append(version)
|
|
608
|
+
else:
|
|
609
|
+
status = "unknown"
|
|
610
|
+
|
|
611
|
+
migrations_data.append(
|
|
612
|
+
{
|
|
613
|
+
"version": version,
|
|
614
|
+
"name": name,
|
|
615
|
+
"status": status,
|
|
616
|
+
}
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
# Determine current version (highest applied)
|
|
620
|
+
current_version = applied_list[-1] if applied_list else None
|
|
621
|
+
|
|
622
|
+
if output_format == "json":
|
|
623
|
+
result: dict[str, Any] = {
|
|
624
|
+
"applied": applied_list,
|
|
625
|
+
"pending": pending_list,
|
|
626
|
+
"current": current_version,
|
|
627
|
+
"total": len(migration_files),
|
|
628
|
+
"migrations": migrations_data,
|
|
629
|
+
}
|
|
630
|
+
if db_error:
|
|
631
|
+
result["warning"] = f"Could not connect to database: {db_error}"
|
|
632
|
+
_output_json(result, output_file, console)
|
|
633
|
+
else:
|
|
634
|
+
# Display migrations in a table
|
|
635
|
+
table = Table(title="Migrations")
|
|
636
|
+
table.add_column("Version", style="cyan")
|
|
637
|
+
table.add_column("Name", style="green")
|
|
638
|
+
table.add_column("Status", style="yellow")
|
|
639
|
+
|
|
640
|
+
for migration in migrations_data:
|
|
641
|
+
if migration["status"] == "applied":
|
|
642
|
+
status_display = "[green]ā
applied[/green]"
|
|
643
|
+
elif migration["status"] == "pending":
|
|
644
|
+
status_display = "[yellow]ā³ pending[/yellow]"
|
|
645
|
+
else:
|
|
646
|
+
status_display = "unknown"
|
|
647
|
+
|
|
648
|
+
table.add_row(migration["version"], migration["name"], status_display)
|
|
649
|
+
|
|
650
|
+
console.print(table)
|
|
651
|
+
console.print(f"\nš Total: {len(migration_files)} migrations", end="")
|
|
652
|
+
if applied_versions:
|
|
653
|
+
console.print(f" ({len(applied_list)} applied, {len(pending_list)} pending)")
|
|
654
|
+
else:
|
|
655
|
+
console.print()
|
|
656
|
+
|
|
657
|
+
except Exception as e:
|
|
658
|
+
if output_format == "json":
|
|
659
|
+
result = {"error": str(e)}
|
|
660
|
+
_output_json(result, output_file, console)
|
|
661
|
+
else:
|
|
662
|
+
console.print(f"[red]ā Error: {e}[/red]")
|
|
663
|
+
raise typer.Exit(1) from e
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def _output_json(data: dict[str, Any], output_file: Path | None, console: Console) -> None:
|
|
667
|
+
"""Output JSON data to file or console.
|
|
668
|
+
|
|
669
|
+
Args:
|
|
670
|
+
data: Data to output as JSON
|
|
671
|
+
output_file: Optional file to write to
|
|
672
|
+
console: Console for output
|
|
673
|
+
"""
|
|
674
|
+
import json
|
|
675
|
+
|
|
676
|
+
json_str = json.dumps(data, indent=2)
|
|
677
|
+
if output_file:
|
|
678
|
+
output_file.write_text(json_str)
|
|
679
|
+
console.print(f"[green]ā
Output written to {output_file}[/green]")
|
|
680
|
+
else:
|
|
681
|
+
console.print(json_str)
|
|
682
|
+
|
|
683
|
+
|
|
684
|
+
@migrate_app.command("up")
|
|
685
|
+
def migrate_up(
|
|
686
|
+
migrations_dir: Path = typer.Option(
|
|
687
|
+
Path("db/migrations"),
|
|
688
|
+
"--migrations-dir",
|
|
689
|
+
help="Migrations directory",
|
|
690
|
+
),
|
|
691
|
+
config: Path = typer.Option(
|
|
692
|
+
Path("db/environments/local.yaml"),
|
|
693
|
+
"--config",
|
|
694
|
+
"-c",
|
|
695
|
+
help="Configuration file",
|
|
696
|
+
),
|
|
697
|
+
target: str = typer.Option(
|
|
698
|
+
None,
|
|
699
|
+
"--target",
|
|
700
|
+
"-t",
|
|
701
|
+
help="Target migration version (applies all if not specified)",
|
|
702
|
+
),
|
|
703
|
+
strict: bool = typer.Option(
|
|
704
|
+
False,
|
|
705
|
+
"--strict",
|
|
706
|
+
help="Enable strict mode (fail on warnings)",
|
|
707
|
+
),
|
|
708
|
+
force: bool = typer.Option(
|
|
709
|
+
False,
|
|
710
|
+
"--force",
|
|
711
|
+
help="Force migration application, skipping state checks",
|
|
712
|
+
),
|
|
713
|
+
lock_timeout: int = typer.Option(
|
|
714
|
+
30000,
|
|
715
|
+
"--lock-timeout",
|
|
716
|
+
help="Lock acquisition timeout in milliseconds (default: 30000ms = 30s)",
|
|
717
|
+
),
|
|
718
|
+
no_lock: bool = typer.Option(
|
|
719
|
+
False,
|
|
720
|
+
"--no-lock",
|
|
721
|
+
help="Disable migration locking (DANGEROUS in multi-pod environments)",
|
|
722
|
+
),
|
|
723
|
+
dry_run: bool = typer.Option(
|
|
724
|
+
False,
|
|
725
|
+
"--dry-run",
|
|
726
|
+
help="Analyze migrations without executing (metadata queries only)",
|
|
727
|
+
),
|
|
728
|
+
dry_run_execute: bool = typer.Option(
|
|
729
|
+
False,
|
|
730
|
+
"--dry-run-execute",
|
|
731
|
+
help="Execute migrations in SAVEPOINT for realistic testing (guaranteed rollback)",
|
|
732
|
+
),
|
|
733
|
+
verify_checksums: bool = typer.Option(
|
|
734
|
+
True,
|
|
735
|
+
"--verify-checksums/--no-verify-checksums",
|
|
736
|
+
help="Verify migration file checksums before running (default: enabled)",
|
|
737
|
+
),
|
|
738
|
+
on_checksum_mismatch: str = typer.Option(
|
|
739
|
+
"fail",
|
|
740
|
+
"--on-checksum-mismatch",
|
|
741
|
+
help="Behavior on checksum mismatch: fail, warn, ignore",
|
|
742
|
+
),
|
|
743
|
+
verbose: bool = typer.Option(
|
|
744
|
+
False,
|
|
745
|
+
"--verbose",
|
|
746
|
+
"-v",
|
|
747
|
+
help="Show detailed analysis in dry-run report",
|
|
748
|
+
),
|
|
749
|
+
format_output: str = typer.Option(
|
|
750
|
+
"text",
|
|
751
|
+
"--format",
|
|
752
|
+
"-f",
|
|
753
|
+
help="Report format (text or json)",
|
|
754
|
+
),
|
|
755
|
+
output_file: Path | None = typer.Option(
|
|
756
|
+
None,
|
|
757
|
+
"--output",
|
|
758
|
+
"-o",
|
|
759
|
+
help="Save report to file",
|
|
760
|
+
),
|
|
761
|
+
) -> None:
|
|
762
|
+
"""Apply pending migrations.
|
|
763
|
+
|
|
764
|
+
Applies all pending migrations up to the target version (or all if no target).
|
|
765
|
+
|
|
766
|
+
Uses distributed locking to ensure only one migration process runs at a time.
|
|
767
|
+
This is critical for Kubernetes/multi-pod deployments.
|
|
768
|
+
|
|
769
|
+
Verifies migration file checksums to detect unauthorized modifications.
|
|
770
|
+
Use --no-verify-checksums to skip verification.
|
|
771
|
+
|
|
772
|
+
Use --dry-run for analysis without execution, or --dry-run-execute to test in SAVEPOINT.
|
|
773
|
+
"""
|
|
774
|
+
from confiture.cli.dry_run import (
|
|
775
|
+
ask_dry_run_execute_confirmation,
|
|
776
|
+
display_dry_run_header,
|
|
777
|
+
print_json_report,
|
|
778
|
+
save_json_report,
|
|
779
|
+
save_text_report,
|
|
780
|
+
)
|
|
781
|
+
from confiture.core.checksum import (
|
|
782
|
+
ChecksumConfig,
|
|
783
|
+
ChecksumMismatchBehavior,
|
|
784
|
+
ChecksumVerificationError,
|
|
785
|
+
MigrationChecksumVerifier,
|
|
786
|
+
)
|
|
787
|
+
from confiture.core.connection import (
|
|
788
|
+
create_connection,
|
|
789
|
+
load_config,
|
|
790
|
+
load_migration_class,
|
|
791
|
+
)
|
|
792
|
+
from confiture.core.locking import LockAcquisitionError, LockConfig, MigrationLock
|
|
793
|
+
from confiture.core.migrator import Migrator
|
|
794
|
+
|
|
795
|
+
try:
|
|
796
|
+
# Validate dry-run options
|
|
797
|
+
if dry_run and dry_run_execute:
|
|
798
|
+
console.print("[red]ā Error: Cannot use both --dry-run and --dry-run-execute[/red]")
|
|
799
|
+
raise typer.Exit(1)
|
|
800
|
+
|
|
801
|
+
if (dry_run or dry_run_execute) and force:
|
|
802
|
+
console.print("[red]ā Error: Cannot use --dry-run with --force[/red]")
|
|
803
|
+
raise typer.Exit(1)
|
|
804
|
+
|
|
805
|
+
# Validate format option
|
|
806
|
+
if format_output not in ("text", "json"):
|
|
807
|
+
console.print(
|
|
808
|
+
f"[red]ā Error: Invalid format '{format_output}'. Use 'text' or 'json'[/red]"
|
|
809
|
+
)
|
|
810
|
+
raise typer.Exit(1)
|
|
811
|
+
|
|
812
|
+
# Validate checksum mismatch option
|
|
813
|
+
valid_mismatch_behaviors = ("fail", "warn", "ignore")
|
|
814
|
+
if on_checksum_mismatch not in valid_mismatch_behaviors:
|
|
815
|
+
console.print(
|
|
816
|
+
f"[red]ā Error: Invalid --on-checksum-mismatch '{on_checksum_mismatch}'. "
|
|
817
|
+
f"Use one of: {', '.join(valid_mismatch_behaviors)}[/red]"
|
|
818
|
+
)
|
|
819
|
+
raise typer.Exit(1)
|
|
820
|
+
|
|
821
|
+
# Load configuration
|
|
822
|
+
config_data = load_config(config)
|
|
823
|
+
|
|
824
|
+
# Try to load environment config for migration settings
|
|
825
|
+
effective_strict_mode = strict
|
|
826
|
+
if (
|
|
827
|
+
not strict
|
|
828
|
+
and config.parent.name == "environments"
|
|
829
|
+
and config.parent.parent.name == "db"
|
|
830
|
+
):
|
|
831
|
+
# Check if config is in standard environments directory
|
|
832
|
+
try:
|
|
833
|
+
from confiture.config.environment import Environment
|
|
834
|
+
|
|
835
|
+
env_name = config.stem # e.g., "local" from "local.yaml"
|
|
836
|
+
project_dir = config.parent.parent.parent
|
|
837
|
+
env_config = Environment.load(env_name, project_dir=project_dir)
|
|
838
|
+
effective_strict_mode = env_config.migration.strict_mode
|
|
839
|
+
except Exception:
|
|
840
|
+
# If environment config loading fails, use default (False)
|
|
841
|
+
pass
|
|
842
|
+
|
|
843
|
+
# Show warnings for force mode before attempting database operations
|
|
844
|
+
if force:
|
|
845
|
+
console.print(
|
|
846
|
+
"[yellow]ā ļø Force mode enabled - skipping migration state checks[/yellow]"
|
|
847
|
+
)
|
|
848
|
+
console.print(
|
|
849
|
+
"[yellow]This may cause issues if applied incorrectly. Use with caution![/yellow]\n"
|
|
850
|
+
)
|
|
851
|
+
|
|
852
|
+
# Show warning for no-lock mode
|
|
853
|
+
if no_lock:
|
|
854
|
+
console.print(
|
|
855
|
+
"[yellow]ā ļø Locking disabled - DANGEROUS in multi-pod environments![/yellow]"
|
|
856
|
+
)
|
|
857
|
+
console.print(
|
|
858
|
+
"[yellow]Concurrent migrations may cause race conditions or data corruption.[/yellow]\n"
|
|
859
|
+
)
|
|
860
|
+
|
|
861
|
+
# Create database connection
|
|
862
|
+
conn = create_connection(config_data)
|
|
863
|
+
|
|
864
|
+
# Create migrator
|
|
865
|
+
migrator = Migrator(connection=conn)
|
|
866
|
+
migrator.initialize()
|
|
867
|
+
|
|
868
|
+
# Verify checksums before running migrations (unless force mode)
|
|
869
|
+
if verify_checksums and not force:
|
|
870
|
+
mismatch_behavior = ChecksumMismatchBehavior(on_checksum_mismatch)
|
|
871
|
+
checksum_config = ChecksumConfig(
|
|
872
|
+
enabled=True,
|
|
873
|
+
on_mismatch=mismatch_behavior,
|
|
874
|
+
)
|
|
875
|
+
verifier = MigrationChecksumVerifier(conn, checksum_config)
|
|
876
|
+
|
|
877
|
+
try:
|
|
878
|
+
mismatches = verifier.verify_all(migrations_dir)
|
|
879
|
+
if not mismatches:
|
|
880
|
+
console.print("[cyan]š Checksum verification passed[/cyan]\n")
|
|
881
|
+
except ChecksumVerificationError as e:
|
|
882
|
+
console.print("[red]ā Checksum verification failed![/red]\n")
|
|
883
|
+
for m in e.mismatches:
|
|
884
|
+
console.print(f" [yellow]{m.version}_{m.name}[/yellow]")
|
|
885
|
+
console.print(f" Expected: {m.expected[:16]}...")
|
|
886
|
+
console.print(f" Actual: {m.actual[:16]}...")
|
|
887
|
+
console.print(
|
|
888
|
+
"\n[yellow]š” Tip: Use 'confiture verify --fix' to update checksums, "
|
|
889
|
+
"or --no-verify-checksums to skip[/yellow]"
|
|
890
|
+
)
|
|
891
|
+
conn.close()
|
|
892
|
+
raise typer.Exit(1) from e
|
|
893
|
+
|
|
894
|
+
# Find migrations to apply
|
|
895
|
+
if force:
|
|
896
|
+
# In force mode, apply all migrations regardless of state
|
|
897
|
+
migrations_to_apply = migrator.find_migration_files(migrations_dir=migrations_dir)
|
|
898
|
+
if not migrations_to_apply:
|
|
899
|
+
console.print("[yellow]ā ļø No migration files found.[/yellow]")
|
|
900
|
+
conn.close()
|
|
901
|
+
return
|
|
902
|
+
console.print(
|
|
903
|
+
f"[cyan]š¦ Force mode: Found {len(migrations_to_apply)} migration(s) to apply[/cyan]\n"
|
|
904
|
+
)
|
|
905
|
+
else:
|
|
906
|
+
# Normal mode: only apply pending migrations
|
|
907
|
+
migrations_to_apply = migrator.find_pending(migrations_dir=migrations_dir)
|
|
908
|
+
if not migrations_to_apply:
|
|
909
|
+
console.print("[green]ā
No pending migrations. Database is up to date.[/green]")
|
|
910
|
+
conn.close()
|
|
911
|
+
return
|
|
912
|
+
console.print(
|
|
913
|
+
f"[cyan]š¦ Found {len(migrations_to_apply)} pending migration(s)[/cyan]\n"
|
|
914
|
+
)
|
|
915
|
+
|
|
916
|
+
# Handle dry-run modes
|
|
917
|
+
if dry_run or dry_run_execute:
|
|
918
|
+
display_dry_run_header("testing" if dry_run_execute else "analysis")
|
|
919
|
+
|
|
920
|
+
# Build migration summary
|
|
921
|
+
migration_summary: dict[str, Any] = {
|
|
922
|
+
"migration_id": f"dry_run_{config.stem}",
|
|
923
|
+
"mode": "execute_and_analyze" if dry_run_execute else "analysis",
|
|
924
|
+
"statements_analyzed": len(migrations_to_apply),
|
|
925
|
+
"migrations": [],
|
|
926
|
+
"summary": {
|
|
927
|
+
"unsafe_count": 0,
|
|
928
|
+
"total_estimated_time_ms": 0,
|
|
929
|
+
"total_estimated_disk_mb": 0.0,
|
|
930
|
+
"has_unsafe_statements": False,
|
|
931
|
+
},
|
|
932
|
+
"warnings": [],
|
|
933
|
+
"analyses": [],
|
|
934
|
+
}
|
|
935
|
+
|
|
936
|
+
try:
|
|
937
|
+
# Collect migration information
|
|
938
|
+
for migration_file in migrations_to_apply:
|
|
939
|
+
migration_class = load_migration_class(migration_file)
|
|
940
|
+
migration = migration_class(connection=conn)
|
|
941
|
+
|
|
942
|
+
migration_info = {
|
|
943
|
+
"version": migration.version,
|
|
944
|
+
"name": migration.name,
|
|
945
|
+
"classification": "warning", # Most migrations are complex changes
|
|
946
|
+
"estimated_duration_ms": 500, # Conservative estimate
|
|
947
|
+
"estimated_disk_usage_mb": 1.0,
|
|
948
|
+
"estimated_cpu_percent": 30.0,
|
|
949
|
+
}
|
|
950
|
+
migration_summary["migrations"].append(migration_info)
|
|
951
|
+
migration_summary["analyses"].append(migration_info)
|
|
952
|
+
|
|
953
|
+
# Display format
|
|
954
|
+
if format_output == "json":
|
|
955
|
+
if output_file:
|
|
956
|
+
save_json_report(migration_summary, output_file)
|
|
957
|
+
console.print(
|
|
958
|
+
f"\n[green]ā
Report saved to: {output_file.absolute()}[/green]"
|
|
959
|
+
)
|
|
960
|
+
else:
|
|
961
|
+
print_json_report(migration_summary)
|
|
962
|
+
else:
|
|
963
|
+
# Text format (default)
|
|
964
|
+
console.print("\n[cyan]Migration Analysis Summary[/cyan]")
|
|
965
|
+
console.print("=" * 80)
|
|
966
|
+
console.print(f"Migrations to apply: {len(migrations_to_apply)}")
|
|
967
|
+
console.print()
|
|
968
|
+
for mig in migration_summary["migrations"]:
|
|
969
|
+
console.print(f" {mig['version']}: {mig['name']}")
|
|
970
|
+
console.print(
|
|
971
|
+
f" Estimated time: {mig['estimated_duration_ms']}ms | "
|
|
972
|
+
f"Disk: {mig['estimated_disk_usage_mb']:.1f}MB | "
|
|
973
|
+
f"CPU: {mig['estimated_cpu_percent']:.0f}%"
|
|
974
|
+
)
|
|
975
|
+
console.print()
|
|
976
|
+
console.print("[green]ā All migrations appear safe to execute[/green]")
|
|
977
|
+
console.print("=" * 80)
|
|
978
|
+
|
|
979
|
+
if output_file:
|
|
980
|
+
# Create a simple text report for file output
|
|
981
|
+
text_report = "DRY-RUN MIGRATION ANALYSIS REPORT\n"
|
|
982
|
+
text_report += "=" * 80 + "\n\n"
|
|
983
|
+
for mig in migration_summary["migrations"]:
|
|
984
|
+
text_report += f"{mig['version']}: {mig['name']}\n"
|
|
985
|
+
save_text_report(text_report, output_file)
|
|
986
|
+
console.print(
|
|
987
|
+
f"[green]ā
Report saved to: {output_file.absolute()}[/green]"
|
|
988
|
+
)
|
|
989
|
+
|
|
990
|
+
# Stop here if dry-run only (not execute)
|
|
991
|
+
if dry_run and not dry_run_execute:
|
|
992
|
+
conn.close()
|
|
993
|
+
return
|
|
994
|
+
|
|
995
|
+
# For dry_run_execute: ask for confirmation
|
|
996
|
+
if dry_run_execute and not ask_dry_run_execute_confirmation():
|
|
997
|
+
console.print("[yellow]Cancelled - no changes applied[/yellow]")
|
|
998
|
+
conn.close()
|
|
999
|
+
return
|
|
1000
|
+
|
|
1001
|
+
# Continue to actual execution below
|
|
1002
|
+
|
|
1003
|
+
except Exception as e:
|
|
1004
|
+
console.print(f"\n[red]ā Dry-run analysis failed: {e}[/red]")
|
|
1005
|
+
conn.close()
|
|
1006
|
+
raise typer.Exit(1) from e
|
|
1007
|
+
|
|
1008
|
+
# Configure locking
|
|
1009
|
+
lock_config = LockConfig(
|
|
1010
|
+
enabled=not no_lock,
|
|
1011
|
+
timeout_ms=lock_timeout,
|
|
1012
|
+
)
|
|
1013
|
+
|
|
1014
|
+
# Create lock manager
|
|
1015
|
+
lock = MigrationLock(conn, lock_config)
|
|
1016
|
+
|
|
1017
|
+
# Apply migrations with distributed lock
|
|
1018
|
+
applied_count = 0
|
|
1019
|
+
failed_migration = None
|
|
1020
|
+
failed_exception = None
|
|
1021
|
+
|
|
1022
|
+
try:
|
|
1023
|
+
with lock.acquire():
|
|
1024
|
+
if not no_lock:
|
|
1025
|
+
console.print("[cyan]š Acquired migration lock[/cyan]\n")
|
|
1026
|
+
|
|
1027
|
+
for migration_file in migrations_to_apply:
|
|
1028
|
+
# Load migration module
|
|
1029
|
+
migration_class = load_migration_class(migration_file)
|
|
1030
|
+
|
|
1031
|
+
# Create migration instance
|
|
1032
|
+
migration = migration_class(connection=conn)
|
|
1033
|
+
# Override strict_mode from CLI/config if not already set on class
|
|
1034
|
+
if effective_strict_mode and not getattr(migration_class, "strict_mode", False):
|
|
1035
|
+
migration.strict_mode = effective_strict_mode
|
|
1036
|
+
|
|
1037
|
+
# Check target
|
|
1038
|
+
if target and migration.version > target:
|
|
1039
|
+
console.print(
|
|
1040
|
+
f"[yellow]āļø Skipping {migration.version} (after target)[/yellow]"
|
|
1041
|
+
)
|
|
1042
|
+
break
|
|
1043
|
+
|
|
1044
|
+
# Apply migration
|
|
1045
|
+
console.print(
|
|
1046
|
+
f"[cyan]ā” Applying {migration.version}_{migration.name}...[/cyan]", end=" "
|
|
1047
|
+
)
|
|
1048
|
+
|
|
1049
|
+
try:
|
|
1050
|
+
migrator.apply(migration, force=force, migration_file=migration_file)
|
|
1051
|
+
console.print("[green]ā
[/green]")
|
|
1052
|
+
applied_count += 1
|
|
1053
|
+
except Exception as e:
|
|
1054
|
+
console.print("[red]ā[/red]")
|
|
1055
|
+
failed_migration = migration
|
|
1056
|
+
failed_exception = e
|
|
1057
|
+
break
|
|
1058
|
+
|
|
1059
|
+
except LockAcquisitionError as e:
|
|
1060
|
+
console.print(f"\n[red]ā Failed to acquire migration lock: {e}[/red]")
|
|
1061
|
+
if e.timeout:
|
|
1062
|
+
console.print(
|
|
1063
|
+
f"[yellow]š” Tip: Increase timeout with --lock-timeout {lock_timeout * 2}[/yellow]"
|
|
1064
|
+
)
|
|
1065
|
+
else:
|
|
1066
|
+
console.print(
|
|
1067
|
+
"[yellow]š” Tip: Check if another migration is running, or use --no-lock (dangerous)[/yellow]"
|
|
1068
|
+
)
|
|
1069
|
+
conn.close()
|
|
1070
|
+
raise typer.Exit(1) from e
|
|
1071
|
+
|
|
1072
|
+
# Handle results
|
|
1073
|
+
if failed_migration:
|
|
1074
|
+
console.print("\n[red]ā Migration failed![/red]")
|
|
1075
|
+
if applied_count > 0:
|
|
1076
|
+
console.print(
|
|
1077
|
+
f"[yellow]ā ļø {applied_count} migration(s) were applied successfully before the failure.[/yellow]"
|
|
1078
|
+
)
|
|
1079
|
+
|
|
1080
|
+
# Show detailed error information
|
|
1081
|
+
_show_migration_error_details(failed_migration, failed_exception, applied_count)
|
|
1082
|
+
conn.close()
|
|
1083
|
+
raise typer.Exit(1)
|
|
1084
|
+
else:
|
|
1085
|
+
if force:
|
|
1086
|
+
console.print(
|
|
1087
|
+
f"\n[green]ā
Force mode: Successfully applied {applied_count} migration(s)![/green]"
|
|
1088
|
+
)
|
|
1089
|
+
console.print(
|
|
1090
|
+
"[yellow]ā ļø Remember to verify your database state after force application[/yellow]"
|
|
1091
|
+
)
|
|
1092
|
+
else:
|
|
1093
|
+
console.print(
|
|
1094
|
+
f"\n[green]ā
Successfully applied {applied_count} migration(s)![/green]"
|
|
1095
|
+
)
|
|
1096
|
+
conn.close()
|
|
1097
|
+
|
|
1098
|
+
except LockAcquisitionError:
|
|
1099
|
+
# Already handled above
|
|
1100
|
+
raise
|
|
1101
|
+
except Exception as e:
|
|
1102
|
+
console.print(f"[red]ā Error: {e}[/red]")
|
|
1103
|
+
raise typer.Exit(1) from e
|
|
1104
|
+
|
|
1105
|
+
|
|
1106
|
+
def _show_migration_error_details(failed_migration, exception, applied_count: int) -> None:
|
|
1107
|
+
"""Show detailed error information for a failed migration with actionable guidance.
|
|
1108
|
+
|
|
1109
|
+
Args:
|
|
1110
|
+
failed_migration: The Migration instance that failed
|
|
1111
|
+
exception: The exception that was raised
|
|
1112
|
+
applied_count: Number of migrations that succeeded before this one
|
|
1113
|
+
"""
|
|
1114
|
+
from confiture.exceptions import MigrationError
|
|
1115
|
+
|
|
1116
|
+
console.print("\n[red]Failed Migration Details:[/red]")
|
|
1117
|
+
console.print(f" Version: {failed_migration.version}")
|
|
1118
|
+
console.print(f" Name: {failed_migration.name}")
|
|
1119
|
+
console.print(f" File: db/migrations/{failed_migration.version}_{failed_migration.name}.py")
|
|
1120
|
+
|
|
1121
|
+
# Analyze error type and provide specific guidance
|
|
1122
|
+
error_message = str(exception)
|
|
1123
|
+
|
|
1124
|
+
# Check if this is a SQL error wrapped in a MigrationError
|
|
1125
|
+
if "SQL execution failed" in error_message:
|
|
1126
|
+
console.print(" Error Type: SQL Execution Error")
|
|
1127
|
+
|
|
1128
|
+
# Extract SQL and error details from the message
|
|
1129
|
+
# Message format: "...SQL execution failed | SQL: ... | Error: ..."
|
|
1130
|
+
parts = error_message.split(" | ")
|
|
1131
|
+
sql_part = next((part for part in parts if part.startswith("SQL: ")), None)
|
|
1132
|
+
error_part = next((part for part in parts if part.startswith("Error: ")), None)
|
|
1133
|
+
|
|
1134
|
+
if sql_part:
|
|
1135
|
+
sql_content = sql_part[5:].strip() # Remove "SQL: " prefix
|
|
1136
|
+
console.print(
|
|
1137
|
+
f" SQL Statement: {sql_content[:100]}{'...' if len(sql_content) > 100 else ''}"
|
|
1138
|
+
)
|
|
1139
|
+
|
|
1140
|
+
if error_part:
|
|
1141
|
+
db_error = error_part[7:].strip() # Remove "Error: " prefix
|
|
1142
|
+
console.print(f" Database Error: {db_error.split(chr(10))[0]}")
|
|
1143
|
+
|
|
1144
|
+
# Specific SQL error guidance
|
|
1145
|
+
error_msg = db_error.lower()
|
|
1146
|
+
if "syntax error" in error_msg:
|
|
1147
|
+
console.print("\n[yellow]š SQL Syntax Error Detected:[/yellow]")
|
|
1148
|
+
console.print(" ⢠Check for typos in SQL keywords, table names, or column names")
|
|
1149
|
+
console.print(
|
|
1150
|
+
" ⢠Verify quotes, parentheses, and semicolons are properly balanced"
|
|
1151
|
+
)
|
|
1152
|
+
if sql_part:
|
|
1153
|
+
sql_content = sql_part[5:].strip()
|
|
1154
|
+
console.print(f' ⢠Test the SQL manually: psql -c "{sql_content}"')
|
|
1155
|
+
elif "does not exist" in error_msg:
|
|
1156
|
+
if "schema" in error_msg:
|
|
1157
|
+
console.print("\n[yellow]š Missing Schema Error:[/yellow]")
|
|
1158
|
+
console.print(
|
|
1159
|
+
" ⢠Create the schema first: CREATE SCHEMA IF NOT EXISTS schema_name;"
|
|
1160
|
+
)
|
|
1161
|
+
console.print(" ⢠Or use the public schema by default")
|
|
1162
|
+
elif "table" in error_msg or "relation" in error_msg:
|
|
1163
|
+
console.print("\n[yellow]š Missing Table Error:[/yellow]")
|
|
1164
|
+
console.print(" ⢠Ensure dependent migrations ran first")
|
|
1165
|
+
console.print(" ⢠Check table name spelling and schema qualification")
|
|
1166
|
+
elif "function" in error_msg:
|
|
1167
|
+
console.print("\n[yellow]š Missing Function Error:[/yellow]")
|
|
1168
|
+
console.print(" ⢠Define the function before using it")
|
|
1169
|
+
console.print(" ⢠Check function name and parameter types")
|
|
1170
|
+
elif "already exists" in error_msg:
|
|
1171
|
+
console.print("\n[yellow]š Object Already Exists:[/yellow]")
|
|
1172
|
+
console.print(" ⢠Use IF NOT EXISTS clauses for safe creation")
|
|
1173
|
+
console.print(" ⢠Check if migration was partially applied")
|
|
1174
|
+
elif "permission denied" in error_msg:
|
|
1175
|
+
console.print("\n[yellow]š Permission Error:[/yellow]")
|
|
1176
|
+
console.print(" ⢠Verify database user has required privileges")
|
|
1177
|
+
console.print(" ⢠Check GRANT statements in earlier migrations")
|
|
1178
|
+
|
|
1179
|
+
elif isinstance(exception, MigrationError):
|
|
1180
|
+
console.print(" Error Type: Migration Framework Error")
|
|
1181
|
+
console.print(f" Message: {exception}")
|
|
1182
|
+
|
|
1183
|
+
# Migration-specific guidance
|
|
1184
|
+
error_msg = str(exception).lower()
|
|
1185
|
+
if "already been applied" in error_msg:
|
|
1186
|
+
console.print("\n[yellow]š Migration Already Applied:[/yellow]")
|
|
1187
|
+
console.print(" ⢠Check migration status: confiture migrate status")
|
|
1188
|
+
console.print(" ⢠This migration may have run successfully before")
|
|
1189
|
+
elif "connection" in error_msg:
|
|
1190
|
+
console.print("\n[yellow]š Database Connection Error:[/yellow]")
|
|
1191
|
+
console.print(" ⢠Verify database is running and accessible")
|
|
1192
|
+
console.print(" ⢠Check connection string in config file")
|
|
1193
|
+
console.print(" ⢠Test connection: psql 'your-connection-string'")
|
|
1194
|
+
|
|
1195
|
+
else:
|
|
1196
|
+
console.print(f" Error Type: {type(exception).__name__}")
|
|
1197
|
+
console.print(f" Message: {exception}")
|
|
1198
|
+
|
|
1199
|
+
# General troubleshooting
|
|
1200
|
+
console.print("\n[yellow]š ļø General Troubleshooting:[/yellow]")
|
|
1201
|
+
console.print(
|
|
1202
|
+
f" ⢠View migration file: cat db/migrations/{failed_migration.version}_{failed_migration.name}.py"
|
|
1203
|
+
)
|
|
1204
|
+
console.print(" ⢠Check database logs for more details")
|
|
1205
|
+
console.print(" ⢠Test SQL manually in psql")
|
|
1206
|
+
|
|
1207
|
+
if applied_count > 0:
|
|
1208
|
+
console.print(f" ⢠{applied_count} migration(s) succeeded - database is partially updated")
|
|
1209
|
+
console.print(" ⢠Fix the error and re-run: confiture migrate up")
|
|
1210
|
+
console.print(f" ⢠Or rollback and retry: confiture migrate down --steps {applied_count}")
|
|
1211
|
+
else:
|
|
1212
|
+
console.print(" ⢠No migrations applied yet - database state is clean")
|
|
1213
|
+
console.print(" ⢠Fix the error and re-run: confiture migrate up")
|
|
1214
|
+
|
|
1215
|
+
|
|
1216
|
+
@migrate_app.command("generate")
|
|
1217
|
+
def migrate_generate(
|
|
1218
|
+
name: str = typer.Argument(..., help="Migration name (snake_case)"),
|
|
1219
|
+
migrations_dir: Path = typer.Option(
|
|
1220
|
+
Path("db/migrations"),
|
|
1221
|
+
"--migrations-dir",
|
|
1222
|
+
help="Migrations directory",
|
|
1223
|
+
),
|
|
1224
|
+
) -> None:
|
|
1225
|
+
"""Generate a new migration file.
|
|
1226
|
+
|
|
1227
|
+
Creates an empty migration template with the given name.
|
|
1228
|
+
"""
|
|
1229
|
+
try:
|
|
1230
|
+
# Ensure migrations directory exists
|
|
1231
|
+
migrations_dir.mkdir(parents=True, exist_ok=True)
|
|
1232
|
+
|
|
1233
|
+
# Generate migration file template
|
|
1234
|
+
generator = MigrationGenerator(migrations_dir=migrations_dir)
|
|
1235
|
+
|
|
1236
|
+
# For empty migration, create a template manually
|
|
1237
|
+
version = generator._get_next_version()
|
|
1238
|
+
class_name = generator._to_class_name(name)
|
|
1239
|
+
filename = f"{version}_{name}.py"
|
|
1240
|
+
filepath = migrations_dir / filename
|
|
1241
|
+
|
|
1242
|
+
# Create template
|
|
1243
|
+
template = f'''"""Migration: {name}
|
|
1244
|
+
|
|
1245
|
+
Version: {version}
|
|
1246
|
+
"""
|
|
1247
|
+
|
|
1248
|
+
from confiture.models.migration import Migration
|
|
1249
|
+
|
|
1250
|
+
|
|
1251
|
+
class {class_name}(Migration):
|
|
1252
|
+
"""Migration: {name}."""
|
|
1253
|
+
|
|
1254
|
+
version = "{version}"
|
|
1255
|
+
name = "{name}"
|
|
1256
|
+
|
|
1257
|
+
def up(self) -> None:
|
|
1258
|
+
"""Apply migration."""
|
|
1259
|
+
# TODO: Add your SQL statements here
|
|
1260
|
+
# Example:
|
|
1261
|
+
# self.execute("CREATE TABLE users (id SERIAL PRIMARY KEY)")
|
|
1262
|
+
pass
|
|
1263
|
+
|
|
1264
|
+
def down(self) -> None:
|
|
1265
|
+
"""Rollback migration."""
|
|
1266
|
+
# TODO: Add your rollback SQL statements here
|
|
1267
|
+
# Example:
|
|
1268
|
+
# self.execute("DROP TABLE users")
|
|
1269
|
+
pass
|
|
1270
|
+
'''
|
|
1271
|
+
|
|
1272
|
+
filepath.write_text(template)
|
|
1273
|
+
|
|
1274
|
+
console.print("[green]ā
Migration generated successfully![/green]")
|
|
1275
|
+
# Use plain print to avoid Rich wrapping long paths
|
|
1276
|
+
print(f"\nš File: {filepath.absolute()}")
|
|
1277
|
+
console.print("\nāļø Edit the migration file to add your SQL statements.")
|
|
1278
|
+
|
|
1279
|
+
except Exception as e:
|
|
1280
|
+
console.print(f"[red]ā Error generating migration: {e}[/red]")
|
|
1281
|
+
raise typer.Exit(1) from e
|
|
1282
|
+
|
|
1283
|
+
|
|
1284
|
+
@migrate_app.command("baseline")
|
|
1285
|
+
def migrate_baseline(
|
|
1286
|
+
through: str = typer.Option(
|
|
1287
|
+
...,
|
|
1288
|
+
"--through",
|
|
1289
|
+
"-t",
|
|
1290
|
+
help="Mark all migrations through this version as applied",
|
|
1291
|
+
),
|
|
1292
|
+
migrations_dir: Path = typer.Option(
|
|
1293
|
+
Path("db/migrations"),
|
|
1294
|
+
"--migrations-dir",
|
|
1295
|
+
help="Migrations directory",
|
|
1296
|
+
),
|
|
1297
|
+
config: Path = typer.Option(
|
|
1298
|
+
Path("db/environments/local.yaml"),
|
|
1299
|
+
"--config",
|
|
1300
|
+
"-c",
|
|
1301
|
+
help="Configuration file with database connection",
|
|
1302
|
+
),
|
|
1303
|
+
dry_run: bool = typer.Option(
|
|
1304
|
+
False,
|
|
1305
|
+
"--dry-run",
|
|
1306
|
+
help="Show what would be marked without making changes",
|
|
1307
|
+
),
|
|
1308
|
+
) -> None:
|
|
1309
|
+
"""Mark migrations as applied without executing them.
|
|
1310
|
+
|
|
1311
|
+
Use this to establish a baseline when:
|
|
1312
|
+
- Adopting confiture on an existing database
|
|
1313
|
+
- Setting up a new environment from a backup
|
|
1314
|
+
- Recovering from a failed migration state
|
|
1315
|
+
|
|
1316
|
+
Examples:
|
|
1317
|
+
confiture migrate baseline --through 002
|
|
1318
|
+
confiture migrate baseline -t 005 --dry-run
|
|
1319
|
+
confiture migrate baseline -t 003 -c db/environments/production.yaml
|
|
1320
|
+
"""
|
|
1321
|
+
from confiture.core.connection import create_connection, load_config
|
|
1322
|
+
from confiture.core.migrator import Migrator
|
|
1323
|
+
|
|
1324
|
+
try:
|
|
1325
|
+
if not config.exists():
|
|
1326
|
+
console.print(f"[red]ā Config file not found: {config}[/red]")
|
|
1327
|
+
console.print(
|
|
1328
|
+
"[yellow]š” Tip: Specify config with --config path/to/config.yaml[/yellow]"
|
|
1329
|
+
)
|
|
1330
|
+
raise typer.Exit(1)
|
|
1331
|
+
|
|
1332
|
+
if not migrations_dir.exists():
|
|
1333
|
+
console.print(f"[red]ā Migrations directory not found: {migrations_dir}[/red]")
|
|
1334
|
+
raise typer.Exit(1)
|
|
1335
|
+
|
|
1336
|
+
# Load config and create connection
|
|
1337
|
+
config_data = load_config(config)
|
|
1338
|
+
conn = create_connection(config_data)
|
|
1339
|
+
|
|
1340
|
+
# Initialize migrator
|
|
1341
|
+
migrator = Migrator(connection=conn)
|
|
1342
|
+
migrator.initialize()
|
|
1343
|
+
|
|
1344
|
+
# Find all migration files
|
|
1345
|
+
all_migrations = migrator.find_migration_files(migrations_dir)
|
|
1346
|
+
|
|
1347
|
+
if not all_migrations:
|
|
1348
|
+
console.print("[yellow]No migrations found.[/yellow]")
|
|
1349
|
+
conn.close()
|
|
1350
|
+
return
|
|
1351
|
+
|
|
1352
|
+
# Filter migrations up to and including the target version
|
|
1353
|
+
migrations_to_mark: list[Path] = []
|
|
1354
|
+
for migration_file in all_migrations:
|
|
1355
|
+
version = migrator._version_from_filename(migration_file.name)
|
|
1356
|
+
migrations_to_mark.append(migration_file)
|
|
1357
|
+
if version == through:
|
|
1358
|
+
break
|
|
1359
|
+
else:
|
|
1360
|
+
# Target version not found
|
|
1361
|
+
console.print(f"[red]ā Migration version '{through}' not found[/red]")
|
|
1362
|
+
console.print("[yellow]Available versions:[/yellow]")
|
|
1363
|
+
for mf in all_migrations[:10]:
|
|
1364
|
+
v = migrator._version_from_filename(mf.name)
|
|
1365
|
+
console.print(f" ⢠{v}")
|
|
1366
|
+
if len(all_migrations) > 10:
|
|
1367
|
+
console.print(f" ... and {len(all_migrations) - 10} more")
|
|
1368
|
+
conn.close()
|
|
1369
|
+
raise typer.Exit(1)
|
|
1370
|
+
|
|
1371
|
+
# Get already applied versions
|
|
1372
|
+
applied_versions = set(migrator.get_applied_versions())
|
|
1373
|
+
|
|
1374
|
+
# Show what will be done
|
|
1375
|
+
console.print(f"\n[cyan]š Baseline: marking migrations through {through}[/cyan]\n")
|
|
1376
|
+
|
|
1377
|
+
if dry_run:
|
|
1378
|
+
console.print("[yellow]š DRY RUN - no changes will be made[/yellow]\n")
|
|
1379
|
+
|
|
1380
|
+
marked_count = 0
|
|
1381
|
+
skipped_count = 0
|
|
1382
|
+
|
|
1383
|
+
for migration_file in migrations_to_mark:
|
|
1384
|
+
version = migrator._version_from_filename(migration_file.name)
|
|
1385
|
+
# Extract name
|
|
1386
|
+
base_name = migration_file.stem
|
|
1387
|
+
if base_name.endswith(".up"):
|
|
1388
|
+
base_name = base_name[:-3]
|
|
1389
|
+
parts = base_name.split("_", 1)
|
|
1390
|
+
name = parts[1] if len(parts) > 1 else base_name
|
|
1391
|
+
|
|
1392
|
+
if version in applied_versions:
|
|
1393
|
+
console.print(f" [dim]āļø {version} {name} (already applied)[/dim]")
|
|
1394
|
+
skipped_count += 1
|
|
1395
|
+
else:
|
|
1396
|
+
if dry_run:
|
|
1397
|
+
console.print(f" [cyan]š {version} {name} (would mark as applied)[/cyan]")
|
|
1398
|
+
else:
|
|
1399
|
+
migrator.mark_applied(migration_file, reason="baseline")
|
|
1400
|
+
console.print(f" [green]ā
{version} {name} (marked as applied)[/green]")
|
|
1401
|
+
marked_count += 1
|
|
1402
|
+
|
|
1403
|
+
# Summary
|
|
1404
|
+
console.print()
|
|
1405
|
+
if dry_run:
|
|
1406
|
+
console.print(
|
|
1407
|
+
f"[cyan]š Would mark {marked_count} migration(s), "
|
|
1408
|
+
f"skip {skipped_count} already applied[/cyan]"
|
|
1409
|
+
)
|
|
1410
|
+
console.print("\n[yellow]Run without --dry-run to apply changes[/yellow]")
|
|
1411
|
+
else:
|
|
1412
|
+
console.print(
|
|
1413
|
+
f"[green]ā
Marked {marked_count} migration(s) as applied, "
|
|
1414
|
+
f"skipped {skipped_count} already applied[/green]"
|
|
1415
|
+
)
|
|
1416
|
+
|
|
1417
|
+
conn.close()
|
|
1418
|
+
|
|
1419
|
+
except typer.Exit:
|
|
1420
|
+
raise
|
|
1421
|
+
except Exception as e:
|
|
1422
|
+
console.print(f"[red]ā Error: {e}[/red]")
|
|
1423
|
+
raise typer.Exit(1) from e
|
|
1424
|
+
|
|
1425
|
+
|
|
1426
|
+
@migrate_app.command("diff")
|
|
1427
|
+
def migrate_diff(
|
|
1428
|
+
old_schema: Path = typer.Argument(..., help="Old schema file"),
|
|
1429
|
+
new_schema: Path = typer.Argument(..., help="New schema file"),
|
|
1430
|
+
generate: bool = typer.Option(
|
|
1431
|
+
False,
|
|
1432
|
+
"--generate",
|
|
1433
|
+
help="Generate migration from diff",
|
|
1434
|
+
),
|
|
1435
|
+
name: str = typer.Option(
|
|
1436
|
+
None,
|
|
1437
|
+
"--name",
|
|
1438
|
+
help="Migration name (required with --generate)",
|
|
1439
|
+
),
|
|
1440
|
+
migrations_dir: Path = typer.Option(
|
|
1441
|
+
Path("db/migrations"),
|
|
1442
|
+
"--migrations-dir",
|
|
1443
|
+
help="Migrations directory",
|
|
1444
|
+
),
|
|
1445
|
+
) -> None:
|
|
1446
|
+
"""Compare two schema files and show differences.
|
|
1447
|
+
|
|
1448
|
+
Optionally generate a migration file from the diff.
|
|
1449
|
+
"""
|
|
1450
|
+
try:
|
|
1451
|
+
# Validate files exist
|
|
1452
|
+
if not old_schema.exists():
|
|
1453
|
+
console.print(f"[red]ā Old schema file not found: {old_schema}[/red]")
|
|
1454
|
+
raise typer.Exit(1)
|
|
1455
|
+
|
|
1456
|
+
if not new_schema.exists():
|
|
1457
|
+
console.print(f"[red]ā New schema file not found: {new_schema}[/red]")
|
|
1458
|
+
raise typer.Exit(1)
|
|
1459
|
+
|
|
1460
|
+
# Read schemas
|
|
1461
|
+
old_sql = old_schema.read_text()
|
|
1462
|
+
new_sql = new_schema.read_text()
|
|
1463
|
+
|
|
1464
|
+
# Compare schemas
|
|
1465
|
+
differ = SchemaDiffer()
|
|
1466
|
+
diff = differ.compare(old_sql, new_sql)
|
|
1467
|
+
|
|
1468
|
+
# Display diff
|
|
1469
|
+
if not diff.has_changes():
|
|
1470
|
+
console.print("[green]ā
No changes detected. Schemas are identical.[/green]")
|
|
1471
|
+
return
|
|
1472
|
+
|
|
1473
|
+
console.print("[cyan]š Schema differences detected:[/cyan]\n")
|
|
1474
|
+
|
|
1475
|
+
# Display changes in a table
|
|
1476
|
+
table = Table()
|
|
1477
|
+
table.add_column("Type", style="yellow")
|
|
1478
|
+
table.add_column("Details", style="white")
|
|
1479
|
+
|
|
1480
|
+
for change in diff.changes:
|
|
1481
|
+
table.add_row(change.type, str(change))
|
|
1482
|
+
|
|
1483
|
+
console.print(table)
|
|
1484
|
+
console.print(f"\nš Total changes: {len(diff.changes)}")
|
|
1485
|
+
|
|
1486
|
+
# Generate migration if requested
|
|
1487
|
+
if generate:
|
|
1488
|
+
if not name:
|
|
1489
|
+
console.print("[red]ā Migration name is required when using --generate[/red]")
|
|
1490
|
+
console.print(
|
|
1491
|
+
"Usage: confiture migrate diff old.sql new.sql --generate --name migration_name"
|
|
1492
|
+
)
|
|
1493
|
+
raise typer.Exit(1)
|
|
1494
|
+
|
|
1495
|
+
# Ensure migrations directory exists
|
|
1496
|
+
migrations_dir.mkdir(parents=True, exist_ok=True)
|
|
1497
|
+
|
|
1498
|
+
# Generate migration
|
|
1499
|
+
generator = MigrationGenerator(migrations_dir=migrations_dir)
|
|
1500
|
+
migration_file = generator.generate(diff, name=name)
|
|
1501
|
+
|
|
1502
|
+
console.print(f"\n[green]ā
Migration generated: {migration_file.name}[/green]")
|
|
1503
|
+
|
|
1504
|
+
except Exception as e:
|
|
1505
|
+
console.print(f"[red]ā Error: {e}[/red]")
|
|
1506
|
+
raise typer.Exit(1) from e
|
|
1507
|
+
|
|
1508
|
+
|
|
1509
|
+
@migrate_app.command("down")
|
|
1510
|
+
def migrate_down(
|
|
1511
|
+
migrations_dir: Path = typer.Option(
|
|
1512
|
+
Path("db/migrations"),
|
|
1513
|
+
"--migrations-dir",
|
|
1514
|
+
help="Migrations directory",
|
|
1515
|
+
),
|
|
1516
|
+
config: Path = typer.Option(
|
|
1517
|
+
Path("db/environments/local.yaml"),
|
|
1518
|
+
"--config",
|
|
1519
|
+
"-c",
|
|
1520
|
+
help="Configuration file",
|
|
1521
|
+
),
|
|
1522
|
+
steps: int = typer.Option(
|
|
1523
|
+
1,
|
|
1524
|
+
"--steps",
|
|
1525
|
+
"-n",
|
|
1526
|
+
help="Number of migrations to rollback",
|
|
1527
|
+
),
|
|
1528
|
+
dry_run: bool = typer.Option(
|
|
1529
|
+
False,
|
|
1530
|
+
"--dry-run",
|
|
1531
|
+
help="Analyze rollback without executing",
|
|
1532
|
+
),
|
|
1533
|
+
verbose: bool = typer.Option(
|
|
1534
|
+
False,
|
|
1535
|
+
"--verbose",
|
|
1536
|
+
"-v",
|
|
1537
|
+
help="Show detailed analysis in dry-run report",
|
|
1538
|
+
),
|
|
1539
|
+
format_output: str = typer.Option(
|
|
1540
|
+
"text",
|
|
1541
|
+
"--format",
|
|
1542
|
+
"-f",
|
|
1543
|
+
help="Report format (text or json)",
|
|
1544
|
+
),
|
|
1545
|
+
output_file: Path | None = typer.Option(
|
|
1546
|
+
None,
|
|
1547
|
+
"--output",
|
|
1548
|
+
"-o",
|
|
1549
|
+
help="Save report to file",
|
|
1550
|
+
),
|
|
1551
|
+
) -> None:
|
|
1552
|
+
"""Rollback applied migrations.
|
|
1553
|
+
|
|
1554
|
+
Rolls back the last N applied migrations (default: 1).
|
|
1555
|
+
|
|
1556
|
+
Use --dry-run to analyze rollback without executing.
|
|
1557
|
+
"""
|
|
1558
|
+
from confiture.core.connection import (
|
|
1559
|
+
create_connection,
|
|
1560
|
+
load_config,
|
|
1561
|
+
load_migration_class,
|
|
1562
|
+
)
|
|
1563
|
+
from confiture.core.migrator import Migrator
|
|
1564
|
+
|
|
1565
|
+
try:
|
|
1566
|
+
# Validate format option
|
|
1567
|
+
if format_output not in ("text", "json"):
|
|
1568
|
+
console.print(
|
|
1569
|
+
f"[red]ā Error: Invalid format '{format_output}'. Use 'text' or 'json'[/red]"
|
|
1570
|
+
)
|
|
1571
|
+
raise typer.Exit(1)
|
|
1572
|
+
|
|
1573
|
+
# Load configuration
|
|
1574
|
+
config_data = load_config(config)
|
|
1575
|
+
|
|
1576
|
+
# Create database connection
|
|
1577
|
+
conn = create_connection(config_data)
|
|
1578
|
+
|
|
1579
|
+
# Create migrator
|
|
1580
|
+
migrator = Migrator(connection=conn)
|
|
1581
|
+
migrator.initialize()
|
|
1582
|
+
|
|
1583
|
+
# Get applied migrations
|
|
1584
|
+
applied_versions = migrator.get_applied_versions()
|
|
1585
|
+
|
|
1586
|
+
if not applied_versions:
|
|
1587
|
+
console.print("[yellow]ā ļø No applied migrations to rollback.[/yellow]")
|
|
1588
|
+
conn.close()
|
|
1589
|
+
return
|
|
1590
|
+
|
|
1591
|
+
# Get migrations to rollback (last N)
|
|
1592
|
+
versions_to_rollback = applied_versions[-steps:]
|
|
1593
|
+
|
|
1594
|
+
# Handle dry-run mode
|
|
1595
|
+
if dry_run:
|
|
1596
|
+
from confiture.cli.dry_run import (
|
|
1597
|
+
display_dry_run_header,
|
|
1598
|
+
save_json_report,
|
|
1599
|
+
save_text_report,
|
|
1600
|
+
)
|
|
1601
|
+
|
|
1602
|
+
display_dry_run_header("analysis")
|
|
1603
|
+
|
|
1604
|
+
# Build rollback summary
|
|
1605
|
+
rollback_summary: dict[str, Any] = {
|
|
1606
|
+
"migration_id": f"dry_run_rollback_{config.stem}",
|
|
1607
|
+
"mode": "analysis",
|
|
1608
|
+
"statements_analyzed": len(versions_to_rollback),
|
|
1609
|
+
"migrations": [],
|
|
1610
|
+
"summary": {
|
|
1611
|
+
"unsafe_count": 0,
|
|
1612
|
+
"total_estimated_time_ms": 0,
|
|
1613
|
+
"total_estimated_disk_mb": 0.0,
|
|
1614
|
+
"has_unsafe_statements": False,
|
|
1615
|
+
},
|
|
1616
|
+
"warnings": [],
|
|
1617
|
+
"analyses": [],
|
|
1618
|
+
}
|
|
1619
|
+
|
|
1620
|
+
# Collect rollback migration information
|
|
1621
|
+
for version in reversed(versions_to_rollback):
|
|
1622
|
+
# Find migration file
|
|
1623
|
+
migration_files = migrator.find_migration_files(migrations_dir=migrations_dir)
|
|
1624
|
+
migration_file = None
|
|
1625
|
+
for mf in migration_files:
|
|
1626
|
+
if migrator._version_from_filename(mf.name) == version:
|
|
1627
|
+
migration_file = mf
|
|
1628
|
+
break
|
|
1629
|
+
|
|
1630
|
+
if not migration_file:
|
|
1631
|
+
continue
|
|
1632
|
+
|
|
1633
|
+
# Load migration class
|
|
1634
|
+
migration_class = load_migration_class(migration_file)
|
|
1635
|
+
|
|
1636
|
+
migration = migration_class(connection=conn)
|
|
1637
|
+
|
|
1638
|
+
migration_info = {
|
|
1639
|
+
"version": migration.version,
|
|
1640
|
+
"name": migration.name,
|
|
1641
|
+
"classification": "warning",
|
|
1642
|
+
"estimated_duration_ms": 500,
|
|
1643
|
+
"estimated_disk_usage_mb": 1.0,
|
|
1644
|
+
"estimated_cpu_percent": 30.0,
|
|
1645
|
+
}
|
|
1646
|
+
rollback_summary["migrations"].append(migration_info)
|
|
1647
|
+
rollback_summary["analyses"].append(migration_info)
|
|
1648
|
+
|
|
1649
|
+
# Display format
|
|
1650
|
+
if format_output == "json":
|
|
1651
|
+
if output_file:
|
|
1652
|
+
save_json_report(rollback_summary, output_file)
|
|
1653
|
+
console.print(f"\n[green]ā
Report saved to: {output_file.absolute()}[/green]")
|
|
1654
|
+
else:
|
|
1655
|
+
from confiture.cli.dry_run import print_json_report
|
|
1656
|
+
|
|
1657
|
+
print_json_report(rollback_summary)
|
|
1658
|
+
else:
|
|
1659
|
+
# Text format (default)
|
|
1660
|
+
console.print("[cyan]Rollback Analysis Summary[/cyan]")
|
|
1661
|
+
console.print("=" * 80)
|
|
1662
|
+
console.print(f"Migrations to rollback: {len(versions_to_rollback)}")
|
|
1663
|
+
console.print()
|
|
1664
|
+
for mig in rollback_summary["migrations"]:
|
|
1665
|
+
console.print(f" {mig['version']}: {mig['name']}")
|
|
1666
|
+
console.print(
|
|
1667
|
+
f" Estimated time: {mig['estimated_duration_ms']}ms | "
|
|
1668
|
+
f"Disk: {mig['estimated_disk_usage_mb']:.1f}MB | "
|
|
1669
|
+
f"CPU: {mig['estimated_cpu_percent']:.0f}%"
|
|
1670
|
+
)
|
|
1671
|
+
console.print()
|
|
1672
|
+
console.print("[yellow]ā ļø Rollback will undo these migrations[/yellow]")
|
|
1673
|
+
console.print("=" * 80)
|
|
1674
|
+
|
|
1675
|
+
if output_file:
|
|
1676
|
+
text_report = "DRY-RUN ROLLBACK ANALYSIS REPORT\n"
|
|
1677
|
+
text_report += "=" * 80 + "\n\n"
|
|
1678
|
+
for mig in rollback_summary["migrations"]:
|
|
1679
|
+
text_report += f"{mig['version']}: {mig['name']}\n"
|
|
1680
|
+
save_text_report(text_report, output_file)
|
|
1681
|
+
console.print(f"[green]ā
Report saved to: {output_file.absolute()}[/green]")
|
|
1682
|
+
|
|
1683
|
+
conn.close()
|
|
1684
|
+
return
|
|
1685
|
+
|
|
1686
|
+
console.print(f"[cyan]š¦ Rolling back {len(versions_to_rollback)} migration(s)[/cyan]\n")
|
|
1687
|
+
|
|
1688
|
+
# Rollback migrations in reverse order
|
|
1689
|
+
rolled_back_count = 0
|
|
1690
|
+
for version in reversed(versions_to_rollback):
|
|
1691
|
+
# Find migration file
|
|
1692
|
+
migration_files = migrator.find_migration_files(migrations_dir=migrations_dir)
|
|
1693
|
+
migration_file = None
|
|
1694
|
+
for mf in migration_files:
|
|
1695
|
+
if migrator._version_from_filename(mf.name) == version:
|
|
1696
|
+
migration_file = mf
|
|
1697
|
+
break
|
|
1698
|
+
|
|
1699
|
+
if not migration_file:
|
|
1700
|
+
console.print(f"[red]ā Migration file for version {version} not found[/red]")
|
|
1701
|
+
continue
|
|
1702
|
+
|
|
1703
|
+
# Load migration module
|
|
1704
|
+
migration_class = load_migration_class(migration_file)
|
|
1705
|
+
|
|
1706
|
+
# Create migration instance
|
|
1707
|
+
migration = migration_class(connection=conn)
|
|
1708
|
+
|
|
1709
|
+
# Rollback migration
|
|
1710
|
+
console.print(
|
|
1711
|
+
f"[cyan]ā” Rolling back {migration.version}_{migration.name}...[/cyan]", end=" "
|
|
1712
|
+
)
|
|
1713
|
+
migrator.rollback(migration)
|
|
1714
|
+
console.print("[green]ā
[/green]")
|
|
1715
|
+
rolled_back_count += 1
|
|
1716
|
+
|
|
1717
|
+
console.print(
|
|
1718
|
+
f"\n[green]ā
Successfully rolled back {rolled_back_count} migration(s)![/green]"
|
|
1719
|
+
)
|
|
1720
|
+
conn.close()
|
|
1721
|
+
|
|
1722
|
+
except Exception as e:
|
|
1723
|
+
console.print(f"[red]ā Error: {e}[/red]")
|
|
1724
|
+
raise typer.Exit(1) from e
|
|
1725
|
+
|
|
1726
|
+
|
|
1727
|
+
@app.command()
|
|
1728
|
+
def validate_profile(
|
|
1729
|
+
path: Path = typer.Argument(
|
|
1730
|
+
...,
|
|
1731
|
+
help="Path to anonymization profile YAML file",
|
|
1732
|
+
),
|
|
1733
|
+
) -> None:
|
|
1734
|
+
"""Validate anonymization profile YAML structure and schema.
|
|
1735
|
+
|
|
1736
|
+
Performs security validation:
|
|
1737
|
+
- Uses safe_load() to prevent YAML injection
|
|
1738
|
+
- Validates against Pydantic schema
|
|
1739
|
+
- Checks strategy types are whitelisted
|
|
1740
|
+
- Verifies all required fields present
|
|
1741
|
+
|
|
1742
|
+
Example:
|
|
1743
|
+
confiture validate-profile db/profiles/production.yaml
|
|
1744
|
+
"""
|
|
1745
|
+
try:
|
|
1746
|
+
from confiture.core.anonymization.profile import AnonymizationProfile
|
|
1747
|
+
|
|
1748
|
+
console.print(f"[cyan]š Validating profile: {path}[/cyan]")
|
|
1749
|
+
profile = AnonymizationProfile.load(path)
|
|
1750
|
+
|
|
1751
|
+
# Print profile summary
|
|
1752
|
+
console.print("[green]ā
Valid profile![/green]")
|
|
1753
|
+
console.print(f" Name: {profile.name}")
|
|
1754
|
+
console.print(f" Version: {profile.version}")
|
|
1755
|
+
if profile.global_seed:
|
|
1756
|
+
console.print(f" Global Seed: {profile.global_seed}")
|
|
1757
|
+
|
|
1758
|
+
# List strategies
|
|
1759
|
+
console.print(f"\n[cyan]Strategies ({len(profile.strategies)})[/cyan]:")
|
|
1760
|
+
for strategy_name, strategy_def in profile.strategies.items():
|
|
1761
|
+
console.print(
|
|
1762
|
+
f" ⢠{strategy_name}: {strategy_def.type}",
|
|
1763
|
+
end="",
|
|
1764
|
+
)
|
|
1765
|
+
if strategy_def.seed_env_var:
|
|
1766
|
+
console.print(f" [env: {strategy_def.seed_env_var}]")
|
|
1767
|
+
else:
|
|
1768
|
+
console.print()
|
|
1769
|
+
|
|
1770
|
+
# List tables
|
|
1771
|
+
console.print(f"\n[cyan]Tables ({len(profile.tables)})[/cyan]:")
|
|
1772
|
+
for table_name, table_def in profile.tables.items():
|
|
1773
|
+
console.print(f" ⢠{table_name}: {len(table_def.rules)} rules")
|
|
1774
|
+
for rule in table_def.rules:
|
|
1775
|
+
console.print(f" - {rule.column} ā {rule.strategy}", end="")
|
|
1776
|
+
if rule.seed:
|
|
1777
|
+
console.print(f" [seed: {rule.seed}]")
|
|
1778
|
+
else:
|
|
1779
|
+
console.print()
|
|
1780
|
+
|
|
1781
|
+
console.print("[green]\nā
Profile validation passed![/green]")
|
|
1782
|
+
|
|
1783
|
+
except FileNotFoundError as e:
|
|
1784
|
+
console.print(f"[red]ā File not found: {e}[/red]")
|
|
1785
|
+
raise typer.Exit(1) from e
|
|
1786
|
+
except ValueError as e:
|
|
1787
|
+
console.print(f"[red]ā Invalid profile: {e}[/red]")
|
|
1788
|
+
raise typer.Exit(1) from e
|
|
1789
|
+
except Exception as e:
|
|
1790
|
+
console.print(f"[red]ā Error validating profile: {e}[/red]")
|
|
1791
|
+
raise typer.Exit(1) from e
|
|
1792
|
+
|
|
1793
|
+
|
|
1794
|
+
@app.command()
|
|
1795
|
+
def verify(
|
|
1796
|
+
migrations_dir: Path = typer.Option(
|
|
1797
|
+
Path("db/migrations"),
|
|
1798
|
+
"--migrations-dir",
|
|
1799
|
+
help="Migrations directory",
|
|
1800
|
+
),
|
|
1801
|
+
config: Path = typer.Option(
|
|
1802
|
+
Path("db/environments/local.yaml"),
|
|
1803
|
+
"--config",
|
|
1804
|
+
"-c",
|
|
1805
|
+
help="Configuration file",
|
|
1806
|
+
),
|
|
1807
|
+
fix: bool = typer.Option(
|
|
1808
|
+
False,
|
|
1809
|
+
"--fix",
|
|
1810
|
+
help="Update stored checksums to match current files (dangerous)",
|
|
1811
|
+
),
|
|
1812
|
+
) -> None:
|
|
1813
|
+
"""Verify migration file integrity against stored checksums.
|
|
1814
|
+
|
|
1815
|
+
Compares SHA-256 checksums of migration files against the checksums
|
|
1816
|
+
stored when migrations were applied. Detects if files have been
|
|
1817
|
+
modified after application.
|
|
1818
|
+
|
|
1819
|
+
This helps prevent:
|
|
1820
|
+
- Silent schema drift between environments
|
|
1821
|
+
- Production/staging mismatches
|
|
1822
|
+
- Debugging nightmares from modified migrations
|
|
1823
|
+
|
|
1824
|
+
Examples:
|
|
1825
|
+
# Verify all migrations
|
|
1826
|
+
confiture verify
|
|
1827
|
+
|
|
1828
|
+
# Verify with specific config
|
|
1829
|
+
confiture verify --config db/environments/production.yaml
|
|
1830
|
+
|
|
1831
|
+
# Fix checksums (update stored to match current files)
|
|
1832
|
+
confiture verify --fix
|
|
1833
|
+
"""
|
|
1834
|
+
from confiture.core.checksum import (
|
|
1835
|
+
ChecksumConfig,
|
|
1836
|
+
ChecksumMismatchBehavior,
|
|
1837
|
+
MigrationChecksumVerifier,
|
|
1838
|
+
)
|
|
1839
|
+
from confiture.core.connection import create_connection, load_config
|
|
1840
|
+
|
|
1841
|
+
try:
|
|
1842
|
+
# Load config and connect
|
|
1843
|
+
config_data = load_config(config)
|
|
1844
|
+
conn = create_connection(config_data)
|
|
1845
|
+
|
|
1846
|
+
# Run verification (warn mode - we'll handle display)
|
|
1847
|
+
verifier = MigrationChecksumVerifier(
|
|
1848
|
+
conn,
|
|
1849
|
+
ChecksumConfig(
|
|
1850
|
+
enabled=True,
|
|
1851
|
+
on_mismatch=ChecksumMismatchBehavior.WARN,
|
|
1852
|
+
),
|
|
1853
|
+
)
|
|
1854
|
+
mismatches = verifier.verify_all(migrations_dir)
|
|
1855
|
+
|
|
1856
|
+
if not mismatches:
|
|
1857
|
+
console.print("[green]ā
All migration checksums verified![/green]")
|
|
1858
|
+
conn.close()
|
|
1859
|
+
return
|
|
1860
|
+
|
|
1861
|
+
# Display mismatches
|
|
1862
|
+
console.print(f"[red]ā Found {len(mismatches)} checksum mismatch(es):[/red]\n")
|
|
1863
|
+
|
|
1864
|
+
for m in mismatches:
|
|
1865
|
+
console.print(f" [yellow]{m.version}_{m.name}[/yellow]")
|
|
1866
|
+
console.print(f" File: {m.file_path}")
|
|
1867
|
+
console.print(f" Expected: {m.expected[:16]}...")
|
|
1868
|
+
console.print(f" Actual: {m.actual[:16]}...")
|
|
1869
|
+
console.print()
|
|
1870
|
+
|
|
1871
|
+
if fix:
|
|
1872
|
+
# Update checksums in database
|
|
1873
|
+
console.print("[yellow]ā ļø Updating stored checksums...[/yellow]")
|
|
1874
|
+
updated = verifier.update_all_checksums(migrations_dir)
|
|
1875
|
+
console.print(f"[green]ā
Updated {updated} checksum(s)[/green]")
|
|
1876
|
+
else:
|
|
1877
|
+
console.print(
|
|
1878
|
+
"[yellow]š” Tip: Use --fix to update stored checksums (dangerous)[/yellow]"
|
|
1879
|
+
)
|
|
1880
|
+
conn.close()
|
|
1881
|
+
raise typer.Exit(1)
|
|
1882
|
+
|
|
1883
|
+
conn.close()
|
|
1884
|
+
|
|
1885
|
+
except typer.Exit:
|
|
1886
|
+
raise
|
|
1887
|
+
except Exception as e:
|
|
1888
|
+
console.print(f"[red]ā Error: {e}[/red]")
|
|
1889
|
+
raise typer.Exit(1) from e
|
|
1890
|
+
|
|
1891
|
+
|
|
1892
|
+
if __name__ == "__main__":
|
|
1893
|
+
app()
|