fraiseql-confiture 0.1.0__cp311-cp311-manylinux_2_34_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fraiseql-confiture might be problematic. Click here for more details.
- confiture/__init__.py +45 -0
- confiture/_core.cpython-311-x86_64-linux-gnu.so +0 -0
- confiture/cli/__init__.py +0 -0
- confiture/cli/main.py +720 -0
- confiture/config/__init__.py +0 -0
- confiture/config/environment.py +190 -0
- confiture/core/__init__.py +0 -0
- confiture/core/builder.py +336 -0
- confiture/core/connection.py +120 -0
- confiture/core/differ.py +522 -0
- confiture/core/migration_generator.py +298 -0
- confiture/core/migrator.py +369 -0
- confiture/core/schema_to_schema.py +592 -0
- confiture/core/syncer.py +540 -0
- confiture/exceptions.py +141 -0
- confiture/integrations/__init__.py +0 -0
- confiture/models/__init__.py +0 -0
- confiture/models/migration.py +95 -0
- confiture/models/schema.py +203 -0
- fraiseql_confiture-0.1.0.dist-info/METADATA +350 -0
- fraiseql_confiture-0.1.0.dist-info/RECORD +24 -0
- fraiseql_confiture-0.1.0.dist-info/WHEEL +4 -0
- fraiseql_confiture-0.1.0.dist-info/entry_points.txt +2 -0
- fraiseql_confiture-0.1.0.dist-info/licenses/LICENSE +21 -0
|
File without changes
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
"""Environment configuration management
|
|
2
|
+
|
|
3
|
+
Handles loading and validation of environment-specific configuration from YAML files.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import yaml
|
|
10
|
+
from pydantic import BaseModel, Field, field_validator
|
|
11
|
+
|
|
12
|
+
from confiture.exceptions import ConfigurationError
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DatabaseConfig(BaseModel):
|
|
16
|
+
"""Database connection configuration.
|
|
17
|
+
|
|
18
|
+
Can be initialized from a connection URL or individual parameters.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
host: str = "localhost"
|
|
22
|
+
port: int = 5432
|
|
23
|
+
database: str = "postgres"
|
|
24
|
+
user: str = "postgres"
|
|
25
|
+
password: str = ""
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
def from_url(cls, url: str) -> "DatabaseConfig":
|
|
29
|
+
"""Parse database configuration from PostgreSQL URL.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
url: PostgreSQL connection URL (postgresql://user:pass@host:port/dbname)
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
DatabaseConfig instance
|
|
36
|
+
|
|
37
|
+
Example:
|
|
38
|
+
>>> config = DatabaseConfig.from_url("postgresql://user:pass@localhost:5432/mydb")
|
|
39
|
+
>>> config.host
|
|
40
|
+
'localhost'
|
|
41
|
+
"""
|
|
42
|
+
import re
|
|
43
|
+
|
|
44
|
+
# Parse URL: postgresql://user:pass@host:port/dbname
|
|
45
|
+
pattern = r"(?:postgresql|postgres)://(?:([^:]+):([^@]+)@)?([^:/]+)(?::(\d+))?/(.+)"
|
|
46
|
+
match = re.match(pattern, url)
|
|
47
|
+
|
|
48
|
+
if not match:
|
|
49
|
+
raise ValueError(f"Invalid PostgreSQL URL: {url}")
|
|
50
|
+
|
|
51
|
+
user, password, host, port, database = match.groups()
|
|
52
|
+
|
|
53
|
+
return cls(
|
|
54
|
+
host=host or "localhost",
|
|
55
|
+
port=int(port) if port else 5432,
|
|
56
|
+
database=database,
|
|
57
|
+
user=user or "postgres",
|
|
58
|
+
password=password or "",
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
def to_dict(self) -> dict[str, Any]:
|
|
62
|
+
"""Convert to dictionary for use with create_connection."""
|
|
63
|
+
return {
|
|
64
|
+
"database": {
|
|
65
|
+
"host": self.host,
|
|
66
|
+
"port": self.port,
|
|
67
|
+
"database": self.database,
|
|
68
|
+
"user": self.user,
|
|
69
|
+
"password": self.password,
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class Environment(BaseModel):
|
|
75
|
+
"""Environment configuration
|
|
76
|
+
|
|
77
|
+
Loaded from db/environments/{env_name}.yaml files.
|
|
78
|
+
|
|
79
|
+
Attributes:
|
|
80
|
+
name: Environment name (e.g., "local", "production")
|
|
81
|
+
database_url: PostgreSQL connection URL
|
|
82
|
+
include_dirs: Directories to include when building schema
|
|
83
|
+
exclude_dirs: Directories to exclude from schema build
|
|
84
|
+
migration_table: Table name for tracking migrations
|
|
85
|
+
auto_backup: Whether to automatically backup before migrations
|
|
86
|
+
require_confirmation: Whether to require user confirmation for risky operations
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
name: str
|
|
90
|
+
database_url: str
|
|
91
|
+
include_dirs: list[str]
|
|
92
|
+
exclude_dirs: list[str] = Field(default_factory=list)
|
|
93
|
+
migration_table: str = "confiture_migrations"
|
|
94
|
+
auto_backup: bool = True
|
|
95
|
+
require_confirmation: bool = True
|
|
96
|
+
|
|
97
|
+
@property
|
|
98
|
+
def database(self) -> DatabaseConfig:
|
|
99
|
+
"""Get database configuration from database_url.
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
DatabaseConfig instance
|
|
103
|
+
"""
|
|
104
|
+
return DatabaseConfig.from_url(self.database_url)
|
|
105
|
+
|
|
106
|
+
@field_validator("database_url")
|
|
107
|
+
@classmethod
|
|
108
|
+
def validate_database_url(cls, v: str) -> str:
|
|
109
|
+
"""Validate PostgreSQL connection URL format"""
|
|
110
|
+
if not v.startswith(("postgresql://", "postgres://")):
|
|
111
|
+
raise ValueError(
|
|
112
|
+
f"Invalid database_url: must start with postgresql:// or postgres://, got: {v}"
|
|
113
|
+
)
|
|
114
|
+
return v
|
|
115
|
+
|
|
116
|
+
@classmethod
|
|
117
|
+
def load(cls, env_name: str, project_dir: Path | None = None) -> "Environment":
|
|
118
|
+
"""Load environment configuration from YAML file
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
env_name: Environment name (e.g., "local", "production")
|
|
122
|
+
project_dir: Project root directory. If None, uses current directory.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
Environment configuration object
|
|
126
|
+
|
|
127
|
+
Raises:
|
|
128
|
+
ConfigurationError: If config file not found, invalid, or missing required fields
|
|
129
|
+
|
|
130
|
+
Example:
|
|
131
|
+
>>> env = Environment.load("local")
|
|
132
|
+
>>> print(env.database_url)
|
|
133
|
+
postgresql://localhost/myapp_local
|
|
134
|
+
"""
|
|
135
|
+
if project_dir is None:
|
|
136
|
+
project_dir = Path.cwd()
|
|
137
|
+
|
|
138
|
+
# Find config file
|
|
139
|
+
config_path = project_dir / "db" / "environments" / f"{env_name}.yaml"
|
|
140
|
+
|
|
141
|
+
if not config_path.exists():
|
|
142
|
+
raise ConfigurationError(
|
|
143
|
+
f"Environment config not found: {config_path}\n"
|
|
144
|
+
f"Expected: db/environments/{env_name}.yaml"
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
# Load YAML
|
|
148
|
+
try:
|
|
149
|
+
with open(config_path) as f:
|
|
150
|
+
data = yaml.safe_load(f)
|
|
151
|
+
except yaml.YAMLError as e:
|
|
152
|
+
raise ConfigurationError(f"Invalid YAML in {config_path}: {e}") from e
|
|
153
|
+
|
|
154
|
+
if not isinstance(data, dict):
|
|
155
|
+
raise ConfigurationError(
|
|
156
|
+
f"Invalid config format in {config_path}: expected dictionary, got {type(data)}"
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
# Validate required fields
|
|
160
|
+
if "database_url" not in data:
|
|
161
|
+
raise ConfigurationError(f"Missing required field 'database_url' in {config_path}")
|
|
162
|
+
|
|
163
|
+
if "include_dirs" not in data:
|
|
164
|
+
raise ConfigurationError(f"Missing required field 'include_dirs' in {config_path}")
|
|
165
|
+
|
|
166
|
+
# Resolve paths to absolute
|
|
167
|
+
include_dirs = []
|
|
168
|
+
for dir_path in data["include_dirs"]:
|
|
169
|
+
abs_path = (project_dir / dir_path).resolve()
|
|
170
|
+
if not abs_path.exists():
|
|
171
|
+
raise ConfigurationError(
|
|
172
|
+
f"Include directory does not exist: {abs_path}\nSpecified in {config_path}"
|
|
173
|
+
)
|
|
174
|
+
include_dirs.append(str(abs_path))
|
|
175
|
+
|
|
176
|
+
data["include_dirs"] = include_dirs
|
|
177
|
+
|
|
178
|
+
# Resolve exclude_dirs if present
|
|
179
|
+
if "exclude_dirs" in data:
|
|
180
|
+
exclude_dirs = []
|
|
181
|
+
for dir_path in data["exclude_dirs"]:
|
|
182
|
+
abs_path = (project_dir / dir_path).resolve()
|
|
183
|
+
exclude_dirs.append(str(abs_path))
|
|
184
|
+
data["exclude_dirs"] = exclude_dirs
|
|
185
|
+
|
|
186
|
+
# Create Environment instance
|
|
187
|
+
try:
|
|
188
|
+
return cls(**data)
|
|
189
|
+
except Exception as e:
|
|
190
|
+
raise ConfigurationError(f"Invalid configuration in {config_path}: {e}") from e
|
|
File without changes
|
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
"""Schema builder - builds PostgreSQL schemas from DDL files
|
|
2
|
+
|
|
3
|
+
The SchemaBuilder concatenates SQL files from db/schema/ in deterministic order
|
|
4
|
+
to create a complete schema file. This implements "Medium 1: Build from Source DDL".
|
|
5
|
+
|
|
6
|
+
Performance: Uses Rust extension (_core) when available for 10-50x speedup.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import hashlib
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import TYPE_CHECKING, Any
|
|
13
|
+
|
|
14
|
+
from confiture.config.environment import Environment
|
|
15
|
+
from confiture.exceptions import SchemaError
|
|
16
|
+
|
|
17
|
+
# Try to import Rust extension for 10-50x performance boost
|
|
18
|
+
_core: Any = None
|
|
19
|
+
HAS_RUST = False
|
|
20
|
+
|
|
21
|
+
if not TYPE_CHECKING:
|
|
22
|
+
try:
|
|
23
|
+
from confiture import _core # type: ignore
|
|
24
|
+
|
|
25
|
+
HAS_RUST = True
|
|
26
|
+
except ImportError:
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class SchemaBuilder:
|
|
31
|
+
"""Build PostgreSQL schema from DDL source files
|
|
32
|
+
|
|
33
|
+
The SchemaBuilder discovers SQL files in the schema directory, concatenates
|
|
34
|
+
them in deterministic order, and generates a complete schema file.
|
|
35
|
+
|
|
36
|
+
Attributes:
|
|
37
|
+
env_config: Environment configuration
|
|
38
|
+
schema_dir: Base directory for schema files
|
|
39
|
+
|
|
40
|
+
Example:
|
|
41
|
+
>>> builder = SchemaBuilder(env="local")
|
|
42
|
+
>>> schema = builder.build()
|
|
43
|
+
>>> print(len(schema))
|
|
44
|
+
15234
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(self, env: str, project_dir: Path | None = None):
|
|
48
|
+
"""Initialize SchemaBuilder
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
env: Environment name (e.g., "local", "production")
|
|
52
|
+
project_dir: Project root directory. If None, uses current directory.
|
|
53
|
+
"""
|
|
54
|
+
self.env_config = Environment.load(env, project_dir=project_dir)
|
|
55
|
+
|
|
56
|
+
# Validate include_dirs
|
|
57
|
+
if not self.env_config.include_dirs:
|
|
58
|
+
raise SchemaError("No include_dirs specified in environment config")
|
|
59
|
+
|
|
60
|
+
# Store include dirs for file discovery
|
|
61
|
+
self.include_dirs = [Path(d) for d in self.env_config.include_dirs]
|
|
62
|
+
|
|
63
|
+
# Base directory for relative path calculation
|
|
64
|
+
# Find the common parent of all include directories
|
|
65
|
+
self.base_dir = self._find_common_parent(self.include_dirs)
|
|
66
|
+
|
|
67
|
+
def _find_common_parent(self, paths: list[Path]) -> Path:
|
|
68
|
+
"""Find common parent directory of all paths.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
paths: List of paths to find common parent
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Common parent directory
|
|
75
|
+
|
|
76
|
+
Example:
|
|
77
|
+
>>> paths = [Path("db/schema/00_common"), Path("db/seeds/common")]
|
|
78
|
+
>>> _find_common_parent(paths)
|
|
79
|
+
Path("db")
|
|
80
|
+
"""
|
|
81
|
+
if len(paths) == 1:
|
|
82
|
+
return paths[0]
|
|
83
|
+
|
|
84
|
+
# Convert to absolute paths for comparison
|
|
85
|
+
abs_paths = [p.resolve() for p in paths]
|
|
86
|
+
|
|
87
|
+
# Get all parent parts for each path (including the path itself)
|
|
88
|
+
all_parts = [p.parts for p in abs_paths]
|
|
89
|
+
|
|
90
|
+
# Find common prefix
|
|
91
|
+
common_parts = []
|
|
92
|
+
for parts_at_level in zip(*all_parts, strict=False):
|
|
93
|
+
if len(set(parts_at_level)) == 1:
|
|
94
|
+
common_parts.append(parts_at_level[0])
|
|
95
|
+
else:
|
|
96
|
+
break
|
|
97
|
+
|
|
98
|
+
if not common_parts:
|
|
99
|
+
# No common parent, use current directory
|
|
100
|
+
return Path(".")
|
|
101
|
+
|
|
102
|
+
# Reconstruct path from common parts
|
|
103
|
+
return Path(*common_parts)
|
|
104
|
+
|
|
105
|
+
def find_sql_files(self) -> list[Path]:
|
|
106
|
+
"""Discover SQL files in all include directories
|
|
107
|
+
|
|
108
|
+
Files are returned in deterministic alphabetical order. Use numbered
|
|
109
|
+
directories (00_common/, 10_tables/, 20_views/) to control ordering.
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
Sorted list of SQL file paths
|
|
113
|
+
|
|
114
|
+
Raises:
|
|
115
|
+
SchemaError: If include directories don't exist or no SQL files found
|
|
116
|
+
|
|
117
|
+
Example:
|
|
118
|
+
>>> builder = SchemaBuilder(env="local")
|
|
119
|
+
>>> files = builder.find_sql_files()
|
|
120
|
+
>>> print(files[0])
|
|
121
|
+
/path/to/db/schema/00_common/extensions.sql
|
|
122
|
+
"""
|
|
123
|
+
all_sql_files = []
|
|
124
|
+
|
|
125
|
+
# Collect SQL files from all include directories
|
|
126
|
+
for include_dir in self.include_dirs:
|
|
127
|
+
if not include_dir.exists():
|
|
128
|
+
raise SchemaError(f"Include directory does not exist: {include_dir}")
|
|
129
|
+
|
|
130
|
+
# Find all SQL files recursively in this directory
|
|
131
|
+
sql_files = list(include_dir.rglob("*.sql"))
|
|
132
|
+
all_sql_files.extend(sql_files)
|
|
133
|
+
|
|
134
|
+
# Filter out excluded directories
|
|
135
|
+
filtered_files = []
|
|
136
|
+
exclude_paths = [Path(d) for d in self.env_config.exclude_dirs]
|
|
137
|
+
|
|
138
|
+
for file in all_sql_files:
|
|
139
|
+
# Check if file is in any excluded directory
|
|
140
|
+
is_excluded = any(file.is_relative_to(exclude_dir) for exclude_dir in exclude_paths)
|
|
141
|
+
if not is_excluded:
|
|
142
|
+
filtered_files.append(file)
|
|
143
|
+
|
|
144
|
+
if not filtered_files:
|
|
145
|
+
include_dirs_str = ", ".join(str(d) for d in self.include_dirs)
|
|
146
|
+
raise SchemaError(
|
|
147
|
+
f"No SQL files found in include directories: {include_dirs_str}\n"
|
|
148
|
+
f"Expected files in subdirectories like 00_common/, 10_tables/, etc."
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
# Sort alphabetically for deterministic order
|
|
152
|
+
return sorted(filtered_files)
|
|
153
|
+
|
|
154
|
+
def build(self, output_path: Path | None = None) -> str:
|
|
155
|
+
"""Build schema by concatenating DDL files
|
|
156
|
+
|
|
157
|
+
Generates a complete schema file by concatenating all SQL files in
|
|
158
|
+
deterministic order, with headers and file separators.
|
|
159
|
+
|
|
160
|
+
Performance: Uses Rust extension when available for 10-50x speedup.
|
|
161
|
+
Falls back gracefully to Python implementation if Rust unavailable.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
output_path: Optional path to write schema file. If None, only returns content.
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
Generated schema content as string
|
|
168
|
+
|
|
169
|
+
Raises:
|
|
170
|
+
SchemaError: If schema build fails
|
|
171
|
+
|
|
172
|
+
Example:
|
|
173
|
+
>>> builder = SchemaBuilder(env="local")
|
|
174
|
+
>>> schema = builder.build(output_path=Path("schema.sql"))
|
|
175
|
+
>>> print(f"Generated {len(schema)} bytes")
|
|
176
|
+
"""
|
|
177
|
+
files = self.find_sql_files()
|
|
178
|
+
|
|
179
|
+
# Generate header
|
|
180
|
+
header = self._generate_header(len(files))
|
|
181
|
+
|
|
182
|
+
# Use Rust extension if available (10-50x faster)
|
|
183
|
+
if HAS_RUST:
|
|
184
|
+
try:
|
|
185
|
+
# Build file content using Rust
|
|
186
|
+
file_paths = [str(f) for f in files]
|
|
187
|
+
content: str = _core.build_schema(file_paths)
|
|
188
|
+
|
|
189
|
+
# Add headers and separators (Python side for flexibility)
|
|
190
|
+
schema = self._add_headers_and_separators(header, files, content)
|
|
191
|
+
except Exception:
|
|
192
|
+
# Fallback to Python if Rust fails
|
|
193
|
+
schema = self._build_python(header, files)
|
|
194
|
+
else:
|
|
195
|
+
# Pure Python implementation (fallback)
|
|
196
|
+
schema = self._build_python(header, files)
|
|
197
|
+
|
|
198
|
+
# Write to file if requested
|
|
199
|
+
if output_path:
|
|
200
|
+
try:
|
|
201
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
202
|
+
output_path.write_text(schema, encoding="utf-8")
|
|
203
|
+
except Exception as e:
|
|
204
|
+
raise SchemaError(f"Error writing schema to {output_path}: {e}") from e
|
|
205
|
+
|
|
206
|
+
return schema
|
|
207
|
+
|
|
208
|
+
def _build_python(self, header: str, files: list[Path]) -> str:
|
|
209
|
+
"""Pure Python implementation of schema building (fallback)
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
header: Schema header
|
|
213
|
+
files: List of SQL files to concatenate
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
Complete schema content
|
|
217
|
+
"""
|
|
218
|
+
parts = [header]
|
|
219
|
+
|
|
220
|
+
# Concatenate all files
|
|
221
|
+
for file in files:
|
|
222
|
+
try:
|
|
223
|
+
# Relative path for header
|
|
224
|
+
rel_path = file.relative_to(self.base_dir)
|
|
225
|
+
|
|
226
|
+
# Add file separator
|
|
227
|
+
parts.append("\n-- ============================================\n")
|
|
228
|
+
parts.append(f"-- File: {rel_path}\n")
|
|
229
|
+
parts.append("-- ============================================\n\n")
|
|
230
|
+
|
|
231
|
+
# Add file content
|
|
232
|
+
content = file.read_text(encoding="utf-8")
|
|
233
|
+
parts.append(content)
|
|
234
|
+
|
|
235
|
+
# Ensure newline at end
|
|
236
|
+
if not content.endswith("\n"):
|
|
237
|
+
parts.append("\n")
|
|
238
|
+
|
|
239
|
+
except Exception as e:
|
|
240
|
+
raise SchemaError(f"Error reading {file}: {e}") from e
|
|
241
|
+
|
|
242
|
+
return "".join(parts)
|
|
243
|
+
|
|
244
|
+
def _add_headers_and_separators(self, header: str, _files: list[Path], content: str) -> str:
|
|
245
|
+
"""Add main header to Rust-built content
|
|
246
|
+
|
|
247
|
+
The Rust layer now includes file separators, so this function
|
|
248
|
+
only needs to prepend the main schema header.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
header: Schema header
|
|
252
|
+
_files: List of SQL files (unused, kept for API compatibility)
|
|
253
|
+
content: Concatenated content from Rust (includes file separators)
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
Content with main header
|
|
257
|
+
"""
|
|
258
|
+
# Rust layer now includes file separators, just prepend main header
|
|
259
|
+
return header + content
|
|
260
|
+
|
|
261
|
+
def compute_hash(self) -> str:
|
|
262
|
+
"""Compute deterministic SHA256 hash of schema
|
|
263
|
+
|
|
264
|
+
The hash includes both file paths and content, ensuring that any change
|
|
265
|
+
to the schema (content or structure) is detected.
|
|
266
|
+
|
|
267
|
+
Performance: Uses Rust extension when available for 30-60x speedup.
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
SHA256 hexadecimal digest
|
|
271
|
+
|
|
272
|
+
Example:
|
|
273
|
+
>>> builder = SchemaBuilder(env="local")
|
|
274
|
+
>>> hash1 = builder.compute_hash()
|
|
275
|
+
>>> # Modify a file...
|
|
276
|
+
>>> hash2 = builder.compute_hash()
|
|
277
|
+
>>> assert hash1 != hash2 # Change detected
|
|
278
|
+
"""
|
|
279
|
+
files = self.find_sql_files()
|
|
280
|
+
|
|
281
|
+
# Use Rust extension if available (30-60x faster)
|
|
282
|
+
if HAS_RUST:
|
|
283
|
+
try:
|
|
284
|
+
file_paths = [str(f) for f in files]
|
|
285
|
+
hash_result: str = _core.hash_files(file_paths)
|
|
286
|
+
return hash_result
|
|
287
|
+
except Exception:
|
|
288
|
+
# Fallback to Python if Rust fails
|
|
289
|
+
pass
|
|
290
|
+
|
|
291
|
+
# Pure Python implementation (fallback)
|
|
292
|
+
hasher = hashlib.sha256()
|
|
293
|
+
|
|
294
|
+
for file in files:
|
|
295
|
+
# Include relative path in hash (detects file renames)
|
|
296
|
+
rel_path = file.relative_to(self.base_dir)
|
|
297
|
+
hasher.update(str(rel_path).encode("utf-8"))
|
|
298
|
+
hasher.update(b"\x00") # Separator
|
|
299
|
+
|
|
300
|
+
# Include file content
|
|
301
|
+
try:
|
|
302
|
+
content = file.read_bytes()
|
|
303
|
+
hasher.update(content)
|
|
304
|
+
hasher.update(b"\x00") # Separator
|
|
305
|
+
except Exception as e:
|
|
306
|
+
raise SchemaError(f"Error reading {file} for hash: {e}") from e
|
|
307
|
+
|
|
308
|
+
return hasher.hexdigest()
|
|
309
|
+
|
|
310
|
+
def _generate_header(self, file_count: int) -> str:
|
|
311
|
+
"""Generate schema file header
|
|
312
|
+
|
|
313
|
+
Args:
|
|
314
|
+
file_count: Number of SQL files included
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
Header string
|
|
318
|
+
"""
|
|
319
|
+
timestamp = datetime.now().isoformat()
|
|
320
|
+
schema_hash = self.compute_hash()
|
|
321
|
+
|
|
322
|
+
return f"""-- ============================================
|
|
323
|
+
-- PostgreSQL Schema for Confiture
|
|
324
|
+
-- ============================================
|
|
325
|
+
--
|
|
326
|
+
-- Environment: {self.env_config.name}
|
|
327
|
+
-- Generated: {timestamp}
|
|
328
|
+
-- Schema Hash: {schema_hash}
|
|
329
|
+
-- Files Included: {file_count}
|
|
330
|
+
--
|
|
331
|
+
-- This file was generated by Confiture (confiture build)
|
|
332
|
+
-- DO NOT EDIT MANUALLY - Edit source files in db/schema/
|
|
333
|
+
--
|
|
334
|
+
-- ============================================
|
|
335
|
+
|
|
336
|
+
"""
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""Database connection management for CLI commands."""
|
|
2
|
+
|
|
3
|
+
import importlib.util
|
|
4
|
+
import sys
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from types import ModuleType
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import psycopg
|
|
10
|
+
import yaml
|
|
11
|
+
|
|
12
|
+
from confiture.exceptions import MigrationError
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def load_config(config_file: Path) -> dict[str, Any]:
|
|
16
|
+
"""Load configuration from YAML file.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
config_file: Path to configuration file
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
Configuration dictionary
|
|
23
|
+
|
|
24
|
+
Raises:
|
|
25
|
+
MigrationError: If config file is invalid
|
|
26
|
+
"""
|
|
27
|
+
if not config_file.exists():
|
|
28
|
+
raise MigrationError(f"Configuration file not found: {config_file}")
|
|
29
|
+
|
|
30
|
+
try:
|
|
31
|
+
with open(config_file) as f:
|
|
32
|
+
config: dict[str, Any] = yaml.safe_load(f)
|
|
33
|
+
return config
|
|
34
|
+
except yaml.YAMLError as e:
|
|
35
|
+
raise MigrationError(f"Invalid YAML configuration: {e}") from e
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def create_connection(config: dict[str, Any] | Any) -> psycopg.Connection:
|
|
39
|
+
"""Create database connection from configuration.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
config: Configuration dictionary with 'database' section or DatabaseConfig instance
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
PostgreSQL connection
|
|
46
|
+
|
|
47
|
+
Raises:
|
|
48
|
+
MigrationError: If connection fails
|
|
49
|
+
"""
|
|
50
|
+
from confiture.config.environment import DatabaseConfig
|
|
51
|
+
|
|
52
|
+
# Handle DatabaseConfig instance
|
|
53
|
+
if isinstance(config, DatabaseConfig):
|
|
54
|
+
config_dict = config.to_dict()
|
|
55
|
+
db_config = config_dict.get("database", {})
|
|
56
|
+
else:
|
|
57
|
+
db_config = config.get("database", {})
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
conn = psycopg.connect(
|
|
61
|
+
host=db_config.get("host", "localhost"),
|
|
62
|
+
port=db_config.get("port", 5432),
|
|
63
|
+
dbname=db_config.get("database", "postgres"),
|
|
64
|
+
user=db_config.get("user", "postgres"),
|
|
65
|
+
password=db_config.get("password", ""),
|
|
66
|
+
)
|
|
67
|
+
return conn
|
|
68
|
+
except psycopg.Error as e:
|
|
69
|
+
raise MigrationError(f"Failed to connect to database: {e}") from e
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def load_migration_module(migration_file: Path) -> ModuleType:
|
|
73
|
+
"""Dynamically load a migration Python module.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
migration_file: Path to migration .py file
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
Loaded module
|
|
80
|
+
|
|
81
|
+
Raises:
|
|
82
|
+
MigrationError: If module cannot be loaded
|
|
83
|
+
"""
|
|
84
|
+
try:
|
|
85
|
+
# Create module spec
|
|
86
|
+
spec = importlib.util.spec_from_file_location(migration_file.stem, migration_file)
|
|
87
|
+
if spec is None or spec.loader is None:
|
|
88
|
+
raise MigrationError(f"Cannot load migration: {migration_file}")
|
|
89
|
+
|
|
90
|
+
# Load module
|
|
91
|
+
module = importlib.util.module_from_spec(spec)
|
|
92
|
+
sys.modules[migration_file.stem] = module
|
|
93
|
+
spec.loader.exec_module(module)
|
|
94
|
+
|
|
95
|
+
return module
|
|
96
|
+
except Exception as e:
|
|
97
|
+
raise MigrationError(f"Failed to load migration {migration_file}: {e}") from e
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def get_migration_class(module: ModuleType) -> type:
|
|
101
|
+
"""Extract Migration subclass from loaded module.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
module: Loaded Python module
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Migration class
|
|
108
|
+
|
|
109
|
+
Raises:
|
|
110
|
+
MigrationError: If no Migration class found
|
|
111
|
+
"""
|
|
112
|
+
from confiture.models.migration import Migration
|
|
113
|
+
|
|
114
|
+
# Find Migration subclass in module
|
|
115
|
+
for attr_name in dir(module):
|
|
116
|
+
attr = getattr(module, attr_name)
|
|
117
|
+
if isinstance(attr, type) and issubclass(attr, Migration) and attr is not Migration:
|
|
118
|
+
return attr
|
|
119
|
+
|
|
120
|
+
raise MigrationError(f"No Migration subclass found in {module}")
|