fraiseql-confiture 0.3.4__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. confiture/__init__.py +48 -0
  2. confiture/_core.cp311-win_amd64.pyd +0 -0
  3. confiture/cli/__init__.py +0 -0
  4. confiture/cli/dry_run.py +116 -0
  5. confiture/cli/lint_formatter.py +193 -0
  6. confiture/cli/main.py +1656 -0
  7. confiture/config/__init__.py +0 -0
  8. confiture/config/environment.py +263 -0
  9. confiture/core/__init__.py +51 -0
  10. confiture/core/anonymization/__init__.py +0 -0
  11. confiture/core/anonymization/audit.py +485 -0
  12. confiture/core/anonymization/benchmarking.py +372 -0
  13. confiture/core/anonymization/breach_notification.py +652 -0
  14. confiture/core/anonymization/compliance.py +617 -0
  15. confiture/core/anonymization/composer.py +298 -0
  16. confiture/core/anonymization/data_subject_rights.py +669 -0
  17. confiture/core/anonymization/factory.py +319 -0
  18. confiture/core/anonymization/governance.py +737 -0
  19. confiture/core/anonymization/performance.py +1092 -0
  20. confiture/core/anonymization/profile.py +284 -0
  21. confiture/core/anonymization/registry.py +195 -0
  22. confiture/core/anonymization/security/kms_manager.py +547 -0
  23. confiture/core/anonymization/security/lineage.py +888 -0
  24. confiture/core/anonymization/security/token_store.py +686 -0
  25. confiture/core/anonymization/strategies/__init__.py +41 -0
  26. confiture/core/anonymization/strategies/address.py +359 -0
  27. confiture/core/anonymization/strategies/credit_card.py +374 -0
  28. confiture/core/anonymization/strategies/custom.py +161 -0
  29. confiture/core/anonymization/strategies/date.py +218 -0
  30. confiture/core/anonymization/strategies/differential_privacy.py +398 -0
  31. confiture/core/anonymization/strategies/email.py +141 -0
  32. confiture/core/anonymization/strategies/format_preserving_encryption.py +310 -0
  33. confiture/core/anonymization/strategies/hash.py +150 -0
  34. confiture/core/anonymization/strategies/ip_address.py +235 -0
  35. confiture/core/anonymization/strategies/masking_retention.py +252 -0
  36. confiture/core/anonymization/strategies/name.py +298 -0
  37. confiture/core/anonymization/strategies/phone.py +119 -0
  38. confiture/core/anonymization/strategies/preserve.py +85 -0
  39. confiture/core/anonymization/strategies/redact.py +101 -0
  40. confiture/core/anonymization/strategies/salted_hashing.py +322 -0
  41. confiture/core/anonymization/strategies/text_redaction.py +183 -0
  42. confiture/core/anonymization/strategies/tokenization.py +334 -0
  43. confiture/core/anonymization/strategy.py +241 -0
  44. confiture/core/anonymization/syncer_audit.py +357 -0
  45. confiture/core/blue_green.py +683 -0
  46. confiture/core/builder.py +500 -0
  47. confiture/core/checksum.py +358 -0
  48. confiture/core/connection.py +132 -0
  49. confiture/core/differ.py +522 -0
  50. confiture/core/drift.py +564 -0
  51. confiture/core/dry_run.py +182 -0
  52. confiture/core/health.py +313 -0
  53. confiture/core/hooks/__init__.py +87 -0
  54. confiture/core/hooks/base.py +232 -0
  55. confiture/core/hooks/context.py +146 -0
  56. confiture/core/hooks/execution_strategies.py +57 -0
  57. confiture/core/hooks/observability.py +220 -0
  58. confiture/core/hooks/phases.py +53 -0
  59. confiture/core/hooks/registry.py +295 -0
  60. confiture/core/large_tables.py +775 -0
  61. confiture/core/linting/__init__.py +70 -0
  62. confiture/core/linting/composer.py +192 -0
  63. confiture/core/linting/libraries/__init__.py +17 -0
  64. confiture/core/linting/libraries/gdpr.py +168 -0
  65. confiture/core/linting/libraries/general.py +184 -0
  66. confiture/core/linting/libraries/hipaa.py +144 -0
  67. confiture/core/linting/libraries/pci_dss.py +104 -0
  68. confiture/core/linting/libraries/sox.py +120 -0
  69. confiture/core/linting/schema_linter.py +491 -0
  70. confiture/core/linting/versioning.py +151 -0
  71. confiture/core/locking.py +389 -0
  72. confiture/core/migration_generator.py +298 -0
  73. confiture/core/migrator.py +793 -0
  74. confiture/core/observability/__init__.py +44 -0
  75. confiture/core/observability/audit.py +323 -0
  76. confiture/core/observability/logging.py +187 -0
  77. confiture/core/observability/metrics.py +174 -0
  78. confiture/core/observability/tracing.py +192 -0
  79. confiture/core/pg_version.py +418 -0
  80. confiture/core/pool.py +406 -0
  81. confiture/core/risk/__init__.py +39 -0
  82. confiture/core/risk/predictor.py +188 -0
  83. confiture/core/risk/scoring.py +248 -0
  84. confiture/core/rollback_generator.py +388 -0
  85. confiture/core/schema_analyzer.py +769 -0
  86. confiture/core/schema_to_schema.py +590 -0
  87. confiture/core/security/__init__.py +32 -0
  88. confiture/core/security/logging.py +201 -0
  89. confiture/core/security/validation.py +416 -0
  90. confiture/core/signals.py +371 -0
  91. confiture/core/syncer.py +540 -0
  92. confiture/exceptions.py +192 -0
  93. confiture/integrations/__init__.py +0 -0
  94. confiture/models/__init__.py +0 -0
  95. confiture/models/lint.py +193 -0
  96. confiture/models/migration.py +180 -0
  97. confiture/models/schema.py +203 -0
  98. confiture/scenarios/__init__.py +36 -0
  99. confiture/scenarios/compliance.py +586 -0
  100. confiture/scenarios/ecommerce.py +199 -0
  101. confiture/scenarios/financial.py +253 -0
  102. confiture/scenarios/healthcare.py +315 -0
  103. confiture/scenarios/multi_tenant.py +340 -0
  104. confiture/scenarios/saas.py +295 -0
  105. confiture/testing/FRAMEWORK_API.md +722 -0
  106. confiture/testing/__init__.py +38 -0
  107. confiture/testing/fixtures/__init__.py +11 -0
  108. confiture/testing/fixtures/data_validator.py +229 -0
  109. confiture/testing/fixtures/migration_runner.py +167 -0
  110. confiture/testing/fixtures/schema_snapshotter.py +352 -0
  111. confiture/testing/frameworks/__init__.py +10 -0
  112. confiture/testing/frameworks/mutation.py +587 -0
  113. confiture/testing/frameworks/performance.py +479 -0
  114. confiture/testing/utils/__init__.py +0 -0
  115. fraiseql_confiture-0.3.4.dist-info/METADATA +438 -0
  116. fraiseql_confiture-0.3.4.dist-info/RECORD +119 -0
  117. fraiseql_confiture-0.3.4.dist-info/WHEEL +4 -0
  118. fraiseql_confiture-0.3.4.dist-info/entry_points.txt +2 -0
  119. fraiseql_confiture-0.3.4.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,564 @@
1
+ """Schema drift detection for Confiture.
2
+
3
+ Compares live database schema against expected state from migrations
4
+ to detect unauthorized changes or migration mishaps.
5
+ """
6
+
7
+ import logging
8
+ import time
9
+ from dataclasses import dataclass, field
10
+ from enum import Enum
11
+ from typing import Any
12
+
13
+ import psycopg
14
+
15
+ from confiture.core.schema_analyzer import SchemaAnalyzer, SchemaInfo
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+
20
+ class DriftType(Enum):
21
+ """Types of schema drift."""
22
+
23
+ MISSING_TABLE = "missing_table"
24
+ EXTRA_TABLE = "extra_table"
25
+ MISSING_COLUMN = "missing_column"
26
+ EXTRA_COLUMN = "extra_column"
27
+ TYPE_MISMATCH = "type_mismatch"
28
+ NULLABLE_MISMATCH = "nullable_mismatch"
29
+ DEFAULT_MISMATCH = "default_mismatch"
30
+ MISSING_INDEX = "missing_index"
31
+ EXTRA_INDEX = "extra_index"
32
+ MISSING_CONSTRAINT = "missing_constraint"
33
+ EXTRA_CONSTRAINT = "extra_constraint"
34
+
35
+
36
+ class DriftSeverity(Enum):
37
+ """Severity of drift."""
38
+
39
+ CRITICAL = "critical" # Missing table/column
40
+ WARNING = "warning" # Extra objects, type changes
41
+ INFO = "info" # Minor differences
42
+
43
+
44
+ @dataclass
45
+ class DriftItem:
46
+ """A single drift item."""
47
+
48
+ drift_type: DriftType
49
+ severity: DriftSeverity
50
+ object_name: str
51
+ expected: Any = None
52
+ actual: Any = None
53
+ message: str = ""
54
+
55
+ def __str__(self) -> str:
56
+ return f"[{self.severity.value}] {self.drift_type.value}: {self.message}"
57
+
58
+ def to_dict(self) -> dict[str, Any]:
59
+ """Convert to dictionary for JSON serialization."""
60
+ return {
61
+ "type": self.drift_type.value,
62
+ "severity": self.severity.value,
63
+ "object": self.object_name,
64
+ "expected": str(self.expected) if self.expected is not None else None,
65
+ "actual": str(self.actual) if self.actual is not None else None,
66
+ "message": self.message,
67
+ }
68
+
69
+
70
+ @dataclass
71
+ class DriftReport:
72
+ """Report of schema drift detection."""
73
+
74
+ database_name: str
75
+ expected_schema_source: str # "migrations" or file path
76
+ drift_items: list[DriftItem] = field(default_factory=list)
77
+ tables_checked: int = 0
78
+ columns_checked: int = 0
79
+ indexes_checked: int = 0
80
+ detection_time_ms: int = 0
81
+
82
+ @property
83
+ def has_drift(self) -> bool:
84
+ """Check if any drift was detected."""
85
+ return len(self.drift_items) > 0
86
+
87
+ @property
88
+ def has_critical_drift(self) -> bool:
89
+ """Check if any critical drift was detected."""
90
+ return any(d.severity == DriftSeverity.CRITICAL for d in self.drift_items)
91
+
92
+ @property
93
+ def critical_count(self) -> int:
94
+ """Count of critical drift items."""
95
+ return sum(1 for d in self.drift_items if d.severity == DriftSeverity.CRITICAL)
96
+
97
+ @property
98
+ def warning_count(self) -> int:
99
+ """Count of warning drift items."""
100
+ return sum(1 for d in self.drift_items if d.severity == DriftSeverity.WARNING)
101
+
102
+ @property
103
+ def info_count(self) -> int:
104
+ """Count of info drift items."""
105
+ return sum(1 for d in self.drift_items if d.severity == DriftSeverity.INFO)
106
+
107
+ def to_dict(self) -> dict[str, Any]:
108
+ """Convert to dictionary for JSON serialization."""
109
+ return {
110
+ "database_name": self.database_name,
111
+ "expected_schema_source": self.expected_schema_source,
112
+ "has_drift": self.has_drift,
113
+ "has_critical_drift": self.has_critical_drift,
114
+ "critical_count": self.critical_count,
115
+ "warning_count": self.warning_count,
116
+ "info_count": self.info_count,
117
+ "tables_checked": self.tables_checked,
118
+ "columns_checked": self.columns_checked,
119
+ "indexes_checked": self.indexes_checked,
120
+ "detection_time_ms": self.detection_time_ms,
121
+ "drift_items": [d.to_dict() for d in self.drift_items],
122
+ }
123
+
124
+
125
+ class SchemaDriftDetector:
126
+ """Detects schema drift between live database and expected state.
127
+
128
+ Compares live database schema against expected state to find:
129
+ - Missing/extra tables
130
+ - Missing/extra columns
131
+ - Type mismatches
132
+ - Nullable mismatches
133
+ - Missing/extra indexes
134
+
135
+ Example:
136
+ >>> detector = SchemaDriftDetector(conn)
137
+ >>> report = detector.compare_with_expected(expected_schema)
138
+ >>> if report.has_critical_drift:
139
+ ... print("CRITICAL: Schema has drifted!")
140
+ ... for item in report.drift_items:
141
+ ... print(f" {item}")
142
+ """
143
+
144
+ # Tables to always ignore
145
+ SYSTEM_TABLES = {
146
+ "confiture_migrations",
147
+ "confiture_version",
148
+ "confiture_audit_log",
149
+ }
150
+
151
+ def __init__(
152
+ self,
153
+ connection: psycopg.Connection,
154
+ ignore_tables: list[str] | None = None,
155
+ ):
156
+ """Initialize drift detector.
157
+
158
+ Args:
159
+ connection: Database connection
160
+ ignore_tables: Additional tables to ignore in drift detection
161
+ """
162
+ self.connection = connection
163
+ self.analyzer = SchemaAnalyzer(connection)
164
+ self.ignore_tables = set(ignore_tables or [])
165
+ # Always ignore Confiture's own tables
166
+ self.ignore_tables.update(self.SYSTEM_TABLES)
167
+
168
+ def compare_schemas(
169
+ self,
170
+ expected: SchemaInfo,
171
+ actual: SchemaInfo,
172
+ ) -> DriftReport:
173
+ """Compare two schema info objects.
174
+
175
+ Args:
176
+ expected: Expected schema state
177
+ actual: Actual (live) schema state
178
+
179
+ Returns:
180
+ DriftReport with differences
181
+ """
182
+ start_time = time.perf_counter()
183
+
184
+ report = DriftReport(
185
+ database_name=self._get_database_name(),
186
+ expected_schema_source="provided",
187
+ )
188
+
189
+ # Compare tables
190
+ expected_tables = set(expected.tables.keys()) - self.ignore_tables
191
+ actual_tables = set(actual.tables.keys()) - self.ignore_tables
192
+
193
+ # Missing tables (in expected but not actual)
194
+ for table in sorted(expected_tables - actual_tables):
195
+ report.drift_items.append(
196
+ DriftItem(
197
+ drift_type=DriftType.MISSING_TABLE,
198
+ severity=DriftSeverity.CRITICAL,
199
+ object_name=table,
200
+ expected=table,
201
+ actual=None,
202
+ message=f"Table '{table}' is missing from database",
203
+ )
204
+ )
205
+
206
+ # Extra tables (in actual but not expected)
207
+ for table in sorted(actual_tables - expected_tables):
208
+ report.drift_items.append(
209
+ DriftItem(
210
+ drift_type=DriftType.EXTRA_TABLE,
211
+ severity=DriftSeverity.WARNING,
212
+ object_name=table,
213
+ expected=None,
214
+ actual=table,
215
+ message=f"Table '{table}' exists but is not in expected schema",
216
+ )
217
+ )
218
+
219
+ # Compare columns for tables that exist in both
220
+ for table in sorted(expected_tables & actual_tables):
221
+ report.tables_checked += 1
222
+ self._compare_table_columns(
223
+ table,
224
+ expected.tables[table],
225
+ actual.tables[table],
226
+ report,
227
+ )
228
+
229
+ # Compare indexes
230
+ self._compare_indexes(expected, actual, report)
231
+
232
+ report.detection_time_ms = int((time.perf_counter() - start_time) * 1000)
233
+ return report
234
+
235
+ def _compare_table_columns(
236
+ self,
237
+ table_name: str,
238
+ expected_cols: dict[str, dict],
239
+ actual_cols: dict[str, dict],
240
+ report: DriftReport,
241
+ ) -> None:
242
+ """Compare columns for a single table."""
243
+ expected_col_names = set(expected_cols.keys())
244
+ actual_col_names = set(actual_cols.keys())
245
+
246
+ # Missing columns
247
+ for col in sorted(expected_col_names - actual_col_names):
248
+ report.drift_items.append(
249
+ DriftItem(
250
+ drift_type=DriftType.MISSING_COLUMN,
251
+ severity=DriftSeverity.CRITICAL,
252
+ object_name=f"{table_name}.{col}",
253
+ expected=expected_cols[col],
254
+ actual=None,
255
+ message=f"Column '{table_name}.{col}' is missing",
256
+ )
257
+ )
258
+
259
+ # Extra columns
260
+ for col in sorted(actual_col_names - expected_col_names):
261
+ report.drift_items.append(
262
+ DriftItem(
263
+ drift_type=DriftType.EXTRA_COLUMN,
264
+ severity=DriftSeverity.WARNING,
265
+ object_name=f"{table_name}.{col}",
266
+ expected=None,
267
+ actual=actual_cols[col],
268
+ message=f"Column '{table_name}.{col}' exists but is not expected",
269
+ )
270
+ )
271
+
272
+ # Compare matching columns
273
+ for col in sorted(expected_col_names & actual_col_names):
274
+ report.columns_checked += 1
275
+ exp = expected_cols[col]
276
+ act = actual_cols[col]
277
+
278
+ # Type mismatch
279
+ exp_type = exp.get("type", "").lower()
280
+ act_type = act.get("type", "").lower()
281
+ # Check for compatible types (e.g., integer vs int4)
282
+ if (
283
+ exp_type
284
+ and act_type
285
+ and exp_type != act_type
286
+ and not self._types_compatible(exp_type, act_type)
287
+ ):
288
+ report.drift_items.append(
289
+ DriftItem(
290
+ drift_type=DriftType.TYPE_MISMATCH,
291
+ severity=DriftSeverity.WARNING,
292
+ object_name=f"{table_name}.{col}",
293
+ expected=exp_type,
294
+ actual=act_type,
295
+ message=f"Column '{table_name}.{col}' type mismatch: "
296
+ f"expected {exp_type}, got {act_type}",
297
+ )
298
+ )
299
+
300
+ # Nullable mismatch
301
+ exp_nullable = exp.get("nullable")
302
+ act_nullable = act.get("nullable")
303
+ if (
304
+ exp_nullable is not None
305
+ and act_nullable is not None
306
+ and exp_nullable != act_nullable
307
+ ):
308
+ report.drift_items.append(
309
+ DriftItem(
310
+ drift_type=DriftType.NULLABLE_MISMATCH,
311
+ severity=DriftSeverity.WARNING,
312
+ object_name=f"{table_name}.{col}",
313
+ expected=f"nullable={exp_nullable}",
314
+ actual=f"nullable={act_nullable}",
315
+ message=f"Column '{table_name}.{col}' nullable mismatch: "
316
+ f"expected {exp_nullable}, got {act_nullable}",
317
+ )
318
+ )
319
+
320
+ def _types_compatible(self, type1: str, type2: str) -> bool:
321
+ """Check if two PostgreSQL types are compatible/equivalent."""
322
+ # Normalize type names
323
+ type_aliases = {
324
+ "integer": "int4",
325
+ "int": "int4",
326
+ "bigint": "int8",
327
+ "smallint": "int2",
328
+ "boolean": "bool",
329
+ "character varying": "varchar",
330
+ "character": "char",
331
+ "double precision": "float8",
332
+ "real": "float4",
333
+ "timestamp without time zone": "timestamp",
334
+ "timestamp with time zone": "timestamptz",
335
+ }
336
+
337
+ t1 = type_aliases.get(type1.lower(), type1.lower())
338
+ t2 = type_aliases.get(type2.lower(), type2.lower())
339
+
340
+ return t1 == t2
341
+
342
+ def _compare_indexes(
343
+ self,
344
+ expected: SchemaInfo,
345
+ actual: SchemaInfo,
346
+ report: DriftReport,
347
+ ) -> None:
348
+ """Compare indexes between schemas."""
349
+ for table in expected.indexes:
350
+ if table in self.ignore_tables:
351
+ continue
352
+
353
+ exp_indexes = set(expected.indexes.get(table, []))
354
+ act_indexes = set(actual.indexes.get(table, []))
355
+
356
+ # Missing indexes
357
+ for idx in sorted(exp_indexes - act_indexes):
358
+ report.indexes_checked += 1
359
+ report.drift_items.append(
360
+ DriftItem(
361
+ drift_type=DriftType.MISSING_INDEX,
362
+ severity=DriftSeverity.WARNING,
363
+ object_name=f"{table}.{idx}",
364
+ expected=idx,
365
+ actual=None,
366
+ message=f"Index '{idx}' on '{table}' is missing",
367
+ )
368
+ )
369
+
370
+ # Extra indexes
371
+ for idx in sorted(act_indexes - exp_indexes):
372
+ report.indexes_checked += 1
373
+ report.drift_items.append(
374
+ DriftItem(
375
+ drift_type=DriftType.EXTRA_INDEX,
376
+ severity=DriftSeverity.INFO,
377
+ object_name=f"{table}.{idx}",
378
+ expected=None,
379
+ actual=idx,
380
+ message=f"Index '{idx}' on '{table}' exists but is not expected",
381
+ )
382
+ )
383
+
384
+ def get_live_schema(self) -> SchemaInfo:
385
+ """Get the current live database schema.
386
+
387
+ Returns:
388
+ SchemaInfo with current database state
389
+ """
390
+ return self.analyzer.get_schema_info(refresh=True)
391
+
392
+ def compare_with_expected(self, expected: SchemaInfo) -> DriftReport:
393
+ """Compare live database with expected schema.
394
+
395
+ Args:
396
+ expected: Expected schema state
397
+
398
+ Returns:
399
+ DriftReport with differences
400
+ """
401
+ actual = self.get_live_schema()
402
+ report = self.compare_schemas(expected, actual)
403
+ report.expected_schema_source = "provided"
404
+ return report
405
+
406
+ def compare_with_schema_file(self, schema_file_path: str) -> DriftReport:
407
+ """Compare live database with a schema SQL file.
408
+
409
+ This parses a SQL schema file to extract expected schema.
410
+
411
+ Args:
412
+ schema_file_path: Path to schema SQL file
413
+
414
+ Returns:
415
+ DriftReport with differences
416
+ """
417
+ from pathlib import Path
418
+
419
+ path = Path(schema_file_path)
420
+ if not path.exists():
421
+ raise FileNotFoundError(f"Schema file not found: {schema_file_path}")
422
+
423
+ sql_content = path.read_text()
424
+ expected = self._parse_schema_from_sql(sql_content)
425
+
426
+ actual = self.get_live_schema()
427
+ report = self.compare_schemas(expected, actual)
428
+ report.expected_schema_source = f"file:{schema_file_path}"
429
+ return report
430
+
431
+ def _parse_schema_from_sql(self, sql: str) -> SchemaInfo:
432
+ """Parse SQL DDL to extract schema information.
433
+
434
+ This is a simplified parser that extracts table and column info
435
+ from CREATE TABLE statements.
436
+
437
+ Args:
438
+ sql: SQL DDL statements
439
+
440
+ Returns:
441
+ SchemaInfo extracted from SQL
442
+ """
443
+ import re
444
+
445
+ import sqlparse
446
+
447
+ info = SchemaInfo()
448
+
449
+ # Parse CREATE TABLE statements
450
+ statements = sqlparse.parse(sql)
451
+ for stmt in statements:
452
+ stmt_str = str(stmt).strip()
453
+ if not stmt_str:
454
+ continue
455
+
456
+ # Check for CREATE TABLE
457
+ match = re.match(
458
+ r"CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:\")?(\w+)(?:\")?",
459
+ stmt_str,
460
+ re.IGNORECASE,
461
+ )
462
+ if match:
463
+ table_name = match.group(1).lower()
464
+ columns = self._extract_columns_from_create(stmt_str)
465
+ info.tables[table_name] = columns
466
+
467
+ # Check for CREATE INDEX
468
+ match = re.match(
469
+ r"CREATE\s+(?:UNIQUE\s+)?INDEX\s+(?:CONCURRENTLY\s+)?"
470
+ r"(?:IF\s+NOT\s+EXISTS\s+)?(?:\")?(\w+)(?:\")?\s+ON\s+(?:\")?(\w+)(?:\")?",
471
+ stmt_str,
472
+ re.IGNORECASE,
473
+ )
474
+ if match:
475
+ index_name = match.group(1).lower()
476
+ table_name = match.group(2).lower()
477
+ if table_name not in info.indexes:
478
+ info.indexes[table_name] = []
479
+ info.indexes[table_name].append(index_name)
480
+
481
+ return info
482
+
483
+ def _extract_columns_from_create(self, create_stmt: str) -> dict[str, dict]:
484
+ """Extract column definitions from CREATE TABLE statement."""
485
+ import re
486
+
487
+ columns: dict[str, dict] = {}
488
+
489
+ # Find the column definitions between parentheses
490
+ match = re.search(r"\((.*)\)", create_stmt, re.DOTALL)
491
+ if not match:
492
+ return columns
493
+
494
+ definitions = match.group(1)
495
+
496
+ # Split by comma, but be careful about nested parentheses
497
+ parts = self._split_column_definitions(definitions)
498
+
499
+ for part in parts:
500
+ part = part.strip()
501
+ if not part:
502
+ continue
503
+
504
+ upper_part = part.upper()
505
+
506
+ # Skip table-level constraints (start with constraint keywords)
507
+ # But NOT column definitions that happen to have PRIMARY KEY inline
508
+ constraint_starters = [
509
+ "PRIMARY KEY",
510
+ "FOREIGN KEY",
511
+ "UNIQUE",
512
+ "CHECK",
513
+ "CONSTRAINT",
514
+ ]
515
+ if any(upper_part.startswith(kw) for kw in constraint_starters):
516
+ continue
517
+
518
+ # Parse column definition
519
+ col_match = re.match(r"(?:\")?(\w+)(?:\")?\s+(\w+(?:\([^)]*\))?)", part)
520
+ if col_match:
521
+ col_name = col_match.group(1).lower()
522
+ col_type = col_match.group(2).lower()
523
+
524
+ # Check for NOT NULL (PRIMARY KEY implies NOT NULL)
525
+ nullable = "NOT NULL" not in upper_part and "PRIMARY KEY" not in upper_part
526
+
527
+ columns[col_name] = {
528
+ "type": col_type,
529
+ "nullable": nullable,
530
+ "default": None,
531
+ }
532
+
533
+ return columns
534
+
535
+ def _split_column_definitions(self, definitions: str) -> list[str]:
536
+ """Split column definitions respecting parentheses."""
537
+ parts = []
538
+ current = []
539
+ depth = 0
540
+
541
+ for char in definitions:
542
+ if char == "(":
543
+ depth += 1
544
+ current.append(char)
545
+ elif char == ")":
546
+ depth -= 1
547
+ current.append(char)
548
+ elif char == "," and depth == 0:
549
+ parts.append("".join(current))
550
+ current = []
551
+ else:
552
+ current.append(char)
553
+
554
+ if current:
555
+ parts.append("".join(current))
556
+
557
+ return parts
558
+
559
+ def _get_database_name(self) -> str:
560
+ """Get current database name."""
561
+ with self.connection.cursor() as cur:
562
+ cur.execute("SELECT current_database()")
563
+ result = cur.fetchone()
564
+ return result[0] if result else "unknown"