remdb 0.3.14__py3-none-any.whl → 0.3.157__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. rem/agentic/README.md +76 -0
  2. rem/agentic/__init__.py +15 -0
  3. rem/agentic/agents/__init__.py +32 -2
  4. rem/agentic/agents/agent_manager.py +310 -0
  5. rem/agentic/agents/sse_simulator.py +502 -0
  6. rem/agentic/context.py +51 -27
  7. rem/agentic/context_builder.py +5 -3
  8. rem/agentic/llm_provider_models.py +301 -0
  9. rem/agentic/mcp/tool_wrapper.py +155 -18
  10. rem/agentic/otel/setup.py +93 -4
  11. rem/agentic/providers/phoenix.py +371 -108
  12. rem/agentic/providers/pydantic_ai.py +280 -57
  13. rem/agentic/schema.py +361 -21
  14. rem/agentic/tools/rem_tools.py +3 -3
  15. rem/api/README.md +215 -1
  16. rem/api/deps.py +255 -0
  17. rem/api/main.py +132 -40
  18. rem/api/mcp_router/resources.py +1 -1
  19. rem/api/mcp_router/server.py +28 -5
  20. rem/api/mcp_router/tools.py +555 -7
  21. rem/api/routers/admin.py +494 -0
  22. rem/api/routers/auth.py +278 -4
  23. rem/api/routers/chat/completions.py +402 -20
  24. rem/api/routers/chat/models.py +88 -10
  25. rem/api/routers/chat/otel_utils.py +33 -0
  26. rem/api/routers/chat/sse_events.py +542 -0
  27. rem/api/routers/chat/streaming.py +697 -45
  28. rem/api/routers/dev.py +81 -0
  29. rem/api/routers/feedback.py +268 -0
  30. rem/api/routers/messages.py +473 -0
  31. rem/api/routers/models.py +78 -0
  32. rem/api/routers/query.py +360 -0
  33. rem/api/routers/shared_sessions.py +406 -0
  34. rem/auth/__init__.py +13 -3
  35. rem/auth/middleware.py +186 -22
  36. rem/auth/providers/__init__.py +4 -1
  37. rem/auth/providers/email.py +215 -0
  38. rem/cli/commands/README.md +237 -64
  39. rem/cli/commands/cluster.py +1808 -0
  40. rem/cli/commands/configure.py +4 -7
  41. rem/cli/commands/db.py +386 -143
  42. rem/cli/commands/experiments.py +468 -76
  43. rem/cli/commands/process.py +14 -8
  44. rem/cli/commands/schema.py +97 -50
  45. rem/cli/commands/session.py +336 -0
  46. rem/cli/dreaming.py +2 -2
  47. rem/cli/main.py +29 -6
  48. rem/config.py +10 -3
  49. rem/models/core/core_model.py +7 -1
  50. rem/models/core/experiment.py +58 -14
  51. rem/models/core/rem_query.py +5 -2
  52. rem/models/entities/__init__.py +25 -0
  53. rem/models/entities/domain_resource.py +38 -0
  54. rem/models/entities/feedback.py +123 -0
  55. rem/models/entities/message.py +30 -1
  56. rem/models/entities/ontology.py +1 -1
  57. rem/models/entities/ontology_config.py +1 -1
  58. rem/models/entities/session.py +83 -0
  59. rem/models/entities/shared_session.py +180 -0
  60. rem/models/entities/subscriber.py +175 -0
  61. rem/models/entities/user.py +1 -0
  62. rem/registry.py +10 -4
  63. rem/schemas/agents/core/agent-builder.yaml +134 -0
  64. rem/schemas/agents/examples/contract-analyzer.yaml +1 -1
  65. rem/schemas/agents/examples/contract-extractor.yaml +1 -1
  66. rem/schemas/agents/examples/cv-parser.yaml +1 -1
  67. rem/schemas/agents/rem.yaml +7 -3
  68. rem/services/__init__.py +3 -1
  69. rem/services/content/service.py +92 -19
  70. rem/services/email/__init__.py +10 -0
  71. rem/services/email/service.py +459 -0
  72. rem/services/email/templates.py +360 -0
  73. rem/services/embeddings/api.py +4 -4
  74. rem/services/embeddings/worker.py +16 -16
  75. rem/services/phoenix/client.py +154 -14
  76. rem/services/postgres/README.md +197 -15
  77. rem/services/postgres/__init__.py +2 -1
  78. rem/services/postgres/diff_service.py +547 -0
  79. rem/services/postgres/pydantic_to_sqlalchemy.py +470 -140
  80. rem/services/postgres/repository.py +132 -0
  81. rem/services/postgres/schema_generator.py +205 -4
  82. rem/services/postgres/service.py +6 -6
  83. rem/services/rem/parser.py +44 -9
  84. rem/services/rem/service.py +36 -2
  85. rem/services/session/compression.py +137 -51
  86. rem/services/session/reload.py +15 -8
  87. rem/settings.py +515 -27
  88. rem/sql/background_indexes.sql +21 -16
  89. rem/sql/migrations/001_install.sql +387 -54
  90. rem/sql/migrations/002_install_models.sql +2304 -377
  91. rem/sql/migrations/003_optional_extensions.sql +326 -0
  92. rem/sql/migrations/004_cache_system.sql +548 -0
  93. rem/sql/migrations/005_schema_update.sql +145 -0
  94. rem/utils/README.md +45 -0
  95. rem/utils/__init__.py +18 -0
  96. rem/utils/date_utils.py +2 -2
  97. rem/utils/files.py +157 -1
  98. rem/utils/model_helpers.py +156 -1
  99. rem/utils/schema_loader.py +220 -22
  100. rem/utils/sql_paths.py +146 -0
  101. rem/utils/sql_types.py +3 -1
  102. rem/utils/vision.py +1 -1
  103. rem/workers/__init__.py +3 -1
  104. rem/workers/db_listener.py +579 -0
  105. rem/workers/unlogged_maintainer.py +463 -0
  106. {remdb-0.3.14.dist-info → remdb-0.3.157.dist-info}/METADATA +340 -229
  107. {remdb-0.3.14.dist-info → remdb-0.3.157.dist-info}/RECORD +109 -80
  108. {remdb-0.3.14.dist-info → remdb-0.3.157.dist-info}/WHEEL +1 -1
  109. rem/sql/002_install_models.sql +0 -1068
  110. rem/sql/install_models.sql +0 -1051
  111. rem/sql/migrations/003_seed_default_user.sql +0 -48
  112. {remdb-0.3.14.dist-info → remdb-0.3.157.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,547 @@
1
+ """
2
+ Schema diff service for comparing Pydantic models against database.
3
+
4
+ Uses Alembic autogenerate to detect differences between:
5
+ - Target schema (derived from Pydantic models)
6
+ - Current database schema
7
+
8
+ This enables:
9
+ 1. Local development: See what would change before applying migrations
10
+ 2. CI validation: Detect drift between code and database (--check mode)
11
+ 3. Migration generation: Create incremental migration files
12
+ """
13
+
14
+ from dataclasses import dataclass, field
15
+ from pathlib import Path
16
+ from typing import Optional
17
+ import io
18
+
19
+ from alembic.autogenerate import produce_migrations, render_python_code
20
+ from alembic.operations import ops
21
+ from alembic.runtime.migration import MigrationContext
22
+ from alembic.script import ScriptDirectory
23
+ from loguru import logger
24
+ from sqlalchemy import create_engine, text
25
+ from sqlalchemy.dialects import postgresql
26
+
27
+ from ...settings import settings
28
+ from .pydantic_to_sqlalchemy import get_target_metadata
29
+
30
+
31
+ # Tables that are NOT managed by Pydantic models (infrastructure tables)
32
+ # These are created by 001_install.sql and should be excluded from diff
33
+ INFRASTRUCTURE_TABLES = {
34
+ "kv_store",
35
+ "rem_migrations",
36
+ "rate_limits",
37
+ "persons", # Legacy table - to be removed from DB
38
+ }
39
+
40
+ # Prefixes for tables that should be included in diff
41
+ # (embeddings tables are created alongside entity tables)
42
+ EMBEDDINGS_PREFIX = "embeddings_"
43
+
44
+
45
+ @dataclass
46
+ class SchemaDiff:
47
+ """Result of schema comparison."""
48
+
49
+ has_changes: bool
50
+ summary: list[str] = field(default_factory=list)
51
+ sql: str = ""
52
+ upgrade_ops: Optional[ops.UpgradeOps] = None
53
+ filtered_count: int = 0 # Number of operations filtered out by strategy
54
+
55
+ @property
56
+ def change_count(self) -> int:
57
+ """Total number of detected changes."""
58
+ return len(self.summary)
59
+
60
+
61
+ class DiffService:
62
+ """
63
+ Service for comparing Pydantic models against database schema.
64
+
65
+ Uses Alembic's autogenerate machinery without creating revision files.
66
+
67
+ Strategies:
68
+ additive: Only ADD operations (columns, tables, indexes). No drops. Safe for production.
69
+ full: All operations including DROPs. Use with caution.
70
+ safe: Additive + safe column type changes (widenings like VARCHAR(50) -> VARCHAR(256)).
71
+ """
72
+
73
+ def __init__(self, models_dir: Optional[Path] = None, strategy: str = "additive"):
74
+ """
75
+ Initialize diff service.
76
+
77
+ Args:
78
+ models_dir: Directory containing Pydantic models.
79
+ If None, uses default rem/models/entities location.
80
+ strategy: Migration strategy - 'additive' (default), 'full', or 'safe'
81
+ """
82
+ self.models_dir = models_dir
83
+ self.strategy = strategy
84
+ self._metadata = None
85
+
86
+ def get_connection_url(self) -> str:
87
+ """Build PostgreSQL connection URL from settings using psycopg (v3) driver."""
88
+ pg = settings.postgres
89
+ # Use postgresql+psycopg to use psycopg v3 (not psycopg2)
90
+ url = f"postgresql+psycopg://{pg.user}"
91
+ if pg.password:
92
+ url += f":{pg.password}"
93
+ url += f"@{pg.host}:{pg.port}/{pg.database}"
94
+ return url
95
+
96
+ def get_target_metadata(self):
97
+ """Get SQLAlchemy metadata from Pydantic models."""
98
+ if self._metadata is None:
99
+ if self.models_dir:
100
+ from .pydantic_to_sqlalchemy import build_sqlalchemy_metadata_from_pydantic
101
+ self._metadata = build_sqlalchemy_metadata_from_pydantic(self.models_dir)
102
+ else:
103
+ self._metadata = get_target_metadata()
104
+ return self._metadata
105
+
106
+ def _include_object(self, obj, name, type_, reflected, compare_to) -> bool:
107
+ """
108
+ Filter function for Alembic autogenerate.
109
+
110
+ Excludes infrastructure tables that are not managed by Pydantic models.
111
+
112
+ Args:
113
+ obj: The schema object (Table, Column, Index, etc.)
114
+ name: Object name
115
+ type_: Object type ("table", "column", "index", etc.)
116
+ reflected: True if object exists in database
117
+ compare_to: The object being compared to (if any)
118
+
119
+ Returns:
120
+ True to include in diff, False to exclude
121
+ """
122
+ if type_ == "table":
123
+ # Exclude infrastructure tables
124
+ if name in INFRASTRUCTURE_TABLES:
125
+ return False
126
+ # Include embeddings tables (they're part of the model schema)
127
+ # These are now generated in pydantic_to_sqlalchemy
128
+ return True
129
+
130
+ def compute_diff(self) -> SchemaDiff:
131
+ """
132
+ Compare Pydantic models against database and return differences.
133
+
134
+ Returns:
135
+ SchemaDiff with detected changes
136
+ """
137
+ url = self.get_connection_url()
138
+ engine = create_engine(url)
139
+ metadata = self.get_target_metadata()
140
+
141
+ summary = []
142
+ filtered_count = 0
143
+
144
+ with engine.connect() as conn:
145
+ # Create migration context for comparison
146
+ context = MigrationContext.configure(
147
+ conn,
148
+ opts={
149
+ "target_metadata": metadata,
150
+ "compare_type": True,
151
+ "compare_server_default": False, # Avoid false positives
152
+ "include_schemas": False,
153
+ "include_object": self._include_object,
154
+ },
155
+ )
156
+
157
+ # Run autogenerate comparison
158
+ migration_script = produce_migrations(context, metadata)
159
+ upgrade_ops = migration_script.upgrade_ops
160
+
161
+ # Filter operations based on strategy
162
+ if upgrade_ops and upgrade_ops.ops:
163
+ filtered_ops, filtered_count = self._filter_operations(upgrade_ops.ops)
164
+ upgrade_ops.ops = filtered_ops
165
+
166
+ # Process filtered operations
167
+ for op in filtered_ops:
168
+ summary.extend(self._describe_operation(op))
169
+
170
+ has_changes = len(summary) > 0
171
+
172
+ # Generate SQL if there are changes
173
+ sql = ""
174
+ if has_changes and upgrade_ops:
175
+ sql = self._render_sql(upgrade_ops, engine)
176
+
177
+ return SchemaDiff(
178
+ has_changes=has_changes,
179
+ summary=summary,
180
+ sql=sql,
181
+ upgrade_ops=upgrade_ops,
182
+ filtered_count=filtered_count,
183
+ )
184
+
185
+ def _filter_operations(self, operations: list) -> tuple[list, int]:
186
+ """
187
+ Filter operations based on migration strategy.
188
+
189
+ Args:
190
+ operations: List of Alembic operations
191
+
192
+ Returns:
193
+ Tuple of (filtered_operations, count_of_filtered_out)
194
+ """
195
+ if self.strategy == "full":
196
+ # Full strategy: include everything
197
+ return operations, 0
198
+
199
+ filtered = []
200
+ filtered_count = 0
201
+
202
+ for op in operations:
203
+ if isinstance(op, ops.ModifyTableOps):
204
+ # Filter sub-operations within table
205
+ sub_filtered, sub_count = self._filter_operations(op.ops)
206
+ filtered_count += sub_count
207
+ if sub_filtered:
208
+ op.ops = sub_filtered
209
+ filtered.append(op)
210
+ elif self._is_allowed_operation(op):
211
+ filtered.append(op)
212
+ else:
213
+ filtered_count += 1
214
+
215
+ return filtered, filtered_count
216
+
217
+ def _is_allowed_operation(self, op: ops.MigrateOperation) -> bool:
218
+ """
219
+ Check if an operation is allowed by the current strategy.
220
+
221
+ Args:
222
+ op: Alembic operation
223
+
224
+ Returns:
225
+ True if operation is allowed, False if it should be filtered out
226
+ """
227
+ # Additive operations (allowed in all strategies)
228
+ if isinstance(op, (ops.CreateTableOp, ops.AddColumnOp, ops.CreateIndexOp, ops.CreateForeignKeyOp)):
229
+ return True
230
+
231
+ # Destructive operations (only allowed in 'full' strategy)
232
+ if isinstance(op, (ops.DropTableOp, ops.DropColumnOp, ops.DropIndexOp, ops.DropConstraintOp)):
233
+ return self.strategy == "full"
234
+
235
+ # Alter operations
236
+ if isinstance(op, ops.AlterColumnOp):
237
+ if self.strategy == "full":
238
+ return True
239
+ if self.strategy == "safe":
240
+ # Allow safe type changes (widenings)
241
+ return self._is_safe_type_change(op)
242
+ # additive: no alter operations
243
+ return False
244
+
245
+ # Unknown operations: allow in full, deny otherwise
246
+ return self.strategy == "full"
247
+
248
+ def _is_safe_type_change(self, op: ops.AlterColumnOp) -> bool:
249
+ """
250
+ Check if a column type change is safe (widening, not narrowing).
251
+
252
+ Safe changes:
253
+ - VARCHAR(n) -> VARCHAR(m) where m > n
254
+ - INTEGER -> BIGINT
255
+ - Adding nullable (NOT NULL -> NULL)
256
+
257
+ Args:
258
+ op: AlterColumnOp to check
259
+
260
+ Returns:
261
+ True if the change is safe
262
+ """
263
+ # Allowing nullable is always safe
264
+ if op.modify_nullable is True:
265
+ return True
266
+
267
+ # Type changes: only allow VARCHAR widenings for now
268
+ if op.modify_type is not None:
269
+ new_type = str(op.modify_type).upper()
270
+ # VARCHAR widenings are generally safe
271
+ if "VARCHAR" in new_type:
272
+ return True # Assume widening; could add length comparison
273
+
274
+ return False
275
+
276
+ def _describe_operation(self, op: ops.MigrateOperation, prefix: str = "") -> list[str]:
277
+ """Convert Alembic operation to human-readable description."""
278
+ descriptions = []
279
+
280
+ if isinstance(op, ops.CreateTableOp):
281
+ descriptions.append(f"{prefix}+ CREATE TABLE {op.table_name}")
282
+ for col in op.columns:
283
+ if hasattr(col, 'name'):
284
+ descriptions.append(f"{prefix} + column {col.name}")
285
+
286
+ elif isinstance(op, ops.DropTableOp):
287
+ descriptions.append(f"{prefix}- DROP TABLE {op.table_name}")
288
+
289
+ elif isinstance(op, ops.AddColumnOp):
290
+ col_type = str(op.column.type) if op.column.type else "unknown"
291
+ descriptions.append(f"{prefix}+ ADD COLUMN {op.table_name}.{op.column.name} ({col_type})")
292
+
293
+ elif isinstance(op, ops.DropColumnOp):
294
+ descriptions.append(f"{prefix}- DROP COLUMN {op.table_name}.{op.column_name}")
295
+
296
+ elif isinstance(op, ops.AlterColumnOp):
297
+ changes = []
298
+ if op.modify_type is not None:
299
+ changes.append(f"type -> {op.modify_type}")
300
+ if op.modify_nullable is not None:
301
+ nullable = "NULL" if op.modify_nullable else "NOT NULL"
302
+ changes.append(f"nullable -> {nullable}")
303
+ if op.modify_server_default is not None:
304
+ changes.append(f"default -> {op.modify_server_default}")
305
+ change_str = ", ".join(changes) if changes else "modified"
306
+ descriptions.append(f"{prefix}~ ALTER COLUMN {op.table_name}.{op.column_name} ({change_str})")
307
+
308
+ elif isinstance(op, ops.CreateIndexOp):
309
+ # op.columns can be strings or Column objects
310
+ if op.columns:
311
+ cols = ", ".join(
312
+ c if isinstance(c, str) else getattr(c, 'name', str(c))
313
+ for c in op.columns
314
+ )
315
+ else:
316
+ cols = "?"
317
+ descriptions.append(f"{prefix}+ CREATE INDEX {op.index_name} ON {op.table_name} ({cols})")
318
+
319
+ elif isinstance(op, ops.DropIndexOp):
320
+ descriptions.append(f"{prefix}- DROP INDEX {op.index_name}")
321
+
322
+ elif isinstance(op, ops.CreateForeignKeyOp):
323
+ descriptions.append(f"{prefix}+ CREATE FK {op.constraint_name} ON {op.source_table}")
324
+
325
+ elif isinstance(op, ops.DropConstraintOp):
326
+ descriptions.append(f"{prefix}- DROP CONSTRAINT {op.constraint_name} ON {op.table_name}")
327
+
328
+ elif isinstance(op, ops.ModifyTableOps):
329
+ # Container for multiple operations on same table
330
+ descriptions.append(f"{prefix}Table: {op.table_name}")
331
+ for sub_op in op.ops:
332
+ descriptions.extend(self._describe_operation(sub_op, prefix + " "))
333
+
334
+ else:
335
+ descriptions.append(f"{prefix}? {type(op).__name__}")
336
+
337
+ return descriptions
338
+
339
+ def _render_sql(self, upgrade_ops: ops.UpgradeOps, engine) -> str:
340
+ """Render upgrade operations as SQL statements."""
341
+ from alembic.runtime.migration import MigrationContext
342
+ from alembic.operations import Operations
343
+
344
+ sql_lines = []
345
+
346
+ # Use offline mode to generate SQL
347
+ buffer = io.StringIO()
348
+
349
+ def emit_sql(text, *args, **kwargs):
350
+ sql_lines.append(str(text))
351
+
352
+ with engine.connect() as conn:
353
+ context = MigrationContext.configure(
354
+ conn,
355
+ opts={
356
+ "as_sql": True,
357
+ "output_buffer": buffer,
358
+ "target_metadata": self.get_target_metadata(),
359
+ },
360
+ )
361
+
362
+ with context.begin_transaction():
363
+ operations = Operations(context)
364
+ for op in upgrade_ops.ops:
365
+ self._execute_op(operations, op)
366
+
367
+ return buffer.getvalue()
368
+
369
+ def _execute_op(self, operations: "Operations", op: ops.MigrateOperation):
370
+ """Execute a single operation via Operations proxy."""
371
+ from alembic.operations import Operations
372
+ from alembic.autogenerate import rewriter
373
+
374
+ if isinstance(op, ops.CreateTableOp):
375
+ operations.create_table(
376
+ op.table_name,
377
+ *op.columns,
378
+ schema=op.schema,
379
+ **op.kw,
380
+ )
381
+ elif isinstance(op, ops.DropTableOp):
382
+ operations.drop_table(op.table_name, schema=op.schema)
383
+ elif isinstance(op, ops.AddColumnOp):
384
+ operations.add_column(op.table_name, op.column, schema=op.schema)
385
+ elif isinstance(op, ops.DropColumnOp):
386
+ operations.drop_column(op.table_name, op.column_name, schema=op.schema)
387
+ elif isinstance(op, ops.AlterColumnOp):
388
+ operations.alter_column(
389
+ op.table_name,
390
+ op.column_name,
391
+ nullable=op.modify_nullable,
392
+ type_=op.modify_type,
393
+ server_default=op.modify_server_default,
394
+ schema=op.schema,
395
+ )
396
+ elif isinstance(op, ops.CreateIndexOp):
397
+ operations.create_index(
398
+ op.index_name,
399
+ op.table_name,
400
+ op.columns,
401
+ schema=op.schema,
402
+ unique=op.unique,
403
+ **op.kw,
404
+ )
405
+ elif isinstance(op, ops.DropIndexOp):
406
+ operations.drop_index(op.index_name, table_name=op.table_name, schema=op.schema)
407
+ elif isinstance(op, ops.ModifyTableOps):
408
+ for sub_op in op.ops:
409
+ self._execute_op(operations, sub_op)
410
+
411
+ def generate_migration_file(
412
+ self,
413
+ output_dir: Path,
414
+ message: str = "auto_migration",
415
+ ) -> Optional[Path]:
416
+ """
417
+ Generate a numbered migration file from the diff.
418
+
419
+ Args:
420
+ output_dir: Directory to write migration file
421
+ message: Migration description (used in filename)
422
+
423
+ Returns:
424
+ Path to generated file, or None if no changes
425
+ """
426
+ diff = self.compute_diff()
427
+
428
+ if not diff.has_changes:
429
+ logger.info("No schema changes detected")
430
+ return None
431
+
432
+ # Find next migration number
433
+ existing = sorted(output_dir.glob("*.sql"))
434
+ next_num = 1
435
+ for f in existing:
436
+ try:
437
+ num = int(f.stem.split("_")[0])
438
+ next_num = max(next_num, num + 1)
439
+ except (ValueError, IndexError):
440
+ pass
441
+
442
+ # Generate filename
443
+ safe_message = message.replace(" ", "_").replace("-", "_")[:40]
444
+ filename = f"{next_num:03d}_{safe_message}.sql"
445
+ output_path = output_dir / filename
446
+
447
+ # Write SQL
448
+ header = f"""-- Migration: {message}
449
+ -- Generated by: rem db diff --generate
450
+ -- Changes detected: {diff.change_count}
451
+ --
452
+ -- Review this file before applying!
453
+ -- Apply with: rem db migrate
454
+ --
455
+
456
+ """
457
+ # Build SQL from operations
458
+ sql_content = self._build_migration_sql(diff)
459
+
460
+ output_path.write_text(header + sql_content)
461
+ logger.info(f"Generated migration: {output_path}")
462
+
463
+ return output_path
464
+
465
+ def _build_migration_sql(self, diff: SchemaDiff) -> str:
466
+ """Build SQL from diff operations."""
467
+ if not diff.upgrade_ops or not diff.upgrade_ops.ops:
468
+ return "-- No changes\n"
469
+
470
+ lines = []
471
+ for op in diff.upgrade_ops.ops:
472
+ lines.extend(self._op_to_sql(op))
473
+
474
+ return "\n".join(lines) + "\n"
475
+
476
+ def _compile_type(self, col_type) -> str:
477
+ """Compile SQLAlchemy type to PostgreSQL DDL string.
478
+
479
+ SQLAlchemy types like ARRAY(Text) need dialect-specific compilation
480
+ to render correctly (e.g., "TEXT[]" instead of just "ARRAY").
481
+ """
482
+ try:
483
+ return col_type.compile(dialect=postgresql.dialect())
484
+ except Exception:
485
+ # Fallback to string representation if compilation fails
486
+ return str(col_type)
487
+
488
+ def _op_to_sql(self, op: ops.MigrateOperation) -> list[str]:
489
+ """Convert operation to SQL statements."""
490
+ lines = []
491
+
492
+ if isinstance(op, ops.CreateTableOp):
493
+ cols = []
494
+ for col in op.columns:
495
+ if hasattr(col, 'name') and hasattr(col, 'type'):
496
+ nullable = "" if getattr(col, 'nullable', True) else " NOT NULL"
497
+ type_str = self._compile_type(col.type)
498
+ cols.append(f" {col.name} {type_str}{nullable}")
499
+ col_str = ",\n".join(cols)
500
+ lines.append(f"CREATE TABLE IF NOT EXISTS {op.table_name} (\n{col_str}\n);")
501
+
502
+ elif isinstance(op, ops.DropTableOp):
503
+ lines.append(f"DROP TABLE IF EXISTS {op.table_name};")
504
+
505
+ elif isinstance(op, ops.AddColumnOp):
506
+ col = op.column
507
+ nullable = "" if getattr(col, 'nullable', True) else " NOT NULL"
508
+ type_str = self._compile_type(col.type)
509
+ lines.append(f"ALTER TABLE {op.table_name} ADD COLUMN IF NOT EXISTS {col.name} {type_str}{nullable};")
510
+
511
+ elif isinstance(op, ops.DropColumnOp):
512
+ lines.append(f"ALTER TABLE {op.table_name} DROP COLUMN IF EXISTS {op.column_name};")
513
+
514
+ elif isinstance(op, ops.AlterColumnOp):
515
+ if op.modify_type is not None:
516
+ type_str = self._compile_type(op.modify_type)
517
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} TYPE {type_str};")
518
+ if op.modify_nullable is not None:
519
+ if op.modify_nullable:
520
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} DROP NOT NULL;")
521
+ else:
522
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} SET NOT NULL;")
523
+
524
+ elif isinstance(op, ops.CreateIndexOp):
525
+ # op.columns can be strings or Column objects
526
+ if op.columns:
527
+ cols = ", ".join(
528
+ c if isinstance(c, str) else getattr(c, 'name', str(c))
529
+ for c in op.columns
530
+ )
531
+ else:
532
+ cols = ""
533
+ unique = "UNIQUE " if op.unique else ""
534
+ lines.append(f"CREATE {unique}INDEX IF NOT EXISTS {op.index_name} ON {op.table_name} ({cols});")
535
+
536
+ elif isinstance(op, ops.DropIndexOp):
537
+ lines.append(f"DROP INDEX IF EXISTS {op.index_name};")
538
+
539
+ elif isinstance(op, ops.ModifyTableOps):
540
+ lines.append(f"-- Changes to table: {op.table_name}")
541
+ for sub_op in op.ops:
542
+ lines.extend(self._op_to_sql(sub_op))
543
+
544
+ else:
545
+ lines.append(f"-- Unsupported operation: {type(op).__name__}")
546
+
547
+ return lines