remdb 0.3.7__py3-none-any.whl → 0.3.133__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. rem/__init__.py +129 -2
  2. rem/agentic/README.md +76 -0
  3. rem/agentic/__init__.py +15 -0
  4. rem/agentic/agents/__init__.py +16 -2
  5. rem/agentic/agents/sse_simulator.py +502 -0
  6. rem/agentic/context.py +51 -25
  7. rem/agentic/llm_provider_models.py +301 -0
  8. rem/agentic/mcp/tool_wrapper.py +112 -17
  9. rem/agentic/otel/setup.py +93 -4
  10. rem/agentic/providers/phoenix.py +314 -132
  11. rem/agentic/providers/pydantic_ai.py +215 -26
  12. rem/agentic/schema.py +361 -21
  13. rem/agentic/tools/rem_tools.py +3 -3
  14. rem/api/README.md +238 -1
  15. rem/api/deps.py +255 -0
  16. rem/api/main.py +154 -37
  17. rem/api/mcp_router/resources.py +1 -1
  18. rem/api/mcp_router/server.py +26 -5
  19. rem/api/mcp_router/tools.py +465 -7
  20. rem/api/middleware/tracking.py +172 -0
  21. rem/api/routers/admin.py +494 -0
  22. rem/api/routers/auth.py +124 -0
  23. rem/api/routers/chat/completions.py +402 -20
  24. rem/api/routers/chat/models.py +88 -10
  25. rem/api/routers/chat/otel_utils.py +33 -0
  26. rem/api/routers/chat/sse_events.py +542 -0
  27. rem/api/routers/chat/streaming.py +642 -45
  28. rem/api/routers/dev.py +81 -0
  29. rem/api/routers/feedback.py +268 -0
  30. rem/api/routers/messages.py +473 -0
  31. rem/api/routers/models.py +78 -0
  32. rem/api/routers/query.py +360 -0
  33. rem/api/routers/shared_sessions.py +406 -0
  34. rem/auth/middleware.py +126 -27
  35. rem/cli/commands/README.md +237 -64
  36. rem/cli/commands/ask.py +13 -10
  37. rem/cli/commands/cluster.py +1808 -0
  38. rem/cli/commands/configure.py +5 -6
  39. rem/cli/commands/db.py +396 -139
  40. rem/cli/commands/experiments.py +469 -74
  41. rem/cli/commands/process.py +22 -15
  42. rem/cli/commands/scaffold.py +47 -0
  43. rem/cli/commands/schema.py +97 -50
  44. rem/cli/main.py +29 -6
  45. rem/config.py +10 -3
  46. rem/models/core/core_model.py +7 -1
  47. rem/models/core/experiment.py +54 -0
  48. rem/models/core/rem_query.py +5 -2
  49. rem/models/entities/__init__.py +21 -0
  50. rem/models/entities/domain_resource.py +38 -0
  51. rem/models/entities/feedback.py +123 -0
  52. rem/models/entities/message.py +30 -1
  53. rem/models/entities/session.py +83 -0
  54. rem/models/entities/shared_session.py +180 -0
  55. rem/models/entities/user.py +10 -3
  56. rem/registry.py +373 -0
  57. rem/schemas/agents/rem.yaml +7 -3
  58. rem/services/content/providers.py +92 -133
  59. rem/services/content/service.py +92 -20
  60. rem/services/dreaming/affinity_service.py +2 -16
  61. rem/services/dreaming/moment_service.py +2 -15
  62. rem/services/embeddings/api.py +24 -17
  63. rem/services/embeddings/worker.py +16 -16
  64. rem/services/phoenix/EXPERIMENT_DESIGN.md +3 -3
  65. rem/services/phoenix/client.py +302 -28
  66. rem/services/postgres/README.md +159 -15
  67. rem/services/postgres/__init__.py +2 -1
  68. rem/services/postgres/diff_service.py +531 -0
  69. rem/services/postgres/pydantic_to_sqlalchemy.py +427 -129
  70. rem/services/postgres/repository.py +132 -0
  71. rem/services/postgres/schema_generator.py +291 -9
  72. rem/services/postgres/service.py +6 -6
  73. rem/services/rate_limit.py +113 -0
  74. rem/services/rem/README.md +14 -0
  75. rem/services/rem/parser.py +44 -9
  76. rem/services/rem/service.py +36 -2
  77. rem/services/session/compression.py +24 -1
  78. rem/services/session/reload.py +1 -1
  79. rem/services/user_service.py +98 -0
  80. rem/settings.py +399 -29
  81. rem/sql/background_indexes.sql +21 -16
  82. rem/sql/migrations/001_install.sql +387 -54
  83. rem/sql/migrations/002_install_models.sql +2320 -393
  84. rem/sql/migrations/003_optional_extensions.sql +326 -0
  85. rem/sql/migrations/004_cache_system.sql +548 -0
  86. rem/utils/__init__.py +18 -0
  87. rem/utils/constants.py +97 -0
  88. rem/utils/date_utils.py +228 -0
  89. rem/utils/embeddings.py +17 -4
  90. rem/utils/files.py +167 -0
  91. rem/utils/mime_types.py +158 -0
  92. rem/utils/model_helpers.py +156 -1
  93. rem/utils/schema_loader.py +282 -35
  94. rem/utils/sql_paths.py +146 -0
  95. rem/utils/sql_types.py +3 -1
  96. rem/utils/vision.py +9 -14
  97. rem/workers/README.md +14 -14
  98. rem/workers/__init__.py +3 -1
  99. rem/workers/db_listener.py +579 -0
  100. rem/workers/db_maintainer.py +74 -0
  101. rem/workers/unlogged_maintainer.py +463 -0
  102. {remdb-0.3.7.dist-info → remdb-0.3.133.dist-info}/METADATA +460 -303
  103. {remdb-0.3.7.dist-info → remdb-0.3.133.dist-info}/RECORD +105 -74
  104. {remdb-0.3.7.dist-info → remdb-0.3.133.dist-info}/WHEEL +1 -1
  105. rem/sql/002_install_models.sql +0 -1068
  106. rem/sql/install_models.sql +0 -1038
  107. {remdb-0.3.7.dist-info → remdb-0.3.133.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,531 @@
1
+ """
2
+ Schema diff service for comparing Pydantic models against database.
3
+
4
+ Uses Alembic autogenerate to detect differences between:
5
+ - Target schema (derived from Pydantic models)
6
+ - Current database schema
7
+
8
+ This enables:
9
+ 1. Local development: See what would change before applying migrations
10
+ 2. CI validation: Detect drift between code and database (--check mode)
11
+ 3. Migration generation: Create incremental migration files
12
+ """
13
+
14
+ from dataclasses import dataclass, field
15
+ from pathlib import Path
16
+ from typing import Optional
17
+ import io
18
+
19
+ from alembic.autogenerate import produce_migrations, render_python_code
20
+ from alembic.operations import ops
21
+ from alembic.runtime.migration import MigrationContext
22
+ from alembic.script import ScriptDirectory
23
+ from loguru import logger
24
+ from sqlalchemy import create_engine, text
25
+
26
+ from ...settings import settings
27
+ from .pydantic_to_sqlalchemy import get_target_metadata
28
+
29
+
30
+ # Tables that are NOT managed by Pydantic models (infrastructure tables)
31
+ # These are created by 001_install.sql and should be excluded from diff
32
+ INFRASTRUCTURE_TABLES = {
33
+ "kv_store",
34
+ "rem_migrations",
35
+ "rate_limits",
36
+ "persons", # Legacy table - to be removed from DB
37
+ }
38
+
39
+ # Prefixes for tables that should be included in diff
40
+ # (embeddings tables are created alongside entity tables)
41
+ EMBEDDINGS_PREFIX = "embeddings_"
42
+
43
+
44
+ @dataclass
45
+ class SchemaDiff:
46
+ """Result of schema comparison."""
47
+
48
+ has_changes: bool
49
+ summary: list[str] = field(default_factory=list)
50
+ sql: str = ""
51
+ upgrade_ops: Optional[ops.UpgradeOps] = None
52
+ filtered_count: int = 0 # Number of operations filtered out by strategy
53
+
54
+ @property
55
+ def change_count(self) -> int:
56
+ """Total number of detected changes."""
57
+ return len(self.summary)
58
+
59
+
60
+ class DiffService:
61
+ """
62
+ Service for comparing Pydantic models against database schema.
63
+
64
+ Uses Alembic's autogenerate machinery without creating revision files.
65
+
66
+ Strategies:
67
+ additive: Only ADD operations (columns, tables, indexes). No drops. Safe for production.
68
+ full: All operations including DROPs. Use with caution.
69
+ safe: Additive + safe column type changes (widenings like VARCHAR(50) -> VARCHAR(256)).
70
+ """
71
+
72
+ def __init__(self, models_dir: Optional[Path] = None, strategy: str = "additive"):
73
+ """
74
+ Initialize diff service.
75
+
76
+ Args:
77
+ models_dir: Directory containing Pydantic models.
78
+ If None, uses default rem/models/entities location.
79
+ strategy: Migration strategy - 'additive' (default), 'full', or 'safe'
80
+ """
81
+ self.models_dir = models_dir
82
+ self.strategy = strategy
83
+ self._metadata = None
84
+
85
+ def get_connection_url(self) -> str:
86
+ """Build PostgreSQL connection URL from settings using psycopg (v3) driver."""
87
+ pg = settings.postgres
88
+ # Use postgresql+psycopg to use psycopg v3 (not psycopg2)
89
+ url = f"postgresql+psycopg://{pg.user}"
90
+ if pg.password:
91
+ url += f":{pg.password}"
92
+ url += f"@{pg.host}:{pg.port}/{pg.database}"
93
+ return url
94
+
95
+ def get_target_metadata(self):
96
+ """Get SQLAlchemy metadata from Pydantic models."""
97
+ if self._metadata is None:
98
+ if self.models_dir:
99
+ from .pydantic_to_sqlalchemy import build_sqlalchemy_metadata_from_pydantic
100
+ self._metadata = build_sqlalchemy_metadata_from_pydantic(self.models_dir)
101
+ else:
102
+ self._metadata = get_target_metadata()
103
+ return self._metadata
104
+
105
+ def _include_object(self, obj, name, type_, reflected, compare_to) -> bool:
106
+ """
107
+ Filter function for Alembic autogenerate.
108
+
109
+ Excludes infrastructure tables that are not managed by Pydantic models.
110
+
111
+ Args:
112
+ obj: The schema object (Table, Column, Index, etc.)
113
+ name: Object name
114
+ type_: Object type ("table", "column", "index", etc.)
115
+ reflected: True if object exists in database
116
+ compare_to: The object being compared to (if any)
117
+
118
+ Returns:
119
+ True to include in diff, False to exclude
120
+ """
121
+ if type_ == "table":
122
+ # Exclude infrastructure tables
123
+ if name in INFRASTRUCTURE_TABLES:
124
+ return False
125
+ # Include embeddings tables (they're part of the model schema)
126
+ # These are now generated in pydantic_to_sqlalchemy
127
+ return True
128
+
129
+ def compute_diff(self) -> SchemaDiff:
130
+ """
131
+ Compare Pydantic models against database and return differences.
132
+
133
+ Returns:
134
+ SchemaDiff with detected changes
135
+ """
136
+ url = self.get_connection_url()
137
+ engine = create_engine(url)
138
+ metadata = self.get_target_metadata()
139
+
140
+ summary = []
141
+ filtered_count = 0
142
+
143
+ with engine.connect() as conn:
144
+ # Create migration context for comparison
145
+ context = MigrationContext.configure(
146
+ conn,
147
+ opts={
148
+ "target_metadata": metadata,
149
+ "compare_type": True,
150
+ "compare_server_default": False, # Avoid false positives
151
+ "include_schemas": False,
152
+ "include_object": self._include_object,
153
+ },
154
+ )
155
+
156
+ # Run autogenerate comparison
157
+ migration_script = produce_migrations(context, metadata)
158
+ upgrade_ops = migration_script.upgrade_ops
159
+
160
+ # Filter operations based on strategy
161
+ if upgrade_ops and upgrade_ops.ops:
162
+ filtered_ops, filtered_count = self._filter_operations(upgrade_ops.ops)
163
+ upgrade_ops.ops = filtered_ops
164
+
165
+ # Process filtered operations
166
+ for op in filtered_ops:
167
+ summary.extend(self._describe_operation(op))
168
+
169
+ has_changes = len(summary) > 0
170
+
171
+ # Generate SQL if there are changes
172
+ sql = ""
173
+ if has_changes and upgrade_ops:
174
+ sql = self._render_sql(upgrade_ops, engine)
175
+
176
+ return SchemaDiff(
177
+ has_changes=has_changes,
178
+ summary=summary,
179
+ sql=sql,
180
+ upgrade_ops=upgrade_ops,
181
+ filtered_count=filtered_count,
182
+ )
183
+
184
+ def _filter_operations(self, operations: list) -> tuple[list, int]:
185
+ """
186
+ Filter operations based on migration strategy.
187
+
188
+ Args:
189
+ operations: List of Alembic operations
190
+
191
+ Returns:
192
+ Tuple of (filtered_operations, count_of_filtered_out)
193
+ """
194
+ if self.strategy == "full":
195
+ # Full strategy: include everything
196
+ return operations, 0
197
+
198
+ filtered = []
199
+ filtered_count = 0
200
+
201
+ for op in operations:
202
+ if isinstance(op, ops.ModifyTableOps):
203
+ # Filter sub-operations within table
204
+ sub_filtered, sub_count = self._filter_operations(op.ops)
205
+ filtered_count += sub_count
206
+ if sub_filtered:
207
+ op.ops = sub_filtered
208
+ filtered.append(op)
209
+ elif self._is_allowed_operation(op):
210
+ filtered.append(op)
211
+ else:
212
+ filtered_count += 1
213
+
214
+ return filtered, filtered_count
215
+
216
+ def _is_allowed_operation(self, op: ops.MigrateOperation) -> bool:
217
+ """
218
+ Check if an operation is allowed by the current strategy.
219
+
220
+ Args:
221
+ op: Alembic operation
222
+
223
+ Returns:
224
+ True if operation is allowed, False if it should be filtered out
225
+ """
226
+ # Additive operations (allowed in all strategies)
227
+ if isinstance(op, (ops.CreateTableOp, ops.AddColumnOp, ops.CreateIndexOp, ops.CreateForeignKeyOp)):
228
+ return True
229
+
230
+ # Destructive operations (only allowed in 'full' strategy)
231
+ if isinstance(op, (ops.DropTableOp, ops.DropColumnOp, ops.DropIndexOp, ops.DropConstraintOp)):
232
+ return self.strategy == "full"
233
+
234
+ # Alter operations
235
+ if isinstance(op, ops.AlterColumnOp):
236
+ if self.strategy == "full":
237
+ return True
238
+ if self.strategy == "safe":
239
+ # Allow safe type changes (widenings)
240
+ return self._is_safe_type_change(op)
241
+ # additive: no alter operations
242
+ return False
243
+
244
+ # Unknown operations: allow in full, deny otherwise
245
+ return self.strategy == "full"
246
+
247
+ def _is_safe_type_change(self, op: ops.AlterColumnOp) -> bool:
248
+ """
249
+ Check if a column type change is safe (widening, not narrowing).
250
+
251
+ Safe changes:
252
+ - VARCHAR(n) -> VARCHAR(m) where m > n
253
+ - INTEGER -> BIGINT
254
+ - Adding nullable (NOT NULL -> NULL)
255
+
256
+ Args:
257
+ op: AlterColumnOp to check
258
+
259
+ Returns:
260
+ True if the change is safe
261
+ """
262
+ # Allowing nullable is always safe
263
+ if op.modify_nullable is True:
264
+ return True
265
+
266
+ # Type changes: only allow VARCHAR widenings for now
267
+ if op.modify_type is not None:
268
+ new_type = str(op.modify_type).upper()
269
+ # VARCHAR widenings are generally safe
270
+ if "VARCHAR" in new_type:
271
+ return True # Assume widening; could add length comparison
272
+
273
+ return False
274
+
275
+ def _describe_operation(self, op: ops.MigrateOperation, prefix: str = "") -> list[str]:
276
+ """Convert Alembic operation to human-readable description."""
277
+ descriptions = []
278
+
279
+ if isinstance(op, ops.CreateTableOp):
280
+ descriptions.append(f"{prefix}+ CREATE TABLE {op.table_name}")
281
+ for col in op.columns:
282
+ if hasattr(col, 'name'):
283
+ descriptions.append(f"{prefix} + column {col.name}")
284
+
285
+ elif isinstance(op, ops.DropTableOp):
286
+ descriptions.append(f"{prefix}- DROP TABLE {op.table_name}")
287
+
288
+ elif isinstance(op, ops.AddColumnOp):
289
+ col_type = str(op.column.type) if op.column.type else "unknown"
290
+ descriptions.append(f"{prefix}+ ADD COLUMN {op.table_name}.{op.column.name} ({col_type})")
291
+
292
+ elif isinstance(op, ops.DropColumnOp):
293
+ descriptions.append(f"{prefix}- DROP COLUMN {op.table_name}.{op.column_name}")
294
+
295
+ elif isinstance(op, ops.AlterColumnOp):
296
+ changes = []
297
+ if op.modify_type is not None:
298
+ changes.append(f"type -> {op.modify_type}")
299
+ if op.modify_nullable is not None:
300
+ nullable = "NULL" if op.modify_nullable else "NOT NULL"
301
+ changes.append(f"nullable -> {nullable}")
302
+ if op.modify_server_default is not None:
303
+ changes.append(f"default -> {op.modify_server_default}")
304
+ change_str = ", ".join(changes) if changes else "modified"
305
+ descriptions.append(f"{prefix}~ ALTER COLUMN {op.table_name}.{op.column_name} ({change_str})")
306
+
307
+ elif isinstance(op, ops.CreateIndexOp):
308
+ # op.columns can be strings or Column objects
309
+ if op.columns:
310
+ cols = ", ".join(
311
+ c if isinstance(c, str) else getattr(c, 'name', str(c))
312
+ for c in op.columns
313
+ )
314
+ else:
315
+ cols = "?"
316
+ descriptions.append(f"{prefix}+ CREATE INDEX {op.index_name} ON {op.table_name} ({cols})")
317
+
318
+ elif isinstance(op, ops.DropIndexOp):
319
+ descriptions.append(f"{prefix}- DROP INDEX {op.index_name}")
320
+
321
+ elif isinstance(op, ops.CreateForeignKeyOp):
322
+ descriptions.append(f"{prefix}+ CREATE FK {op.constraint_name} ON {op.source_table}")
323
+
324
+ elif isinstance(op, ops.DropConstraintOp):
325
+ descriptions.append(f"{prefix}- DROP CONSTRAINT {op.constraint_name} ON {op.table_name}")
326
+
327
+ elif isinstance(op, ops.ModifyTableOps):
328
+ # Container for multiple operations on same table
329
+ descriptions.append(f"{prefix}Table: {op.table_name}")
330
+ for sub_op in op.ops:
331
+ descriptions.extend(self._describe_operation(sub_op, prefix + " "))
332
+
333
+ else:
334
+ descriptions.append(f"{prefix}? {type(op).__name__}")
335
+
336
+ return descriptions
337
+
338
+ def _render_sql(self, upgrade_ops: ops.UpgradeOps, engine) -> str:
339
+ """Render upgrade operations as SQL statements."""
340
+ from alembic.runtime.migration import MigrationContext
341
+ from alembic.operations import Operations
342
+
343
+ sql_lines = []
344
+
345
+ # Use offline mode to generate SQL
346
+ buffer = io.StringIO()
347
+
348
+ def emit_sql(text, *args, **kwargs):
349
+ sql_lines.append(str(text))
350
+
351
+ with engine.connect() as conn:
352
+ context = MigrationContext.configure(
353
+ conn,
354
+ opts={
355
+ "as_sql": True,
356
+ "output_buffer": buffer,
357
+ "target_metadata": self.get_target_metadata(),
358
+ },
359
+ )
360
+
361
+ with context.begin_transaction():
362
+ operations = Operations(context)
363
+ for op in upgrade_ops.ops:
364
+ self._execute_op(operations, op)
365
+
366
+ return buffer.getvalue()
367
+
368
+ def _execute_op(self, operations: "Operations", op: ops.MigrateOperation):
369
+ """Execute a single operation via Operations proxy."""
370
+ from alembic.operations import Operations
371
+ from alembic.autogenerate import rewriter
372
+
373
+ if isinstance(op, ops.CreateTableOp):
374
+ operations.create_table(
375
+ op.table_name,
376
+ *op.columns,
377
+ schema=op.schema,
378
+ **op.kw,
379
+ )
380
+ elif isinstance(op, ops.DropTableOp):
381
+ operations.drop_table(op.table_name, schema=op.schema)
382
+ elif isinstance(op, ops.AddColumnOp):
383
+ operations.add_column(op.table_name, op.column, schema=op.schema)
384
+ elif isinstance(op, ops.DropColumnOp):
385
+ operations.drop_column(op.table_name, op.column_name, schema=op.schema)
386
+ elif isinstance(op, ops.AlterColumnOp):
387
+ operations.alter_column(
388
+ op.table_name,
389
+ op.column_name,
390
+ nullable=op.modify_nullable,
391
+ type_=op.modify_type,
392
+ server_default=op.modify_server_default,
393
+ schema=op.schema,
394
+ )
395
+ elif isinstance(op, ops.CreateIndexOp):
396
+ operations.create_index(
397
+ op.index_name,
398
+ op.table_name,
399
+ op.columns,
400
+ schema=op.schema,
401
+ unique=op.unique,
402
+ **op.kw,
403
+ )
404
+ elif isinstance(op, ops.DropIndexOp):
405
+ operations.drop_index(op.index_name, table_name=op.table_name, schema=op.schema)
406
+ elif isinstance(op, ops.ModifyTableOps):
407
+ for sub_op in op.ops:
408
+ self._execute_op(operations, sub_op)
409
+
410
+ def generate_migration_file(
411
+ self,
412
+ output_dir: Path,
413
+ message: str = "auto_migration",
414
+ ) -> Optional[Path]:
415
+ """
416
+ Generate a numbered migration file from the diff.
417
+
418
+ Args:
419
+ output_dir: Directory to write migration file
420
+ message: Migration description (used in filename)
421
+
422
+ Returns:
423
+ Path to generated file, or None if no changes
424
+ """
425
+ diff = self.compute_diff()
426
+
427
+ if not diff.has_changes:
428
+ logger.info("No schema changes detected")
429
+ return None
430
+
431
+ # Find next migration number
432
+ existing = sorted(output_dir.glob("*.sql"))
433
+ next_num = 1
434
+ for f in existing:
435
+ try:
436
+ num = int(f.stem.split("_")[0])
437
+ next_num = max(next_num, num + 1)
438
+ except (ValueError, IndexError):
439
+ pass
440
+
441
+ # Generate filename
442
+ safe_message = message.replace(" ", "_").replace("-", "_")[:40]
443
+ filename = f"{next_num:03d}_{safe_message}.sql"
444
+ output_path = output_dir / filename
445
+
446
+ # Write SQL
447
+ header = f"""-- Migration: {message}
448
+ -- Generated by: rem db diff --generate
449
+ -- Changes detected: {diff.change_count}
450
+ --
451
+ -- Review this file before applying!
452
+ -- Apply with: rem db migrate
453
+ --
454
+
455
+ """
456
+ # Build SQL from operations
457
+ sql_content = self._build_migration_sql(diff)
458
+
459
+ output_path.write_text(header + sql_content)
460
+ logger.info(f"Generated migration: {output_path}")
461
+
462
+ return output_path
463
+
464
+ def _build_migration_sql(self, diff: SchemaDiff) -> str:
465
+ """Build SQL from diff operations."""
466
+ if not diff.upgrade_ops or not diff.upgrade_ops.ops:
467
+ return "-- No changes\n"
468
+
469
+ lines = []
470
+ for op in diff.upgrade_ops.ops:
471
+ lines.extend(self._op_to_sql(op))
472
+
473
+ return "\n".join(lines) + "\n"
474
+
475
+ def _op_to_sql(self, op: ops.MigrateOperation) -> list[str]:
476
+ """Convert operation to SQL statements."""
477
+ lines = []
478
+
479
+ if isinstance(op, ops.CreateTableOp):
480
+ cols = []
481
+ for col in op.columns:
482
+ if hasattr(col, 'name') and hasattr(col, 'type'):
483
+ nullable = "" if getattr(col, 'nullable', True) else " NOT NULL"
484
+ cols.append(f" {col.name} {col.type}{nullable}")
485
+ col_str = ",\n".join(cols)
486
+ lines.append(f"CREATE TABLE IF NOT EXISTS {op.table_name} (\n{col_str}\n);")
487
+
488
+ elif isinstance(op, ops.DropTableOp):
489
+ lines.append(f"DROP TABLE IF EXISTS {op.table_name};")
490
+
491
+ elif isinstance(op, ops.AddColumnOp):
492
+ col = op.column
493
+ nullable = "" if getattr(col, 'nullable', True) else " NOT NULL"
494
+ lines.append(f"ALTER TABLE {op.table_name} ADD COLUMN IF NOT EXISTS {col.name} {col.type}{nullable};")
495
+
496
+ elif isinstance(op, ops.DropColumnOp):
497
+ lines.append(f"ALTER TABLE {op.table_name} DROP COLUMN IF EXISTS {op.column_name};")
498
+
499
+ elif isinstance(op, ops.AlterColumnOp):
500
+ if op.modify_type is not None:
501
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} TYPE {op.modify_type};")
502
+ if op.modify_nullable is not None:
503
+ if op.modify_nullable:
504
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} DROP NOT NULL;")
505
+ else:
506
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} SET NOT NULL;")
507
+
508
+ elif isinstance(op, ops.CreateIndexOp):
509
+ # op.columns can be strings or Column objects
510
+ if op.columns:
511
+ cols = ", ".join(
512
+ c if isinstance(c, str) else getattr(c, 'name', str(c))
513
+ for c in op.columns
514
+ )
515
+ else:
516
+ cols = ""
517
+ unique = "UNIQUE " if op.unique else ""
518
+ lines.append(f"CREATE {unique}INDEX IF NOT EXISTS {op.index_name} ON {op.table_name} ({cols});")
519
+
520
+ elif isinstance(op, ops.DropIndexOp):
521
+ lines.append(f"DROP INDEX IF EXISTS {op.index_name};")
522
+
523
+ elif isinstance(op, ops.ModifyTableOps):
524
+ lines.append(f"-- Changes to table: {op.table_name}")
525
+ for sub_op in op.ops:
526
+ lines.extend(self._op_to_sql(sub_op))
527
+
528
+ else:
529
+ lines.append(f"-- Unsupported operation: {type(op).__name__}")
530
+
531
+ return lines