remdb 0.3.0__py3-none-any.whl → 0.3.114__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of remdb might be problematic. Click here for more details.

Files changed (98) hide show
  1. rem/__init__.py +129 -2
  2. rem/agentic/README.md +76 -0
  3. rem/agentic/__init__.py +15 -0
  4. rem/agentic/agents/__init__.py +16 -2
  5. rem/agentic/agents/sse_simulator.py +500 -0
  6. rem/agentic/context.py +28 -22
  7. rem/agentic/llm_provider_models.py +301 -0
  8. rem/agentic/otel/setup.py +92 -4
  9. rem/agentic/providers/phoenix.py +32 -43
  10. rem/agentic/providers/pydantic_ai.py +142 -22
  11. rem/agentic/schema.py +358 -21
  12. rem/agentic/tools/rem_tools.py +3 -3
  13. rem/api/README.md +238 -1
  14. rem/api/deps.py +255 -0
  15. rem/api/main.py +151 -37
  16. rem/api/mcp_router/resources.py +1 -1
  17. rem/api/mcp_router/server.py +17 -2
  18. rem/api/mcp_router/tools.py +143 -7
  19. rem/api/middleware/tracking.py +172 -0
  20. rem/api/routers/admin.py +277 -0
  21. rem/api/routers/auth.py +124 -0
  22. rem/api/routers/chat/completions.py +152 -16
  23. rem/api/routers/chat/models.py +7 -3
  24. rem/api/routers/chat/sse_events.py +526 -0
  25. rem/api/routers/chat/streaming.py +608 -45
  26. rem/api/routers/dev.py +81 -0
  27. rem/api/routers/feedback.py +148 -0
  28. rem/api/routers/messages.py +473 -0
  29. rem/api/routers/models.py +78 -0
  30. rem/api/routers/query.py +357 -0
  31. rem/api/routers/shared_sessions.py +406 -0
  32. rem/auth/middleware.py +126 -27
  33. rem/cli/commands/README.md +201 -70
  34. rem/cli/commands/ask.py +13 -10
  35. rem/cli/commands/cluster.py +1359 -0
  36. rem/cli/commands/configure.py +4 -3
  37. rem/cli/commands/db.py +350 -137
  38. rem/cli/commands/experiments.py +76 -72
  39. rem/cli/commands/process.py +22 -15
  40. rem/cli/commands/scaffold.py +47 -0
  41. rem/cli/commands/schema.py +95 -49
  42. rem/cli/main.py +29 -6
  43. rem/config.py +2 -2
  44. rem/models/core/core_model.py +7 -1
  45. rem/models/core/rem_query.py +5 -2
  46. rem/models/entities/__init__.py +21 -0
  47. rem/models/entities/domain_resource.py +38 -0
  48. rem/models/entities/feedback.py +123 -0
  49. rem/models/entities/message.py +30 -1
  50. rem/models/entities/session.py +83 -0
  51. rem/models/entities/shared_session.py +180 -0
  52. rem/models/entities/user.py +10 -3
  53. rem/registry.py +373 -0
  54. rem/schemas/agents/rem.yaml +7 -3
  55. rem/services/content/providers.py +94 -140
  56. rem/services/content/service.py +92 -20
  57. rem/services/dreaming/affinity_service.py +2 -16
  58. rem/services/dreaming/moment_service.py +2 -15
  59. rem/services/embeddings/api.py +24 -17
  60. rem/services/embeddings/worker.py +16 -16
  61. rem/services/phoenix/EXPERIMENT_DESIGN.md +3 -3
  62. rem/services/phoenix/client.py +252 -19
  63. rem/services/postgres/README.md +159 -15
  64. rem/services/postgres/__init__.py +2 -1
  65. rem/services/postgres/diff_service.py +426 -0
  66. rem/services/postgres/pydantic_to_sqlalchemy.py +427 -129
  67. rem/services/postgres/repository.py +132 -0
  68. rem/services/postgres/schema_generator.py +86 -5
  69. rem/services/postgres/service.py +6 -6
  70. rem/services/rate_limit.py +113 -0
  71. rem/services/rem/README.md +14 -0
  72. rem/services/rem/parser.py +44 -9
  73. rem/services/rem/service.py +36 -2
  74. rem/services/session/compression.py +17 -1
  75. rem/services/session/reload.py +1 -1
  76. rem/services/user_service.py +98 -0
  77. rem/settings.py +169 -17
  78. rem/sql/background_indexes.sql +21 -16
  79. rem/sql/migrations/001_install.sql +231 -54
  80. rem/sql/migrations/002_install_models.sql +457 -393
  81. rem/sql/migrations/003_optional_extensions.sql +326 -0
  82. rem/utils/constants.py +97 -0
  83. rem/utils/date_utils.py +228 -0
  84. rem/utils/embeddings.py +17 -4
  85. rem/utils/files.py +167 -0
  86. rem/utils/mime_types.py +158 -0
  87. rem/utils/model_helpers.py +156 -1
  88. rem/utils/schema_loader.py +191 -35
  89. rem/utils/sql_types.py +3 -1
  90. rem/utils/vision.py +9 -14
  91. rem/workers/README.md +14 -14
  92. rem/workers/db_maintainer.py +74 -0
  93. {remdb-0.3.0.dist-info → remdb-0.3.114.dist-info}/METADATA +303 -164
  94. {remdb-0.3.0.dist-info → remdb-0.3.114.dist-info}/RECORD +96 -70
  95. {remdb-0.3.0.dist-info → remdb-0.3.114.dist-info}/WHEEL +1 -1
  96. rem/sql/002_install_models.sql +0 -1068
  97. rem/sql/install_models.sql +0 -1038
  98. {remdb-0.3.0.dist-info → remdb-0.3.114.dist-info}/entry_points.txt +0 -0
@@ -348,8 +348,27 @@ results = await service.vector_search(
348
348
 
349
349
  ### Initialize Service
350
350
 
351
+ There are two ways to initialize the PostgresService:
352
+
353
+ **Option 1: Factory function (recommended for apps using remdb as a library)**
354
+
355
+ ```python
356
+ from rem.services.postgres import get_postgres_service
357
+
358
+ # Uses POSTGRES__CONNECTION_STRING from environment
359
+ pg = get_postgres_service()
360
+ if pg is None:
361
+ raise RuntimeError("Database not configured - set POSTGRES__CONNECTION_STRING")
362
+
363
+ await pg.connect()
364
+ # ... use pg ...
365
+ await pg.disconnect()
366
+ ```
367
+
368
+ **Option 2: Direct instantiation**
369
+
351
370
  ```python
352
- from rem.services.postgres import PostgresService, Repository
371
+ from rem.services.postgres import PostgresService
353
372
 
354
373
  service = PostgresService(
355
374
  connection_string="postgresql://user:pass@localhost/remdb",
@@ -359,6 +378,9 @@ service = PostgresService(
359
378
  await service.connect()
360
379
  ```
361
380
 
381
+ > **Note**: `get_postgres_service()` returns the service directly. It does NOT support
382
+ > `async with` context manager syntax. Always call `connect()` and `disconnect()` explicitly.
383
+
362
384
  ### Using Repository Pattern
363
385
 
364
386
  **Generic Repository** for simple CRUD operations:
@@ -514,34 +536,156 @@ results = await service.vector_search(
514
536
  - HNSW parameters: `m=16, ef_construction=64` (tunable)
515
537
  - Monitor shared_buffers and work_mem
516
538
 
517
- ## Migrations
539
+ ## Schema Management
518
540
 
519
- Run migrations in order:
541
+ REM uses a **code-as-source-of-truth** approach. Pydantic models define the schema, and the database is kept in sync via diff-based migrations.
520
542
 
521
- ```bash
522
- psql -d remdb -f sql/migrations/001_setup_extensions.sql
523
- psql -d remdb -f sql/migrations/002_kv_store_cache.sql
524
- psql -d remdb -f sql/generated_schema.sql
543
+ ### File Structure
544
+
545
+ ```
546
+ src/rem/sql/
547
+ ├── migrations/
548
+ │ ├── 001_install.sql # Core infrastructure (manual)
549
+ │ └── 002_install_models.sql # Entity tables (auto-generated)
550
+ └── background_indexes.sql # HNSW vector indexes (optional)
525
551
  ```
526
552
 
527
- Background indexes (after data load):
553
+ **Key principle**: Only two migration files. No incremental `003_`, `004_` files.
554
+
555
+ ### CLI Commands
528
556
 
529
557
  ```bash
530
- psql -d remdb -f sql/background_indexes.sql
558
+ # Apply migrations (installs extensions, core tables, entity tables)
559
+ rem db migrate
560
+
561
+ # Check migration status
562
+ rem db status
563
+
564
+ # Generate schema SQL from models (for remdb development)
565
+ rem db schema generate --models src/rem/models/entities
566
+
567
+ # Validate models for schema generation
568
+ rem db schema validate --models src/rem/models/entities
531
569
  ```
532
570
 
533
- ## CLI Usage
571
+ ### Model Registry
534
572
 
535
- Generate schema from models:
573
+ Models are discovered via the registry:
536
574
 
537
- ```bash
538
- rem schema generate --models src/rem/models/entities --output sql/schema.sql
575
+ ```python
576
+ import rem
577
+ from rem.models.core import CoreModel
578
+
579
+ @rem.register_model
580
+ class MyEntity(CoreModel):
581
+ name: str
582
+ description: str # Auto-embeds
583
+ ```
584
+
585
+ ## Using REM as a Library (Downstream Apps)
586
+
587
+ When building an application that **depends on remdb as a package** (e.g., `pip install remdb`),
588
+ there are important differences from developing remdb itself.
589
+
590
+ ### What Works Out of the Box
591
+
592
+ 1. **All core entity tables** - Resources, Messages, Users, Sessions, etc.
593
+ 2. **PostgresService** - Full database access via `get_postgres_service()`
594
+ 3. **Repository pattern** - CRUD operations for core entities
595
+ 4. **Migrations** - `rem db migrate` applies the bundled SQL files
596
+
597
+ ```python
598
+ # In your downstream app (e.g., myapp/main.py)
599
+ from rem.services.postgres import get_postgres_service
600
+ from rem.models.entities import Message, Resource
601
+
602
+ pg = get_postgres_service()
603
+ await pg.connect()
604
+
605
+ # Use core entities - tables already exist
606
+ messages = await pg.query(Message, {"session_id": "abc"})
539
607
  ```
540
608
 
541
- Validate models:
609
+ ### Custom Models in Downstream Apps
610
+
611
+ The `@rem.register_model` decorator registers models in the **runtime registry**, which is useful for:
612
+ - Schema introspection at runtime
613
+ - Future tooling that reads the registry
614
+
615
+ However, **`rem db migrate` only applies SQL files bundled in the remdb package**.
616
+ Custom models from downstream apps do NOT automatically get tables created.
617
+
618
+ **Options for custom model tables:**
619
+
620
+ **Option A: Use core entities with metadata**
621
+
622
+ Store custom data in the `metadata` JSONB field of existing entities:
623
+
624
+ ```python
625
+ resource = Resource(
626
+ name="my-custom-thing",
627
+ content="...",
628
+ metadata={"custom_field": "value", "another": 123}
629
+ )
630
+ ```
631
+
632
+ **Option B: Create tables manually**
633
+
634
+ Write and apply your own SQL:
635
+
636
+ ```sql
637
+ -- myapp/sql/custom_tables.sql
638
+ CREATE TABLE IF NOT EXISTS conversation_summaries (
639
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
640
+ session_ref TEXT NOT NULL,
641
+ summary TEXT NOT NULL,
642
+ -- ... include CoreModel fields for compatibility
643
+ user_id VARCHAR(256),
644
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
645
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
646
+ );
647
+ ```
542
648
 
543
649
  ```bash
544
- rem schema validate --models src/rem/models/entities
650
+ psql $DATABASE_URL -f myapp/sql/custom_tables.sql
651
+ ```
652
+
653
+ **Option C: Contribute upstream**
654
+
655
+ If your model is generally useful, contribute it to remdb so it's included in
656
+ the next release and `rem db migrate` creates it automatically.
657
+
658
+ ### Example: Downstream App Structure
659
+
660
+ ```
661
+ myapp/
662
+ ├── main.py # Import models, start API
663
+ ├── models/
664
+ │ └── __init__.py # @rem.register_model decorators
665
+ ├── sql/
666
+ │ └── custom.sql # Manual migrations for custom tables
667
+ ├── .env # POSTGRES__CONNECTION_STRING, LLM keys
668
+ └── pyproject.toml # dependencies = ["remdb>=0.3.110"]
669
+ ```
670
+
671
+ ```python
672
+ # myapp/models/__init__.py
673
+ import rem
674
+ from rem.models.core import CoreModel
675
+
676
+ @rem.register_model
677
+ class ConversationSummary(CoreModel):
678
+ """Registered for introspection, but table created via sql/custom.sql"""
679
+ session_ref: str
680
+ summary: str
681
+ ```
682
+
683
+ ```python
684
+ # myapp/main.py
685
+ import models # Registers custom models
686
+
687
+ from rem.api.main import app # Use REM's FastAPI app
688
+ # Or build your own app using rem.services
545
689
  ```
546
690
 
547
691
  ## Configuration
@@ -2,6 +2,7 @@
2
2
  PostgreSQL service for CloudNativePG database operations.
3
3
  """
4
4
 
5
+ from .diff_service import DiffService, SchemaDiff
5
6
  from .repository import Repository
6
7
  from .service import PostgresService
7
8
 
@@ -20,4 +21,4 @@ def get_postgres_service() -> PostgresService | None:
20
21
  return PostgresService()
21
22
 
22
23
 
23
- __all__ = ["PostgresService", "get_postgres_service", "Repository"]
24
+ __all__ = ["PostgresService", "get_postgres_service", "Repository", "DiffService", "SchemaDiff"]
@@ -0,0 +1,426 @@
1
+ """
2
+ Schema diff service for comparing Pydantic models against database.
3
+
4
+ Uses Alembic autogenerate to detect differences between:
5
+ - Target schema (derived from Pydantic models)
6
+ - Current database schema
7
+
8
+ This enables:
9
+ 1. Local development: See what would change before applying migrations
10
+ 2. CI validation: Detect drift between code and database (--check mode)
11
+ 3. Migration generation: Create incremental migration files
12
+ """
13
+
14
+ from dataclasses import dataclass, field
15
+ from pathlib import Path
16
+ from typing import Optional
17
+ import io
18
+
19
+ from alembic.autogenerate import produce_migrations, render_python_code
20
+ from alembic.operations import ops
21
+ from alembic.runtime.migration import MigrationContext
22
+ from alembic.script import ScriptDirectory
23
+ from loguru import logger
24
+ from sqlalchemy import create_engine, text
25
+
26
+ from ...settings import settings
27
+ from .pydantic_to_sqlalchemy import get_target_metadata
28
+
29
+
30
+ # Tables that are NOT managed by Pydantic models (infrastructure tables)
31
+ # These are created by 001_install.sql and should be excluded from diff
32
+ INFRASTRUCTURE_TABLES = {
33
+ "kv_store",
34
+ "rem_migrations",
35
+ "rate_limits",
36
+ "persons", # Legacy table - to be removed from DB
37
+ }
38
+
39
+ # Prefixes for tables that should be included in diff
40
+ # (embeddings tables are created alongside entity tables)
41
+ EMBEDDINGS_PREFIX = "embeddings_"
42
+
43
+
44
+ @dataclass
45
+ class SchemaDiff:
46
+ """Result of schema comparison."""
47
+
48
+ has_changes: bool
49
+ summary: list[str] = field(default_factory=list)
50
+ sql: str = ""
51
+ upgrade_ops: Optional[ops.UpgradeOps] = None
52
+
53
+ @property
54
+ def change_count(self) -> int:
55
+ """Total number of detected changes."""
56
+ return len(self.summary)
57
+
58
+
59
+ class DiffService:
60
+ """
61
+ Service for comparing Pydantic models against database schema.
62
+
63
+ Uses Alembic's autogenerate machinery without creating revision files.
64
+ """
65
+
66
+ def __init__(self, models_dir: Optional[Path] = None):
67
+ """
68
+ Initialize diff service.
69
+
70
+ Args:
71
+ models_dir: Directory containing Pydantic models.
72
+ If None, uses default rem/models/entities location.
73
+ """
74
+ self.models_dir = models_dir
75
+ self._metadata = None
76
+
77
+ def get_connection_url(self) -> str:
78
+ """Build PostgreSQL connection URL from settings using psycopg (v3) driver."""
79
+ pg = settings.postgres
80
+ # Use postgresql+psycopg to use psycopg v3 (not psycopg2)
81
+ url = f"postgresql+psycopg://{pg.user}"
82
+ if pg.password:
83
+ url += f":{pg.password}"
84
+ url += f"@{pg.host}:{pg.port}/{pg.database}"
85
+ return url
86
+
87
+ def get_target_metadata(self):
88
+ """Get SQLAlchemy metadata from Pydantic models."""
89
+ if self._metadata is None:
90
+ if self.models_dir:
91
+ from .pydantic_to_sqlalchemy import build_sqlalchemy_metadata_from_pydantic
92
+ self._metadata = build_sqlalchemy_metadata_from_pydantic(self.models_dir)
93
+ else:
94
+ self._metadata = get_target_metadata()
95
+ return self._metadata
96
+
97
+ def _include_object(self, obj, name, type_, reflected, compare_to) -> bool:
98
+ """
99
+ Filter function for Alembic autogenerate.
100
+
101
+ Excludes infrastructure tables that are not managed by Pydantic models.
102
+
103
+ Args:
104
+ obj: The schema object (Table, Column, Index, etc.)
105
+ name: Object name
106
+ type_: Object type ("table", "column", "index", etc.)
107
+ reflected: True if object exists in database
108
+ compare_to: The object being compared to (if any)
109
+
110
+ Returns:
111
+ True to include in diff, False to exclude
112
+ """
113
+ if type_ == "table":
114
+ # Exclude infrastructure tables
115
+ if name in INFRASTRUCTURE_TABLES:
116
+ return False
117
+ # Include embeddings tables (they're part of the model schema)
118
+ # These are now generated in pydantic_to_sqlalchemy
119
+ return True
120
+
121
+ def compute_diff(self) -> SchemaDiff:
122
+ """
123
+ Compare Pydantic models against database and return differences.
124
+
125
+ Returns:
126
+ SchemaDiff with detected changes
127
+ """
128
+ url = self.get_connection_url()
129
+ engine = create_engine(url)
130
+ metadata = self.get_target_metadata()
131
+
132
+ summary = []
133
+
134
+ with engine.connect() as conn:
135
+ # Create migration context for comparison
136
+ context = MigrationContext.configure(
137
+ conn,
138
+ opts={
139
+ "target_metadata": metadata,
140
+ "compare_type": True,
141
+ "compare_server_default": False, # Avoid false positives
142
+ "include_schemas": False,
143
+ "include_object": self._include_object,
144
+ },
145
+ )
146
+
147
+ # Run autogenerate comparison
148
+ migration_script = produce_migrations(context, metadata)
149
+ upgrade_ops = migration_script.upgrade_ops
150
+
151
+ # Process detected operations
152
+ if upgrade_ops and upgrade_ops.ops:
153
+ for op in upgrade_ops.ops:
154
+ summary.extend(self._describe_operation(op))
155
+
156
+ has_changes = len(summary) > 0
157
+
158
+ # Generate SQL if there are changes
159
+ sql = ""
160
+ if has_changes and upgrade_ops:
161
+ sql = self._render_sql(upgrade_ops, engine)
162
+
163
+ return SchemaDiff(
164
+ has_changes=has_changes,
165
+ summary=summary,
166
+ sql=sql,
167
+ upgrade_ops=upgrade_ops,
168
+ )
169
+
170
+ def _describe_operation(self, op: ops.MigrateOperation, prefix: str = "") -> list[str]:
171
+ """Convert Alembic operation to human-readable description."""
172
+ descriptions = []
173
+
174
+ if isinstance(op, ops.CreateTableOp):
175
+ descriptions.append(f"{prefix}+ CREATE TABLE {op.table_name}")
176
+ for col in op.columns:
177
+ if hasattr(col, 'name'):
178
+ descriptions.append(f"{prefix} + column {col.name}")
179
+
180
+ elif isinstance(op, ops.DropTableOp):
181
+ descriptions.append(f"{prefix}- DROP TABLE {op.table_name}")
182
+
183
+ elif isinstance(op, ops.AddColumnOp):
184
+ col_type = str(op.column.type) if op.column.type else "unknown"
185
+ descriptions.append(f"{prefix}+ ADD COLUMN {op.table_name}.{op.column.name} ({col_type})")
186
+
187
+ elif isinstance(op, ops.DropColumnOp):
188
+ descriptions.append(f"{prefix}- DROP COLUMN {op.table_name}.{op.column_name}")
189
+
190
+ elif isinstance(op, ops.AlterColumnOp):
191
+ changes = []
192
+ if op.modify_type is not None:
193
+ changes.append(f"type -> {op.modify_type}")
194
+ if op.modify_nullable is not None:
195
+ nullable = "NULL" if op.modify_nullable else "NOT NULL"
196
+ changes.append(f"nullable -> {nullable}")
197
+ if op.modify_server_default is not None:
198
+ changes.append(f"default -> {op.modify_server_default}")
199
+ change_str = ", ".join(changes) if changes else "modified"
200
+ descriptions.append(f"{prefix}~ ALTER COLUMN {op.table_name}.{op.column_name} ({change_str})")
201
+
202
+ elif isinstance(op, ops.CreateIndexOp):
203
+ # op.columns can be strings or Column objects
204
+ if op.columns:
205
+ cols = ", ".join(
206
+ c if isinstance(c, str) else getattr(c, 'name', str(c))
207
+ for c in op.columns
208
+ )
209
+ else:
210
+ cols = "?"
211
+ descriptions.append(f"{prefix}+ CREATE INDEX {op.index_name} ON {op.table_name} ({cols})")
212
+
213
+ elif isinstance(op, ops.DropIndexOp):
214
+ descriptions.append(f"{prefix}- DROP INDEX {op.index_name}")
215
+
216
+ elif isinstance(op, ops.CreateForeignKeyOp):
217
+ descriptions.append(f"{prefix}+ CREATE FK {op.constraint_name} ON {op.source_table}")
218
+
219
+ elif isinstance(op, ops.DropConstraintOp):
220
+ descriptions.append(f"{prefix}- DROP CONSTRAINT {op.constraint_name} ON {op.table_name}")
221
+
222
+ elif isinstance(op, ops.ModifyTableOps):
223
+ # Container for multiple operations on same table
224
+ descriptions.append(f"{prefix}Table: {op.table_name}")
225
+ for sub_op in op.ops:
226
+ descriptions.extend(self._describe_operation(sub_op, prefix + " "))
227
+
228
+ else:
229
+ descriptions.append(f"{prefix}? {type(op).__name__}")
230
+
231
+ return descriptions
232
+
233
+ def _render_sql(self, upgrade_ops: ops.UpgradeOps, engine) -> str:
234
+ """Render upgrade operations as SQL statements."""
235
+ from alembic.runtime.migration import MigrationContext
236
+ from alembic.operations import Operations
237
+
238
+ sql_lines = []
239
+
240
+ # Use offline mode to generate SQL
241
+ buffer = io.StringIO()
242
+
243
+ def emit_sql(text, *args, **kwargs):
244
+ sql_lines.append(str(text))
245
+
246
+ with engine.connect() as conn:
247
+ context = MigrationContext.configure(
248
+ conn,
249
+ opts={
250
+ "as_sql": True,
251
+ "output_buffer": buffer,
252
+ "target_metadata": self.get_target_metadata(),
253
+ },
254
+ )
255
+
256
+ with context.begin_transaction():
257
+ operations = Operations(context)
258
+ for op in upgrade_ops.ops:
259
+ self._execute_op(operations, op)
260
+
261
+ return buffer.getvalue()
262
+
263
+ def _execute_op(self, operations: "Operations", op: ops.MigrateOperation):
264
+ """Execute a single operation via Operations proxy."""
265
+ from alembic.operations import Operations
266
+ from alembic.autogenerate import rewriter
267
+
268
+ if isinstance(op, ops.CreateTableOp):
269
+ operations.create_table(
270
+ op.table_name,
271
+ *op.columns,
272
+ schema=op.schema,
273
+ **op.kw,
274
+ )
275
+ elif isinstance(op, ops.DropTableOp):
276
+ operations.drop_table(op.table_name, schema=op.schema)
277
+ elif isinstance(op, ops.AddColumnOp):
278
+ operations.add_column(op.table_name, op.column, schema=op.schema)
279
+ elif isinstance(op, ops.DropColumnOp):
280
+ operations.drop_column(op.table_name, op.column_name, schema=op.schema)
281
+ elif isinstance(op, ops.AlterColumnOp):
282
+ operations.alter_column(
283
+ op.table_name,
284
+ op.column_name,
285
+ nullable=op.modify_nullable,
286
+ type_=op.modify_type,
287
+ server_default=op.modify_server_default,
288
+ schema=op.schema,
289
+ )
290
+ elif isinstance(op, ops.CreateIndexOp):
291
+ operations.create_index(
292
+ op.index_name,
293
+ op.table_name,
294
+ op.columns,
295
+ schema=op.schema,
296
+ unique=op.unique,
297
+ **op.kw,
298
+ )
299
+ elif isinstance(op, ops.DropIndexOp):
300
+ operations.drop_index(op.index_name, table_name=op.table_name, schema=op.schema)
301
+ elif isinstance(op, ops.ModifyTableOps):
302
+ for sub_op in op.ops:
303
+ self._execute_op(operations, sub_op)
304
+
305
+ def generate_migration_file(
306
+ self,
307
+ output_dir: Path,
308
+ message: str = "auto_migration",
309
+ ) -> Optional[Path]:
310
+ """
311
+ Generate a numbered migration file from the diff.
312
+
313
+ Args:
314
+ output_dir: Directory to write migration file
315
+ message: Migration description (used in filename)
316
+
317
+ Returns:
318
+ Path to generated file, or None if no changes
319
+ """
320
+ diff = self.compute_diff()
321
+
322
+ if not diff.has_changes:
323
+ logger.info("No schema changes detected")
324
+ return None
325
+
326
+ # Find next migration number
327
+ existing = sorted(output_dir.glob("*.sql"))
328
+ next_num = 1
329
+ for f in existing:
330
+ try:
331
+ num = int(f.stem.split("_")[0])
332
+ next_num = max(next_num, num + 1)
333
+ except (ValueError, IndexError):
334
+ pass
335
+
336
+ # Generate filename
337
+ safe_message = message.replace(" ", "_").replace("-", "_")[:40]
338
+ filename = f"{next_num:03d}_{safe_message}.sql"
339
+ output_path = output_dir / filename
340
+
341
+ # Write SQL
342
+ header = f"""-- Migration: {message}
343
+ -- Generated by: rem db diff --generate
344
+ -- Changes detected: {diff.change_count}
345
+ --
346
+ -- Review this file before applying!
347
+ -- Apply with: rem db migrate
348
+ --
349
+
350
+ """
351
+ # Build SQL from operations
352
+ sql_content = self._build_migration_sql(diff)
353
+
354
+ output_path.write_text(header + sql_content)
355
+ logger.info(f"Generated migration: {output_path}")
356
+
357
+ return output_path
358
+
359
+ def _build_migration_sql(self, diff: SchemaDiff) -> str:
360
+ """Build SQL from diff operations."""
361
+ if not diff.upgrade_ops or not diff.upgrade_ops.ops:
362
+ return "-- No changes\n"
363
+
364
+ lines = []
365
+ for op in diff.upgrade_ops.ops:
366
+ lines.extend(self._op_to_sql(op))
367
+
368
+ return "\n".join(lines) + "\n"
369
+
370
+ def _op_to_sql(self, op: ops.MigrateOperation) -> list[str]:
371
+ """Convert operation to SQL statements."""
372
+ lines = []
373
+
374
+ if isinstance(op, ops.CreateTableOp):
375
+ cols = []
376
+ for col in op.columns:
377
+ if hasattr(col, 'name') and hasattr(col, 'type'):
378
+ nullable = "" if getattr(col, 'nullable', True) else " NOT NULL"
379
+ cols.append(f" {col.name} {col.type}{nullable}")
380
+ col_str = ",\n".join(cols)
381
+ lines.append(f"CREATE TABLE IF NOT EXISTS {op.table_name} (\n{col_str}\n);")
382
+
383
+ elif isinstance(op, ops.DropTableOp):
384
+ lines.append(f"DROP TABLE IF EXISTS {op.table_name};")
385
+
386
+ elif isinstance(op, ops.AddColumnOp):
387
+ col = op.column
388
+ nullable = "" if getattr(col, 'nullable', True) else " NOT NULL"
389
+ lines.append(f"ALTER TABLE {op.table_name} ADD COLUMN IF NOT EXISTS {col.name} {col.type}{nullable};")
390
+
391
+ elif isinstance(op, ops.DropColumnOp):
392
+ lines.append(f"ALTER TABLE {op.table_name} DROP COLUMN IF EXISTS {op.column_name};")
393
+
394
+ elif isinstance(op, ops.AlterColumnOp):
395
+ if op.modify_type is not None:
396
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} TYPE {op.modify_type};")
397
+ if op.modify_nullable is not None:
398
+ if op.modify_nullable:
399
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} DROP NOT NULL;")
400
+ else:
401
+ lines.append(f"ALTER TABLE {op.table_name} ALTER COLUMN {op.column_name} SET NOT NULL;")
402
+
403
+ elif isinstance(op, ops.CreateIndexOp):
404
+ # op.columns can be strings or Column objects
405
+ if op.columns:
406
+ cols = ", ".join(
407
+ c if isinstance(c, str) else getattr(c, 'name', str(c))
408
+ for c in op.columns
409
+ )
410
+ else:
411
+ cols = ""
412
+ unique = "UNIQUE " if op.unique else ""
413
+ lines.append(f"CREATE {unique}INDEX IF NOT EXISTS {op.index_name} ON {op.table_name} ({cols});")
414
+
415
+ elif isinstance(op, ops.DropIndexOp):
416
+ lines.append(f"DROP INDEX IF EXISTS {op.index_name};")
417
+
418
+ elif isinstance(op, ops.ModifyTableOps):
419
+ lines.append(f"-- Changes to table: {op.table_name}")
420
+ for sub_op in op.ops:
421
+ lines.extend(self._op_to_sql(sub_op))
422
+
423
+ else:
424
+ lines.append(f"-- Unsupported operation: {type(op).__name__}")
425
+
426
+ return lines