remdb 0.3.14__py3-none-any.whl → 0.3.133__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. rem/agentic/README.md +76 -0
  2. rem/agentic/__init__.py +15 -0
  3. rem/agentic/agents/__init__.py +16 -2
  4. rem/agentic/agents/sse_simulator.py +502 -0
  5. rem/agentic/context.py +51 -27
  6. rem/agentic/llm_provider_models.py +301 -0
  7. rem/agentic/mcp/tool_wrapper.py +112 -17
  8. rem/agentic/otel/setup.py +93 -4
  9. rem/agentic/providers/phoenix.py +302 -109
  10. rem/agentic/providers/pydantic_ai.py +215 -26
  11. rem/agentic/schema.py +361 -21
  12. rem/agentic/tools/rem_tools.py +3 -3
  13. rem/api/README.md +215 -1
  14. rem/api/deps.py +255 -0
  15. rem/api/main.py +132 -40
  16. rem/api/mcp_router/resources.py +1 -1
  17. rem/api/mcp_router/server.py +26 -5
  18. rem/api/mcp_router/tools.py +465 -7
  19. rem/api/routers/admin.py +494 -0
  20. rem/api/routers/auth.py +70 -0
  21. rem/api/routers/chat/completions.py +402 -20
  22. rem/api/routers/chat/models.py +88 -10
  23. rem/api/routers/chat/otel_utils.py +33 -0
  24. rem/api/routers/chat/sse_events.py +542 -0
  25. rem/api/routers/chat/streaming.py +642 -45
  26. rem/api/routers/dev.py +81 -0
  27. rem/api/routers/feedback.py +268 -0
  28. rem/api/routers/messages.py +473 -0
  29. rem/api/routers/models.py +78 -0
  30. rem/api/routers/query.py +360 -0
  31. rem/api/routers/shared_sessions.py +406 -0
  32. rem/auth/middleware.py +126 -27
  33. rem/cli/commands/README.md +237 -64
  34. rem/cli/commands/cluster.py +1808 -0
  35. rem/cli/commands/configure.py +1 -3
  36. rem/cli/commands/db.py +386 -143
  37. rem/cli/commands/experiments.py +418 -27
  38. rem/cli/commands/process.py +14 -8
  39. rem/cli/commands/schema.py +97 -50
  40. rem/cli/main.py +27 -6
  41. rem/config.py +10 -3
  42. rem/models/core/core_model.py +7 -1
  43. rem/models/core/experiment.py +54 -0
  44. rem/models/core/rem_query.py +5 -2
  45. rem/models/entities/__init__.py +21 -0
  46. rem/models/entities/domain_resource.py +38 -0
  47. rem/models/entities/feedback.py +123 -0
  48. rem/models/entities/message.py +30 -1
  49. rem/models/entities/session.py +83 -0
  50. rem/models/entities/shared_session.py +180 -0
  51. rem/registry.py +10 -4
  52. rem/schemas/agents/rem.yaml +7 -3
  53. rem/services/content/service.py +92 -20
  54. rem/services/embeddings/api.py +4 -4
  55. rem/services/embeddings/worker.py +16 -16
  56. rem/services/phoenix/client.py +154 -14
  57. rem/services/postgres/README.md +159 -15
  58. rem/services/postgres/__init__.py +2 -1
  59. rem/services/postgres/diff_service.py +531 -0
  60. rem/services/postgres/pydantic_to_sqlalchemy.py +427 -129
  61. rem/services/postgres/repository.py +132 -0
  62. rem/services/postgres/schema_generator.py +205 -4
  63. rem/services/postgres/service.py +6 -6
  64. rem/services/rem/parser.py +44 -9
  65. rem/services/rem/service.py +36 -2
  66. rem/services/session/compression.py +24 -1
  67. rem/services/session/reload.py +1 -1
  68. rem/settings.py +324 -23
  69. rem/sql/background_indexes.sql +21 -16
  70. rem/sql/migrations/001_install.sql +387 -54
  71. rem/sql/migrations/002_install_models.sql +2320 -393
  72. rem/sql/migrations/003_optional_extensions.sql +326 -0
  73. rem/sql/migrations/004_cache_system.sql +548 -0
  74. rem/utils/__init__.py +18 -0
  75. rem/utils/date_utils.py +2 -2
  76. rem/utils/model_helpers.py +156 -1
  77. rem/utils/schema_loader.py +220 -22
  78. rem/utils/sql_paths.py +146 -0
  79. rem/utils/sql_types.py +3 -1
  80. rem/workers/__init__.py +3 -1
  81. rem/workers/db_listener.py +579 -0
  82. rem/workers/unlogged_maintainer.py +463 -0
  83. {remdb-0.3.14.dist-info → remdb-0.3.133.dist-info}/METADATA +335 -226
  84. {remdb-0.3.14.dist-info → remdb-0.3.133.dist-info}/RECORD +86 -66
  85. {remdb-0.3.14.dist-info → remdb-0.3.133.dist-info}/WHEEL +1 -1
  86. rem/sql/002_install_models.sql +0 -1068
  87. rem/sql/install_models.sql +0 -1051
  88. rem/sql/migrations/003_seed_default_user.sql +0 -48
  89. {remdb-0.3.14.dist-info → remdb-0.3.133.dist-info}/entry_points.txt +0 -0
@@ -69,7 +69,7 @@ def get_global_embedding_worker(postgres_service: Any = None) -> "EmbeddingWorke
69
69
  if postgres_service is None:
70
70
  raise RuntimeError("Must provide postgres_service on first call to get_global_embedding_worker")
71
71
  _global_worker = EmbeddingWorker(postgres_service=postgres_service)
72
- logger.info("Created global EmbeddingWorker singleton")
72
+ logger.debug("Created global EmbeddingWorker singleton")
73
73
 
74
74
  return _global_worker
75
75
 
@@ -117,7 +117,7 @@ class EmbeddingWorker:
117
117
  "No OpenAI API key provided - embeddings will use zero vectors"
118
118
  )
119
119
 
120
- logger.info(
120
+ logger.debug(
121
121
  f"Initialized EmbeddingWorker: {num_workers} workers, "
122
122
  f"batch_size={batch_size}, timeout={batch_timeout}s"
123
123
  )
@@ -125,17 +125,17 @@ class EmbeddingWorker:
125
125
  async def start(self) -> None:
126
126
  """Start worker pool."""
127
127
  if self.running:
128
- logger.warning("EmbeddingWorker already running")
128
+ logger.debug("EmbeddingWorker already running")
129
129
  return
130
130
 
131
131
  self.running = True
132
- logger.info(f"Starting {self.num_workers} embedding workers")
132
+ logger.debug(f"Starting {self.num_workers} embedding workers")
133
133
 
134
134
  for i in range(self.num_workers):
135
135
  worker = asyncio.create_task(self._worker_loop(i))
136
136
  self.workers.append(worker)
137
137
 
138
- logger.info("EmbeddingWorker started")
138
+ logger.debug("EmbeddingWorker started")
139
139
 
140
140
  async def stop(self) -> None:
141
141
  """Stop worker pool gracefully - processes remaining queue before stopping."""
@@ -143,7 +143,7 @@ class EmbeddingWorker:
143
143
  return
144
144
 
145
145
  queue_size = self.task_queue.qsize()
146
- logger.info(f"Stopping EmbeddingWorker (processing {queue_size} queued tasks first)")
146
+ logger.debug(f"Stopping EmbeddingWorker (processing {queue_size} queued tasks first)")
147
147
 
148
148
  # Wait for queue to drain (with timeout)
149
149
  max_wait = 30 # 30 seconds max
@@ -171,7 +171,7 @@ class EmbeddingWorker:
171
171
  await asyncio.gather(*self.workers, return_exceptions=True)
172
172
 
173
173
  self.workers.clear()
174
- logger.info("EmbeddingWorker stopped")
174
+ logger.debug("EmbeddingWorker stopped")
175
175
 
176
176
  async def queue_task(self, task: EmbeddingTask) -> None:
177
177
  """
@@ -195,7 +195,7 @@ class EmbeddingWorker:
195
195
  Args:
196
196
  worker_id: Unique worker identifier
197
197
  """
198
- logger.info(f"Worker {worker_id} started")
198
+ logger.debug(f"Worker {worker_id} started")
199
199
 
200
200
  while self.running:
201
201
  try:
@@ -205,7 +205,7 @@ class EmbeddingWorker:
205
205
  if not batch:
206
206
  continue
207
207
 
208
- logger.info(f"Worker {worker_id} processing batch of {len(batch)} tasks")
208
+ logger.debug(f"Worker {worker_id} processing batch of {len(batch)} tasks")
209
209
 
210
210
  # Generate embeddings for batch
211
211
  await self._process_batch(batch)
@@ -213,14 +213,14 @@ class EmbeddingWorker:
213
213
  logger.debug(f"Worker {worker_id} completed batch")
214
214
 
215
215
  except asyncio.CancelledError:
216
- logger.info(f"Worker {worker_id} cancelled")
216
+ logger.debug(f"Worker {worker_id} cancelled")
217
217
  break
218
218
  except Exception as e:
219
219
  logger.error(f"Worker {worker_id} error: {e}", exc_info=True)
220
220
  # Continue processing (don't crash worker on error)
221
221
  await asyncio.sleep(1)
222
222
 
223
- logger.info(f"Worker {worker_id} stopped")
223
+ logger.debug(f"Worker {worker_id} stopped")
224
224
 
225
225
  async def _collect_batch(self) -> list[EmbeddingTask]:
226
226
  """
@@ -284,10 +284,10 @@ class EmbeddingWorker:
284
284
  )
285
285
 
286
286
  # Upsert to database
287
- logger.info(f"Upserting {len(embeddings)} embeddings to database...")
287
+ logger.debug(f"Upserting {len(embeddings)} embeddings to database...")
288
288
  await self._upsert_embeddings(batch, embeddings)
289
289
 
290
- logger.info(
290
+ logger.debug(
291
291
  f"Successfully generated and stored {len(embeddings)} embeddings "
292
292
  f"(provider={provider}, model={model})"
293
293
  )
@@ -315,7 +315,7 @@ class EmbeddingWorker:
315
315
  """
316
316
  if provider == "openai" and self.openai_api_key:
317
317
  try:
318
- logger.info(
318
+ logger.debug(
319
319
  f"Generating OpenAI embeddings for {len(texts)} texts using {model}"
320
320
  )
321
321
 
@@ -336,7 +336,7 @@ class EmbeddingWorker:
336
336
  data = response.json()
337
337
  embeddings = [item["embedding"] for item in data["data"]]
338
338
 
339
- logger.info(
339
+ logger.debug(
340
340
  f"Successfully generated {len(embeddings)} embeddings from OpenAI"
341
341
  )
342
342
  return embeddings
@@ -409,7 +409,7 @@ class EmbeddingWorker:
409
409
  ),
410
410
  )
411
411
 
412
- logger.info(
412
+ logger.debug(
413
413
  f"Upserted embedding: {task.table_name}.{task.entity_id}.{task.field_name}"
414
414
  )
415
415
 
@@ -792,29 +792,169 @@ class PhoenixClient:
792
792
  label: str | None = None,
793
793
  score: float | None = None,
794
794
  explanation: str | None = None,
795
- ) -> None:
796
- """Add feedback annotation to a span.
795
+ metadata: dict[str, Any] | None = None,
796
+ trace_id: str | None = None,
797
+ ) -> str | None:
798
+ """Add feedback annotation to a span via Phoenix REST API.
799
+
800
+ Uses direct HTTP POST to /v1/span_annotations for reliability
801
+ (Phoenix Python client API changes frequently).
797
802
 
798
803
  Args:
799
- span_id: Span ID to annotate
800
- annotation_name: Name of the annotation (e.g., "correctness")
804
+ span_id: Span ID to annotate (hex string)
805
+ annotation_name: Name of the annotation (e.g., "correctness", "user_feedback")
801
806
  annotator_kind: Type of annotator ("HUMAN", "LLM", "CODE")
802
- label: Optional label (e.g., "correct", "incorrect")
807
+ label: Optional label (e.g., "correct", "incorrect", "helpful")
803
808
  score: Optional numeric score (0.0-1.0)
804
809
  explanation: Optional explanation text
810
+ metadata: Optional additional metadata dict
811
+ trace_id: Optional trace ID (used if span lookup needed)
812
+
813
+ Returns:
814
+ Annotation ID if successful, None otherwise
805
815
  """
816
+ import httpx
817
+
806
818
  try:
807
- self._client.add_span_annotation( # type: ignore[attr-defined]
808
- span_id=span_id,
809
- name=annotation_name,
810
- annotator_kind=annotator_kind,
811
- label=label,
812
- score=score,
813
- explanation=explanation,
814
- )
819
+ # Build annotation payload for Phoenix REST API
820
+ annotation_data = {
821
+ "span_id": span_id,
822
+ "name": annotation_name,
823
+ "annotator_kind": annotator_kind,
824
+ "result": {
825
+ "label": label,
826
+ "score": score,
827
+ "explanation": explanation,
828
+ },
829
+ "metadata": metadata or {},
830
+ }
831
+
832
+ # Add trace_id if provided
833
+ if trace_id:
834
+ annotation_data["trace_id"] = trace_id
835
+
836
+ # POST to Phoenix REST API
837
+ annotations_endpoint = f"{self.config.base_url}/v1/span_annotations"
838
+ headers = {}
839
+ if self.config.api_key:
840
+ headers["Authorization"] = f"Bearer {self.config.api_key}"
841
+
842
+ with httpx.Client(timeout=5.0) as client:
843
+ response = client.post(
844
+ annotations_endpoint,
845
+ json={"data": [annotation_data]},
846
+ headers=headers,
847
+ )
848
+ response.raise_for_status()
815
849
 
816
850
  logger.info(f"Added {annotator_kind} feedback to span {span_id}")
851
+ return span_id # Return span_id as annotation reference
817
852
 
853
+ except httpx.HTTPStatusError as e:
854
+ logger.error(
855
+ f"Failed to add span feedback (HTTP {e.response.status_code}): "
856
+ f"{e.response.text if hasattr(e, 'response') else 'N/A'}"
857
+ )
858
+ return None
818
859
  except Exception as e:
819
860
  logger.error(f"Failed to add span feedback: {e}")
820
- raise
861
+ return None
862
+
863
+ def sync_user_feedback(
864
+ self,
865
+ span_id: str,
866
+ rating: int | None = None,
867
+ categories: list[str] | None = None,
868
+ comment: str | None = None,
869
+ feedback_id: str | None = None,
870
+ trace_id: str | None = None,
871
+ ) -> str | None:
872
+ """Sync user feedback to Phoenix as a span annotation.
873
+
874
+ Convenience method for syncing Feedback entities to Phoenix.
875
+ Converts REM feedback format to Phoenix annotation format.
876
+
877
+ Args:
878
+ span_id: OTEL span ID to annotate
879
+ rating: User rating (-1, 1-5 scale)
880
+ categories: List of feedback categories
881
+ comment: Free-text comment
882
+ feedback_id: Optional REM feedback ID for reference
883
+ trace_id: Optional trace ID for the span
884
+
885
+ Returns:
886
+ Phoenix annotation ID if successful
887
+
888
+ Example:
889
+ >>> client.sync_user_feedback(
890
+ ... span_id="abc123",
891
+ ... rating=4,
892
+ ... categories=["helpful", "accurate"],
893
+ ... comment="Great response!"
894
+ ... )
895
+ """
896
+ # Convert rating to 0-1 score
897
+ # Rating scheme:
898
+ # -1 = thumbs down → score 0.0
899
+ # 1 = thumbs up → score 1.0
900
+ # 2-5 = star rating → normalized to 0-1 range
901
+ score = None
902
+ if rating is not None:
903
+ if rating == -1:
904
+ score = 0.0
905
+ elif rating == 1:
906
+ score = 1.0 # Thumbs up
907
+ elif 2 <= rating <= 5:
908
+ score = (rating - 1) / 4.0 # 2→0.25, 3→0.5, 4→0.75, 5→1.0
909
+
910
+ # Use primary category as label
911
+ label = categories[0] if categories else None
912
+
913
+ # Build explanation from comment and additional categories
914
+ explanation = comment
915
+ if categories and len(categories) > 1:
916
+ cats_str = ", ".join(categories[1:])
917
+ if explanation:
918
+ explanation = f"{explanation} [Categories: {cats_str}]"
919
+ else:
920
+ explanation = f"Categories: {cats_str}"
921
+
922
+ # Build metadata
923
+ metadata: dict[str, Any] = {
924
+ "rating": rating,
925
+ "categories": categories or [],
926
+ }
927
+ if feedback_id:
928
+ metadata["rem_feedback_id"] = feedback_id
929
+
930
+ return self.add_span_feedback(
931
+ span_id=span_id,
932
+ annotation_name="user_feedback",
933
+ annotator_kind="HUMAN",
934
+ label=label,
935
+ score=score,
936
+ explanation=explanation,
937
+ metadata=metadata,
938
+ trace_id=trace_id,
939
+ )
940
+
941
+ def get_span_annotations(
942
+ self,
943
+ span_id: str,
944
+ annotation_name: str | None = None,
945
+ ) -> list[dict[str, Any]]:
946
+ """Get annotations for a span.
947
+
948
+ Args:
949
+ span_id: Span ID to query
950
+ annotation_name: Optional filter by annotation name
951
+
952
+ Returns:
953
+ List of annotation dicts
954
+
955
+ TODO: Implement once Phoenix client exposes this method
956
+ """
957
+ # TODO: Phoenix client doesn't expose annotation query yet
958
+ # This is a stub for future implementation
959
+ logger.warning("get_span_annotations not yet implemented in Phoenix client")
960
+ return []
@@ -348,8 +348,27 @@ results = await service.vector_search(
348
348
 
349
349
  ### Initialize Service
350
350
 
351
+ There are two ways to initialize the PostgresService:
352
+
353
+ **Option 1: Factory function (recommended for apps using remdb as a library)**
354
+
355
+ ```python
356
+ from rem.services.postgres import get_postgres_service
357
+
358
+ # Uses POSTGRES__CONNECTION_STRING from environment
359
+ pg = get_postgres_service()
360
+ if pg is None:
361
+ raise RuntimeError("Database not configured - set POSTGRES__CONNECTION_STRING")
362
+
363
+ await pg.connect()
364
+ # ... use pg ...
365
+ await pg.disconnect()
366
+ ```
367
+
368
+ **Option 2: Direct instantiation**
369
+
351
370
  ```python
352
- from rem.services.postgres import PostgresService, Repository
371
+ from rem.services.postgres import PostgresService
353
372
 
354
373
  service = PostgresService(
355
374
  connection_string="postgresql://user:pass@localhost/remdb",
@@ -359,6 +378,9 @@ service = PostgresService(
359
378
  await service.connect()
360
379
  ```
361
380
 
381
+ > **Note**: `get_postgres_service()` returns the service directly. It does NOT support
382
+ > `async with` context manager syntax. Always call `connect()` and `disconnect()` explicitly.
383
+
362
384
  ### Using Repository Pattern
363
385
 
364
386
  **Generic Repository** for simple CRUD operations:
@@ -514,34 +536,156 @@ results = await service.vector_search(
514
536
  - HNSW parameters: `m=16, ef_construction=64` (tunable)
515
537
  - Monitor shared_buffers and work_mem
516
538
 
517
- ## Migrations
539
+ ## Schema Management
518
540
 
519
- Run migrations in order:
541
+ REM uses a **code-as-source-of-truth** approach. Pydantic models define the schema, and the database is kept in sync via diff-based migrations.
520
542
 
521
- ```bash
522
- psql -d remdb -f sql/migrations/001_setup_extensions.sql
523
- psql -d remdb -f sql/migrations/002_kv_store_cache.sql
524
- psql -d remdb -f sql/generated_schema.sql
543
+ ### File Structure
544
+
545
+ ```
546
+ src/rem/sql/
547
+ ├── migrations/
548
+ │ ├── 001_install.sql # Core infrastructure (manual)
549
+ │ └── 002_install_models.sql # Entity tables (auto-generated)
550
+ └── background_indexes.sql # HNSW vector indexes (optional)
525
551
  ```
526
552
 
527
- Background indexes (after data load):
553
+ **Key principle**: Only two migration files. No incremental `003_`, `004_` files.
554
+
555
+ ### CLI Commands
528
556
 
529
557
  ```bash
530
- psql -d remdb -f sql/background_indexes.sql
558
+ # Apply migrations (installs extensions, core tables, entity tables)
559
+ rem db migrate
560
+
561
+ # Check migration status
562
+ rem db status
563
+
564
+ # Generate schema SQL from models (for remdb development)
565
+ rem db schema generate --models src/rem/models/entities
566
+
567
+ # Validate models for schema generation
568
+ rem db schema validate --models src/rem/models/entities
531
569
  ```
532
570
 
533
- ## CLI Usage
571
+ ### Model Registry
534
572
 
535
- Generate schema from models:
573
+ Models are discovered via the registry:
536
574
 
537
- ```bash
538
- rem schema generate --models src/rem/models/entities --output sql/schema.sql
575
+ ```python
576
+ import rem
577
+ from rem.models.core import CoreModel
578
+
579
+ @rem.register_model
580
+ class MyEntity(CoreModel):
581
+ name: str
582
+ description: str # Auto-embeds
583
+ ```
584
+
585
+ ## Using REM as a Library (Downstream Apps)
586
+
587
+ When building an application that **depends on remdb as a package** (e.g., `pip install remdb`),
588
+ there are important differences from developing remdb itself.
589
+
590
+ ### What Works Out of the Box
591
+
592
+ 1. **All core entity tables** - Resources, Messages, Users, Sessions, etc.
593
+ 2. **PostgresService** - Full database access via `get_postgres_service()`
594
+ 3. **Repository pattern** - CRUD operations for core entities
595
+ 4. **Migrations** - `rem db migrate` applies the bundled SQL files
596
+
597
+ ```python
598
+ # In your downstream app (e.g., myapp/main.py)
599
+ from rem.services.postgres import get_postgres_service
600
+ from rem.models.entities import Message, Resource
601
+
602
+ pg = get_postgres_service()
603
+ await pg.connect()
604
+
605
+ # Use core entities - tables already exist
606
+ messages = await pg.query(Message, {"session_id": "abc"})
539
607
  ```
540
608
 
541
- Validate models:
609
+ ### Custom Models in Downstream Apps
610
+
611
+ The `@rem.register_model` decorator registers models in the **runtime registry**, which is useful for:
612
+ - Schema introspection at runtime
613
+ - Future tooling that reads the registry
614
+
615
+ However, **`rem db migrate` only applies SQL files bundled in the remdb package**.
616
+ Custom models from downstream apps do NOT automatically get tables created.
617
+
618
+ **Options for custom model tables:**
619
+
620
+ **Option A: Use core entities with metadata**
621
+
622
+ Store custom data in the `metadata` JSONB field of existing entities:
623
+
624
+ ```python
625
+ resource = Resource(
626
+ name="my-custom-thing",
627
+ content="...",
628
+ metadata={"custom_field": "value", "another": 123}
629
+ )
630
+ ```
631
+
632
+ **Option B: Create tables manually**
633
+
634
+ Write and apply your own SQL:
635
+
636
+ ```sql
637
+ -- myapp/sql/custom_tables.sql
638
+ CREATE TABLE IF NOT EXISTS conversation_summaries (
639
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
640
+ session_ref TEXT NOT NULL,
641
+ summary TEXT NOT NULL,
642
+ -- ... include CoreModel fields for compatibility
643
+ user_id VARCHAR(256),
644
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
645
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
646
+ );
647
+ ```
542
648
 
543
649
  ```bash
544
- rem schema validate --models src/rem/models/entities
650
+ psql $DATABASE_URL -f myapp/sql/custom_tables.sql
651
+ ```
652
+
653
+ **Option C: Contribute upstream**
654
+
655
+ If your model is generally useful, contribute it to remdb so it's included in
656
+ the next release and `rem db migrate` creates it automatically.
657
+
658
+ ### Example: Downstream App Structure
659
+
660
+ ```
661
+ myapp/
662
+ ├── main.py # Import models, start API
663
+ ├── models/
664
+ │ └── __init__.py # @rem.register_model decorators
665
+ ├── sql/
666
+ │ └── custom.sql # Manual migrations for custom tables
667
+ ├── .env # POSTGRES__CONNECTION_STRING, LLM keys
668
+ └── pyproject.toml # dependencies = ["remdb>=0.3.110"]
669
+ ```
670
+
671
+ ```python
672
+ # myapp/models/__init__.py
673
+ import rem
674
+ from rem.models.core import CoreModel
675
+
676
+ @rem.register_model
677
+ class ConversationSummary(CoreModel):
678
+ """Registered for introspection, but table created via sql/custom.sql"""
679
+ session_ref: str
680
+ summary: str
681
+ ```
682
+
683
+ ```python
684
+ # myapp/main.py
685
+ import models # Registers custom models
686
+
687
+ from rem.api.main import app # Use REM's FastAPI app
688
+ # Or build your own app using rem.services
545
689
  ```
546
690
 
547
691
  ## Configuration
@@ -2,6 +2,7 @@
2
2
  PostgreSQL service for CloudNativePG database operations.
3
3
  """
4
4
 
5
+ from .diff_service import DiffService, SchemaDiff
5
6
  from .repository import Repository
6
7
  from .service import PostgresService
7
8
 
@@ -20,4 +21,4 @@ def get_postgres_service() -> PostgresService | None:
20
21
  return PostgresService()
21
22
 
22
23
 
23
- __all__ = ["PostgresService", "get_postgres_service", "Repository"]
24
+ __all__ = ["PostgresService", "get_postgres_service", "Repository", "DiffService", "SchemaDiff"]