remdb 0.2.6__py3-none-any.whl → 0.3.118__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of remdb might be problematic. Click here for more details.

Files changed (104) hide show
  1. rem/__init__.py +129 -2
  2. rem/agentic/README.md +76 -0
  3. rem/agentic/__init__.py +15 -0
  4. rem/agentic/agents/__init__.py +16 -2
  5. rem/agentic/agents/sse_simulator.py +500 -0
  6. rem/agentic/context.py +28 -22
  7. rem/agentic/llm_provider_models.py +301 -0
  8. rem/agentic/mcp/tool_wrapper.py +29 -3
  9. rem/agentic/otel/setup.py +92 -4
  10. rem/agentic/providers/phoenix.py +32 -43
  11. rem/agentic/providers/pydantic_ai.py +168 -24
  12. rem/agentic/schema.py +358 -21
  13. rem/agentic/tools/rem_tools.py +3 -3
  14. rem/api/README.md +238 -1
  15. rem/api/deps.py +255 -0
  16. rem/api/main.py +154 -37
  17. rem/api/mcp_router/resources.py +1 -1
  18. rem/api/mcp_router/server.py +26 -5
  19. rem/api/mcp_router/tools.py +454 -7
  20. rem/api/middleware/tracking.py +172 -0
  21. rem/api/routers/admin.py +494 -0
  22. rem/api/routers/auth.py +124 -0
  23. rem/api/routers/chat/completions.py +152 -16
  24. rem/api/routers/chat/models.py +7 -3
  25. rem/api/routers/chat/sse_events.py +526 -0
  26. rem/api/routers/chat/streaming.py +608 -45
  27. rem/api/routers/dev.py +81 -0
  28. rem/api/routers/feedback.py +148 -0
  29. rem/api/routers/messages.py +473 -0
  30. rem/api/routers/models.py +78 -0
  31. rem/api/routers/query.py +360 -0
  32. rem/api/routers/shared_sessions.py +406 -0
  33. rem/auth/middleware.py +126 -27
  34. rem/cli/commands/README.md +237 -64
  35. rem/cli/commands/ask.py +15 -11
  36. rem/cli/commands/cluster.py +1300 -0
  37. rem/cli/commands/configure.py +170 -97
  38. rem/cli/commands/db.py +396 -139
  39. rem/cli/commands/experiments.py +278 -96
  40. rem/cli/commands/process.py +22 -15
  41. rem/cli/commands/scaffold.py +47 -0
  42. rem/cli/commands/schema.py +97 -50
  43. rem/cli/main.py +37 -6
  44. rem/config.py +2 -2
  45. rem/models/core/core_model.py +7 -1
  46. rem/models/core/rem_query.py +5 -2
  47. rem/models/entities/__init__.py +21 -0
  48. rem/models/entities/domain_resource.py +38 -0
  49. rem/models/entities/feedback.py +123 -0
  50. rem/models/entities/message.py +30 -1
  51. rem/models/entities/session.py +83 -0
  52. rem/models/entities/shared_session.py +180 -0
  53. rem/models/entities/user.py +10 -3
  54. rem/registry.py +373 -0
  55. rem/schemas/agents/rem.yaml +7 -3
  56. rem/services/content/providers.py +94 -140
  57. rem/services/content/service.py +115 -24
  58. rem/services/dreaming/affinity_service.py +2 -16
  59. rem/services/dreaming/moment_service.py +2 -15
  60. rem/services/embeddings/api.py +24 -17
  61. rem/services/embeddings/worker.py +16 -16
  62. rem/services/phoenix/EXPERIMENT_DESIGN.md +3 -3
  63. rem/services/phoenix/client.py +252 -19
  64. rem/services/postgres/README.md +159 -15
  65. rem/services/postgres/__init__.py +2 -1
  66. rem/services/postgres/diff_service.py +531 -0
  67. rem/services/postgres/pydantic_to_sqlalchemy.py +427 -129
  68. rem/services/postgres/repository.py +132 -0
  69. rem/services/postgres/schema_generator.py +291 -9
  70. rem/services/postgres/service.py +6 -6
  71. rem/services/rate_limit.py +113 -0
  72. rem/services/rem/README.md +14 -0
  73. rem/services/rem/parser.py +44 -9
  74. rem/services/rem/service.py +36 -2
  75. rem/services/session/compression.py +17 -1
  76. rem/services/session/reload.py +1 -1
  77. rem/services/user_service.py +98 -0
  78. rem/settings.py +169 -22
  79. rem/sql/background_indexes.sql +21 -16
  80. rem/sql/migrations/001_install.sql +387 -54
  81. rem/sql/migrations/002_install_models.sql +2320 -393
  82. rem/sql/migrations/003_optional_extensions.sql +326 -0
  83. rem/sql/migrations/004_cache_system.sql +548 -0
  84. rem/utils/__init__.py +18 -0
  85. rem/utils/constants.py +97 -0
  86. rem/utils/date_utils.py +228 -0
  87. rem/utils/embeddings.py +17 -4
  88. rem/utils/files.py +167 -0
  89. rem/utils/mime_types.py +158 -0
  90. rem/utils/model_helpers.py +156 -1
  91. rem/utils/schema_loader.py +284 -21
  92. rem/utils/sql_paths.py +146 -0
  93. rem/utils/sql_types.py +3 -1
  94. rem/utils/vision.py +9 -14
  95. rem/workers/README.md +14 -14
  96. rem/workers/__init__.py +2 -1
  97. rem/workers/db_maintainer.py +74 -0
  98. rem/workers/unlogged_maintainer.py +463 -0
  99. {remdb-0.2.6.dist-info → remdb-0.3.118.dist-info}/METADATA +598 -171
  100. {remdb-0.2.6.dist-info → remdb-0.3.118.dist-info}/RECORD +102 -73
  101. {remdb-0.2.6.dist-info → remdb-0.3.118.dist-info}/WHEEL +1 -1
  102. rem/sql/002_install_models.sql +0 -1068
  103. rem/sql/install_models.sql +0 -1038
  104. {remdb-0.2.6.dist-info → remdb-0.3.118.dist-info}/entry_points.txt +0 -0
@@ -12,12 +12,12 @@ from rem.services.content import ContentService
12
12
 
13
13
  @click.command(name="ingest")
14
14
  @click.argument("file_path", type=click.Path(exists=True))
15
- @click.option("--user-id", required=True, help="User ID to own the file")
15
+ @click.option("--user-id", default=None, help="User ID to scope file privately (default: public/shared)")
16
16
  @click.option("--category", help="Optional file category")
17
17
  @click.option("--tags", help="Optional comma-separated tags")
18
18
  def process_ingest(
19
19
  file_path: str,
20
- user_id: str,
20
+ user_id: str | None,
21
21
  category: str | None,
22
22
  tags: str | None,
23
23
  ):
@@ -32,8 +32,9 @@ def process_ingest(
32
32
  5. Creates a File entity record.
33
33
 
34
34
  Examples:
35
- rem process ingest sample.pdf --user-id user-123
36
- rem process ingest contract.docx --user-id user-123 --category legal --tags contract,2023
35
+ rem process ingest sample.pdf
36
+ rem process ingest contract.docx --category legal --tags contract,2023
37
+ rem process ingest agent.yaml # Auto-detects kind=agent, saves to schemas table
37
38
  """
38
39
  import asyncio
39
40
  from ...services.content import ContentService
@@ -56,7 +57,8 @@ def process_ingest(
56
57
 
57
58
  tag_list = tags.split(",") if tags else None
58
59
 
59
- logger.info(f"Ingesting file: {file_path} for user: {user_id}")
60
+ scope_msg = f"user: {user_id}" if user_id else "public"
61
+ logger.info(f"Ingesting file: {file_path} ({scope_msg})")
60
62
  result = await service.ingest_file(
61
63
  file_uri=file_path,
62
64
  user_id=user_id,
@@ -65,11 +67,15 @@ def process_ingest(
65
67
  is_local_server=True, # CLI is local
66
68
  )
67
69
 
68
- if result.get("processing_status") == "completed":
69
- logger.success(f"File ingested successfully: {result['file_name']}")
70
+ # Handle schema ingestion (agents/evaluators)
71
+ if result.get("schema_name"):
72
+ logger.success(f"Schema ingested: {result['schema_name']} (kind={result.get('kind', 'agent')})")
73
+ logger.info(f"Version: {result.get('version', '1.0.0')}")
74
+ # Handle file ingestion
75
+ elif result.get("processing_status") == "completed":
76
+ logger.success(f"File ingested: {result['file_name']}")
70
77
  logger.info(f"File ID: {result['file_id']}")
71
78
  logger.info(f"Resources created: {result['resources_created']}")
72
- logger.info(f"Status: {result['processing_status']}")
73
79
  else:
74
80
  logger.error(f"Ingestion failed: {result.get('message', 'Unknown error')}")
75
81
  sys.exit(1)
@@ -192,15 +198,13 @@ def process_uri(uri: str, output: str, save: str | None):
192
198
 
193
199
 
194
200
  @click.command(name="files")
195
- @click.option("--tenant-id", required=True, help="Tenant ID")
196
- @click.option("--user-id", help="Filter by user ID")
201
+ @click.option("--user-id", default=None, help="User ID (default: from settings)")
197
202
  @click.option("--status", type=click.Choice(["pending", "processing", "completed", "failed"]), help="Filter by status")
198
203
  @click.option("--extractor", help="Run files through custom extractor (e.g., cv-parser-v1)")
199
204
  @click.option("--limit", type=int, help="Max files to process")
200
205
  @click.option("--provider", help="Optional LLM provider override")
201
206
  @click.option("--model", help="Optional model override")
202
207
  def process_files(
203
- tenant_id: str,
204
208
  user_id: Optional[str],
205
209
  status: Optional[str],
206
210
  extractor: Optional[str],
@@ -217,19 +221,22 @@ def process_files(
217
221
 
218
222
  \b
219
223
  # List completed files
220
- rem process files --tenant-id acme-corp --status completed
224
+ rem process files --status completed
221
225
 
222
226
  \b
223
227
  # Extract from CV files
224
- rem process files --tenant-id acme-corp --extractor cv-parser-v1 --limit 10
228
+ rem process files --extractor cv-parser-v1 --limit 10
225
229
 
226
230
  \b
227
231
  # Extract with provider override
228
- rem process files --tenant-id acme-corp --extractor contract-analyzer-v1 \\
232
+ rem process files --extractor contract-analyzer-v1 \\
229
233
  --provider anthropic --model claude-sonnet-4-5
230
234
  """
235
+ from ...settings import settings
236
+ effective_user_id = user_id or settings.test.effective_user_id
237
+
231
238
  logger.warning("Not implemented yet")
232
- logger.info(f"Would process files for tenant: {tenant_id}")
239
+ logger.info(f"Would process files for user: {effective_user_id}")
233
240
 
234
241
  if user_id:
235
242
  logger.info(f"Filter: user_id={user_id}")
@@ -0,0 +1,47 @@
1
+ """
2
+ Scaffold command - generate project structure for REM-based applications.
3
+
4
+ TODO: Implement this command to generate:
5
+ - my_app/main.py (entry point with create_app)
6
+ - my_app/models.py (example CoreModel subclass)
7
+ - my_app/routers/ (example FastAPI router)
8
+ - schemas/agents/ (example agent schema)
9
+ - schemas/evaluators/ (example evaluator)
10
+ - sql/migrations/ (empty migrations directory)
11
+ - pyproject.toml (with remdb dependency)
12
+ - README.md (basic usage instructions)
13
+
14
+ Usage:
15
+ rem scaffold my-app
16
+ rem scaffold my-app --with-examples # Include example models/routers/tools
17
+ """
18
+
19
+ import click
20
+
21
+
22
+ @click.command()
23
+ @click.argument("name")
24
+ @click.option("--with-examples", is_flag=True, help="Include example code")
25
+ def scaffold(name: str, with_examples: bool) -> None:
26
+ """
27
+ Generate a new REM-based project structure.
28
+
29
+ NAME is the project directory name to create.
30
+ """
31
+ click.echo(f"TODO: Scaffold command not yet implemented")
32
+ click.echo(f"Would create project: {name}")
33
+ click.echo(f"With examples: {with_examples}")
34
+ click.echo()
35
+ click.echo("For now, manually create this structure:")
36
+ click.echo(f"""
37
+ {name}/
38
+ ├── {name.replace('-', '_')}/
39
+ │ ├── main.py # Entry point (create_app + extensions)
40
+ │ ├── models.py # Custom models (inherit CoreModel)
41
+ │ └── routers/ # Custom FastAPI routers
42
+ ├── schemas/
43
+ │ ├── agents/ # Custom agent YAML schemas
44
+ │ └── evaluators/ # Custom evaluator schemas
45
+ ├── sql/migrations/ # Custom SQL migrations
46
+ └── pyproject.toml
47
+ """)
@@ -8,6 +8,7 @@ Usage:
8
8
  """
9
9
 
10
10
  import asyncio
11
+ import importlib
11
12
  from pathlib import Path
12
13
 
13
14
  import click
@@ -15,68 +16,116 @@ from loguru import logger
15
16
 
16
17
  from ...settings import settings
17
18
  from ...services.postgres.schema_generator import SchemaGenerator
19
+ from ...utils.sql_paths import get_package_sql_dir, get_package_migrations_dir
20
+
21
+
22
+ def _import_model_modules() -> list[str]:
23
+ """
24
+ Import modules specified in MODELS__IMPORT_MODULES setting.
25
+
26
+ This ensures downstream models decorated with @rem.register_model
27
+ are registered before schema generation.
28
+
29
+ Returns:
30
+ List of successfully imported module names
31
+ """
32
+ imported = []
33
+ for module_name in settings.models.module_list:
34
+ try:
35
+ importlib.import_module(module_name)
36
+ imported.append(module_name)
37
+ logger.debug(f"Imported model module: {module_name}")
38
+ except ImportError as e:
39
+ logger.warning(f"Failed to import model module '{module_name}': {e}")
40
+ click.echo(
41
+ click.style(f" ⚠ Could not import '{module_name}': {e}", fg="yellow"),
42
+ err=True,
43
+ )
44
+ return imported
18
45
 
19
46
 
20
47
  @click.command()
21
- @click.option(
22
- "--models",
23
- "-m",
24
- required=True,
25
- type=click.Path(exists=True, path_type=Path),
26
- help="Directory containing Pydantic models",
27
- )
28
48
  @click.option(
29
49
  "--output",
30
50
  "-o",
31
51
  type=click.Path(path_type=Path),
32
- default="install_models.sql",
33
- help="Output SQL file (default: install_models.sql)",
52
+ default="002_install_models.sql",
53
+ help="Output SQL file (default: 002_install_models.sql)",
34
54
  )
35
55
  @click.option(
36
56
  "--output-dir",
37
57
  type=click.Path(path_type=Path),
38
58
  default=None,
39
- help=f"Base output directory (default: {settings.sql_dir})",
59
+ help="Base output directory (default: package sql/migrations)",
40
60
  )
41
- def generate(models: Path, output: Path, output_dir: Path | None):
61
+ def generate(output: Path, output_dir: Path | None):
42
62
  """
43
- Generate database schema from Pydantic models.
63
+ Generate database schema from registered Pydantic models.
44
64
 
45
- Scans the specified directory for Pydantic models and generates:
65
+ Uses the model registry (core models + user-registered models) to generate:
46
66
  - CREATE TABLE statements
47
67
  - Embeddings tables (embeddings_<table>)
48
68
  - KV_STORE triggers for cache maintenance
49
69
  - Indexes (foreground only)
50
70
 
51
- Output is written to src/rem/sql/install_models.sql by default.
71
+ Output is written to src/rem/sql/migrations/002_install_models.sql by default.
52
72
 
53
73
  Example:
54
- rem db schema generate --models src/rem/models/entities
74
+ rem db schema generate
75
+
76
+ To register custom models in downstream apps:
77
+
78
+ 1. Create models with @rem.register_model decorator:
79
+
80
+ # models/__init__.py
81
+ import rem
82
+ from rem.models.core import CoreModel
83
+
84
+ @rem.register_model
85
+ class MyEntity(CoreModel):
86
+ name: str
87
+
88
+ 2. Set MODELS__IMPORT_MODULES in your .env:
89
+
90
+ MODELS__IMPORT_MODULES=models
91
+
92
+ 3. Run schema generation:
93
+
94
+ rem db schema generate
55
95
 
56
96
  This creates:
57
- - src/rem/sql/install_models.sql - Entity tables and triggers
97
+ - src/rem/sql/migrations/002_install_models.sql - Entity tables and triggers
58
98
  - src/rem/sql/background_indexes.sql - HNSW indexes (apply after data load)
59
99
 
60
- After generation, apply with:
61
- rem db migrate
100
+ After generation, verify with:
101
+ rem db diff
62
102
  """
63
- click.echo(f"Discovering models in {models}")
103
+ from ...registry import get_model_registry
104
+
105
+ # Import downstream model modules to trigger @rem.register_model decorators
106
+ imported_modules = _import_model_modules()
107
+ if imported_modules:
108
+ click.echo(f"Imported model modules: {', '.join(imported_modules)}")
109
+
110
+ registry = get_model_registry()
111
+ models = registry.get_models(include_core=True)
112
+ click.echo(f"Generating schema from {len(models)} registered models")
64
113
 
65
- # Use settings.sql_dir if not provided
66
- actual_output_dir = output_dir or Path(settings.sql_dir)
114
+ # Default to package migrations directory
115
+ actual_output_dir = output_dir or get_package_migrations_dir()
67
116
  generator = SchemaGenerator(output_dir=actual_output_dir)
68
117
 
69
- # Generate schema
118
+ # Generate schema from registry
70
119
  try:
71
- schema_sql = asyncio.run(generator.generate_from_directory(models, output_file=output.name))
120
+ schema_sql = asyncio.run(generator.generate_from_registry(output_file=output.name))
72
121
 
73
122
  click.echo(f"✓ Schema generated: {len(generator.schemas)} tables")
74
123
  click.echo(f"✓ Written to: {actual_output_dir / output.name}")
75
124
 
76
- # Generate background indexes
125
+ # Generate background indexes in parent sql dir
77
126
  background_indexes = generator.generate_background_indexes()
78
127
  if background_indexes:
79
- bg_file = actual_output_dir / "background_indexes.sql"
128
+ bg_file = get_package_sql_dir() / "background_indexes.sql"
80
129
  bg_file.write_text(background_indexes)
81
130
  click.echo(f"✓ Background indexes: {bg_file}")
82
131
 
@@ -94,48 +143,46 @@ def generate(models: Path, output: Path, output_dir: Path | None):
94
143
 
95
144
 
96
145
  @click.command()
97
- @click.option(
98
- "--models",
99
- "-m",
100
- required=True,
101
- type=click.Path(exists=True, path_type=Path),
102
- help="Directory containing Pydantic models",
103
- )
104
- def validate(models: Path):
146
+ def validate():
105
147
  """
106
- Validate Pydantic models for schema generation.
148
+ Validate registered Pydantic models for schema generation.
107
149
 
108
150
  Checks:
109
- - Models can be loaded
151
+ - Models can be loaded from registry
110
152
  - Models have suitable entity_key fields
111
153
  - Fields with embeddings are properly configured
154
+
155
+ Set MODELS__IMPORT_MODULES to include custom models from downstream apps.
112
156
  """
113
- click.echo(f"Validating models in {models}")
157
+ from ...registry import get_model_registry
114
158
 
115
- generator = SchemaGenerator()
116
- discovered = generator.discover_models(models)
159
+ # Import downstream model modules to trigger @rem.register_model decorators
160
+ imported_modules = _import_model_modules()
161
+ if imported_modules:
162
+ click.echo(f"Imported model modules: {', '.join(imported_modules)}")
117
163
 
118
- if not discovered:
119
- click.echo("✗ No models found", err=True)
120
- raise click.Abort()
164
+ registry = get_model_registry()
165
+ models = registry.get_models(include_core=True)
166
+
167
+ click.echo(f"Validating {len(models)} registered models")
121
168
 
122
- click.echo(f"✓ Discovered {len(discovered)} models")
169
+ if not models:
170
+ click.echo("✗ No models found in registry", err=True)
171
+ raise click.Abort()
123
172
 
173
+ generator = SchemaGenerator()
124
174
  errors: list[str] = []
125
175
  warnings: list[str] = []
126
176
 
127
- for model_name, model in discovered.items():
128
- table_name = generator.infer_table_name(model)
129
- entity_key = generator.infer_entity_key_field(model)
177
+ for model_name, ext in models.items():
178
+ model = ext.model
179
+ table_name = ext.table_name or generator.infer_table_name(model)
180
+ entity_key = ext.entity_key_field or generator.infer_entity_key_field(model)
130
181
 
131
182
  # Check for entity_key
132
183
  if entity_key == "id":
133
184
  warnings.append(f"{model_name}: No natural key field, using 'id'")
134
185
 
135
- # Check for embeddable fields
136
- # TODO: Implement should_embed_field check
137
- embeddable: list[str] = [] # Placeholder - needs implementation
138
-
139
186
  click.echo(f" {model_name} -> {table_name} (key: {entity_key})")
140
187
 
141
188
  if warnings:
@@ -158,7 +205,7 @@ def validate(models: Path):
158
205
  "-o",
159
206
  type=click.Path(path_type=Path),
160
207
  default=None,
161
- help=f"Output file for background indexes (default: {settings.sql_dir}/background_indexes.sql)",
208
+ help="Output file for background indexes (default: package sql/background_indexes.sql)",
162
209
  )
163
210
  def indexes(output: Path):
164
211
  """
rem/cli/main.py CHANGED
@@ -14,17 +14,38 @@ from pathlib import Path
14
14
  import click
15
15
  from loguru import logger
16
16
 
17
+ # Import version from package
18
+ try:
19
+ from importlib.metadata import version
20
+ __version__ = version("remdb")
21
+ except Exception:
22
+ __version__ = "unknown"
23
+
24
+
25
+ def _configure_logger(level: str):
26
+ """Configure loguru with custom level icons."""
27
+ logger.remove()
28
+
29
+ # Configure level icons - only warnings and errors get visual indicators
30
+ logger.level("DEBUG", icon=" ")
31
+ logger.level("INFO", icon=" ")
32
+ logger.level("WARNING", icon="🟠")
33
+ logger.level("ERROR", icon="🔴")
34
+ logger.level("CRITICAL", icon="🔴")
35
+
36
+ logger.add(
37
+ sys.stderr,
38
+ level=level,
39
+ format="<green>{time:HH:mm:ss}</green> | {level.icon} <level>{level: <8}</level> | <level>{message}</level>",
40
+ )
41
+
17
42
 
18
43
  @click.group()
19
44
  @click.option("--verbose", "-v", is_flag=True, help="Enable verbose logging")
45
+ @click.version_option(version=__version__, prog_name="rem")
20
46
  def cli(verbose: bool):
21
47
  """REM - Resources Entities Moments system CLI."""
22
- if verbose:
23
- logger.remove()
24
- logger.add(sys.stderr, level="DEBUG")
25
- else:
26
- logger.remove()
27
- logger.add(sys.stderr, level="INFO")
48
+ _configure_logger("DEBUG" if verbose else "INFO")
28
49
 
29
50
 
30
51
  @cli.group()
@@ -57,6 +78,12 @@ def dreaming():
57
78
  pass
58
79
 
59
80
 
81
+ @cli.group()
82
+ def cluster():
83
+ """Kubernetes cluster deployment and management."""
84
+ pass
85
+
86
+
60
87
  # Register commands
61
88
  from .commands.schema import register_commands as register_schema_commands
62
89
  from .commands.db import register_commands as register_db_commands
@@ -67,16 +94,20 @@ from .commands.experiments import experiments as experiments_group
67
94
  from .commands.configure import register_command as register_configure_command
68
95
  from .commands.serve import register_command as register_serve_command
69
96
  from .commands.mcp import register_command as register_mcp_command
97
+ from .commands.scaffold import scaffold as scaffold_command
98
+ from .commands.cluster import register_commands as register_cluster_commands
70
99
 
71
100
  register_schema_commands(schema)
72
101
  register_db_commands(db)
73
102
  register_process_commands(process)
74
103
  register_dreaming_commands(dreaming)
104
+ register_cluster_commands(cluster)
75
105
  register_ask_command(cli)
76
106
  register_configure_command(cli)
77
107
  register_serve_command(cli)
78
108
  register_mcp_command(cli)
79
109
  cli.add_command(experiments_group)
110
+ cli.add_command(scaffold_command)
80
111
 
81
112
 
82
113
  def main():
rem/config.py CHANGED
@@ -15,7 +15,7 @@ File Format (~/.rem/config.yaml):
15
15
  pool_max_size: 20
16
16
 
17
17
  llm:
18
- default_model: anthropic:claude-sonnet-4-5-20250929
18
+ default_model: openai:gpt-4.1
19
19
  openai_api_key: sk-...
20
20
  anthropic_api_key: sk-ant-...
21
21
 
@@ -216,7 +216,7 @@ def get_default_config() -> dict[str, Any]:
216
216
  "pool_max_size": 20,
217
217
  },
218
218
  "llm": {
219
- "default_model": "anthropic:claude-sonnet-4-5-20250929",
219
+ "default_model": "openai:gpt-4.1",
220
220
  "default_temperature": 0.5,
221
221
  # API keys will be prompted for in wizard
222
222
  # "openai_api_key": "",
@@ -52,7 +52,13 @@ class CoreModel(BaseModel):
52
52
  default=None, description="Tenant identifier for multi-tenancy isolation"
53
53
  )
54
54
  user_id: Optional[str] = Field(
55
- default=None, description="Owner user identifier (tenant-scoped)"
55
+ default=None,
56
+ description=(
57
+ "Owner user identifier (tenant-scoped). This is a VARCHAR(256), not a UUID, "
58
+ "to allow flexibility for external identity providers. Typically generated as "
59
+ "a hash of the user's email address. In future, other strong unique claims "
60
+ "(e.g., OAuth sub, verified phone) could also be used for generation."
61
+ ),
56
62
  )
57
63
  graph_edges: list[dict] = Field(
58
64
  default_factory=list,
@@ -112,7 +112,7 @@ class SearchParameters(BaseModel):
112
112
  table_name: str = Field(..., description="Table to search (resources, moments, etc.)")
113
113
  limit: int = Field(default=10, gt=0, description="Maximum results")
114
114
  min_similarity: float = Field(
115
- default=0.7, ge=0.0, le=1.0, description="Minimum similarity score"
115
+ default=0.3, ge=0.0, le=1.0, description="Minimum similarity score (0.3 recommended for general queries)"
116
116
  )
117
117
 
118
118
 
@@ -198,7 +198,10 @@ class RemQuery(BaseModel):
198
198
  | SQLParameters
199
199
  | TraverseParameters
200
200
  ) = Field(..., description="Query parameters")
201
- user_id: str = Field(..., description="User identifier for isolation")
201
+ user_id: Optional[str] = Field(
202
+ default=None,
203
+ description="User identifier (UUID5 hash of email). None = anonymous (shared/public data only)"
204
+ )
202
205
 
203
206
 
204
207
  class TraverseStage(BaseModel):
@@ -5,6 +5,9 @@ Core entity types for the REM system:
5
5
  - Resources: Base content units (documents, conversations, artifacts)
6
6
  - ImageResources: Image-specific resources with CLIP embeddings
7
7
  - Messages: Communication content
8
+ - Sessions: Conversation sessions (normal or evaluation mode)
9
+ - SharedSessions: Session sharing between users for collaboration
10
+ - Feedback: User feedback on messages/sessions with trace integration
8
11
  - Users: User entities
9
12
  - Files: File metadata and tracking
10
13
  - Moments: Temporal narratives (meetings, coding sessions, conversations)
@@ -19,6 +22,8 @@ All entities inherit from CoreModel and support:
19
22
  - Natural language labels for conversational queries
20
23
  """
21
24
 
25
+ from .domain_resource import DomainResource
26
+ from .feedback import Feedback, FeedbackCategory
22
27
  from .file import File
23
28
  from .image_resource import ImageResource
24
29
  from .message import Message
@@ -27,12 +32,28 @@ from .ontology import Ontology
27
32
  from .ontology_config import OntologyConfig
28
33
  from .resource import Resource
29
34
  from .schema import Schema
35
+ from .session import Session, SessionMode
36
+ from .shared_session import (
37
+ SharedSession,
38
+ SharedSessionCreate,
39
+ SharedWithMeResponse,
40
+ SharedWithMeSummary,
41
+ )
30
42
  from .user import User, UserTier
31
43
 
32
44
  __all__ = [
33
45
  "Resource",
46
+ "DomainResource",
34
47
  "ImageResource",
35
48
  "Message",
49
+ "Session",
50
+ "SessionMode",
51
+ "SharedSession",
52
+ "SharedSessionCreate",
53
+ "SharedWithMeResponse",
54
+ "SharedWithMeSummary",
55
+ "Feedback",
56
+ "FeedbackCategory",
36
57
  "User",
37
58
  "UserTier",
38
59
  "File",
@@ -0,0 +1,38 @@
1
+ """
2
+ DomainResource - Curated internal knowledge in REM.
3
+
4
+ DomainResources are a specialized subclass of Resource for storing curated,
5
+ domain-specific internal knowledge that is not part of general knowledge.
6
+ This includes proprietary information, internal documentation, institutional
7
+ knowledge, and other content that requires more careful curation.
8
+
9
+ Key Differences from Resource:
10
+ - Intended for curated, internal knowledge (not raw ingested content)
11
+ - Higher quality bar - content is reviewed/vetted before ingestion
12
+ - May contain proprietary or sensitive information
13
+ - Subject to different retention/governance policies
14
+
15
+ Use Cases:
16
+ - Internal documentation and procedures
17
+ - Proprietary research and analysis
18
+ - Institutional knowledge bases
19
+ - Domain-specific ontologies and taxonomies
20
+ - Curated best practices and guidelines
21
+ """
22
+
23
+ from .resource import Resource
24
+
25
+
26
+ class DomainResource(Resource):
27
+ """
28
+ Curated domain-specific knowledge resource.
29
+
30
+ Inherits all fields from Resource but stored in a separate table
31
+ (domain_resources) to distinguish curated internal knowledge from
32
+ general ingested content.
33
+
34
+ The schema is identical to Resource, allowing seamless migration
35
+ of content between tables as curation status changes.
36
+ """
37
+
38
+ pass