remdb 0.2.6__py3-none-any.whl → 0.3.103__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of remdb might be problematic. Click here for more details.
- rem/__init__.py +129 -2
- rem/agentic/README.md +76 -0
- rem/agentic/__init__.py +15 -0
- rem/agentic/agents/__init__.py +16 -2
- rem/agentic/agents/sse_simulator.py +500 -0
- rem/agentic/context.py +7 -5
- rem/agentic/llm_provider_models.py +301 -0
- rem/agentic/providers/phoenix.py +32 -43
- rem/agentic/providers/pydantic_ai.py +84 -10
- rem/api/README.md +238 -1
- rem/api/deps.py +255 -0
- rem/api/main.py +70 -22
- rem/api/mcp_router/server.py +8 -1
- rem/api/mcp_router/tools.py +80 -0
- rem/api/middleware/tracking.py +172 -0
- rem/api/routers/admin.py +277 -0
- rem/api/routers/auth.py +124 -0
- rem/api/routers/chat/completions.py +123 -14
- rem/api/routers/chat/models.py +7 -3
- rem/api/routers/chat/sse_events.py +526 -0
- rem/api/routers/chat/streaming.py +468 -45
- rem/api/routers/dev.py +81 -0
- rem/api/routers/feedback.py +455 -0
- rem/api/routers/messages.py +473 -0
- rem/api/routers/models.py +78 -0
- rem/api/routers/shared_sessions.py +406 -0
- rem/auth/middleware.py +126 -27
- rem/cli/commands/ask.py +15 -11
- rem/cli/commands/configure.py +169 -94
- rem/cli/commands/db.py +53 -7
- rem/cli/commands/experiments.py +278 -96
- rem/cli/commands/process.py +8 -7
- rem/cli/commands/scaffold.py +47 -0
- rem/cli/commands/schema.py +9 -9
- rem/cli/main.py +10 -0
- rem/config.py +2 -2
- rem/models/core/core_model.py +7 -1
- rem/models/entities/__init__.py +21 -0
- rem/models/entities/domain_resource.py +38 -0
- rem/models/entities/feedback.py +123 -0
- rem/models/entities/message.py +30 -1
- rem/models/entities/session.py +83 -0
- rem/models/entities/shared_session.py +206 -0
- rem/models/entities/user.py +10 -3
- rem/registry.py +367 -0
- rem/schemas/agents/rem.yaml +7 -3
- rem/services/content/providers.py +94 -140
- rem/services/content/service.py +85 -16
- rem/services/dreaming/affinity_service.py +2 -16
- rem/services/dreaming/moment_service.py +2 -15
- rem/services/embeddings/api.py +20 -13
- rem/services/phoenix/EXPERIMENT_DESIGN.md +3 -3
- rem/services/phoenix/client.py +252 -19
- rem/services/postgres/README.md +29 -10
- rem/services/postgres/repository.py +132 -0
- rem/services/postgres/schema_generator.py +86 -5
- rem/services/rate_limit.py +113 -0
- rem/services/rem/README.md +14 -0
- rem/services/session/compression.py +17 -1
- rem/services/user_service.py +98 -0
- rem/settings.py +115 -17
- rem/sql/background_indexes.sql +10 -0
- rem/sql/migrations/001_install.sql +152 -2
- rem/sql/migrations/002_install_models.sql +580 -231
- rem/sql/migrations/003_seed_default_user.sql +48 -0
- rem/utils/constants.py +97 -0
- rem/utils/date_utils.py +228 -0
- rem/utils/embeddings.py +17 -4
- rem/utils/files.py +167 -0
- rem/utils/mime_types.py +158 -0
- rem/utils/model_helpers.py +156 -1
- rem/utils/schema_loader.py +273 -14
- rem/utils/sql_types.py +3 -1
- rem/utils/vision.py +9 -14
- rem/workers/README.md +14 -14
- rem/workers/db_maintainer.py +74 -0
- {remdb-0.2.6.dist-info → remdb-0.3.103.dist-info}/METADATA +486 -132
- {remdb-0.2.6.dist-info → remdb-0.3.103.dist-info}/RECORD +80 -57
- {remdb-0.2.6.dist-info → remdb-0.3.103.dist-info}/WHEEL +1 -1
- rem/sql/002_install_models.sql +0 -1068
- rem/sql/install_models.sql +0 -1038
- {remdb-0.2.6.dist-info → remdb-0.3.103.dist-info}/entry_points.txt +0 -0
rem/cli/commands/ask.py
CHANGED
|
@@ -89,8 +89,8 @@ async def run_agent_streaming(
|
|
|
89
89
|
context: Optional AgentContext for session persistence
|
|
90
90
|
max_iterations: Maximum iterations/requests (from agent schema or settings)
|
|
91
91
|
"""
|
|
92
|
-
from datetime import datetime, timezone
|
|
93
92
|
from pydantic_ai import UsageLimits
|
|
93
|
+
from rem.utils.date_utils import to_iso_with_z, utc_now
|
|
94
94
|
|
|
95
95
|
logger.info("Running agent in streaming mode...")
|
|
96
96
|
|
|
@@ -151,13 +151,13 @@ async def run_agent_streaming(
|
|
|
151
151
|
user_message = {
|
|
152
152
|
"role": "user",
|
|
153
153
|
"content": user_message_content,
|
|
154
|
-
"timestamp":
|
|
154
|
+
"timestamp": to_iso_with_z(utc_now()),
|
|
155
155
|
}
|
|
156
156
|
|
|
157
157
|
assistant_message = {
|
|
158
158
|
"role": "assistant",
|
|
159
159
|
"content": "".join(assistant_response_parts),
|
|
160
|
-
"timestamp":
|
|
160
|
+
"timestamp": to_iso_with_z(utc_now()),
|
|
161
161
|
}
|
|
162
162
|
|
|
163
163
|
# Store messages with compression
|
|
@@ -200,8 +200,8 @@ async def run_agent_non_streaming(
|
|
|
200
200
|
Returns:
|
|
201
201
|
Output data if successful, None otherwise
|
|
202
202
|
"""
|
|
203
|
-
from datetime import datetime, timezone
|
|
204
203
|
from pydantic_ai import UsageLimits
|
|
204
|
+
from rem.utils.date_utils import to_iso_with_z, utc_now
|
|
205
205
|
|
|
206
206
|
logger.info("Running agent in non-streaming mode...")
|
|
207
207
|
|
|
@@ -248,13 +248,13 @@ async def run_agent_non_streaming(
|
|
|
248
248
|
user_message = {
|
|
249
249
|
"role": "user",
|
|
250
250
|
"content": user_message_content,
|
|
251
|
-
"timestamp":
|
|
251
|
+
"timestamp": to_iso_with_z(utc_now()),
|
|
252
252
|
}
|
|
253
253
|
|
|
254
254
|
assistant_message = {
|
|
255
255
|
"role": "assistant",
|
|
256
256
|
"content": assistant_content,
|
|
257
|
-
"timestamp":
|
|
257
|
+
"timestamp": to_iso_with_z(utc_now()),
|
|
258
258
|
}
|
|
259
259
|
|
|
260
260
|
# Store messages with compression
|
|
@@ -357,8 +357,8 @@ async def _save_output_file(file_path: Path, data: dict[str, Any]) -> None:
|
|
|
357
357
|
)
|
|
358
358
|
@click.option(
|
|
359
359
|
"--user-id",
|
|
360
|
-
default=
|
|
361
|
-
help="User ID for context (default: test
|
|
360
|
+
default=None,
|
|
361
|
+
help="User ID for context (default: from settings.test.effective_user_id)",
|
|
362
362
|
)
|
|
363
363
|
@click.option(
|
|
364
364
|
"--session-id",
|
|
@@ -393,7 +393,7 @@ def ask(
|
|
|
393
393
|
max_turns: int,
|
|
394
394
|
version: str | None,
|
|
395
395
|
stream: bool,
|
|
396
|
-
user_id: str,
|
|
396
|
+
user_id: str | None,
|
|
397
397
|
session_id: str | None,
|
|
398
398
|
input_file: Path | None,
|
|
399
399
|
output_file: Path | None,
|
|
@@ -434,6 +434,9 @@ def ask(
|
|
|
434
434
|
# Two arguments provided
|
|
435
435
|
name = name_or_query
|
|
436
436
|
|
|
437
|
+
# Resolve user_id from settings if not provided
|
|
438
|
+
effective_user_id = user_id or settings.test.effective_user_id
|
|
439
|
+
|
|
437
440
|
asyncio.run(
|
|
438
441
|
_ask_async(
|
|
439
442
|
name=name,
|
|
@@ -443,7 +446,7 @@ def ask(
|
|
|
443
446
|
max_turns=max_turns,
|
|
444
447
|
version=version,
|
|
445
448
|
stream=stream,
|
|
446
|
-
user_id=
|
|
449
|
+
user_id=effective_user_id,
|
|
447
450
|
session_id=session_id,
|
|
448
451
|
input_file=input_file,
|
|
449
452
|
output_file=output_file,
|
|
@@ -486,9 +489,10 @@ async def _ask_async(
|
|
|
486
489
|
|
|
487
490
|
# Load schema using centralized utility
|
|
488
491
|
# Handles both file paths and schema names automatically
|
|
492
|
+
# Falls back to database LOOKUP if not found in filesystem
|
|
489
493
|
logger.info(f"Loading schema: {name} (version: {version or 'latest'})")
|
|
490
494
|
try:
|
|
491
|
-
schema = load_agent_schema(name)
|
|
495
|
+
schema = load_agent_schema(name, user_id=user_id)
|
|
492
496
|
except FileNotFoundError as e:
|
|
493
497
|
logger.error(str(e))
|
|
494
498
|
sys.exit(1)
|
rem/cli/commands/configure.py
CHANGED
|
@@ -28,10 +28,13 @@ from rem.config import (
|
|
|
28
28
|
)
|
|
29
29
|
|
|
30
30
|
|
|
31
|
-
def prompt_postgres_config() -> dict:
|
|
31
|
+
def prompt_postgres_config(use_defaults: bool = False) -> dict:
|
|
32
32
|
"""
|
|
33
33
|
Prompt user for PostgreSQL configuration.
|
|
34
34
|
|
|
35
|
+
Args:
|
|
36
|
+
use_defaults: If True, use all default values without prompting
|
|
37
|
+
|
|
35
38
|
Returns:
|
|
36
39
|
PostgreSQL configuration dictionary
|
|
37
40
|
"""
|
|
@@ -44,28 +47,45 @@ def prompt_postgres_config() -> dict:
|
|
|
44
47
|
"POSTGRES__CONNECTION_STRING", "postgresql://rem:rem@localhost:5051/rem"
|
|
45
48
|
)
|
|
46
49
|
|
|
47
|
-
#
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
50
|
+
# Default values
|
|
51
|
+
host = "localhost"
|
|
52
|
+
port = 5051
|
|
53
|
+
database = "rem"
|
|
54
|
+
username = "rem"
|
|
55
|
+
password = "rem"
|
|
56
|
+
pool_min_size = 5
|
|
57
|
+
pool_max_size = 20
|
|
58
|
+
|
|
59
|
+
if use_defaults:
|
|
60
|
+
click.echo("\nUsing default PostgreSQL configuration:")
|
|
61
|
+
click.echo(f" Host: {host}")
|
|
62
|
+
click.echo(f" Port: {port}")
|
|
63
|
+
click.echo(f" Database: {database}")
|
|
64
|
+
click.echo(f" Username: {username}")
|
|
65
|
+
click.echo(f" Pool: {pool_min_size}-{pool_max_size} connections")
|
|
66
|
+
else:
|
|
67
|
+
# Prompt for components
|
|
68
|
+
click.echo(
|
|
69
|
+
"\nEnter PostgreSQL connection details (press Enter to use default):"
|
|
70
|
+
)
|
|
71
|
+
click.echo("Default: Package users on port 5051 (docker compose -f docker-compose.prebuilt.yml up -d)")
|
|
72
|
+
click.echo("Developers: Change port to 5050 if using docker-compose.yml (local build)")
|
|
73
|
+
click.echo("Custom DB: Enter your own host/port below")
|
|
54
74
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
75
|
+
host = click.prompt("Host", default=host)
|
|
76
|
+
port = click.prompt("Port", default=port, type=int)
|
|
77
|
+
database = click.prompt("Database name", default=database)
|
|
78
|
+
username = click.prompt("Username", default=username)
|
|
79
|
+
password = click.prompt("Password", default=password, hide_input=True)
|
|
80
|
+
|
|
81
|
+
# Additional pool settings
|
|
82
|
+
click.echo("\nConnection pool settings:")
|
|
83
|
+
pool_min_size = click.prompt("Pool minimum size", default=pool_min_size, type=int)
|
|
84
|
+
pool_max_size = click.prompt("Pool maximum size", default=pool_max_size, type=int)
|
|
60
85
|
|
|
61
86
|
# Build connection string
|
|
62
87
|
connection_string = f"postgresql://{username}:{password}@{host}:{port}/{database}"
|
|
63
88
|
|
|
64
|
-
# Additional pool settings
|
|
65
|
-
click.echo("\nConnection pool settings:")
|
|
66
|
-
pool_min_size = click.prompt("Pool minimum size", default=5, type=int)
|
|
67
|
-
pool_max_size = click.prompt("Pool maximum size", default=20, type=int)
|
|
68
|
-
|
|
69
89
|
return {
|
|
70
90
|
"connection_string": connection_string,
|
|
71
91
|
"pool_min_size": pool_min_size,
|
|
@@ -73,10 +93,13 @@ def prompt_postgres_config() -> dict:
|
|
|
73
93
|
}
|
|
74
94
|
|
|
75
95
|
|
|
76
|
-
def prompt_llm_config() -> dict:
|
|
96
|
+
def prompt_llm_config(use_defaults: bool = False) -> dict:
|
|
77
97
|
"""
|
|
78
98
|
Prompt user for LLM provider configuration.
|
|
79
99
|
|
|
100
|
+
Args:
|
|
101
|
+
use_defaults: If True, use all default values without prompting
|
|
102
|
+
|
|
80
103
|
Returns:
|
|
81
104
|
LLM configuration dictionary
|
|
82
105
|
"""
|
|
@@ -86,40 +109,55 @@ def prompt_llm_config() -> dict:
|
|
|
86
109
|
|
|
87
110
|
config = {}
|
|
88
111
|
|
|
89
|
-
# Default
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
"
|
|
98
|
-
|
|
112
|
+
# Default values
|
|
113
|
+
default_model = "anthropic:claude-sonnet-4-5-20250929"
|
|
114
|
+
default_temperature = 0.5
|
|
115
|
+
|
|
116
|
+
if use_defaults:
|
|
117
|
+
click.echo("\nUsing default LLM configuration:")
|
|
118
|
+
click.echo(f" Model: {default_model}")
|
|
119
|
+
click.echo(f" Temperature: {default_temperature}")
|
|
120
|
+
click.echo(" API Keys: Not configured (set via environment variables)")
|
|
121
|
+
config["default_model"] = default_model
|
|
122
|
+
config["default_temperature"] = default_temperature
|
|
123
|
+
else:
|
|
124
|
+
# Default model
|
|
125
|
+
click.echo("\nDefault LLM model (format: provider:model-id)")
|
|
126
|
+
click.echo("Examples:")
|
|
127
|
+
click.echo(" - anthropic:claude-sonnet-4-5-20250929")
|
|
128
|
+
click.echo(" - openai:gpt-4o")
|
|
129
|
+
click.echo(" - openai:gpt-4o-mini")
|
|
130
|
+
|
|
131
|
+
config["default_model"] = click.prompt(
|
|
132
|
+
"Default model", default=default_model
|
|
133
|
+
)
|
|
99
134
|
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
135
|
+
# Temperature
|
|
136
|
+
config["default_temperature"] = click.prompt(
|
|
137
|
+
"Default temperature (0.0-1.0)", default=default_temperature, type=float
|
|
138
|
+
)
|
|
104
139
|
|
|
105
|
-
|
|
106
|
-
|
|
140
|
+
# API keys
|
|
141
|
+
click.echo("\nAPI Keys (optional - leave empty to skip):")
|
|
107
142
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
143
|
+
openai_key = click.prompt("OpenAI API key", default="", show_default=False)
|
|
144
|
+
if openai_key:
|
|
145
|
+
config["openai_api_key"] = openai_key
|
|
111
146
|
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
147
|
+
anthropic_key = click.prompt("Anthropic API key", default="", show_default=False)
|
|
148
|
+
if anthropic_key:
|
|
149
|
+
config["anthropic_api_key"] = anthropic_key
|
|
115
150
|
|
|
116
151
|
return config
|
|
117
152
|
|
|
118
153
|
|
|
119
|
-
def prompt_s3_config() -> dict:
|
|
154
|
+
def prompt_s3_config(use_defaults: bool = False) -> dict:
|
|
120
155
|
"""
|
|
121
156
|
Prompt user for S3 storage configuration.
|
|
122
157
|
|
|
158
|
+
Args:
|
|
159
|
+
use_defaults: If True, skip S3 configuration (optional feature)
|
|
160
|
+
|
|
123
161
|
Returns:
|
|
124
162
|
S3 configuration dictionary
|
|
125
163
|
"""
|
|
@@ -129,8 +167,12 @@ def prompt_s3_config() -> dict:
|
|
|
129
167
|
|
|
130
168
|
config = {}
|
|
131
169
|
|
|
170
|
+
if use_defaults:
|
|
171
|
+
click.echo("\nSkipping S3 configuration (optional - configure later if needed)")
|
|
172
|
+
return {}
|
|
173
|
+
|
|
132
174
|
click.echo("\nS3 storage is used for file uploads and processed content.")
|
|
133
|
-
use_s3 = click.confirm("Configure S3 storage?", default=
|
|
175
|
+
use_s3 = click.confirm("Configure S3 storage?", default=False)
|
|
134
176
|
|
|
135
177
|
if not use_s3:
|
|
136
178
|
return {}
|
|
@@ -154,10 +196,13 @@ def prompt_s3_config() -> dict:
|
|
|
154
196
|
return config
|
|
155
197
|
|
|
156
198
|
|
|
157
|
-
def prompt_additional_env_vars() -> dict:
|
|
199
|
+
def prompt_additional_env_vars(use_defaults: bool = False) -> dict:
|
|
158
200
|
"""
|
|
159
201
|
Prompt user for additional environment variables.
|
|
160
202
|
|
|
203
|
+
Args:
|
|
204
|
+
use_defaults: If True, skip additional env vars (optional feature)
|
|
205
|
+
|
|
161
206
|
Returns:
|
|
162
207
|
Dictionary of custom environment variables
|
|
163
208
|
"""
|
|
@@ -167,6 +212,10 @@ def prompt_additional_env_vars() -> dict:
|
|
|
167
212
|
|
|
168
213
|
env_vars: dict[str, str] = {}
|
|
169
214
|
|
|
215
|
+
if use_defaults:
|
|
216
|
+
click.echo("\nSkipping additional environment variables (configure later if needed)")
|
|
217
|
+
return env_vars
|
|
218
|
+
|
|
170
219
|
add_env = click.confirm(
|
|
171
220
|
"Add custom environment variables?", default=False
|
|
172
221
|
)
|
|
@@ -207,7 +256,13 @@ def prompt_additional_env_vars() -> dict:
|
|
|
207
256
|
is_flag=True,
|
|
208
257
|
help="Open configuration file in editor",
|
|
209
258
|
)
|
|
210
|
-
|
|
259
|
+
@click.option(
|
|
260
|
+
"--yes",
|
|
261
|
+
"-y",
|
|
262
|
+
is_flag=True,
|
|
263
|
+
help="Accept all defaults without prompting (non-interactive mode)",
|
|
264
|
+
)
|
|
265
|
+
def configure_command(install: bool, claude_desktop: bool, show: bool, edit: bool, yes: bool):
|
|
211
266
|
"""
|
|
212
267
|
Configure REM installation.
|
|
213
268
|
|
|
@@ -215,10 +270,12 @@ def configure_command(install: bool, claude_desktop: bool, show: bool, edit: boo
|
|
|
215
270
|
Configuration is saved to ~/.rem/config.yaml and merged with environment variables.
|
|
216
271
|
|
|
217
272
|
Examples:
|
|
218
|
-
rem configure
|
|
219
|
-
rem configure --
|
|
220
|
-
rem configure --
|
|
221
|
-
rem configure --
|
|
273
|
+
rem configure # Run interactive wizard
|
|
274
|
+
rem configure --yes # Accept all defaults (non-interactive)
|
|
275
|
+
rem configure --yes --install # Quick setup with defaults + install tables
|
|
276
|
+
rem configure --install # Run wizard + install database tables
|
|
277
|
+
rem configure --show # Show current configuration
|
|
278
|
+
rem configure --edit # Open config in $EDITOR
|
|
222
279
|
"""
|
|
223
280
|
config_path = get_config_path()
|
|
224
281
|
|
|
@@ -258,57 +315,74 @@ def configure_command(install: bool, claude_desktop: bool, show: bool, edit: boo
|
|
|
258
315
|
click.echo("\n" + "=" * 60)
|
|
259
316
|
click.echo("REM Configuration Wizard")
|
|
260
317
|
click.echo("=" * 60)
|
|
261
|
-
|
|
318
|
+
|
|
319
|
+
if yes:
|
|
320
|
+
click.echo("\nRunning in non-interactive mode (--yes flag)")
|
|
321
|
+
click.echo("Using default configuration values...")
|
|
322
|
+
else:
|
|
323
|
+
click.echo("\nThis wizard will help you configure REM for first-time use.")
|
|
324
|
+
|
|
262
325
|
click.echo(f"Configuration will be saved to: {config_path}")
|
|
263
326
|
|
|
264
327
|
# Check if config already exists
|
|
265
328
|
if config_exists():
|
|
266
329
|
click.echo(f"\nConfiguration file already exists at {config_path}")
|
|
267
|
-
if
|
|
330
|
+
if yes:
|
|
331
|
+
# In non-interactive mode, skip configuration creation
|
|
332
|
+
click.echo("Skipping configuration creation (file already exists)")
|
|
333
|
+
config = None # Will not save/validate
|
|
334
|
+
elif not click.confirm("Overwrite existing configuration?", default=False):
|
|
268
335
|
click.echo("Configuration unchanged.")
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
config
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
336
|
+
config = None # Will not save/validate
|
|
337
|
+
else:
|
|
338
|
+
# User confirmed overwrite - create new config
|
|
339
|
+
config = {}
|
|
340
|
+
config["postgres"] = prompt_postgres_config(use_defaults=yes)
|
|
341
|
+
config["llm"] = prompt_llm_config(use_defaults=yes)
|
|
342
|
+
s3_config = prompt_s3_config(use_defaults=yes)
|
|
343
|
+
if s3_config:
|
|
344
|
+
config["s3"] = s3_config
|
|
345
|
+
env_vars = prompt_additional_env_vars(use_defaults=yes)
|
|
346
|
+
if env_vars:
|
|
347
|
+
config["env"] = env_vars
|
|
348
|
+
else:
|
|
349
|
+
# No existing config - create new one
|
|
350
|
+
config = {}
|
|
351
|
+
config["postgres"] = prompt_postgres_config(use_defaults=yes)
|
|
352
|
+
config["llm"] = prompt_llm_config(use_defaults=yes)
|
|
353
|
+
s3_config = prompt_s3_config(use_defaults=yes)
|
|
354
|
+
if s3_config:
|
|
355
|
+
config["s3"] = s3_config
|
|
356
|
+
env_vars = prompt_additional_env_vars(use_defaults=yes)
|
|
357
|
+
if env_vars:
|
|
358
|
+
config["env"] = env_vars
|
|
359
|
+
|
|
360
|
+
# Validate and save configuration (only if we created one)
|
|
361
|
+
if config is not None:
|
|
362
|
+
click.echo("\n" + "=" * 60)
|
|
363
|
+
click.echo("Validating Configuration")
|
|
364
|
+
click.echo("=" * 60)
|
|
294
365
|
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
366
|
+
errors = validate_config(config)
|
|
367
|
+
if errors:
|
|
368
|
+
click.echo("\nConfiguration validation failed:")
|
|
369
|
+
for error in errors:
|
|
370
|
+
click.echo(f" ❌ {error}", err=True)
|
|
371
|
+
click.echo("\nPlease fix these errors and try again.")
|
|
372
|
+
return
|
|
302
373
|
|
|
303
|
-
|
|
374
|
+
click.echo("✅ Configuration is valid")
|
|
304
375
|
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
376
|
+
# Save configuration
|
|
377
|
+
try:
|
|
378
|
+
save_config(config)
|
|
379
|
+
click.echo(f"\n✅ Configuration saved to {config_path}")
|
|
380
|
+
except Exception as e:
|
|
381
|
+
click.echo(f"\n❌ Failed to save configuration: {e}", err=True)
|
|
382
|
+
return
|
|
383
|
+
else:
|
|
384
|
+
# Load existing config for use in install step
|
|
385
|
+
config = load_config() if config_exists() else {}
|
|
312
386
|
|
|
313
387
|
# Install database tables if requested
|
|
314
388
|
if install:
|
|
@@ -316,7 +390,7 @@ def configure_command(install: bool, claude_desktop: bool, show: bool, edit: boo
|
|
|
316
390
|
click.echo("Database Installation")
|
|
317
391
|
click.echo("=" * 60)
|
|
318
392
|
|
|
319
|
-
if click.confirm("\nInstall database tables?", default=True):
|
|
393
|
+
if yes or click.confirm("\nInstall database tables?", default=True):
|
|
320
394
|
try:
|
|
321
395
|
# Import here to ensure config is loaded first
|
|
322
396
|
from rem.config import merge_config_to_env
|
|
@@ -357,8 +431,9 @@ def configure_command(install: bool, claude_desktop: bool, show: bool, edit: boo
|
|
|
357
431
|
if os.name == "nt": # Windows
|
|
358
432
|
config_dir = Path.home() / "AppData/Roaming/Claude"
|
|
359
433
|
elif os.name == "posix":
|
|
360
|
-
|
|
361
|
-
|
|
434
|
+
macos_path = Path.home() / "Library/Application Support/Claude"
|
|
435
|
+
if macos_path.exists():
|
|
436
|
+
config_dir = macos_path
|
|
362
437
|
else:
|
|
363
438
|
config_dir = Path.home() / ".config/Claude"
|
|
364
439
|
else:
|
rem/cli/commands/db.py
CHANGED
|
@@ -382,9 +382,9 @@ def rebuild_cache(connection: str | None):
|
|
|
382
382
|
|
|
383
383
|
@click.command()
|
|
384
384
|
@click.argument("file_path", type=click.Path(exists=True, path_type=Path))
|
|
385
|
-
@click.option("--user-id", default=
|
|
385
|
+
@click.option("--user-id", default=None, help="User ID for loaded data (default: from settings)")
|
|
386
386
|
@click.option("--dry-run", is_flag=True, help="Show what would be loaded without loading")
|
|
387
|
-
def load(file_path: Path, user_id: str, dry_run: bool):
|
|
387
|
+
def load(file_path: Path, user_id: str | None, dry_run: bool):
|
|
388
388
|
"""
|
|
389
389
|
Load data from YAML file into database.
|
|
390
390
|
|
|
@@ -400,14 +400,18 @@ def load(file_path: Path, user_id: str, dry_run: bool):
|
|
|
400
400
|
rem db load data.yaml --user-id my-user
|
|
401
401
|
rem db load data.yaml --dry-run
|
|
402
402
|
"""
|
|
403
|
-
|
|
403
|
+
from ...settings import settings
|
|
404
|
+
|
|
405
|
+
# Resolve user_id from settings if not provided
|
|
406
|
+
effective_user_id = user_id or settings.test.effective_user_id
|
|
407
|
+
asyncio.run(_load_async(file_path, effective_user_id, dry_run))
|
|
404
408
|
|
|
405
409
|
|
|
406
410
|
async def _load_async(file_path: Path, user_id: str, dry_run: bool):
|
|
407
411
|
"""Async implementation of load command."""
|
|
408
412
|
import yaml
|
|
409
413
|
from ...models.core.inline_edge import InlineEdge
|
|
410
|
-
from ...models.entities import Resource, Moment, User
|
|
414
|
+
from ...models.entities import Resource, Moment, User, Message, SharedSession, Schema
|
|
411
415
|
from ...services.postgres import get_postgres_service
|
|
412
416
|
|
|
413
417
|
logger.info(f"Loading data from: {file_path}")
|
|
@@ -427,12 +431,18 @@ async def _load_async(file_path: Path, user_id: str, dry_run: bool):
|
|
|
427
431
|
return
|
|
428
432
|
|
|
429
433
|
# Map table names to model classes
|
|
434
|
+
# CoreModel subclasses use Repository.upsert()
|
|
430
435
|
MODEL_MAP = {
|
|
431
436
|
"users": User,
|
|
432
437
|
"moments": Moment,
|
|
433
438
|
"resources": Resource,
|
|
439
|
+
"messages": Message,
|
|
440
|
+
"schemas": Schema,
|
|
434
441
|
}
|
|
435
442
|
|
|
443
|
+
# Non-CoreModel tables that need direct SQL insertion
|
|
444
|
+
DIRECT_INSERT_TABLES = {"shared_sessions"}
|
|
445
|
+
|
|
436
446
|
# Connect to database
|
|
437
447
|
pg = get_postgres_service()
|
|
438
448
|
if not pg:
|
|
@@ -449,6 +459,29 @@ async def _load_async(file_path: Path, user_id: str, dry_run: bool):
|
|
|
449
459
|
key_field = table_def.get("key_field", "id")
|
|
450
460
|
rows = table_def.get("rows", [])
|
|
451
461
|
|
|
462
|
+
# Handle direct insert tables (non-CoreModel)
|
|
463
|
+
if table_name in DIRECT_INSERT_TABLES:
|
|
464
|
+
for row_data in rows:
|
|
465
|
+
# Add tenant_id if not present
|
|
466
|
+
if "tenant_id" not in row_data:
|
|
467
|
+
row_data["tenant_id"] = "default"
|
|
468
|
+
|
|
469
|
+
if table_name == "shared_sessions":
|
|
470
|
+
# Insert shared_session directly
|
|
471
|
+
await pg.fetch(
|
|
472
|
+
"""INSERT INTO shared_sessions
|
|
473
|
+
(session_id, owner_user_id, shared_with_user_id, tenant_id)
|
|
474
|
+
VALUES ($1, $2, $3, $4)
|
|
475
|
+
ON CONFLICT DO NOTHING""",
|
|
476
|
+
row_data["session_id"],
|
|
477
|
+
row_data["owner_user_id"],
|
|
478
|
+
row_data["shared_with_user_id"],
|
|
479
|
+
row_data["tenant_id"],
|
|
480
|
+
)
|
|
481
|
+
total_loaded += 1
|
|
482
|
+
logger.success(f"Loaded shared_session: {row_data['owner_user_id']} -> {row_data['shared_with_user_id']}")
|
|
483
|
+
continue
|
|
484
|
+
|
|
452
485
|
if table_name not in MODEL_MAP:
|
|
453
486
|
logger.warning(f"Unknown table: {table_name}, skipping")
|
|
454
487
|
continue
|
|
@@ -456,9 +489,12 @@ async def _load_async(file_path: Path, user_id: str, dry_run: bool):
|
|
|
456
489
|
model_class = MODEL_MAP[table_name] # Type is inferred from MODEL_MAP
|
|
457
490
|
|
|
458
491
|
for row_data in rows:
|
|
459
|
-
# Add user_id and tenant_id
|
|
460
|
-
|
|
461
|
-
|
|
492
|
+
# Add user_id and tenant_id if not already present
|
|
493
|
+
# This allows seed files to specify explicit owners
|
|
494
|
+
if "user_id" not in row_data:
|
|
495
|
+
row_data["user_id"] = user_id
|
|
496
|
+
if "tenant_id" not in row_data:
|
|
497
|
+
row_data["tenant_id"] = row_data.get("user_id", user_id)
|
|
462
498
|
|
|
463
499
|
# Convert graph_edges to InlineEdge format if present
|
|
464
500
|
if "graph_edges" in row_data:
|
|
@@ -467,6 +503,16 @@ async def _load_async(file_path: Path, user_id: str, dry_run: bool):
|
|
|
467
503
|
for edge in row_data["graph_edges"]
|
|
468
504
|
]
|
|
469
505
|
|
|
506
|
+
# Convert any ISO timestamp strings with Z suffix to naive datetime
|
|
507
|
+
# This handles fields like starts_timestamp, ends_timestamp, etc.
|
|
508
|
+
from ...utils.date_utils import parse_iso
|
|
509
|
+
for key, value in list(row_data.items()):
|
|
510
|
+
if isinstance(value, str) and (key.endswith("_timestamp") or key.endswith("_at")):
|
|
511
|
+
try:
|
|
512
|
+
row_data[key] = parse_iso(value)
|
|
513
|
+
except (ValueError, TypeError):
|
|
514
|
+
pass # Not a valid datetime string, leave as-is
|
|
515
|
+
|
|
470
516
|
# Create model instance and upsert via repository
|
|
471
517
|
from ...services.postgres.repository import Repository
|
|
472
518
|
|