remdb 0.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rem/__init__.py +2 -0
- rem/agentic/README.md +650 -0
- rem/agentic/__init__.py +39 -0
- rem/agentic/agents/README.md +155 -0
- rem/agentic/agents/__init__.py +8 -0
- rem/agentic/context.py +148 -0
- rem/agentic/context_builder.py +329 -0
- rem/agentic/mcp/__init__.py +0 -0
- rem/agentic/mcp/tool_wrapper.py +107 -0
- rem/agentic/otel/__init__.py +5 -0
- rem/agentic/otel/setup.py +151 -0
- rem/agentic/providers/phoenix.py +674 -0
- rem/agentic/providers/pydantic_ai.py +572 -0
- rem/agentic/query.py +117 -0
- rem/agentic/query_helper.py +89 -0
- rem/agentic/schema.py +396 -0
- rem/agentic/serialization.py +245 -0
- rem/agentic/tools/__init__.py +5 -0
- rem/agentic/tools/rem_tools.py +231 -0
- rem/api/README.md +420 -0
- rem/api/main.py +324 -0
- rem/api/mcp_router/prompts.py +182 -0
- rem/api/mcp_router/resources.py +536 -0
- rem/api/mcp_router/server.py +213 -0
- rem/api/mcp_router/tools.py +584 -0
- rem/api/routers/auth.py +229 -0
- rem/api/routers/chat/__init__.py +5 -0
- rem/api/routers/chat/completions.py +281 -0
- rem/api/routers/chat/json_utils.py +76 -0
- rem/api/routers/chat/models.py +124 -0
- rem/api/routers/chat/streaming.py +185 -0
- rem/auth/README.md +258 -0
- rem/auth/__init__.py +26 -0
- rem/auth/middleware.py +100 -0
- rem/auth/providers/__init__.py +13 -0
- rem/auth/providers/base.py +376 -0
- rem/auth/providers/google.py +163 -0
- rem/auth/providers/microsoft.py +237 -0
- rem/cli/README.md +455 -0
- rem/cli/__init__.py +8 -0
- rem/cli/commands/README.md +126 -0
- rem/cli/commands/__init__.py +3 -0
- rem/cli/commands/ask.py +566 -0
- rem/cli/commands/configure.py +497 -0
- rem/cli/commands/db.py +493 -0
- rem/cli/commands/dreaming.py +324 -0
- rem/cli/commands/experiments.py +1302 -0
- rem/cli/commands/mcp.py +66 -0
- rem/cli/commands/process.py +245 -0
- rem/cli/commands/schema.py +183 -0
- rem/cli/commands/serve.py +106 -0
- rem/cli/dreaming.py +363 -0
- rem/cli/main.py +96 -0
- rem/config.py +237 -0
- rem/mcp_server.py +41 -0
- rem/models/core/__init__.py +49 -0
- rem/models/core/core_model.py +64 -0
- rem/models/core/engram.py +333 -0
- rem/models/core/experiment.py +628 -0
- rem/models/core/inline_edge.py +132 -0
- rem/models/core/rem_query.py +243 -0
- rem/models/entities/__init__.py +43 -0
- rem/models/entities/file.py +57 -0
- rem/models/entities/image_resource.py +88 -0
- rem/models/entities/message.py +35 -0
- rem/models/entities/moment.py +123 -0
- rem/models/entities/ontology.py +191 -0
- rem/models/entities/ontology_config.py +131 -0
- rem/models/entities/resource.py +95 -0
- rem/models/entities/schema.py +87 -0
- rem/models/entities/user.py +85 -0
- rem/py.typed +0 -0
- rem/schemas/README.md +507 -0
- rem/schemas/__init__.py +6 -0
- rem/schemas/agents/README.md +92 -0
- rem/schemas/agents/core/moment-builder.yaml +178 -0
- rem/schemas/agents/core/rem-query-agent.yaml +226 -0
- rem/schemas/agents/core/resource-affinity-assessor.yaml +99 -0
- rem/schemas/agents/core/simple-assistant.yaml +19 -0
- rem/schemas/agents/core/user-profile-builder.yaml +163 -0
- rem/schemas/agents/examples/contract-analyzer.yaml +317 -0
- rem/schemas/agents/examples/contract-extractor.yaml +134 -0
- rem/schemas/agents/examples/cv-parser.yaml +263 -0
- rem/schemas/agents/examples/hello-world.yaml +37 -0
- rem/schemas/agents/examples/query.yaml +54 -0
- rem/schemas/agents/examples/simple.yaml +21 -0
- rem/schemas/agents/examples/test.yaml +29 -0
- rem/schemas/agents/rem.yaml +128 -0
- rem/schemas/evaluators/hello-world/default.yaml +77 -0
- rem/schemas/evaluators/rem/faithfulness.yaml +219 -0
- rem/schemas/evaluators/rem/lookup-correctness.yaml +182 -0
- rem/schemas/evaluators/rem/retrieval-precision.yaml +199 -0
- rem/schemas/evaluators/rem/retrieval-recall.yaml +211 -0
- rem/schemas/evaluators/rem/search-correctness.yaml +192 -0
- rem/services/__init__.py +16 -0
- rem/services/audio/INTEGRATION.md +308 -0
- rem/services/audio/README.md +376 -0
- rem/services/audio/__init__.py +15 -0
- rem/services/audio/chunker.py +354 -0
- rem/services/audio/transcriber.py +259 -0
- rem/services/content/README.md +1269 -0
- rem/services/content/__init__.py +5 -0
- rem/services/content/providers.py +801 -0
- rem/services/content/service.py +676 -0
- rem/services/dreaming/README.md +230 -0
- rem/services/dreaming/__init__.py +53 -0
- rem/services/dreaming/affinity_service.py +336 -0
- rem/services/dreaming/moment_service.py +264 -0
- rem/services/dreaming/ontology_service.py +54 -0
- rem/services/dreaming/user_model_service.py +297 -0
- rem/services/dreaming/utils.py +39 -0
- rem/services/embeddings/__init__.py +11 -0
- rem/services/embeddings/api.py +120 -0
- rem/services/embeddings/worker.py +421 -0
- rem/services/fs/README.md +662 -0
- rem/services/fs/__init__.py +62 -0
- rem/services/fs/examples.py +206 -0
- rem/services/fs/examples_paths.py +204 -0
- rem/services/fs/git_provider.py +935 -0
- rem/services/fs/local_provider.py +760 -0
- rem/services/fs/parsing-hooks-examples.md +172 -0
- rem/services/fs/paths.py +276 -0
- rem/services/fs/provider.py +460 -0
- rem/services/fs/s3_provider.py +1042 -0
- rem/services/fs/service.py +186 -0
- rem/services/git/README.md +1075 -0
- rem/services/git/__init__.py +17 -0
- rem/services/git/service.py +469 -0
- rem/services/phoenix/EXPERIMENT_DESIGN.md +1146 -0
- rem/services/phoenix/README.md +453 -0
- rem/services/phoenix/__init__.py +46 -0
- rem/services/phoenix/client.py +686 -0
- rem/services/phoenix/config.py +88 -0
- rem/services/phoenix/prompt_labels.py +477 -0
- rem/services/postgres/README.md +575 -0
- rem/services/postgres/__init__.py +23 -0
- rem/services/postgres/migration_service.py +427 -0
- rem/services/postgres/pydantic_to_sqlalchemy.py +232 -0
- rem/services/postgres/register_type.py +352 -0
- rem/services/postgres/repository.py +337 -0
- rem/services/postgres/schema_generator.py +379 -0
- rem/services/postgres/service.py +802 -0
- rem/services/postgres/sql_builder.py +354 -0
- rem/services/rem/README.md +304 -0
- rem/services/rem/__init__.py +23 -0
- rem/services/rem/exceptions.py +71 -0
- rem/services/rem/executor.py +293 -0
- rem/services/rem/parser.py +145 -0
- rem/services/rem/queries.py +196 -0
- rem/services/rem/query.py +371 -0
- rem/services/rem/service.py +527 -0
- rem/services/session/README.md +374 -0
- rem/services/session/__init__.py +6 -0
- rem/services/session/compression.py +360 -0
- rem/services/session/reload.py +77 -0
- rem/settings.py +1235 -0
- rem/sql/002_install_models.sql +1068 -0
- rem/sql/background_indexes.sql +42 -0
- rem/sql/install_models.sql +1038 -0
- rem/sql/migrations/001_install.sql +503 -0
- rem/sql/migrations/002_install_models.sql +1202 -0
- rem/utils/AGENTIC_CHUNKING.md +597 -0
- rem/utils/README.md +583 -0
- rem/utils/__init__.py +43 -0
- rem/utils/agentic_chunking.py +622 -0
- rem/utils/batch_ops.py +343 -0
- rem/utils/chunking.py +108 -0
- rem/utils/clip_embeddings.py +276 -0
- rem/utils/dict_utils.py +98 -0
- rem/utils/embeddings.py +423 -0
- rem/utils/examples/embeddings_example.py +305 -0
- rem/utils/examples/sql_types_example.py +202 -0
- rem/utils/markdown.py +16 -0
- rem/utils/model_helpers.py +236 -0
- rem/utils/schema_loader.py +336 -0
- rem/utils/sql_types.py +348 -0
- rem/utils/user_id.py +81 -0
- rem/utils/vision.py +330 -0
- rem/workers/README.md +506 -0
- rem/workers/__init__.py +5 -0
- rem/workers/dreaming.py +502 -0
- rem/workers/engram_processor.py +312 -0
- rem/workers/sqs_file_processor.py +193 -0
- remdb-0.3.7.dist-info/METADATA +1473 -0
- remdb-0.3.7.dist-info/RECORD +187 -0
- remdb-0.3.7.dist-info/WHEEL +4 -0
- remdb-0.3.7.dist-info/entry_points.txt +2 -0
rem/cli/commands/db.py
ADDED
|
@@ -0,0 +1,493 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database management commands.
|
|
3
|
+
|
|
4
|
+
Usage:
|
|
5
|
+
rem db migrate # Apply both install.sql and install_models.sql
|
|
6
|
+
rem db migrate --install # Apply only install.sql
|
|
7
|
+
rem db migrate --models # Apply only install_models.sql
|
|
8
|
+
rem db migrate --background-indexes # Apply background indexes
|
|
9
|
+
rem db status # Show migration status
|
|
10
|
+
rem db rebuild-cache # Rebuild KV_STORE cache
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import asyncio
|
|
14
|
+
import hashlib
|
|
15
|
+
import subprocess
|
|
16
|
+
import time
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Type
|
|
19
|
+
|
|
20
|
+
import click
|
|
21
|
+
from loguru import logger
|
|
22
|
+
from pydantic import BaseModel
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_connection_string() -> str:
|
|
26
|
+
"""
|
|
27
|
+
Get PostgreSQL connection string from environment or settings.
|
|
28
|
+
|
|
29
|
+
Returns:
|
|
30
|
+
Connection string for psql
|
|
31
|
+
"""
|
|
32
|
+
import os
|
|
33
|
+
|
|
34
|
+
# Try environment variables first
|
|
35
|
+
host = os.getenv("POSTGRES__HOST", "localhost")
|
|
36
|
+
port = os.getenv("POSTGRES__PORT", "5432")
|
|
37
|
+
database = os.getenv("POSTGRES__DATABASE", "remdb")
|
|
38
|
+
user = os.getenv("POSTGRES__USER", "postgres")
|
|
39
|
+
password = os.getenv("POSTGRES__PASSWORD", "")
|
|
40
|
+
|
|
41
|
+
# Build connection string
|
|
42
|
+
conn_str = f"host={host} port={port} dbname={database} user={user}"
|
|
43
|
+
if password:
|
|
44
|
+
conn_str += f" password={password}"
|
|
45
|
+
|
|
46
|
+
return conn_str
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
async def run_sql_file_async(file_path: Path, db) -> tuple[bool, str, float]:
|
|
50
|
+
"""
|
|
51
|
+
Execute a SQL file using psycopg3 (synchronous, handles multi-statement SQL).
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
file_path: Path to SQL file
|
|
55
|
+
db: PostgresService instance (used to get connection info)
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Tuple of (success, output, execution_time_ms)
|
|
59
|
+
"""
|
|
60
|
+
if not file_path.exists():
|
|
61
|
+
return False, f"File not found: {file_path}", 0
|
|
62
|
+
|
|
63
|
+
start_time = time.time()
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
# Read SQL file
|
|
67
|
+
sql_content = file_path.read_text(encoding="utf-8")
|
|
68
|
+
|
|
69
|
+
# Use psycopg3 for reliable multi-statement execution
|
|
70
|
+
# This is the synchronous PostgreSQL driver, perfect for migrations
|
|
71
|
+
import psycopg
|
|
72
|
+
from ...settings import settings
|
|
73
|
+
|
|
74
|
+
# Use connection string from settings
|
|
75
|
+
conn_str = settings.postgres.connection_string
|
|
76
|
+
|
|
77
|
+
# Execute using synchronous psycopg (not async)
|
|
78
|
+
# This properly handles multi-statement SQL scripts
|
|
79
|
+
with psycopg.connect(conn_str) as conn:
|
|
80
|
+
with conn.cursor() as cur:
|
|
81
|
+
cur.execute(sql_content)
|
|
82
|
+
conn.commit()
|
|
83
|
+
|
|
84
|
+
execution_time = (time.time() - start_time) * 1000
|
|
85
|
+
return True, f"Successfully executed {file_path.name}", execution_time
|
|
86
|
+
|
|
87
|
+
except Exception as e:
|
|
88
|
+
execution_time = (time.time() - start_time) * 1000
|
|
89
|
+
error_output = str(e)
|
|
90
|
+
return False, error_output, execution_time
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def calculate_checksum(file_path: Path) -> str:
|
|
94
|
+
"""Calculate SHA256 checksum of file."""
|
|
95
|
+
if not file_path.exists():
|
|
96
|
+
return ""
|
|
97
|
+
return hashlib.sha256(file_path.read_bytes()).hexdigest()
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@click.command()
|
|
101
|
+
@click.option(
|
|
102
|
+
"--install",
|
|
103
|
+
"install_only",
|
|
104
|
+
is_flag=True,
|
|
105
|
+
help="Apply only install.sql (extensions and infrastructure)",
|
|
106
|
+
)
|
|
107
|
+
@click.option(
|
|
108
|
+
"--models", "models_only", is_flag=True, help="Apply only install_models.sql (entity tables)"
|
|
109
|
+
)
|
|
110
|
+
@click.option(
|
|
111
|
+
"--background-indexes",
|
|
112
|
+
is_flag=True,
|
|
113
|
+
help="Apply background indexes (HNSW for vectors)",
|
|
114
|
+
)
|
|
115
|
+
@click.option(
|
|
116
|
+
"--connection",
|
|
117
|
+
"-c",
|
|
118
|
+
help="PostgreSQL connection string (overrides environment)",
|
|
119
|
+
)
|
|
120
|
+
@click.option(
|
|
121
|
+
"--sql-dir",
|
|
122
|
+
type=click.Path(exists=True, path_type=Path),
|
|
123
|
+
default=None,
|
|
124
|
+
help="Directory containing SQL files (defaults to package SQL dir)",
|
|
125
|
+
)
|
|
126
|
+
def migrate(
|
|
127
|
+
install_only: bool,
|
|
128
|
+
models_only: bool,
|
|
129
|
+
background_indexes: bool,
|
|
130
|
+
connection: str | None,
|
|
131
|
+
sql_dir: Path | None,
|
|
132
|
+
):
|
|
133
|
+
"""
|
|
134
|
+
Apply database migrations.
|
|
135
|
+
|
|
136
|
+
By default, applies both install.sql and install_models.sql.
|
|
137
|
+
Use flags to apply specific migrations.
|
|
138
|
+
|
|
139
|
+
Examples:
|
|
140
|
+
rem db migrate # Apply all
|
|
141
|
+
rem db migrate --install # Core infrastructure only
|
|
142
|
+
rem db migrate --models # Entity tables only
|
|
143
|
+
rem db migrate --background-indexes # Background HNSW indexes
|
|
144
|
+
"""
|
|
145
|
+
asyncio.run(_migrate_async(install_only, models_only, background_indexes, connection, sql_dir))
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
async def _migrate_async(
|
|
149
|
+
install_only: bool,
|
|
150
|
+
models_only: bool,
|
|
151
|
+
background_indexes: bool,
|
|
152
|
+
connection: str | None,
|
|
153
|
+
sql_dir: Path | None,
|
|
154
|
+
):
|
|
155
|
+
"""Async implementation of migrate command."""
|
|
156
|
+
from ...services.postgres import get_postgres_service
|
|
157
|
+
# Find SQL directory - use package SQL if not specified
|
|
158
|
+
if sql_dir is None:
|
|
159
|
+
import importlib.resources
|
|
160
|
+
try:
|
|
161
|
+
# Python 3.9+
|
|
162
|
+
sql_ref = importlib.resources.files("rem") / "sql"
|
|
163
|
+
sql_dir = Path(str(sql_ref))
|
|
164
|
+
except AttributeError:
|
|
165
|
+
# Fallback: try to find sql dir relative to package
|
|
166
|
+
import rem
|
|
167
|
+
package_dir = Path(rem.__file__).parent.parent
|
|
168
|
+
sql_dir = package_dir / "sql"
|
|
169
|
+
if not sql_dir.exists():
|
|
170
|
+
# Last resort: current directory
|
|
171
|
+
sql_dir = Path("sql")
|
|
172
|
+
|
|
173
|
+
click.echo("REM Database Migration")
|
|
174
|
+
click.echo("=" * 60)
|
|
175
|
+
click.echo(f"SQL Directory: {sql_dir}")
|
|
176
|
+
click.echo()
|
|
177
|
+
|
|
178
|
+
# Discover migrations from migrations/ directory
|
|
179
|
+
migrations_dir = sql_dir / "migrations"
|
|
180
|
+
|
|
181
|
+
if background_indexes:
|
|
182
|
+
# Special case: background indexes
|
|
183
|
+
migrations = [("background_indexes.sql", "Background Indexes")]
|
|
184
|
+
elif install_only or models_only:
|
|
185
|
+
# Find specific migration
|
|
186
|
+
target_prefix = "001" if install_only else "002"
|
|
187
|
+
migration_files = sorted(migrations_dir.glob(f"{target_prefix}_*.sql"))
|
|
188
|
+
if migration_files:
|
|
189
|
+
migrations = [(f"migrations/{f.name}", f.stem.replace("_", " ").title()) for f in migration_files]
|
|
190
|
+
else:
|
|
191
|
+
migrations = []
|
|
192
|
+
else:
|
|
193
|
+
# Default: discover and apply all migrations in sorted order
|
|
194
|
+
migration_files = sorted(migrations_dir.glob("*.sql"))
|
|
195
|
+
migrations = [(f"migrations/{f.name}", f.stem.replace("_", " ").title()) for f in migration_files]
|
|
196
|
+
|
|
197
|
+
# Check files exist
|
|
198
|
+
for filename, description in migrations:
|
|
199
|
+
file_path = sql_dir / filename
|
|
200
|
+
if not file_path.exists():
|
|
201
|
+
if filename == "install_models.sql":
|
|
202
|
+
click.secho(f"✗ {filename} not found", fg="red")
|
|
203
|
+
click.echo()
|
|
204
|
+
click.secho("Generate it first with:", fg="yellow")
|
|
205
|
+
click.secho(" rem db schema generate --models src/rem/models/entities", fg="yellow")
|
|
206
|
+
raise click.Abort()
|
|
207
|
+
else:
|
|
208
|
+
click.secho(f"✗ {filename} not found", fg="red")
|
|
209
|
+
raise click.Abort()
|
|
210
|
+
|
|
211
|
+
# Connect to database
|
|
212
|
+
db = get_postgres_service()
|
|
213
|
+
if not db:
|
|
214
|
+
click.secho("Error: PostgreSQL is disabled in settings.", fg="red")
|
|
215
|
+
raise click.Abort()
|
|
216
|
+
|
|
217
|
+
await db.connect()
|
|
218
|
+
|
|
219
|
+
try:
|
|
220
|
+
# Apply migrations
|
|
221
|
+
total_time = 0.0
|
|
222
|
+
all_success = True
|
|
223
|
+
|
|
224
|
+
for filename, description in migrations:
|
|
225
|
+
file_path = sql_dir / filename
|
|
226
|
+
checksum = calculate_checksum(file_path)
|
|
227
|
+
|
|
228
|
+
click.echo(f"Applying: {description} ({filename})")
|
|
229
|
+
click.echo(f" Checksum: {checksum[:16]}...")
|
|
230
|
+
|
|
231
|
+
success, output, exec_time = await run_sql_file_async(file_path, db)
|
|
232
|
+
total_time += exec_time
|
|
233
|
+
|
|
234
|
+
if success:
|
|
235
|
+
click.secho(f" ✓ Applied in {exec_time:.0f}ms", fg="green")
|
|
236
|
+
# Show any NOTICE messages from the output
|
|
237
|
+
for line in output.split("\n"):
|
|
238
|
+
if "NOTICE:" in line or "✓" in line:
|
|
239
|
+
notice = line.split("NOTICE:")[-1].strip()
|
|
240
|
+
if notice:
|
|
241
|
+
click.echo(f" {notice}")
|
|
242
|
+
else:
|
|
243
|
+
click.secho(f" ✗ Failed", fg="red")
|
|
244
|
+
click.echo()
|
|
245
|
+
click.secho("Error output:", fg="red")
|
|
246
|
+
click.secho(output, fg="red")
|
|
247
|
+
all_success = False
|
|
248
|
+
break
|
|
249
|
+
|
|
250
|
+
click.echo()
|
|
251
|
+
|
|
252
|
+
# Summary
|
|
253
|
+
click.echo("=" * 60)
|
|
254
|
+
if all_success:
|
|
255
|
+
click.secho(f"✓ All migrations applied successfully", fg="green")
|
|
256
|
+
click.echo(f" Total time: {total_time:.0f}ms")
|
|
257
|
+
else:
|
|
258
|
+
click.secho(f"✗ Migration failed", fg="red")
|
|
259
|
+
raise click.Abort()
|
|
260
|
+
|
|
261
|
+
finally:
|
|
262
|
+
await db.disconnect()
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
@click.command()
|
|
266
|
+
@click.option(
|
|
267
|
+
"--connection",
|
|
268
|
+
"-c",
|
|
269
|
+
help="PostgreSQL connection string (overrides environment)",
|
|
270
|
+
)
|
|
271
|
+
def status(connection: str | None):
|
|
272
|
+
"""
|
|
273
|
+
Show migration status.
|
|
274
|
+
|
|
275
|
+
Displays:
|
|
276
|
+
- Applied migrations
|
|
277
|
+
- Execution times
|
|
278
|
+
- Last applied timestamps
|
|
279
|
+
"""
|
|
280
|
+
asyncio.run(_status_async(connection))
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
async def _status_async(connection: str | None):
|
|
284
|
+
"""Async implementation of status command."""
|
|
285
|
+
from ...services.postgres import get_postgres_service
|
|
286
|
+
|
|
287
|
+
click.echo()
|
|
288
|
+
click.echo("REM Migration Status")
|
|
289
|
+
click.echo("=" * 60)
|
|
290
|
+
|
|
291
|
+
db = get_postgres_service()
|
|
292
|
+
if not db:
|
|
293
|
+
click.secho("Error: PostgreSQL is disabled in settings.", fg="red")
|
|
294
|
+
raise click.Abort()
|
|
295
|
+
|
|
296
|
+
try:
|
|
297
|
+
await db.connect()
|
|
298
|
+
|
|
299
|
+
# Query migration status
|
|
300
|
+
query = "SELECT * FROM migration_status();"
|
|
301
|
+
|
|
302
|
+
try:
|
|
303
|
+
rows = await db.fetch(query)
|
|
304
|
+
|
|
305
|
+
if not rows:
|
|
306
|
+
click.echo("No migrations found")
|
|
307
|
+
click.echo()
|
|
308
|
+
click.secho("Run: rem db migrate", fg="yellow")
|
|
309
|
+
return
|
|
310
|
+
|
|
311
|
+
# Display results
|
|
312
|
+
click.echo()
|
|
313
|
+
for row in rows:
|
|
314
|
+
migration_type = row.get("migration_type", "unknown")
|
|
315
|
+
count = row.get("count", 0)
|
|
316
|
+
last_applied = row.get("last_applied", "never")
|
|
317
|
+
total_time = row.get("total_time_ms", 0)
|
|
318
|
+
|
|
319
|
+
click.echo(f"{migration_type.upper()}:")
|
|
320
|
+
click.echo(f" Count: {count}")
|
|
321
|
+
click.echo(f" Last Applied: {last_applied}")
|
|
322
|
+
click.echo(f" Total Time: {total_time}ms")
|
|
323
|
+
click.echo()
|
|
324
|
+
|
|
325
|
+
except Exception as e:
|
|
326
|
+
error_str = str(e)
|
|
327
|
+
if "does not exist" in error_str or "relation" in error_str or "function" in error_str:
|
|
328
|
+
click.secho("✗ Migration tracking not found", fg="red")
|
|
329
|
+
click.echo()
|
|
330
|
+
click.secho("Run: rem db migrate", fg="yellow")
|
|
331
|
+
else:
|
|
332
|
+
click.secho(f"✗ Error: {error_str}", fg="red")
|
|
333
|
+
raise click.Abort()
|
|
334
|
+
|
|
335
|
+
finally:
|
|
336
|
+
await db.disconnect()
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
@click.command()
|
|
340
|
+
@click.option(
|
|
341
|
+
"--connection",
|
|
342
|
+
"-c",
|
|
343
|
+
help="PostgreSQL connection string (overrides environment)",
|
|
344
|
+
)
|
|
345
|
+
def rebuild_cache(connection: str | None):
|
|
346
|
+
"""
|
|
347
|
+
Rebuild KV_STORE cache from entity tables.
|
|
348
|
+
|
|
349
|
+
Call this after:
|
|
350
|
+
- Database restart (UNLOGGED tables are cleared)
|
|
351
|
+
- Manual cache invalidation
|
|
352
|
+
- Bulk data imports
|
|
353
|
+
"""
|
|
354
|
+
conn_str = connection or get_connection_string()
|
|
355
|
+
|
|
356
|
+
click.echo("Rebuilding KV_STORE cache...")
|
|
357
|
+
|
|
358
|
+
query = "SELECT rebuild_kv_store();"
|
|
359
|
+
|
|
360
|
+
try:
|
|
361
|
+
result = subprocess.run(
|
|
362
|
+
["psql", conn_str, "-c", query],
|
|
363
|
+
capture_output=True,
|
|
364
|
+
text=True,
|
|
365
|
+
check=True,
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
click.secho("✓ Cache rebuilt successfully", fg="green")
|
|
369
|
+
|
|
370
|
+
# Show any NOTICE messages
|
|
371
|
+
for line in result.stdout.split("\n") + result.stderr.split("\n"):
|
|
372
|
+
if "NOTICE:" in line:
|
|
373
|
+
notice = line.split("NOTICE:")[-1].strip()
|
|
374
|
+
if notice:
|
|
375
|
+
click.echo(f" {notice}")
|
|
376
|
+
|
|
377
|
+
except subprocess.CalledProcessError as e:
|
|
378
|
+
error = e.stderr or e.stdout or str(e)
|
|
379
|
+
click.secho(f"✗ Error: {error}", fg="red")
|
|
380
|
+
raise click.Abort()
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
@click.command()
|
|
384
|
+
@click.argument("file_path", type=click.Path(exists=True, path_type=Path))
|
|
385
|
+
@click.option("--user-id", default="test-user", help="User ID for loaded data")
|
|
386
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be loaded without loading")
|
|
387
|
+
def load(file_path: Path, user_id: str, dry_run: bool):
|
|
388
|
+
"""
|
|
389
|
+
Load data from YAML file into database.
|
|
390
|
+
|
|
391
|
+
File format:
|
|
392
|
+
- table: resources
|
|
393
|
+
key_field: name
|
|
394
|
+
rows:
|
|
395
|
+
- name: Example
|
|
396
|
+
content: Test data...
|
|
397
|
+
|
|
398
|
+
Examples:
|
|
399
|
+
rem db load rem/tests/data/graph_seed.yaml
|
|
400
|
+
rem db load data.yaml --user-id my-user
|
|
401
|
+
rem db load data.yaml --dry-run
|
|
402
|
+
"""
|
|
403
|
+
asyncio.run(_load_async(file_path, user_id, dry_run))
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
async def _load_async(file_path: Path, user_id: str, dry_run: bool):
|
|
407
|
+
"""Async implementation of load command."""
|
|
408
|
+
import yaml
|
|
409
|
+
from ...models.core.inline_edge import InlineEdge
|
|
410
|
+
from ...models.entities import Resource, Moment, User
|
|
411
|
+
from ...services.postgres import get_postgres_service
|
|
412
|
+
|
|
413
|
+
logger.info(f"Loading data from: {file_path}")
|
|
414
|
+
logger.info(f"User ID: {user_id}")
|
|
415
|
+
|
|
416
|
+
# Load YAML file
|
|
417
|
+
with open(file_path) as f:
|
|
418
|
+
data = yaml.safe_load(f)
|
|
419
|
+
|
|
420
|
+
if not isinstance(data, list):
|
|
421
|
+
logger.error("YAML must be a list of table definitions")
|
|
422
|
+
raise click.Abort()
|
|
423
|
+
|
|
424
|
+
if dry_run:
|
|
425
|
+
logger.info("DRY RUN - Would load:")
|
|
426
|
+
logger.info(yaml.dump(data, default_flow_style=False))
|
|
427
|
+
return
|
|
428
|
+
|
|
429
|
+
# Map table names to model classes
|
|
430
|
+
MODEL_MAP = {
|
|
431
|
+
"users": User,
|
|
432
|
+
"moments": Moment,
|
|
433
|
+
"resources": Resource,
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
# Connect to database
|
|
437
|
+
pg = get_postgres_service()
|
|
438
|
+
if not pg:
|
|
439
|
+
logger.error("PostgreSQL is disabled in settings. Enable with POSTGRES__ENABLED=true")
|
|
440
|
+
raise click.Abort()
|
|
441
|
+
|
|
442
|
+
await pg.connect()
|
|
443
|
+
|
|
444
|
+
try:
|
|
445
|
+
total_loaded = 0
|
|
446
|
+
|
|
447
|
+
for table_def in data:
|
|
448
|
+
table_name = table_def["table"]
|
|
449
|
+
key_field = table_def.get("key_field", "id")
|
|
450
|
+
rows = table_def.get("rows", [])
|
|
451
|
+
|
|
452
|
+
if table_name not in MODEL_MAP:
|
|
453
|
+
logger.warning(f"Unknown table: {table_name}, skipping")
|
|
454
|
+
continue
|
|
455
|
+
|
|
456
|
+
model_class = MODEL_MAP[table_name] # Type is inferred from MODEL_MAP
|
|
457
|
+
|
|
458
|
+
for row_data in rows:
|
|
459
|
+
# Add user_id and tenant_id (set to user_id for backward compat)
|
|
460
|
+
row_data["user_id"] = user_id
|
|
461
|
+
row_data["tenant_id"] = user_id
|
|
462
|
+
|
|
463
|
+
# Convert graph_edges to InlineEdge format if present
|
|
464
|
+
if "graph_edges" in row_data:
|
|
465
|
+
row_data["graph_edges"] = [
|
|
466
|
+
InlineEdge(**edge).model_dump(mode='json')
|
|
467
|
+
for edge in row_data["graph_edges"]
|
|
468
|
+
]
|
|
469
|
+
|
|
470
|
+
# Create model instance and upsert via repository
|
|
471
|
+
from ...services.postgres.repository import Repository
|
|
472
|
+
|
|
473
|
+
instance = model_class(**row_data)
|
|
474
|
+
repo = Repository(model_class, table_name, pg) # Type inferred from MODEL_MAP
|
|
475
|
+
await repo.upsert(instance) # type: ignore[arg-type]
|
|
476
|
+
total_loaded += 1
|
|
477
|
+
|
|
478
|
+
# Log based on model type
|
|
479
|
+
name = getattr(instance, 'name', getattr(instance, 'id', '?'))
|
|
480
|
+
logger.success(f"Loaded {table_name[:-1]}: {name}")
|
|
481
|
+
|
|
482
|
+
logger.success(f"Data loaded successfully! Total rows: {total_loaded}")
|
|
483
|
+
|
|
484
|
+
finally:
|
|
485
|
+
await pg.disconnect()
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def register_commands(db_group):
|
|
489
|
+
"""Register all db commands."""
|
|
490
|
+
db_group.add_command(migrate)
|
|
491
|
+
db_group.add_command(status)
|
|
492
|
+
db_group.add_command(rebuild_cache, name="rebuild-cache")
|
|
493
|
+
db_group.add_command(load)
|