remdb 0.3.180__py3-none-any.whl → 0.3.258__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. rem/agentic/README.md +36 -2
  2. rem/agentic/__init__.py +10 -1
  3. rem/agentic/context.py +185 -1
  4. rem/agentic/context_builder.py +56 -35
  5. rem/agentic/mcp/tool_wrapper.py +2 -2
  6. rem/agentic/providers/pydantic_ai.py +303 -111
  7. rem/agentic/schema.py +2 -2
  8. rem/api/main.py +1 -1
  9. rem/api/mcp_router/resources.py +223 -0
  10. rem/api/mcp_router/server.py +4 -0
  11. rem/api/mcp_router/tools.py +608 -166
  12. rem/api/routers/admin.py +30 -4
  13. rem/api/routers/auth.py +219 -20
  14. rem/api/routers/chat/child_streaming.py +393 -0
  15. rem/api/routers/chat/completions.py +77 -40
  16. rem/api/routers/chat/sse_events.py +7 -3
  17. rem/api/routers/chat/streaming.py +381 -291
  18. rem/api/routers/chat/streaming_utils.py +325 -0
  19. rem/api/routers/common.py +18 -0
  20. rem/api/routers/dev.py +7 -1
  21. rem/api/routers/feedback.py +11 -3
  22. rem/api/routers/messages.py +176 -38
  23. rem/api/routers/models.py +9 -1
  24. rem/api/routers/query.py +17 -15
  25. rem/api/routers/shared_sessions.py +16 -0
  26. rem/auth/jwt.py +19 -4
  27. rem/auth/middleware.py +42 -28
  28. rem/cli/README.md +62 -0
  29. rem/cli/commands/ask.py +205 -114
  30. rem/cli/commands/db.py +55 -31
  31. rem/cli/commands/experiments.py +1 -1
  32. rem/cli/commands/process.py +179 -43
  33. rem/cli/commands/query.py +109 -0
  34. rem/cli/commands/session.py +117 -0
  35. rem/cli/main.py +2 -0
  36. rem/models/core/experiment.py +1 -1
  37. rem/models/entities/ontology.py +18 -20
  38. rem/models/entities/session.py +1 -0
  39. rem/schemas/agents/core/agent-builder.yaml +1 -1
  40. rem/schemas/agents/rem.yaml +1 -1
  41. rem/schemas/agents/test_orchestrator.yaml +42 -0
  42. rem/schemas/agents/test_structured_output.yaml +52 -0
  43. rem/services/content/providers.py +151 -49
  44. rem/services/content/service.py +18 -5
  45. rem/services/embeddings/worker.py +26 -12
  46. rem/services/postgres/__init__.py +28 -3
  47. rem/services/postgres/diff_service.py +57 -5
  48. rem/services/postgres/programmable_diff_service.py +635 -0
  49. rem/services/postgres/pydantic_to_sqlalchemy.py +2 -2
  50. rem/services/postgres/register_type.py +11 -10
  51. rem/services/postgres/repository.py +39 -28
  52. rem/services/postgres/schema_generator.py +5 -5
  53. rem/services/postgres/sql_builder.py +6 -5
  54. rem/services/rem/README.md +4 -3
  55. rem/services/rem/parser.py +7 -10
  56. rem/services/rem/service.py +47 -0
  57. rem/services/session/__init__.py +8 -1
  58. rem/services/session/compression.py +47 -5
  59. rem/services/session/pydantic_messages.py +310 -0
  60. rem/services/session/reload.py +2 -1
  61. rem/settings.py +92 -7
  62. rem/sql/migrations/001_install.sql +125 -7
  63. rem/sql/migrations/002_install_models.sql +159 -149
  64. rem/sql/migrations/004_cache_system.sql +10 -276
  65. rem/sql/migrations/migrate_session_id_to_uuid.sql +45 -0
  66. rem/utils/schema_loader.py +180 -120
  67. {remdb-0.3.180.dist-info → remdb-0.3.258.dist-info}/METADATA +7 -6
  68. {remdb-0.3.180.dist-info → remdb-0.3.258.dist-info}/RECORD +70 -61
  69. {remdb-0.3.180.dist-info → remdb-0.3.258.dist-info}/WHEEL +0 -0
  70. {remdb-0.3.180.dist-info → remdb-0.3.258.dist-info}/entry_points.txt +0 -0
@@ -11,39 +11,102 @@ from rem.services.content import ContentService
11
11
 
12
12
 
13
13
  @click.command(name="ingest")
14
- @click.argument("file_path", type=click.Path(exists=True))
15
- @click.option("--user-id", default=None, help="User ID to scope file privately (default: public/shared)")
14
+ @click.argument("path", type=click.Path(exists=True))
15
+ @click.option("--table", "-t", default=None, help="Target table (e.g., ontologies, resources). Auto-detected for schemas.")
16
+ @click.option("--make-private", is_flag=True, help="Make data private to a specific user. RARELY NEEDED - most data should be public/shared.")
17
+ @click.option("--user-id", default=None, help="User ID for private data. REQUIRES --make-private flag.")
16
18
  @click.option("--category", help="Optional file category")
17
19
  @click.option("--tags", help="Optional comma-separated tags")
20
+ @click.option("--pattern", "-p", default="**/*.md", help="Glob pattern for directory ingestion (default: **/*.md)")
21
+ @click.option("--dry-run", is_flag=True, help="Show what would be ingested without making changes")
18
22
  def process_ingest(
19
- file_path: str,
23
+ path: str,
24
+ table: str | None,
25
+ make_private: bool,
20
26
  user_id: str | None,
21
27
  category: str | None,
22
28
  tags: str | None,
29
+ pattern: str,
30
+ dry_run: bool,
23
31
  ):
24
32
  """
25
- Ingest a file into REM (storage + parsing + embedding).
33
+ Ingest files into REM (storage + parsing + embedding).
26
34
 
27
- This command performs the full ingestion pipeline:
28
- 1. Reads the file from the local path.
29
- 2. Stores it in the configured storage (local/S3).
30
- 3. Parses the content.
31
- 4. Chunks and embeds the content into Resources.
32
- 5. Creates a File entity record.
35
+ Supports both single files and directories. For directories, recursively
36
+ processes files matching the pattern (default: **/*.md).
37
+
38
+ **IMPORTANT: Data is PUBLIC by default.** This is the correct behavior for
39
+ shared knowledge bases (ontologies, procedures, reference data). Private
40
+ user-scoped data is rarely needed and requires explicit --make-private flag.
41
+
42
+ Target table is auto-detected for schemas (agent.yaml → schemas table).
43
+ Use --table to explicitly set the target (e.g., ontologies for clinical knowledge).
33
44
 
34
45
  Examples:
35
46
  rem process ingest sample.pdf
36
47
  rem process ingest contract.docx --category legal --tags contract,2023
37
48
  rem process ingest agent.yaml # Auto-detects kind=agent, saves to schemas table
49
+
50
+ # Directory ingestion into ontologies table (PUBLIC - no user-id needed)
51
+ rem process ingest ontology/procedures/scid-5/ --table ontologies
52
+ rem process ingest ontology/ --table ontologies --pattern "**/*.md"
53
+
54
+ # Preview what would be ingested
55
+ rem process ingest ontology/ --table ontologies --dry-run
56
+
57
+ # RARE: Private user-scoped data (requires --make-private)
58
+ rem process ingest private-notes.md --make-private --user-id user-123
38
59
  """
39
60
  import asyncio
61
+
62
+ # Validate: user_id requires --make-private flag
63
+ if user_id and not make_private:
64
+ raise click.UsageError(
65
+ "Setting --user-id requires the --make-private flag.\n\n"
66
+ "Data should be PUBLIC by default (no user-id). Private user-scoped data\n"
67
+ "is rarely needed - only use --make-private for truly personal content.\n\n"
68
+ "Example: rem process ingest file.md --make-private --user-id user-123"
69
+ )
70
+
71
+ # If --make-private is set, user_id is required
72
+ if make_private and not user_id:
73
+ raise click.UsageError(
74
+ "--make-private requires --user-id to specify which user owns the data.\n\n"
75
+ "Example: rem process ingest file.md --make-private --user-id user-123"
76
+ )
77
+
78
+ # Clear user_id if not making private (ensure None for public data)
79
+ effective_user_id = user_id if make_private else None
80
+ from pathlib import Path
40
81
  from ...services.content import ContentService
41
82
 
42
83
  async def _ingest():
43
- # Initialize ContentService with repositories for proper resource saving
44
84
  from rem.services.postgres import get_postgres_service
45
85
  from rem.services.postgres.repository import Repository
46
- from rem.models.entities import File, Resource
86
+ from rem.models.entities import File, Resource, Ontology
87
+
88
+ input_path = Path(path)
89
+ tag_list = tags.split(",") if tags else None
90
+
91
+ # Collect files to process
92
+ if input_path.is_dir():
93
+ files_to_process = list(input_path.glob(pattern))
94
+ if not files_to_process:
95
+ logger.error(f"No files matching '{pattern}' found in {input_path}")
96
+ sys.exit(1)
97
+ logger.info(f"Found {len(files_to_process)} files matching '{pattern}'")
98
+ else:
99
+ files_to_process = [input_path]
100
+
101
+ # Dry run: just show what would be processed
102
+ if dry_run:
103
+ logger.info("DRY RUN - Would ingest:")
104
+ for f in files_to_process[:20]:
105
+ entity_key = f.stem # filename without extension
106
+ logger.info(f" {f} → {table or 'auto-detect'} (key: {entity_key})")
107
+ if len(files_to_process) > 20:
108
+ logger.info(f" ... and {len(files_to_process) - 20} more files")
109
+ return
47
110
 
48
111
  db = get_postgres_service()
49
112
  if not db:
@@ -51,53 +114,126 @@ def process_ingest(
51
114
  await db.connect()
52
115
 
53
116
  try:
54
- file_repo = Repository(File, "files", db=db)
55
- resource_repo = Repository(Resource, "resources", db=db)
56
- service = ContentService(file_repo=file_repo, resource_repo=resource_repo)
57
-
58
- tag_list = tags.split(",") if tags else None
59
-
60
- scope_msg = f"user: {user_id}" if user_id else "public"
61
- logger.info(f"Ingesting file: {file_path} ({scope_msg})")
62
- result = await service.ingest_file(
63
- file_uri=file_path,
64
- user_id=user_id,
65
- category=category,
66
- tags=tag_list,
67
- is_local_server=True, # CLI is local
68
- )
69
-
70
- # Handle schema ingestion (agents/evaluators)
71
- if result.get("schema_name"):
72
- logger.success(f"Schema ingested: {result['schema_name']} (kind={result.get('kind', 'agent')})")
73
- logger.info(f"Version: {result.get('version', '1.0.0')}")
74
- # Handle file ingestion
75
- elif result.get("processing_status") == "completed":
76
- logger.success(f"File ingested: {result['file_name']}")
77
- logger.info(f"File ID: {result['file_id']}")
78
- logger.info(f"Resources created: {result['resources_created']}")
117
+ # Direct table ingestion (ontologies, etc.)
118
+ if table:
119
+ await _ingest_to_table(
120
+ db=db,
121
+ files=files_to_process,
122
+ table_name=table,
123
+ user_id=effective_user_id,
124
+ category=category,
125
+ tag_list=tag_list,
126
+ )
79
127
  else:
80
- logger.error(f"Ingestion failed: {result.get('message', 'Unknown error')}")
81
- sys.exit(1)
128
+ # Standard file ingestion via ContentService
129
+ file_repo = Repository(File, "files", db=db)
130
+ resource_repo = Repository(Resource, "resources", db=db)
131
+ service = ContentService(file_repo=file_repo, resource_repo=resource_repo)
132
+
133
+ for file_path in files_to_process:
134
+ scope_msg = f"user: {effective_user_id}" if effective_user_id else "public"
135
+ logger.info(f"Ingesting: {file_path} ({scope_msg})")
136
+
137
+ result = await service.ingest_file(
138
+ file_uri=str(file_path),
139
+ user_id=effective_user_id,
140
+ category=category,
141
+ tags=tag_list,
142
+ is_local_server=True,
143
+ )
144
+
145
+ # Handle schema ingestion (agents/evaluators)
146
+ if result.get("schema_name"):
147
+ logger.success(f"Schema: {result['schema_name']} (kind={result.get('kind', 'agent')})")
148
+ elif result.get("processing_status") == "completed":
149
+ logger.success(f"File: {result['file_name']} ({result['resources_created']} resources)")
150
+ else:
151
+ logger.error(f"Failed: {result.get('message', 'Unknown error')}")
82
152
 
83
153
  except Exception as e:
84
154
  logger.error(f"Error during ingestion: {e}")
85
155
  sys.exit(1)
86
156
  finally:
87
- # Wait for global embedding worker to finish queued tasks
157
+ # Wait for embedding worker to finish
88
158
  from rem.services.embeddings.worker import get_global_embedding_worker
89
159
  try:
90
160
  worker = get_global_embedding_worker()
91
161
  if worker and worker.running and not worker.task_queue.empty():
92
- logger.info(f"Waiting for {worker.task_queue.qsize()} embedding tasks to complete...")
93
- # Worker.stop() waits for queue to drain (see worker.py line ~148)
162
+ logger.info(f"Waiting for {worker.task_queue.qsize()} embedding tasks...")
94
163
  await worker.stop()
95
164
  except RuntimeError:
96
- # Worker doesn't exist yet - no tasks queued
97
165
  pass
98
166
 
99
167
  await db.disconnect()
100
168
 
169
+ async def _ingest_to_table(db, files, table_name, user_id, category, tag_list):
170
+ """Direct ingestion of files to a specific table (ontologies, etc.)."""
171
+ from rem.services.postgres.repository import Repository
172
+ from rem import get_model_registry
173
+ from rem.utils.model_helpers import get_table_name
174
+
175
+ # Get model class for table
176
+ registry = get_model_registry()
177
+ registry.register_core_models()
178
+ model_class = None
179
+ for model in registry.get_model_classes().values():
180
+ if get_table_name(model) == table_name:
181
+ model_class = model
182
+ break
183
+
184
+ if not model_class:
185
+ logger.error(f"Unknown table: {table_name}")
186
+ sys.exit(1)
187
+
188
+ repo = Repository(model_class, table_name, db=db)
189
+ processed = 0
190
+ failed = 0
191
+
192
+ for file_path in files:
193
+ try:
194
+ # Read file content
195
+ content = file_path.read_text(encoding="utf-8")
196
+
197
+ # Generate entity key from filename
198
+ # Special case: README files use parent directory as section name
199
+ if file_path.stem.lower() == "readme":
200
+ # Use parent directory name, e.g., "drugs" for drugs/README.md
201
+ # For nested paths like disorders/anxiety/README.md -> "anxiety"
202
+ entity_key = file_path.parent.name
203
+ else:
204
+ entity_key = file_path.stem # filename without extension
205
+
206
+ # Build entity based on table
207
+ entity_data = {
208
+ "name": entity_key,
209
+ "content": content,
210
+ "tags": tag_list or [],
211
+ }
212
+
213
+ # Add optional fields
214
+ if category:
215
+ entity_data["category"] = category
216
+
217
+ # Scoping: user_id for private data, "public" for shared
218
+ # tenant_id="public" is the default for shared knowledge bases
219
+ entity_data["tenant_id"] = user_id or "public"
220
+ entity_data["user_id"] = user_id # None = public/shared
221
+
222
+ # For ontologies, add URI
223
+ if table_name == "ontologies":
224
+ entity_data["uri"] = f"file://{file_path.absolute()}"
225
+
226
+ entity = model_class(**entity_data)
227
+ await repo.upsert(entity, embeddable_fields=["content"], generate_embeddings=True)
228
+ processed += 1
229
+ logger.success(f" ✓ {entity_key}")
230
+
231
+ except Exception as e:
232
+ failed += 1
233
+ logger.error(f" ✗ {file_path.name}: {e}")
234
+
235
+ logger.info(f"Completed: {processed} succeeded, {failed} failed")
236
+
101
237
  asyncio.run(_ingest())
102
238
 
103
239
  def register_commands(group: click.Group):
@@ -0,0 +1,109 @@
1
+ """
2
+ REM query command.
3
+
4
+ Usage:
5
+ rem query --sql 'LOOKUP "Sarah Chen"'
6
+ rem query --sql 'SEARCH resources "API design" LIMIT 10'
7
+ rem query --sql "SELECT * FROM resources LIMIT 5"
8
+ rem query --file queries/my_query.sql
9
+
10
+ This tool connects to the configured PostgreSQL instance and executes the
11
+ provided REM dialect query, printing results as JSON (default) or plain dicts.
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import asyncio
17
+ import json
18
+ from pathlib import Path
19
+ from typing import List
20
+
21
+ import click
22
+ from loguru import logger
23
+
24
+ from ...services.rem import QueryExecutionError
25
+ from ...services.rem.service import RemService
26
+
27
+
28
+ @click.command("query")
29
+ @click.option("--sql", "-s", default=None, help="REM query string (LOOKUP, SEARCH, FUZZY, TRAVERSE, or SQL)")
30
+ @click.option(
31
+ "--file",
32
+ "-f",
33
+ "sql_file",
34
+ type=click.Path(exists=True, path_type=Path),
35
+ default=None,
36
+ help="Path to file containing REM query",
37
+ )
38
+ @click.option("--no-json", is_flag=True, default=False, help="Print rows as Python dicts instead of JSON")
39
+ @click.option("--user-id", "-u", default=None, help="Scope query to a specific user")
40
+ def query_command(sql: str | None, sql_file: Path | None, no_json: bool, user_id: str | None):
41
+ """
42
+ Execute a REM query against the database.
43
+
44
+ Supports REM dialect queries (LOOKUP, SEARCH, FUZZY, TRAVERSE) and raw SQL.
45
+ Either --sql or --file must be provided.
46
+ """
47
+ if not sql and not sql_file:
48
+ click.secho("Error: either --sql or --file is required", fg="red")
49
+ raise click.Abort()
50
+
51
+ # Read query from file if provided
52
+ if sql_file:
53
+ query_text = sql_file.read_text(encoding="utf-8")
54
+ else:
55
+ query_text = sql # type: ignore[assignment]
56
+
57
+ try:
58
+ asyncio.run(_run_query_async(query_text, not no_json, user_id))
59
+ except Exception as exc: # pragma: no cover - CLI error path
60
+ logger.exception("Query failed")
61
+ click.secho(f"✗ Query failed: {exc}", fg="red")
62
+ raise click.Abort()
63
+
64
+
65
+ async def _run_query_async(query_text: str, as_json: bool, user_id: str | None) -> None:
66
+ """
67
+ Execute the query using RemService.execute_query_string().
68
+ """
69
+ from ...services.postgres import get_postgres_service
70
+
71
+ db = get_postgres_service()
72
+ if not db:
73
+ click.secho("✗ PostgreSQL is disabled in settings. Enable with POSTGRES__ENABLED=true", fg="red")
74
+ raise click.Abort()
75
+
76
+ if db.pool is None:
77
+ await db.connect()
78
+
79
+ rem_service = RemService(db)
80
+
81
+ try:
82
+ # Use the unified execute_query_string method
83
+ result = await rem_service.execute_query_string(query_text, user_id=user_id)
84
+ output_rows = result.get("results", [])
85
+ except QueryExecutionError as qe:
86
+ logger.exception("Query execution failed")
87
+ click.secho(f"✗ Query execution failed: {qe}. Please check the query you provided and try again.", fg="red")
88
+ raise click.Abort()
89
+ except ValueError as ve:
90
+ # Parse errors from the query parser
91
+ click.secho(f"✗ Invalid query: {ve}", fg="red")
92
+ raise click.Abort()
93
+ except Exception as exc: # pragma: no cover - CLI error path
94
+ logger.exception("Unexpected error during query execution")
95
+ click.secho("✗ An unexpected error occurred while executing the query. Please check the query you provided and try again.", fg="red")
96
+ raise click.Abort()
97
+
98
+ if as_json:
99
+ click.echo(json.dumps(output_rows, default=str, indent=2))
100
+ else:
101
+ for r in output_rows:
102
+ click.echo(str(r))
103
+
104
+
105
+ def register_command(cli_group):
106
+ """Register the query command on the given CLI group (top-level)."""
107
+ cli_group.add_command(query_command)
108
+
109
+
@@ -331,6 +331,123 @@ async def _show_async(
331
331
  raise
332
332
 
333
333
 
334
+ @session.command("clone")
335
+ @click.argument("session_id")
336
+ @click.option("--to-turn", "-t", type=int, help="Clone up to turn N (counting user messages only)")
337
+ @click.option("--name", "-n", help="Name/description for the cloned session")
338
+ def clone(session_id: str, to_turn: int | None, name: str | None):
339
+ """
340
+ Clone a session for exploring alternate conversation paths.
341
+
342
+ SESSION_ID: The session ID to clone.
343
+
344
+ Examples:
345
+
346
+ # Clone entire session
347
+ rem session clone 810f1f2d-d5a1-4c02-83b6-67040b47f7c0
348
+
349
+ # Clone up to turn 3 (first 3 user messages and their responses)
350
+ rem session clone 810f1f2d-d5a1-4c02-83b6-67040b47f7c0 --to-turn 3
351
+
352
+ # Clone with a descriptive name
353
+ rem session clone 810f1f2d-d5a1-4c02-83b6-67040b47f7c0 -n "Alternate anxiety path"
354
+ """
355
+ asyncio.run(_clone_async(session_id, to_turn, name))
356
+
357
+
358
+ async def _clone_async(
359
+ session_id: str,
360
+ to_turn: int | None,
361
+ name: str | None,
362
+ ):
363
+ """Async implementation of clone command."""
364
+ from uuid import uuid4
365
+ from ...models.entities.session import Session, SessionMode
366
+
367
+ pg = get_postgres_service()
368
+ if not pg:
369
+ logger.error("PostgreSQL not available")
370
+ return
371
+
372
+ await pg.connect()
373
+
374
+ try:
375
+ # Load original session messages
376
+ message_repo = Repository(Message, "messages", db=pg)
377
+ messages = await message_repo.find(
378
+ filters={"session_id": session_id},
379
+ order_by="created_at ASC",
380
+ limit=1000,
381
+ )
382
+
383
+ if not messages:
384
+ logger.error(f"No messages found for session {session_id}")
385
+ return
386
+
387
+ # If --to-turn specified, filter messages up to that turn (user messages)
388
+ if to_turn is not None:
389
+ user_count = 0
390
+ cutoff_idx = len(messages)
391
+ for idx, msg in enumerate(messages):
392
+ if msg.message_type == "user":
393
+ user_count += 1
394
+ if user_count > to_turn:
395
+ cutoff_idx = idx
396
+ break
397
+ messages = messages[:cutoff_idx]
398
+ logger.info(f"Cloning {len(messages)} messages (up to turn {to_turn})")
399
+ else:
400
+ logger.info(f"Cloning all {len(messages)} messages")
401
+
402
+ # Generate new session ID
403
+ new_session_id = str(uuid4())
404
+
405
+ # Get user_id and tenant_id from first message
406
+ first_msg = messages[0]
407
+ user_id = first_msg.user_id
408
+ tenant_id = first_msg.tenant_id or "default"
409
+
410
+ # Create Session record with CLONE mode and lineage
411
+ session_repo = Repository(Session, "sessions", db=pg)
412
+ new_session = Session(
413
+ id=uuid4(),
414
+ name=name or f"Clone of {session_id[:8]}",
415
+ mode=SessionMode.CLONE,
416
+ original_trace_id=session_id,
417
+ description=f"Cloned from session {session_id}" + (f" at turn {to_turn}" if to_turn else ""),
418
+ user_id=user_id,
419
+ tenant_id=tenant_id,
420
+ message_count=len(messages),
421
+ )
422
+ await session_repo.upsert(new_session)
423
+ logger.info(f"Created session record: {new_session.id}")
424
+
425
+ # Copy messages with new session_id
426
+ for msg in messages:
427
+ new_msg = Message(
428
+ id=uuid4(),
429
+ user_id=msg.user_id,
430
+ tenant_id=msg.tenant_id,
431
+ session_id=str(new_session.id),
432
+ content=msg.content,
433
+ message_type=msg.message_type,
434
+ metadata=msg.metadata,
435
+ )
436
+ await message_repo.upsert(new_msg)
437
+
438
+ click.echo(f"\n✅ Cloned session successfully!")
439
+ click.echo(f" Original: {session_id}")
440
+ click.echo(f" New: {new_session.id}")
441
+ click.echo(f" Messages: {len(messages)}")
442
+ if to_turn:
443
+ click.echo(f" Turns: {to_turn}")
444
+ click.echo(f"\nContinue this session with:")
445
+ click.echo(f" rem ask <agent> \"your message\" --session-id {new_session.id}")
446
+
447
+ finally:
448
+ await pg.disconnect()
449
+
450
+
334
451
  def register_command(cli_group):
335
452
  """Register the session command group."""
336
453
  cli_group.add_command(session)
rem/cli/main.py CHANGED
@@ -97,6 +97,7 @@ from .commands.mcp import register_command as register_mcp_command
97
97
  from .commands.scaffold import scaffold as scaffold_command
98
98
  from .commands.cluster import register_commands as register_cluster_commands
99
99
  from .commands.session import register_command as register_session_command
100
+ from .commands.query import register_command as register_query_command
100
101
 
101
102
  register_schema_commands(schema)
102
103
  register_db_commands(db)
@@ -107,6 +108,7 @@ register_ask_command(cli)
107
108
  register_configure_command(cli)
108
109
  register_serve_command(cli)
109
110
  register_mcp_command(cli)
111
+ register_query_command(cli)
110
112
  cli.add_command(experiments_group)
111
113
  cli.add_command(scaffold_command)
112
114
  register_session_command(cli)
@@ -461,7 +461,7 @@ class ExperimentConfig(BaseModel):
461
461
  """
462
462
  Get the evaluator filename with task prefix.
463
463
 
464
- Returns: {agent_name}-{task}.yaml (e.g., siggy-risk-assessment.yaml)
464
+ Returns: {agent_name}-{task}.yaml (e.g., rem-risk-assessment.yaml)
465
465
  """
466
466
  return f"{self.agent_schema_ref.name}-{self.task}.yaml"
467
467
 
@@ -103,32 +103,30 @@ class Ontology(CoreModel):
103
103
  tags=["cv", "engineering"]
104
104
  )
105
105
 
106
- # Direct-loaded: Medical knowledge base from git
107
- disorder_ontology = Ontology(
108
- name="panic-disorder",
109
- uri="git://bwolfson-siggie/Siggy-MVP/ontology/disorders/anxiety/panic-disorder.md",
110
- content="# Panic Disorder\\n\\nPanic disorder is characterized by...",
106
+ # Direct-loaded: Knowledge base from git
107
+ api_docs = Ontology(
108
+ name="rest-api-guide",
109
+ uri="git://example-org/docs/api/rest-api-guide.md",
110
+ content="# REST API Guide\\n\\nThis guide covers RESTful API design...",
111
111
  extracted_data={
112
- "type": "disorder",
113
- "category": "anxiety",
114
- "icd10": "F41.0",
115
- "dsm5_criteria": ["A", "B", "C", "D"],
112
+ "type": "documentation",
113
+ "category": "api",
114
+ "version": "2.0",
116
115
  },
117
- tags=["disorder", "anxiety", "dsm5"]
116
+ tags=["api", "rest", "documentation"]
118
117
  )
119
118
 
120
- # Direct-loaded: Clinical procedure from git
121
- scid_node = Ontology(
122
- name="scid-5-f1",
123
- uri="git://bwolfson-siggie/Siggy-MVP/ontology/procedures/scid-5/module-f/scid-5-f1.md",
124
- content="# scid-5-f1: Panic Attack Screening\\n\\n...",
119
+ # Direct-loaded: Technical spec from git
120
+ config_spec = Ontology(
121
+ name="config-schema",
122
+ uri="git://example-org/docs/specs/config-schema.md",
123
+ content="# Configuration Schema\\n\\nThis document defines...",
125
124
  extracted_data={
126
- "type": "procedure",
127
- "module": "F",
128
- "section": "Panic Disorder",
129
- "dsm5_criterion": "Panic Attack Specifier",
125
+ "type": "specification",
126
+ "format": "yaml",
127
+ "version": "1.0",
130
128
  },
131
- tags=["scid-5", "procedure", "anxiety"]
129
+ tags=["config", "schema", "specification"]
132
130
  )
133
131
  """
134
132
 
@@ -21,6 +21,7 @@ class SessionMode(str, Enum):
21
21
 
22
22
  NORMAL = "normal"
23
23
  EVALUATION = "evaluation"
24
+ CLONE = "clone"
24
25
 
25
26
 
26
27
  class Session(CoreModel):
@@ -229,7 +229,7 @@ json_schema_extra:
229
229
  - uri: rem://agents
230
230
  description: "List all available agent schemas with descriptions"
231
231
  - uri: rem://agents/{agent_name}
232
- description: "Load a specific agent schema by name (e.g., 'rem', 'siggy')"
232
+ description: "Load a specific agent schema by name (e.g., 'rem', 'intake')"
233
233
  tools:
234
234
  - name: save_agent
235
235
  description: "Save the agent schema. Only call when user approves the preview in Step 6."
@@ -124,7 +124,7 @@ json_schema_extra:
124
124
 
125
125
  # Explicit resource declarations for reference data
126
126
  resources:
127
- - uri: rem://schemas
127
+ - uri: rem://agents
128
128
  name: Agent Schemas List
129
129
  description: List all available agent schemas in the system
130
130
  - uri: rem://status
@@ -0,0 +1,42 @@
1
+ # =============================================================================
2
+ # TEST ORCHESTRATOR AGENT
3
+ # =============================================================================
4
+ # Parent agent that delegates to test_structured_output for testing
5
+ # the structured output persistence feature.
6
+ # =============================================================================
7
+
8
+ name: test_orchestrator
9
+ version: "1.0"
10
+ description: |
11
+ You are an orchestrator that helps analyze user messages.
12
+
13
+ When the user provides a message to analyze, you MUST:
14
+ 1. Call the ask_agent tool to delegate to "test_structured_output"
15
+ 2. Return the structured result to the user
16
+
17
+ ## CRITICAL RULES
18
+
19
+ - ALWAYS call ask_agent with agent_name="test_structured_output"
20
+ - Pass the user's message as input_text
21
+ - Report back the structured result you receive
22
+
23
+ type: object
24
+ properties:
25
+ answer:
26
+ type: string
27
+ description: Response to the user
28
+
29
+ required:
30
+ - answer
31
+
32
+ json_schema_extra:
33
+ kind: agent
34
+ name: test_orchestrator
35
+ version: "1.0.0"
36
+ tags: [test, orchestrator]
37
+ tools:
38
+ - name: ask_agent
39
+ description: |
40
+ Delegate to the test_structured_output agent to analyze the message.
41
+ Always use agent_name="test_structured_output".
42
+ resources: []