remdb 0.3.163__py3-none-any.whl → 0.3.200__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of remdb might be problematic. Click here for more details.

Files changed (48) hide show
  1. rem/agentic/agents/agent_manager.py +2 -1
  2. rem/agentic/context.py +101 -0
  3. rem/agentic/context_builder.py +30 -8
  4. rem/agentic/mcp/tool_wrapper.py +43 -14
  5. rem/agentic/providers/pydantic_ai.py +76 -34
  6. rem/agentic/schema.py +4 -3
  7. rem/agentic/tools/rem_tools.py +11 -0
  8. rem/api/main.py +1 -1
  9. rem/api/mcp_router/resources.py +75 -14
  10. rem/api/mcp_router/server.py +31 -24
  11. rem/api/mcp_router/tools.py +476 -155
  12. rem/api/routers/auth.py +11 -6
  13. rem/api/routers/chat/completions.py +52 -10
  14. rem/api/routers/chat/sse_events.py +2 -2
  15. rem/api/routers/chat/streaming.py +162 -19
  16. rem/api/routers/messages.py +96 -23
  17. rem/auth/middleware.py +59 -42
  18. rem/cli/README.md +62 -0
  19. rem/cli/commands/ask.py +1 -1
  20. rem/cli/commands/db.py +148 -70
  21. rem/cli/commands/process.py +171 -43
  22. rem/models/entities/ontology.py +93 -101
  23. rem/schemas/agents/core/agent-builder.yaml +143 -42
  24. rem/services/content/service.py +18 -5
  25. rem/services/email/service.py +17 -6
  26. rem/services/embeddings/worker.py +26 -12
  27. rem/services/postgres/__init__.py +28 -3
  28. rem/services/postgres/diff_service.py +57 -5
  29. rem/services/postgres/programmable_diff_service.py +635 -0
  30. rem/services/postgres/pydantic_to_sqlalchemy.py +2 -2
  31. rem/services/postgres/register_type.py +12 -11
  32. rem/services/postgres/repository.py +32 -21
  33. rem/services/postgres/schema_generator.py +5 -5
  34. rem/services/postgres/sql_builder.py +6 -5
  35. rem/services/session/__init__.py +7 -1
  36. rem/services/session/pydantic_messages.py +210 -0
  37. rem/services/user_service.py +12 -9
  38. rem/settings.py +7 -1
  39. rem/sql/background_indexes.sql +5 -0
  40. rem/sql/migrations/001_install.sql +148 -11
  41. rem/sql/migrations/002_install_models.sql +162 -132
  42. rem/sql/migrations/004_cache_system.sql +7 -275
  43. rem/utils/model_helpers.py +101 -0
  44. rem/utils/schema_loader.py +51 -13
  45. {remdb-0.3.163.dist-info → remdb-0.3.200.dist-info}/METADATA +1 -1
  46. {remdb-0.3.163.dist-info → remdb-0.3.200.dist-info}/RECORD +48 -46
  47. {remdb-0.3.163.dist-info → remdb-0.3.200.dist-info}/WHEEL +0 -0
  48. {remdb-0.3.163.dist-info → remdb-0.3.200.dist-info}/entry_points.txt +0 -0
@@ -1,16 +1,15 @@
1
1
  -- REM Cache System
2
- -- Description: Self-healing cache for UNLOGGED tables (kv_store)
2
+ -- Description: Cache management helpers for UNLOGGED tables (kv_store)
3
3
  -- Version: 1.0.0
4
4
  -- Date: 2025-11-29
5
5
  --
6
6
  -- This migration adds:
7
7
  -- 1. cache_system_state table for debouncing and API secret storage
8
8
  -- 2. maybe_trigger_kv_rebuild() function for async rebuild triggering
9
- -- 3. Updated rem_lookup/fuzzy/traverse with self-healing on empty cache
9
+ -- 3. Helper functions for cache management
10
10
  --
11
- -- Self-Healing Flow:
12
- -- Query returns 0 results Check if kv_store empty → Trigger async rebuild
13
- -- Priority: pg_net (if available) → dblink (always available)
11
+ -- NOTE: Core functions (rem_lookup, rem_fuzzy, rem_traverse) are defined in 001_install.sql
12
+ -- This file only provides cache-specific infrastructure.
14
13
 
15
14
  -- ============================================================================
16
15
  -- REQUIRED EXTENSION
@@ -194,271 +193,6 @@ $$ LANGUAGE plpgsql;
194
193
  COMMENT ON FUNCTION maybe_trigger_kv_rebuild IS
195
194
  'Async trigger for kv_store rebuild. Uses pg_net (API) or dblink (SQL). Includes debouncing.';
196
195
 
197
- -- ============================================================================
198
- -- UPDATED: rem_lookup with self-healing
199
- -- ============================================================================
200
-
201
- CREATE OR REPLACE FUNCTION rem_lookup(
202
- p_entity_key VARCHAR(255),
203
- p_tenant_id VARCHAR(100),
204
- p_user_id VARCHAR(100)
205
- )
206
- RETURNS TABLE(
207
- entity_type VARCHAR(100),
208
- data JSONB
209
- ) AS $$
210
- DECLARE
211
- entity_table VARCHAR(100);
212
- query_sql TEXT;
213
- effective_user_id VARCHAR(100);
214
- v_result_count INTEGER := 0;
215
- BEGIN
216
- effective_user_id := COALESCE(p_user_id, p_tenant_id);
217
-
218
- -- First lookup in KV store to get entity_type (table name)
219
- SELECT kv.entity_type INTO entity_table
220
- FROM kv_store kv
221
- WHERE (kv.user_id = effective_user_id OR kv.user_id IS NULL)
222
- AND kv.entity_key = p_entity_key
223
- LIMIT 1;
224
-
225
- -- If not found, check if cache is empty and maybe trigger rebuild
226
- IF entity_table IS NULL THEN
227
- -- SELF-HEALING: Check if this is because cache is empty
228
- IF rem_kv_store_empty(effective_user_id) THEN
229
- PERFORM maybe_trigger_kv_rebuild(effective_user_id, 'rem_lookup');
230
- END IF;
231
- RETURN;
232
- END IF;
233
-
234
- -- Fetch raw record from underlying table as JSONB
235
- query_sql := format('
236
- SELECT
237
- %L::VARCHAR(100) AS entity_type,
238
- row_to_json(t)::jsonb AS data
239
- FROM %I t
240
- WHERE (t.user_id = $1 OR t.user_id IS NULL)
241
- AND t.name = $2
242
- AND t.deleted_at IS NULL
243
- ', entity_table, entity_table);
244
-
245
- RETURN QUERY EXECUTE query_sql USING effective_user_id, p_entity_key;
246
- END;
247
- $$ LANGUAGE plpgsql STABLE;
248
-
249
- -- ============================================================================
250
- -- UPDATED: rem_fuzzy with self-healing
251
- -- ============================================================================
252
-
253
- CREATE OR REPLACE FUNCTION rem_fuzzy(
254
- p_query TEXT,
255
- p_tenant_id VARCHAR(100),
256
- p_threshold REAL DEFAULT 0.3,
257
- p_limit INTEGER DEFAULT 10,
258
- p_user_id VARCHAR(100) DEFAULT NULL
259
- )
260
- RETURNS TABLE(
261
- entity_type VARCHAR(100),
262
- similarity_score REAL,
263
- data JSONB
264
- ) AS $$
265
- DECLARE
266
- kv_matches RECORD;
267
- entities_by_table JSONB := '{}'::jsonb;
268
- table_keys JSONB;
269
- effective_user_id VARCHAR(100);
270
- v_found_any BOOLEAN := FALSE;
271
- BEGIN
272
- effective_user_id := COALESCE(p_user_id, p_tenant_id);
273
-
274
- -- Find matching keys in KV store
275
- FOR kv_matches IN
276
- SELECT
277
- kv.entity_key,
278
- kv.entity_type,
279
- similarity(kv.entity_key, p_query) AS sim_score
280
- FROM kv_store kv
281
- WHERE (kv.user_id = effective_user_id OR kv.user_id IS NULL)
282
- AND kv.entity_key % p_query
283
- AND similarity(kv.entity_key, p_query) >= p_threshold
284
- ORDER BY sim_score DESC
285
- LIMIT p_limit
286
- LOOP
287
- v_found_any := TRUE;
288
- -- Build JSONB mapping {table: [keys]}
289
- IF entities_by_table ? kv_matches.entity_type THEN
290
- table_keys := entities_by_table->kv_matches.entity_type;
291
- entities_by_table := jsonb_set(
292
- entities_by_table,
293
- ARRAY[kv_matches.entity_type],
294
- table_keys || jsonb_build_array(kv_matches.entity_key)
295
- );
296
- ELSE
297
- entities_by_table := jsonb_set(
298
- entities_by_table,
299
- ARRAY[kv_matches.entity_type],
300
- jsonb_build_array(kv_matches.entity_key)
301
- );
302
- END IF;
303
- END LOOP;
304
-
305
- -- SELF-HEALING: If no matches and cache is empty, trigger rebuild
306
- IF NOT v_found_any AND rem_kv_store_empty(effective_user_id) THEN
307
- PERFORM maybe_trigger_kv_rebuild(effective_user_id, 'rem_fuzzy');
308
- END IF;
309
-
310
- -- Fetch full records
311
- RETURN QUERY
312
- SELECT
313
- f.entity_type::VARCHAR(100),
314
- similarity(f.entity_key, p_query) AS similarity_score,
315
- f.entity_record AS data
316
- FROM rem_fetch(entities_by_table, effective_user_id) f
317
- ORDER BY similarity_score DESC;
318
- END;
319
- $$ LANGUAGE plpgsql STABLE;
320
-
321
- -- ============================================================================
322
- -- UPDATED: rem_traverse with self-healing
323
- -- ============================================================================
324
-
325
- CREATE OR REPLACE FUNCTION rem_traverse(
326
- p_entity_key VARCHAR(255),
327
- p_tenant_id VARCHAR(100),
328
- p_user_id VARCHAR(100),
329
- p_max_depth INTEGER DEFAULT 1,
330
- p_rel_type VARCHAR(100) DEFAULT NULL,
331
- p_keys_only BOOLEAN DEFAULT FALSE
332
- )
333
- RETURNS TABLE(
334
- depth INTEGER,
335
- entity_key VARCHAR(255),
336
- entity_type VARCHAR(100),
337
- entity_id UUID,
338
- rel_type VARCHAR(100),
339
- rel_weight REAL,
340
- path TEXT[],
341
- entity_record JSONB
342
- ) AS $$
343
- DECLARE
344
- graph_keys RECORD;
345
- entities_by_table JSONB := '{}'::jsonb;
346
- table_keys JSONB;
347
- effective_user_id VARCHAR(100);
348
- v_found_start BOOLEAN := FALSE;
349
- BEGIN
350
- effective_user_id := COALESCE(p_user_id, p_tenant_id);
351
-
352
- -- Check if start entity exists in kv_store
353
- SELECT TRUE INTO v_found_start
354
- FROM kv_store kv
355
- WHERE (kv.user_id = effective_user_id OR kv.user_id IS NULL)
356
- AND kv.entity_key = p_entity_key
357
- LIMIT 1;
358
-
359
- -- SELF-HEALING: If start not found and cache is empty, trigger rebuild
360
- IF NOT COALESCE(v_found_start, FALSE) THEN
361
- IF rem_kv_store_empty(effective_user_id) THEN
362
- PERFORM maybe_trigger_kv_rebuild(effective_user_id, 'rem_traverse');
363
- END IF;
364
- RETURN;
365
- END IF;
366
-
367
- -- Original traverse logic
368
- FOR graph_keys IN
369
- WITH RECURSIVE graph_traversal AS (
370
- SELECT
371
- 0 AS depth,
372
- kv.entity_key,
373
- kv.entity_type,
374
- kv.entity_id,
375
- NULL::VARCHAR(100) AS rel_type,
376
- NULL::REAL AS rel_weight,
377
- ARRAY[kv.entity_key]::TEXT[] AS path
378
- FROM kv_store kv
379
- WHERE (kv.user_id = effective_user_id OR kv.user_id IS NULL)
380
- AND kv.entity_key = p_entity_key
381
-
382
- UNION ALL
383
-
384
- SELECT
385
- gt.depth + 1,
386
- target_kv.entity_key,
387
- target_kv.entity_type,
388
- target_kv.entity_id,
389
- (edge->>'rel_type')::VARCHAR(100) AS rel_type,
390
- COALESCE((edge->>'weight')::REAL, 1.0) AS rel_weight,
391
- gt.path || target_kv.entity_key AS path
392
- FROM graph_traversal gt
393
- JOIN kv_store source_kv ON source_kv.entity_key = gt.entity_key
394
- AND (source_kv.user_id = effective_user_id OR source_kv.user_id IS NULL)
395
- CROSS JOIN LATERAL jsonb_array_elements(COALESCE(source_kv.graph_edges, '[]'::jsonb)) AS edge
396
- JOIN kv_store target_kv ON target_kv.entity_key = (edge->>'dst')::VARCHAR(255)
397
- AND (target_kv.user_id = effective_user_id OR target_kv.user_id IS NULL)
398
- WHERE gt.depth < p_max_depth
399
- AND (p_rel_type IS NULL OR (edge->>'rel_type')::VARCHAR(100) = p_rel_type)
400
- AND NOT (target_kv.entity_key = ANY(gt.path))
401
- )
402
- SELECT DISTINCT ON (gt.entity_key)
403
- gt.depth,
404
- gt.entity_key,
405
- gt.entity_type,
406
- gt.entity_id,
407
- gt.rel_type,
408
- gt.rel_weight,
409
- gt.path
410
- FROM graph_traversal gt
411
- WHERE gt.depth > 0
412
- ORDER BY gt.entity_key, gt.depth
413
- LOOP
414
- IF p_keys_only THEN
415
- depth := graph_keys.depth;
416
- entity_key := graph_keys.entity_key;
417
- entity_type := graph_keys.entity_type;
418
- entity_id := graph_keys.entity_id;
419
- rel_type := graph_keys.rel_type;
420
- rel_weight := graph_keys.rel_weight;
421
- path := graph_keys.path;
422
- entity_record := NULL;
423
- RETURN NEXT;
424
- ELSE
425
- IF entities_by_table ? graph_keys.entity_type THEN
426
- table_keys := entities_by_table->graph_keys.entity_type;
427
- entities_by_table := jsonb_set(
428
- entities_by_table,
429
- ARRAY[graph_keys.entity_type],
430
- table_keys || jsonb_build_array(graph_keys.entity_key)
431
- );
432
- ELSE
433
- entities_by_table := jsonb_set(
434
- entities_by_table,
435
- ARRAY[graph_keys.entity_type],
436
- jsonb_build_array(graph_keys.entity_key)
437
- );
438
- END IF;
439
- END IF;
440
- END LOOP;
441
-
442
- IF NOT p_keys_only THEN
443
- RETURN QUERY
444
- SELECT
445
- g.depth,
446
- g.entity_key,
447
- g.entity_type,
448
- g.entity_id,
449
- g.rel_type,
450
- g.rel_weight,
451
- g.path,
452
- f.entity_record
453
- FROM (
454
- SELECT * FROM rem_traverse(p_entity_key, p_tenant_id, effective_user_id, p_max_depth, p_rel_type, TRUE)
455
- ) g
456
- LEFT JOIN rem_fetch(entities_by_table, effective_user_id) f
457
- ON g.entity_key = f.entity_key;
458
- END IF;
459
- END;
460
- $$ LANGUAGE plpgsql STABLE;
461
-
462
196
  -- ============================================================================
463
197
  -- HELPER: Get API secret for validation
464
198
  -- ============================================================================
@@ -527,9 +261,9 @@ BEGIN
527
261
  RAISE NOTICE '';
528
262
  RAISE NOTICE 'Functions:';
529
263
  RAISE NOTICE ' maybe_trigger_kv_rebuild() - Async rebuild trigger';
530
- RAISE NOTICE ' rem_lookup() - Updated with self-healing';
531
- RAISE NOTICE ' rem_fuzzy() - Updated with self-healing';
532
- RAISE NOTICE ' rem_traverse() - Updated with self-healing';
264
+ RAISE NOTICE ' rem_kv_store_empty() - Check if cache is empty';
265
+ RAISE NOTICE ' rem_get_cache_api_secret() - Get API secret';
266
+ RAISE NOTICE ' rem_record_cache_rebuild() - Record rebuild completion';
533
267
  RAISE NOTICE '';
534
268
  RAISE NOTICE 'Async Methods Available:';
535
269
  IF v_has_pgnet THEN
@@ -542,7 +276,5 @@ BEGIN
542
276
  ELSE
543
277
  RAISE NOTICE ' [ ] dblink - Not installed';
544
278
  END IF;
545
- RAISE NOTICE '';
546
- RAISE NOTICE 'Self-Healing: Queries will auto-trigger rebuild on empty cache';
547
279
  RAISE NOTICE '============================================================';
548
280
  END $$;
@@ -19,6 +19,9 @@ Table Name Inference:
19
19
 
20
20
  Model Resolution:
21
21
  - model_from_arbitrary_casing: Resolve model class from flexible input casing
22
+
23
+ Data Validation:
24
+ - validate_data_for_model: Validate row data against a Pydantic model with clear error reporting
22
25
  """
23
26
 
24
27
  import re
@@ -389,3 +392,101 @@ def model_from_arbitrary_casing(
389
392
  f"Unknown model: '{name}' (normalized: '{normalized}'). "
390
393
  f"Available models: {', '.join(available)}"
391
394
  )
395
+
396
+
397
+ class ValidationResult:
398
+ """Result of validating data against a Pydantic model."""
399
+
400
+ def __init__(
401
+ self,
402
+ valid: bool,
403
+ instance: BaseModel | None = None,
404
+ errors: list[str] | None = None,
405
+ missing_required: set[str] | None = None,
406
+ extra_fields: set[str] | None = None,
407
+ required_fields: set[str] | None = None,
408
+ optional_fields: set[str] | None = None,
409
+ ):
410
+ self.valid = valid
411
+ self.instance = instance
412
+ self.errors = errors or []
413
+ self.missing_required = missing_required or set()
414
+ self.extra_fields = extra_fields or set()
415
+ self.required_fields = required_fields or set()
416
+ self.optional_fields = optional_fields or set()
417
+
418
+ def log_errors(self, row_label: str = "Row") -> None:
419
+ """Log validation errors using loguru."""
420
+ if self.valid:
421
+ return
422
+
423
+ logger.error(f"{row_label}: Validation failed")
424
+ if self.missing_required:
425
+ logger.error(f" Missing required: {self.missing_required}")
426
+ if self.extra_fields:
427
+ logger.warning(f" Unknown fields (ignored): {self.extra_fields}")
428
+ for err in self.errors:
429
+ logger.error(f" - {err}")
430
+ logger.info(f" Required: {self.required_fields or '(none)'}")
431
+ logger.info(f" Optional: {self.optional_fields}")
432
+
433
+
434
+ def validate_data_for_model(
435
+ model: Type[BaseModel],
436
+ data: dict[str, Any],
437
+ ) -> ValidationResult:
438
+ """
439
+ Validate a data dict against a Pydantic model with detailed error reporting.
440
+
441
+ Args:
442
+ model: Pydantic model class to validate against
443
+ data: Dictionary of field values
444
+
445
+ Returns:
446
+ ValidationResult with validation status and detailed field info
447
+
448
+ Example:
449
+ >>> from rem.models.entities import Resource
450
+ >>> result = validate_data_for_model(Resource, {"name": "test", "content": "hello"})
451
+ >>> result.valid
452
+ True
453
+ >>> result = validate_data_for_model(Resource, {"unknown_field": "value"})
454
+ >>> result.valid
455
+ True # Resource has no required fields
456
+ >>> result.extra_fields
457
+ {'unknown_field'}
458
+ """
459
+ from pydantic import ValidationError
460
+
461
+ model_fields = set(model.model_fields.keys())
462
+ required = {k for k, v in model.model_fields.items() if v.is_required()}
463
+ optional = model_fields - required
464
+ data_fields = set(data.keys())
465
+
466
+ missing_required = required - data_fields
467
+ extra_fields = data_fields - model_fields
468
+
469
+ try:
470
+ instance = model(**data)
471
+ return ValidationResult(
472
+ valid=True,
473
+ instance=instance,
474
+ required_fields=required,
475
+ optional_fields=optional,
476
+ extra_fields=extra_fields,
477
+ )
478
+ except ValidationError as e:
479
+ errors = []
480
+ for err in e.errors():
481
+ field = ".".join(str(p) for p in err["loc"])
482
+ if field not in missing_required: # Don't double-report missing
483
+ errors.append(f"{field}: {err['msg']}")
484
+
485
+ return ValidationResult(
486
+ valid=False,
487
+ errors=errors,
488
+ missing_required=missing_required,
489
+ extra_fields=extra_fields,
490
+ required_fields=required,
491
+ optional_fields=optional,
492
+ )
@@ -132,13 +132,51 @@ def _load_schema_from_database(schema_name: str, user_id: str) -> dict[str, Any]
132
132
  # Check if we're already in an async context
133
133
  try:
134
134
  loop = asyncio.get_running_loop()
135
- # We're in an async context - can't use asyncio.run()
136
- # This shouldn't happen in normal usage since load_agent_schema is called from sync contexts
137
- logger.warning(
138
- "Database schema lookup called from async context. "
139
- "This may cause issues. Consider using async version of load_agent_schema."
140
- )
141
- return None
135
+ # We're in an async context - use thread executor to run async code
136
+ import concurrent.futures
137
+
138
+ async def _async_lookup():
139
+ """Async helper to query database."""
140
+ from rem.services.postgres import get_postgres_service
141
+
142
+ db = get_postgres_service()
143
+ if not db:
144
+ logger.debug("PostgreSQL service not available for schema lookup")
145
+ return None
146
+
147
+ try:
148
+ await db.connect()
149
+
150
+ query = """
151
+ SELECT spec FROM schemas
152
+ WHERE LOWER(name) = LOWER($1)
153
+ AND (user_id = $2 OR user_id = 'system' OR user_id IS NULL)
154
+ LIMIT 1
155
+ """
156
+ logger.debug(f"Executing schema lookup: name={schema_name}, user_id={user_id}")
157
+
158
+ row = await db.fetchrow(query, schema_name, user_id)
159
+
160
+ if row:
161
+ spec = row.get("spec")
162
+ if spec and isinstance(spec, dict):
163
+ logger.debug(f"Found schema in database: {schema_name}")
164
+ return spec
165
+
166
+ logger.debug(f"Schema not found in database: {schema_name}")
167
+ return None
168
+
169
+ except Exception as e:
170
+ logger.debug(f"Database schema lookup error: {e}")
171
+ return None
172
+ finally:
173
+ await db.disconnect()
174
+
175
+ # Run in thread pool to avoid blocking the event loop
176
+ with concurrent.futures.ThreadPoolExecutor() as pool:
177
+ future = pool.submit(asyncio.run, _async_lookup())
178
+ return future.result(timeout=10)
179
+
142
180
  except RuntimeError:
143
181
  # Not in async context - safe to use asyncio.run()
144
182
  pass
@@ -253,14 +291,14 @@ def load_agent_schema(
253
291
 
254
292
  # Check cache first (only for package resources, not custom paths)
255
293
  path = Path(schema_name_or_path)
256
- is_custom_path = path.exists() or '/' in str(schema_name_or_path) or '\\' in str(schema_name_or_path)
294
+ is_custom_path = (path.exists() and path.is_file()) or '/' in str(schema_name_or_path) or '\\' in str(schema_name_or_path)
257
295
 
258
296
  if use_cache and not is_custom_path and cache_key in _fs_schema_cache:
259
297
  logger.debug(f"Loading schema from cache: {cache_key}")
260
298
  return _fs_schema_cache[cache_key]
261
299
 
262
- # 1. Try exact path first (absolute or relative to cwd)
263
- if path.exists():
300
+ # 1. Try exact path first (absolute or relative to cwd) - must be a file, not directory
301
+ if path.exists() and path.is_file():
264
302
  logger.debug(f"Loading schema from exact path: {path}")
265
303
  with open(path, "r") as f:
266
304
  schema = yaml.safe_load(f)
@@ -394,15 +432,15 @@ async def load_agent_schema_async(
394
432
  if cache_key.endswith('.yaml') or cache_key.endswith('.yml'):
395
433
  cache_key = cache_key.rsplit('.', 1)[0]
396
434
 
397
- is_custom_path = path.exists() or '/' in str(schema_name_or_path) or '\\' in str(schema_name_or_path)
435
+ is_custom_path = (path.exists() and path.is_file()) or '/' in str(schema_name_or_path) or '\\' in str(schema_name_or_path)
398
436
 
399
437
  # Check cache
400
438
  if not is_custom_path and cache_key in _fs_schema_cache:
401
439
  logger.debug(f"Loading schema from cache: {cache_key}")
402
440
  return _fs_schema_cache[cache_key]
403
441
 
404
- # Try exact path
405
- if path.exists():
442
+ # Try exact path (must be a file, not directory)
443
+ if path.exists() and path.is_file():
406
444
  logger.debug(f"Loading schema from exact path: {path}")
407
445
  with open(path, "r") as f:
408
446
  schema = yaml.safe_load(f)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: remdb
3
- Version: 0.3.163
3
+ Version: 0.3.200
4
4
  Summary: Resources Entities Moments - Bio-inspired memory system for agentic AI workloads
5
5
  Project-URL: Homepage, https://github.com/Percolation-Labs/reminiscent
6
6
  Project-URL: Documentation, https://github.com/Percolation-Labs/reminiscent/blob/main/README.md