memorymaster 3.2.2__tar.gz → 3.3.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {memorymaster-3.2.2/memorymaster.egg-info → memorymaster-3.3.1}/PKG-INFO +1 -1
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/__init__.py +1 -1
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/_storage_read.py +60 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/_storage_schema.py +51 -15
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/cli.py +16 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/cli_handlers_curation.py +80 -0
- memorymaster-3.3.1/memorymaster/entity_registry.py +255 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/mcp_server.py +15 -2
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/models.py +11 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/service.py +31 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/storage.py +7 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1/memorymaster.egg-info}/PKG-INFO +1 -1
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster.egg-info/SOURCES.txt +1 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/pyproject.toml +1 -1
- {memorymaster-3.2.2 → memorymaster-3.3.1}/LICENSE +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/README.md +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/benchmarks/longmemeval_runner.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/benchmarks/longmemeval_vector_runner.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/benchmarks/perf_smoke.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/__main__.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/_storage_lifecycle.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/_storage_shared.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/_storage_write_claims.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/access_control.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/auto_extractor.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/auto_resolver.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/claim_verifier.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/cli_handlers_basic.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/cli_helpers.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/claude-md-append.md +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/codex-agents-md-append.md +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/hooks/memorymaster-auto-ingest.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/hooks/memorymaster-classify.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/hooks/memorymaster-precompact.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/hooks/memorymaster-recall.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/hooks/memorymaster-session-start.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/hooks/memorymaster-steward-cycle.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/config_templates/hooks/memorymaster-validate-wiki.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/conflict_resolver.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/context_hook.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/context_optimizer.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/daily_notes.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/dashboard.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/db_merge.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/dream_bridge.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/embeddings.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/entity_graph.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/feedback.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/__init__.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/compact_summaries.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/compactor.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/decay.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/dedup.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/deterministic.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/extractor.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/staleness.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/jobs/validator.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/lifecycle.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/llm_provider.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/llm_steward.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/metrics_exporter.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/operator.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/operator_queue.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/plugins.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/policy.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/postgres_store.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/qdrant_backend.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/qmd_bridge.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/query_classifier.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/retrieval.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/retry.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/review.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/rl_trainer.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/scheduler.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/schema.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/schema.sql +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/schema_postgres.sql +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/security.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/session_tracker.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/setup_hooks.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/skill_evolver.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/snapshot.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/steward.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/store_factory.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/transcript_miner.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/turn_schema.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/vault_bases.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/vault_curator.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/vault_exporter.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/vault_linter.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/vault_log.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/vault_query_capture.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/vault_synthesis.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/verbatim_store.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/webhook.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster/wiki_engine.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster.egg-info/dependency_links.txt +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster.egg-info/entry_points.txt +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster.egg-info/requires.txt +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/memorymaster.egg-info/top_level.txt +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/alert_operator_metrics.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/autoresearch_daemon.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/claude_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/codex_live_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/compaction_edge_cases.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/compaction_trace_report.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/compaction_trace_validate.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/confusion_matrix_eval.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/conversation_importer.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/conversation_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/e2e_operator.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/email_live_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/eval_memorymaster.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/generate_drill_signoff.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/git_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/github_live_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/gitnexus_to_claims.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/ingest_planning_docs.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/jira_live_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/messages_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/operator_metrics.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/recurring_incident_drill.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/release_readiness.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/run_codex_autologger.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/run_incident_drill.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/scheduled_ingest.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/setup-hooks.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/slack_live_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/tickets_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/scripts/webhook_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/setup.cfg +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/conftest.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_access_control.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_auto_extractor.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_auto_resolver.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_auto_validate.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_claim_links.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_claude_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_cli_json_flag.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_cli_ready.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_cli_review_queue.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_cli_subcommands.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_compact_summaries.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_compaction_trace.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_config.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_conflict_resolver.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_confusion_matrix_eval.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_connection_retry.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_connectors.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_context_hook.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_context_optimizer.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_conversation_to_turns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_dashboard.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_dedup.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_deterministic_predicates.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_embeddings_coverage.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_entity_graph.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_events_schema.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_feedback.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_fts5_search.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_handler_regressions.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_human_id.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_incident_drill_runner.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_integration_workflows.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_lifecycle.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_llm_steward_coverage.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_llm_steward_key_rotation.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_mcp_helpers.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_metrics_exporter.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_obsidian_mind_patterns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_operator.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_operator_queue.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_perf_smoke_config.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_plugins.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_policy_coverage.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_postgres_parity.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_qdrant_backend.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_qmd_bridge.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_query_classifier.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_reliability_hardening.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_review.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_rl_trainer.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_scheduler.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_schema.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_security_access.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_security_patterns.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_service_coverage.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_session_tracker.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_snapshot.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_sqlite_core.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_staleness.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_stealth_mode.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_steward.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_steward_resolution_parity.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_store_factory.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_tenant_isolation.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_turn_schema.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_vault_exporter.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_vector_search.py +0 -0
- {memorymaster-3.2.2 → memorymaster-3.3.1}/tests/test_webhook.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: memorymaster
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.3.1
|
|
4
4
|
Summary: Production-grade memory reliability system for AI coding agents. Lifecycle-managed claims with citations, conflict detection, steward governance, and MCP integration.
|
|
5
5
|
Author: wolverin0
|
|
6
6
|
License: MIT
|
|
@@ -590,3 +590,63 @@ class _ReadMixin:
|
|
|
590
590
|
).fetchall()
|
|
591
591
|
return [self._row_to_citation(row) for row in rows]
|
|
592
592
|
|
|
593
|
+
def traverse_relationships(
|
|
594
|
+
self,
|
|
595
|
+
start_claim_id: int,
|
|
596
|
+
*,
|
|
597
|
+
link_types: list[str] | None = None,
|
|
598
|
+
max_depth: int = 3,
|
|
599
|
+
direction: str = "both",
|
|
600
|
+
) -> list[dict]:
|
|
601
|
+
"""Traverse the claim relationship graph from a starting claim.
|
|
602
|
+
|
|
603
|
+
Returns a list of dicts: [{"claim": Claim, "depth": int, "path": [int],
|
|
604
|
+
"link_type": str}]. BFS traversal, stops at max_depth. direction can be
|
|
605
|
+
"outgoing" (source→target), "incoming" (target→source), or "both".
|
|
606
|
+
|
|
607
|
+
Inspired by GBrain's graph traversal queries — "what depends on Qdrant?"
|
|
608
|
+
becomes traverse_relationships(qdrant_claim_id, link_types=["depends_on"]).
|
|
609
|
+
"""
|
|
610
|
+
with self.connect() as conn:
|
|
611
|
+
visited: set[int] = {start_claim_id}
|
|
612
|
+
queue: list[tuple[int, int, list[int], str]] = [] # (claim_id, depth, path, via_link_type)
|
|
613
|
+
|
|
614
|
+
# Seed with depth-0 neighbors
|
|
615
|
+
def _get_neighbors(claim_id: int) -> list[tuple[int, str]]:
|
|
616
|
+
neighbors: list[tuple[int, str]] = []
|
|
617
|
+
if direction in ("outgoing", "both"):
|
|
618
|
+
q = "SELECT target_id, link_type FROM claim_links WHERE source_id = ?"
|
|
619
|
+
for row in conn.execute(q, (claim_id,)).fetchall():
|
|
620
|
+
if link_types is None or row[1] in link_types:
|
|
621
|
+
neighbors.append((row[0], row[1]))
|
|
622
|
+
if direction in ("incoming", "both"):
|
|
623
|
+
q = "SELECT source_id, link_type FROM claim_links WHERE target_id = ?"
|
|
624
|
+
for row in conn.execute(q, (claim_id,)).fetchall():
|
|
625
|
+
if link_types is None or row[1] in link_types:
|
|
626
|
+
neighbors.append((row[0], row[1]))
|
|
627
|
+
return neighbors
|
|
628
|
+
|
|
629
|
+
for neighbor_id, link_type in _get_neighbors(start_claim_id):
|
|
630
|
+
if neighbor_id not in visited:
|
|
631
|
+
visited.add(neighbor_id)
|
|
632
|
+
queue.append((neighbor_id, 1, [start_claim_id, neighbor_id], link_type))
|
|
633
|
+
|
|
634
|
+
results: list[dict] = []
|
|
635
|
+
while queue:
|
|
636
|
+
cid, depth, path, via_type = queue.pop(0)
|
|
637
|
+
claim = self.get_claim(cid, include_citations=False)
|
|
638
|
+
if claim:
|
|
639
|
+
results.append({
|
|
640
|
+
"claim": claim,
|
|
641
|
+
"depth": depth,
|
|
642
|
+
"path": path,
|
|
643
|
+
"link_type": via_type,
|
|
644
|
+
})
|
|
645
|
+
if depth < max_depth:
|
|
646
|
+
for neighbor_id, link_type in _get_neighbors(cid):
|
|
647
|
+
if neighbor_id not in visited:
|
|
648
|
+
visited.add(neighbor_id)
|
|
649
|
+
queue.append((neighbor_id, depth + 1, path + [neighbor_id], link_type))
|
|
650
|
+
|
|
651
|
+
return results
|
|
652
|
+
|
|
@@ -262,21 +262,57 @@ class _SchemaMixin:
|
|
|
262
262
|
|
|
263
263
|
@staticmethod
|
|
264
264
|
def _ensure_claim_links_schema(conn: sqlite3.Connection) -> None:
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
265
|
+
from memorymaster.models import CLAIM_LINK_TYPES
|
|
266
|
+
|
|
267
|
+
# Build the CHECK constraint from the canonical CLAIM_LINK_TYPES tuple
|
|
268
|
+
# so new types only need to be added in models.py.
|
|
269
|
+
types_sql = ", ".join(f"'{t}'" for t in CLAIM_LINK_TYPES)
|
|
270
|
+
check_clause = f"CHECK (link_type IN ({types_sql}))"
|
|
271
|
+
|
|
272
|
+
# Check if table exists and whether it needs migration (old CHECK with only 5 types)
|
|
273
|
+
existing = conn.execute(
|
|
274
|
+
"SELECT sql FROM sqlite_master WHERE type='table' AND name='claim_links'"
|
|
275
|
+
).fetchone()
|
|
276
|
+
|
|
277
|
+
if existing and existing[0]:
|
|
278
|
+
# Table exists — check if it has the old 5-type CHECK
|
|
279
|
+
if "'implements'" not in existing[0]:
|
|
280
|
+
# Migrate: rename old → create new → copy → drop old
|
|
281
|
+
conn.execute("ALTER TABLE claim_links RENAME TO _claim_links_old")
|
|
282
|
+
conn.execute(f"""
|
|
283
|
+
CREATE TABLE claim_links (
|
|
284
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
285
|
+
source_id INTEGER NOT NULL,
|
|
286
|
+
target_id INTEGER NOT NULL,
|
|
287
|
+
link_type TEXT NOT NULL,
|
|
288
|
+
created_at TEXT NOT NULL,
|
|
289
|
+
FOREIGN KEY (source_id) REFERENCES claims(id) ON DELETE CASCADE,
|
|
290
|
+
FOREIGN KEY (target_id) REFERENCES claims(id) ON DELETE CASCADE,
|
|
291
|
+
CHECK (source_id <> target_id),
|
|
292
|
+
{check_clause}
|
|
293
|
+
)
|
|
294
|
+
""")
|
|
295
|
+
conn.execute("""
|
|
296
|
+
INSERT INTO claim_links (id, source_id, target_id, link_type, created_at)
|
|
297
|
+
SELECT id, source_id, target_id, link_type, created_at FROM _claim_links_old
|
|
298
|
+
""")
|
|
299
|
+
conn.execute("DROP TABLE _claim_links_old")
|
|
300
|
+
else:
|
|
301
|
+
# Fresh creation
|
|
302
|
+
conn.execute(f"""
|
|
303
|
+
CREATE TABLE IF NOT EXISTS claim_links (
|
|
304
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
305
|
+
source_id INTEGER NOT NULL,
|
|
306
|
+
target_id INTEGER NOT NULL,
|
|
307
|
+
link_type TEXT NOT NULL,
|
|
308
|
+
created_at TEXT NOT NULL,
|
|
309
|
+
FOREIGN KEY (source_id) REFERENCES claims(id) ON DELETE CASCADE,
|
|
310
|
+
FOREIGN KEY (target_id) REFERENCES claims(id) ON DELETE CASCADE,
|
|
311
|
+
CHECK (source_id <> target_id),
|
|
312
|
+
{check_clause}
|
|
313
|
+
)
|
|
314
|
+
""")
|
|
315
|
+
|
|
280
316
|
conn.execute(
|
|
281
317
|
"CREATE UNIQUE INDEX IF NOT EXISTS idx_claim_links_unique ON claim_links(source_id, target_id, link_type)"
|
|
282
318
|
)
|
|
@@ -375,6 +375,22 @@ def build_parser() -> argparse.ArgumentParser:
|
|
|
375
375
|
dream_clean_cmd.add_argument("--project", default=None, help="Project path to compute Claude Code memory dir slug")
|
|
376
376
|
dream_clean_cmd.add_argument("--dry-run", action="store_true", help="Preview what would be removed without deleting files")
|
|
377
377
|
|
|
378
|
+
# Entity registry (GBrain-inspired)
|
|
379
|
+
entity_list = sub.add_parser("entity-list", help="List canonical entities with alias and claim counts")
|
|
380
|
+
entity_list.add_argument("--scope", default="", help="Filter by scope prefix")
|
|
381
|
+
entity_list.add_argument("--type", default="", help="Filter by entity type")
|
|
382
|
+
entity_list.add_argument("--limit", type=int, default=50)
|
|
383
|
+
|
|
384
|
+
entity_merge = sub.add_parser("entity-merge", help="Merge two entities (move aliases + claims to target)")
|
|
385
|
+
entity_merge.add_argument("keep_id", type=int, help="Entity ID to keep")
|
|
386
|
+
entity_merge.add_argument("merge_id", type=int, help="Entity ID to merge into keep_id")
|
|
387
|
+
|
|
388
|
+
entity_aliases_cmd = sub.add_parser("entity-aliases", help="List or add aliases for an entity")
|
|
389
|
+
entity_aliases_cmd.add_argument("entity_id", type=int, help="Entity ID")
|
|
390
|
+
entity_aliases_cmd.add_argument("--add", default="", help="Add this alias to the entity")
|
|
391
|
+
|
|
392
|
+
entity_backfill = sub.add_parser("entity-backfill", help="Backfill entity_id on claims with subject but no entity")
|
|
393
|
+
|
|
378
394
|
return parser
|
|
379
395
|
|
|
380
396
|
|
|
@@ -673,3 +673,83 @@ COMMAND_HANDLERS["dream-seed"] = _handle_dream_seed
|
|
|
673
673
|
COMMAND_HANDLERS["dream-ingest"] = _handle_dream_ingest
|
|
674
674
|
COMMAND_HANDLERS["dream-sync"] = _handle_dream_sync
|
|
675
675
|
COMMAND_HANDLERS["dream-clean"] = _handle_dream_clean
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
# ---------------------------------------------------------------------------
|
|
679
|
+
# Entity registry (GBrain-inspired)
|
|
680
|
+
# ---------------------------------------------------------------------------
|
|
681
|
+
|
|
682
|
+
def _handle_entity_list(args, service, parser, effective_db) -> int:
|
|
683
|
+
from memorymaster.entity_registry import list_entities
|
|
684
|
+
t0 = time.perf_counter()
|
|
685
|
+
with service.store.connect() as conn:
|
|
686
|
+
entities = list_entities(
|
|
687
|
+
conn,
|
|
688
|
+
scope=args.scope or None,
|
|
689
|
+
entity_type=getattr(args, "type", "") or None,
|
|
690
|
+
limit=args.limit,
|
|
691
|
+
)
|
|
692
|
+
elapsed_ms = (time.perf_counter() - t0) * 1000
|
|
693
|
+
if args.json_output:
|
|
694
|
+
print(_json_envelope(entities, total=len(entities), query_ms=elapsed_ms))
|
|
695
|
+
else:
|
|
696
|
+
print(f"Entities ({len(entities)}):")
|
|
697
|
+
for e in entities:
|
|
698
|
+
print(f" #{e['id']} [{e['type']}] {e['name']} — {e['alias_count']} aliases, {e['claim_count']} claims (scope={e['scope']})")
|
|
699
|
+
return 0
|
|
700
|
+
|
|
701
|
+
|
|
702
|
+
def _handle_entity_merge(args, service, parser, effective_db) -> int:
|
|
703
|
+
from memorymaster.entity_registry import merge_entities
|
|
704
|
+
t0 = time.perf_counter()
|
|
705
|
+
with service.store.connect() as conn:
|
|
706
|
+
result = merge_entities(conn, args.keep_id, args.merge_id)
|
|
707
|
+
conn.commit()
|
|
708
|
+
elapsed_ms = (time.perf_counter() - t0) * 1000
|
|
709
|
+
if args.json_output:
|
|
710
|
+
print(_json_envelope(result, query_ms=elapsed_ms))
|
|
711
|
+
else:
|
|
712
|
+
print(f"Merged entity #{args.merge_id} → #{args.keep_id}: {result['merged_aliases']} aliases, {result['updated_claims']} claims moved ({elapsed_ms:.0f}ms)")
|
|
713
|
+
return 0
|
|
714
|
+
|
|
715
|
+
|
|
716
|
+
def _handle_entity_aliases(args, service, parser, effective_db) -> int:
|
|
717
|
+
from memorymaster.entity_registry import get_aliases, add_alias
|
|
718
|
+
t0 = time.perf_counter()
|
|
719
|
+
with service.store.connect() as conn:
|
|
720
|
+
if args.add:
|
|
721
|
+
added = add_alias(conn, args.entity_id, args.add)
|
|
722
|
+
conn.commit()
|
|
723
|
+
if args.json_output:
|
|
724
|
+
print(_json_envelope({"added": added, "alias": args.add}, query_ms=(time.perf_counter() - t0) * 1000))
|
|
725
|
+
else:
|
|
726
|
+
print(f"{'Added' if added else 'Already exists'}: '{args.add}' → entity #{args.entity_id}")
|
|
727
|
+
else:
|
|
728
|
+
aliases = get_aliases(conn, args.entity_id)
|
|
729
|
+
elapsed_ms = (time.perf_counter() - t0) * 1000
|
|
730
|
+
if args.json_output:
|
|
731
|
+
print(_json_envelope(aliases, total=len(aliases), query_ms=elapsed_ms))
|
|
732
|
+
else:
|
|
733
|
+
print(f"Aliases for entity #{args.entity_id} ({len(aliases)}):")
|
|
734
|
+
for a in aliases:
|
|
735
|
+
print(f" - {a}")
|
|
736
|
+
return 0
|
|
737
|
+
|
|
738
|
+
|
|
739
|
+
def _handle_entity_backfill(args, service, parser, effective_db) -> int:
|
|
740
|
+
from memorymaster.entity_registry import backfill_entities
|
|
741
|
+
t0 = time.perf_counter()
|
|
742
|
+
with service.store.connect() as conn:
|
|
743
|
+
result = backfill_entities(conn)
|
|
744
|
+
elapsed_ms = (time.perf_counter() - t0) * 1000
|
|
745
|
+
if args.json_output:
|
|
746
|
+
print(_json_envelope(result, query_ms=elapsed_ms))
|
|
747
|
+
else:
|
|
748
|
+
print(f"Backfill: {result['entities_created']} entities created, {result['claims_resolved']} claims resolved ({result['subjects_processed']} subjects processed, {elapsed_ms:.0f}ms)")
|
|
749
|
+
return 0
|
|
750
|
+
|
|
751
|
+
|
|
752
|
+
COMMAND_HANDLERS["entity-list"] = _handle_entity_list
|
|
753
|
+
COMMAND_HANDLERS["entity-merge"] = _handle_entity_merge
|
|
754
|
+
COMMAND_HANDLERS["entity-aliases"] = _handle_entity_aliases
|
|
755
|
+
COMMAND_HANDLERS["entity-backfill"] = _handle_entity_backfill
|
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
"""Entity Registry — canonical entities with alias resolution.
|
|
2
|
+
|
|
3
|
+
Inspired by GBrain's entity registry pattern: every subject string resolves
|
|
4
|
+
to a canonical entity so that "MemoryMaster", "memorymaster", "MM" all point
|
|
5
|
+
to the same node. This turns the flat claims DB into a real knowledge graph.
|
|
6
|
+
|
|
7
|
+
Tables:
|
|
8
|
+
- entities: canonical entity (id, name, type, scope, created/updated)
|
|
9
|
+
- entity_aliases: maps normalized alias strings → entity id
|
|
10
|
+
|
|
11
|
+
Resolution flow (on ingest):
|
|
12
|
+
1. Normalize subject string (lowercase, strip, collapse separators)
|
|
13
|
+
2. Look up in entity_aliases
|
|
14
|
+
3. If found → return canonical entity_id
|
|
15
|
+
4. If not found → create new entity + register the alias
|
|
16
|
+
5. Store entity_id on the claim
|
|
17
|
+
|
|
18
|
+
The claim.subject column stays as free-text for display; entity_id is the
|
|
19
|
+
canonical FK used for grouping and traversal.
|
|
20
|
+
"""
|
|
21
|
+
from __future__ import annotations
|
|
22
|
+
|
|
23
|
+
import logging
|
|
24
|
+
import re
|
|
25
|
+
import sqlite3
|
|
26
|
+
from datetime import datetime, timezone
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
_NORMALIZE_RE = re.compile(r"[\s_\-\.]+")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def normalize_alias(raw: str) -> str:
|
|
34
|
+
"""Normalize a subject string for alias lookup.
|
|
35
|
+
|
|
36
|
+
Lowercases, collapses whitespace/dashes/underscores/dots into single
|
|
37
|
+
dashes, strips leading/trailing separators. Truncates to 200 chars.
|
|
38
|
+
"""
|
|
39
|
+
if not raw:
|
|
40
|
+
return ""
|
|
41
|
+
return _NORMALIZE_RE.sub("-", raw.strip().lower()).strip("-")[:200]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def ensure_entity_schema(conn: sqlite3.Connection) -> None:
|
|
45
|
+
"""Create entity tables if they don't exist. Idempotent."""
|
|
46
|
+
conn.executescript("""
|
|
47
|
+
CREATE TABLE IF NOT EXISTS entities (
|
|
48
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
49
|
+
canonical_name TEXT NOT NULL UNIQUE,
|
|
50
|
+
entity_type TEXT NOT NULL DEFAULT 'unknown',
|
|
51
|
+
scope TEXT NOT NULL DEFAULT 'global',
|
|
52
|
+
created_at TEXT NOT NULL,
|
|
53
|
+
updated_at TEXT NOT NULL
|
|
54
|
+
);
|
|
55
|
+
|
|
56
|
+
CREATE TABLE IF NOT EXISTS entity_aliases (
|
|
57
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
58
|
+
entity_id INTEGER NOT NULL,
|
|
59
|
+
alias TEXT NOT NULL UNIQUE,
|
|
60
|
+
original_form TEXT NOT NULL,
|
|
61
|
+
created_at TEXT NOT NULL,
|
|
62
|
+
FOREIGN KEY (entity_id) REFERENCES entities(id) ON DELETE CASCADE
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
CREATE INDEX IF NOT EXISTS idx_entity_aliases_alias
|
|
66
|
+
ON entity_aliases(alias);
|
|
67
|
+
CREATE INDEX IF NOT EXISTS idx_entity_aliases_entity_id
|
|
68
|
+
ON entity_aliases(entity_id);
|
|
69
|
+
""")
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _utc_now() -> str:
|
|
73
|
+
return datetime.now(timezone.utc).replace(microsecond=0).isoformat()
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def resolve_or_create(
|
|
77
|
+
conn: sqlite3.Connection,
|
|
78
|
+
subject: str,
|
|
79
|
+
*,
|
|
80
|
+
entity_type: str = "unknown",
|
|
81
|
+
scope: str = "global",
|
|
82
|
+
) -> int:
|
|
83
|
+
"""Resolve a subject string to a canonical entity_id, creating if needed.
|
|
84
|
+
|
|
85
|
+
Returns the entity_id (int). Thread-safe via SQLite serialization.
|
|
86
|
+
"""
|
|
87
|
+
alias = normalize_alias(subject)
|
|
88
|
+
if not alias:
|
|
89
|
+
return 0
|
|
90
|
+
|
|
91
|
+
# Fast path: alias already registered
|
|
92
|
+
row = conn.execute(
|
|
93
|
+
"SELECT entity_id FROM entity_aliases WHERE alias = ?", (alias,)
|
|
94
|
+
).fetchone()
|
|
95
|
+
if row:
|
|
96
|
+
return row[0]
|
|
97
|
+
|
|
98
|
+
# Slow path: create entity + register alias
|
|
99
|
+
now = _utc_now()
|
|
100
|
+
display_name = subject.strip()[:200]
|
|
101
|
+
|
|
102
|
+
cur = conn.execute(
|
|
103
|
+
"""INSERT OR IGNORE INTO entities (canonical_name, entity_type, scope, created_at, updated_at)
|
|
104
|
+
VALUES (?, ?, ?, ?, ?)""",
|
|
105
|
+
(display_name, entity_type, scope, now, now),
|
|
106
|
+
)
|
|
107
|
+
if cur.lastrowid and cur.lastrowid > 0:
|
|
108
|
+
entity_id = cur.lastrowid
|
|
109
|
+
else:
|
|
110
|
+
# INSERT OR IGNORE hit a duplicate canonical_name — fetch existing
|
|
111
|
+
existing = conn.execute(
|
|
112
|
+
"SELECT id FROM entities WHERE canonical_name = ?", (display_name,)
|
|
113
|
+
).fetchone()
|
|
114
|
+
entity_id = existing[0] if existing else 0
|
|
115
|
+
|
|
116
|
+
if entity_id > 0:
|
|
117
|
+
conn.execute(
|
|
118
|
+
"""INSERT OR IGNORE INTO entity_aliases (entity_id, alias, original_form, created_at)
|
|
119
|
+
VALUES (?, ?, ?, ?)""",
|
|
120
|
+
(entity_id, alias, subject.strip(), now),
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
return entity_id
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def merge_entities(
|
|
127
|
+
conn: sqlite3.Connection,
|
|
128
|
+
keep_id: int,
|
|
129
|
+
merge_id: int,
|
|
130
|
+
) -> dict:
|
|
131
|
+
"""Merge entity merge_id INTO keep_id. All aliases of merge_id move to keep_id.
|
|
132
|
+
|
|
133
|
+
Returns {"merged_aliases": int, "updated_claims": int}.
|
|
134
|
+
"""
|
|
135
|
+
# Move aliases
|
|
136
|
+
cur = conn.execute(
|
|
137
|
+
"UPDATE entity_aliases SET entity_id = ? WHERE entity_id = ?",
|
|
138
|
+
(keep_id, merge_id),
|
|
139
|
+
)
|
|
140
|
+
merged_aliases = cur.rowcount
|
|
141
|
+
|
|
142
|
+
# Move claims that reference merge_id
|
|
143
|
+
cur2 = conn.execute(
|
|
144
|
+
"UPDATE claims SET entity_id = ? WHERE entity_id = ?",
|
|
145
|
+
(keep_id, merge_id),
|
|
146
|
+
)
|
|
147
|
+
updated_claims = cur2.rowcount
|
|
148
|
+
|
|
149
|
+
# Delete the merged entity
|
|
150
|
+
conn.execute("DELETE FROM entities WHERE id = ?", (merge_id,))
|
|
151
|
+
|
|
152
|
+
return {"merged_aliases": merged_aliases, "updated_claims": updated_claims}
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def add_alias(conn: sqlite3.Connection, entity_id: int, alias_text: str) -> bool:
|
|
156
|
+
"""Register an additional alias for an entity. Returns True if added."""
|
|
157
|
+
alias = normalize_alias(alias_text)
|
|
158
|
+
if not alias:
|
|
159
|
+
return False
|
|
160
|
+
now = _utc_now()
|
|
161
|
+
try:
|
|
162
|
+
conn.execute(
|
|
163
|
+
"""INSERT INTO entity_aliases (entity_id, alias, original_form, created_at)
|
|
164
|
+
VALUES (?, ?, ?, ?)""",
|
|
165
|
+
(entity_id, alias, alias_text.strip(), now),
|
|
166
|
+
)
|
|
167
|
+
return True
|
|
168
|
+
except sqlite3.IntegrityError:
|
|
169
|
+
return False # alias already registered
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def list_entities(
|
|
173
|
+
conn: sqlite3.Connection,
|
|
174
|
+
*,
|
|
175
|
+
scope: str | None = None,
|
|
176
|
+
entity_type: str | None = None,
|
|
177
|
+
limit: int = 100,
|
|
178
|
+
) -> list[dict]:
|
|
179
|
+
"""List entities with their alias counts and claim counts."""
|
|
180
|
+
query = """
|
|
181
|
+
SELECT e.id, e.canonical_name, e.entity_type, e.scope, e.created_at,
|
|
182
|
+
COUNT(DISTINCT a.id) as alias_count,
|
|
183
|
+
COUNT(DISTINCT c.id) as claim_count
|
|
184
|
+
FROM entities e
|
|
185
|
+
LEFT JOIN entity_aliases a ON a.entity_id = e.id
|
|
186
|
+
LEFT JOIN claims c ON c.entity_id = e.id
|
|
187
|
+
"""
|
|
188
|
+
params: list = []
|
|
189
|
+
wheres: list[str] = []
|
|
190
|
+
if scope:
|
|
191
|
+
wheres.append("e.scope LIKE ?")
|
|
192
|
+
params.append(f"{scope}%")
|
|
193
|
+
if entity_type:
|
|
194
|
+
wheres.append("e.entity_type = ?")
|
|
195
|
+
params.append(entity_type)
|
|
196
|
+
if wheres:
|
|
197
|
+
query += " WHERE " + " AND ".join(wheres)
|
|
198
|
+
query += " GROUP BY e.id ORDER BY claim_count DESC LIMIT ?"
|
|
199
|
+
params.append(limit)
|
|
200
|
+
|
|
201
|
+
rows = conn.execute(query, params).fetchall()
|
|
202
|
+
return [
|
|
203
|
+
{
|
|
204
|
+
"id": r[0],
|
|
205
|
+
"name": r[1],
|
|
206
|
+
"type": r[2],
|
|
207
|
+
"scope": r[3],
|
|
208
|
+
"created_at": r[4],
|
|
209
|
+
"alias_count": r[5],
|
|
210
|
+
"claim_count": r[6],
|
|
211
|
+
}
|
|
212
|
+
for r in rows
|
|
213
|
+
]
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def get_aliases(conn: sqlite3.Connection, entity_id: int) -> list[str]:
|
|
217
|
+
"""Get all aliases for an entity."""
|
|
218
|
+
rows = conn.execute(
|
|
219
|
+
"SELECT original_form FROM entity_aliases WHERE entity_id = ? ORDER BY created_at",
|
|
220
|
+
(entity_id,),
|
|
221
|
+
).fetchall()
|
|
222
|
+
return [r[0] for r in rows]
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def backfill_entities(conn: sqlite3.Connection) -> dict:
|
|
226
|
+
"""Backfill entity_id on existing claims that have subject but no entity_id.
|
|
227
|
+
|
|
228
|
+
Creates entities and aliases as needed. Returns stats.
|
|
229
|
+
"""
|
|
230
|
+
ensure_entity_schema(conn)
|
|
231
|
+
|
|
232
|
+
# Ensure entity_id column exists on claims
|
|
233
|
+
try:
|
|
234
|
+
conn.execute("ALTER TABLE claims ADD COLUMN entity_id INTEGER")
|
|
235
|
+
except sqlite3.OperationalError:
|
|
236
|
+
pass # already exists
|
|
237
|
+
|
|
238
|
+
rows = conn.execute(
|
|
239
|
+
"SELECT DISTINCT subject FROM claims WHERE subject IS NOT NULL AND (entity_id IS NULL OR entity_id = 0)"
|
|
240
|
+
).fetchall()
|
|
241
|
+
|
|
242
|
+
created = 0
|
|
243
|
+
resolved = 0
|
|
244
|
+
for (subject,) in rows:
|
|
245
|
+
entity_id = resolve_or_create(conn, subject)
|
|
246
|
+
if entity_id > 0:
|
|
247
|
+
cur = conn.execute(
|
|
248
|
+
"UPDATE claims SET entity_id = ? WHERE subject = ? AND (entity_id IS NULL OR entity_id = 0)",
|
|
249
|
+
(entity_id, subject),
|
|
250
|
+
)
|
|
251
|
+
resolved += cur.rowcount
|
|
252
|
+
created += 1
|
|
253
|
+
|
|
254
|
+
conn.commit()
|
|
255
|
+
return {"entities_created": created, "claims_resolved": resolved, "subjects_processed": len(rows)}
|
|
@@ -89,13 +89,26 @@ def _parse_scope_allowlist(raw: str) -> list[str] | None:
|
|
|
89
89
|
|
|
90
90
|
|
|
91
91
|
def _project_scope(workspace: str) -> str:
|
|
92
|
+
"""Derive a project scope from a workspace path.
|
|
93
|
+
|
|
94
|
+
Returns the canonical ``project:<slug>`` form by default. The legacy
|
|
95
|
+
hash-suffix form ``project:<slug>:<sha1[:8]>`` is only used when the env
|
|
96
|
+
var ``MEMORYMASTER_SCOPE_DISAMBIGUATE=1`` is set — that's the escape hatch
|
|
97
|
+
for hosts that genuinely have two different workspaces with the same
|
|
98
|
+
directory name. For the common case (one workspace per slug on a host),
|
|
99
|
+
dropping the hash prevents scope fragmentation where CLI ingests write
|
|
100
|
+
``project:wezbridge`` and MCP ingests write ``project:wezbridge:a6a83c6a``
|
|
101
|
+
and nobody finds each other.
|
|
102
|
+
"""
|
|
92
103
|
if _ENV_DEFAULT_PROJECT_SCOPE:
|
|
93
104
|
return _ENV_DEFAULT_PROJECT_SCOPE
|
|
94
105
|
workspace_path = Path(_resolve_workspace(workspace)).resolve()
|
|
95
106
|
slug_base = workspace_path.name.strip().lower() or "workspace"
|
|
96
107
|
slug = _SCOPE_SAFE_RE.sub("-", slug_base).strip("-") or "workspace"
|
|
97
|
-
|
|
98
|
-
|
|
108
|
+
if os.getenv("MEMORYMASTER_SCOPE_DISAMBIGUATE", "").strip().lower() in ("1", "true", "yes"):
|
|
109
|
+
digest = hashlib.sha1(str(workspace_path).lower().encode("utf-8")).hexdigest()[:8]
|
|
110
|
+
return f"project:{slug}:{digest}"
|
|
111
|
+
return f"project:{slug}"
|
|
99
112
|
|
|
100
113
|
|
|
101
114
|
def _effective_ingest_scope(scope: str, workspace: str) -> str:
|
|
@@ -272,11 +272,22 @@ class Event:
|
|
|
272
272
|
|
|
273
273
|
|
|
274
274
|
CLAIM_LINK_TYPES = (
|
|
275
|
+
# Core lifecycle types (original v2.0)
|
|
275
276
|
"relates_to",
|
|
276
277
|
"supersedes",
|
|
277
278
|
"derived_from",
|
|
278
279
|
"contradicts",
|
|
279
280
|
"supports",
|
|
281
|
+
# Domain-specific relationship types (GBrain-inspired, v3.3)
|
|
282
|
+
"implements", # claim A describes an implementation of claim B
|
|
283
|
+
"configures", # claim A configures/parametrizes claim B
|
|
284
|
+
"depends_on", # claim A requires claim B to function
|
|
285
|
+
"deployed_on", # claim A is deployed on infrastructure described by claim B
|
|
286
|
+
"owned_by", # claim A is owned/maintained by entity in claim B
|
|
287
|
+
"tested_by", # claim A is validated by test described in claim B
|
|
288
|
+
"documents", # claim A documents behavior of claim B
|
|
289
|
+
"blocks", # claim A blocks progress on claim B
|
|
290
|
+
"enables", # claim A enables/unlocks claim B
|
|
280
291
|
)
|
|
281
292
|
|
|
282
293
|
|
|
@@ -120,6 +120,10 @@ class MemoryService:
|
|
|
120
120
|
raise ValueError("Claim text cannot be empty.")
|
|
121
121
|
if not citations:
|
|
122
122
|
citations = [CitationInput(source="mcp-session", locator=scope or "project")]
|
|
123
|
+
# Normalize claim_type to lowercase so routing hints like "DECISION"
|
|
124
|
+
# from the classify hook don't create a separate type from "decision".
|
|
125
|
+
if claim_type:
|
|
126
|
+
claim_type = claim_type.strip().lower() or None
|
|
123
127
|
# Dedup by idempotency key
|
|
124
128
|
normalized_idempotency_key = (idempotency_key or "").strip() or None
|
|
125
129
|
if normalized_idempotency_key is not None and hasattr(self.store, "get_claim_by_idempotency_key"):
|
|
@@ -146,6 +150,21 @@ class MemoryService:
|
|
|
146
150
|
)
|
|
147
151
|
if not sanitized.citations:
|
|
148
152
|
raise ValueError("At least one citation is required.")
|
|
153
|
+
# Resolve subject → canonical entity (GBrain-inspired entity registry)
|
|
154
|
+
entity_id = 0
|
|
155
|
+
if subject:
|
|
156
|
+
try:
|
|
157
|
+
from memorymaster.entity_registry import resolve_or_create
|
|
158
|
+
with self.store.connect() as _conn:
|
|
159
|
+
entity_id = resolve_or_create(
|
|
160
|
+
_conn, subject,
|
|
161
|
+
entity_type=claim_type or "unknown",
|
|
162
|
+
scope=scope,
|
|
163
|
+
)
|
|
164
|
+
_conn.commit()
|
|
165
|
+
except Exception:
|
|
166
|
+
pass # entity resolution is best-effort, never block ingest
|
|
167
|
+
|
|
149
168
|
claim = self.store.create_claim(
|
|
150
169
|
text=sanitized.text,
|
|
151
170
|
citations=sanitized.citations,
|
|
@@ -164,6 +183,18 @@ class MemoryService:
|
|
|
164
183
|
source_agent=source_agent,
|
|
165
184
|
visibility=visibility,
|
|
166
185
|
)
|
|
186
|
+
|
|
187
|
+
# Set entity_id on the claim (best-effort, don't fail ingest)
|
|
188
|
+
if entity_id > 0:
|
|
189
|
+
try:
|
|
190
|
+
with self.store.connect() as _conn:
|
|
191
|
+
_conn.execute(
|
|
192
|
+
"UPDATE claims SET entity_id = ? WHERE id = ?",
|
|
193
|
+
(entity_id, claim.id),
|
|
194
|
+
)
|
|
195
|
+
_conn.commit()
|
|
196
|
+
except Exception:
|
|
197
|
+
pass
|
|
167
198
|
if sanitized.is_sensitive:
|
|
168
199
|
self.store.record_event(
|
|
169
200
|
claim_id=claim.id,
|
|
@@ -90,4 +90,11 @@ class SQLiteStore(_SchemaMixin, _ReadMixin, _WriteClaimsMixin, _LifecycleMixin):
|
|
|
90
90
|
self._ensure_agent_columns(conn)
|
|
91
91
|
self._ensure_version_column(conn)
|
|
92
92
|
self._ensure_embeddings_schema(conn)
|
|
93
|
+
# Entity registry (GBrain-inspired canonical entities + alias resolution)
|
|
94
|
+
from memorymaster.entity_registry import ensure_entity_schema
|
|
95
|
+
ensure_entity_schema(conn)
|
|
96
|
+
try:
|
|
97
|
+
conn.execute("ALTER TABLE claims ADD COLUMN entity_id INTEGER")
|
|
98
|
+
except sqlite3.OperationalError:
|
|
99
|
+
pass # already exists
|
|
93
100
|
conn.commit()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: memorymaster
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.3.1
|
|
4
4
|
Summary: Production-grade memory reliability system for AI coding agents. Lifecycle-managed claims with citations, conflict detection, steward governance, and MCP integration.
|
|
5
5
|
Author: wolverin0
|
|
6
6
|
License: MIT
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "memorymaster"
|
|
7
|
-
version = "3.
|
|
7
|
+
version = "3.3.1"
|
|
8
8
|
description = "Production-grade memory reliability system for AI coding agents. Lifecycle-managed claims with citations, conflict detection, steward governance, and MCP integration."
|
|
9
9
|
license = {text = "MIT"}
|
|
10
10
|
authors = [{name = "wolverin0"}]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|