attune-ai 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- attune/__init__.py +358 -0
- attune/adaptive/__init__.py +13 -0
- attune/adaptive/task_complexity.py +127 -0
- attune/agent_monitoring.py +414 -0
- attune/cache/__init__.py +117 -0
- attune/cache/base.py +166 -0
- attune/cache/dependency_manager.py +256 -0
- attune/cache/hash_only.py +251 -0
- attune/cache/hybrid.py +457 -0
- attune/cache/storage.py +285 -0
- attune/cache_monitor.py +356 -0
- attune/cache_stats.py +298 -0
- attune/cli/__init__.py +152 -0
- attune/cli/__main__.py +12 -0
- attune/cli/commands/__init__.py +1 -0
- attune/cli/commands/batch.py +264 -0
- attune/cli/commands/cache.py +248 -0
- attune/cli/commands/help.py +331 -0
- attune/cli/commands/info.py +140 -0
- attune/cli/commands/inspect.py +436 -0
- attune/cli/commands/inspection.py +57 -0
- attune/cli/commands/memory.py +48 -0
- attune/cli/commands/metrics.py +92 -0
- attune/cli/commands/orchestrate.py +184 -0
- attune/cli/commands/patterns.py +207 -0
- attune/cli/commands/profiling.py +202 -0
- attune/cli/commands/provider.py +98 -0
- attune/cli/commands/routing.py +285 -0
- attune/cli/commands/setup.py +96 -0
- attune/cli/commands/status.py +235 -0
- attune/cli/commands/sync.py +166 -0
- attune/cli/commands/tier.py +121 -0
- attune/cli/commands/utilities.py +114 -0
- attune/cli/commands/workflow.py +579 -0
- attune/cli/core.py +32 -0
- attune/cli/parsers/__init__.py +68 -0
- attune/cli/parsers/batch.py +118 -0
- attune/cli/parsers/cache.py +65 -0
- attune/cli/parsers/help.py +41 -0
- attune/cli/parsers/info.py +26 -0
- attune/cli/parsers/inspect.py +66 -0
- attune/cli/parsers/metrics.py +42 -0
- attune/cli/parsers/orchestrate.py +61 -0
- attune/cli/parsers/patterns.py +54 -0
- attune/cli/parsers/provider.py +40 -0
- attune/cli/parsers/routing.py +110 -0
- attune/cli/parsers/setup.py +42 -0
- attune/cli/parsers/status.py +47 -0
- attune/cli/parsers/sync.py +31 -0
- attune/cli/parsers/tier.py +33 -0
- attune/cli/parsers/workflow.py +77 -0
- attune/cli/utils/__init__.py +1 -0
- attune/cli/utils/data.py +242 -0
- attune/cli/utils/helpers.py +68 -0
- attune/cli_legacy.py +3957 -0
- attune/cli_minimal.py +1159 -0
- attune/cli_router.py +437 -0
- attune/cli_unified.py +814 -0
- attune/config/__init__.py +66 -0
- attune/config/xml_config.py +286 -0
- attune/config.py +545 -0
- attune/coordination.py +870 -0
- attune/core.py +1511 -0
- attune/core_modules/__init__.py +15 -0
- attune/cost_tracker.py +626 -0
- attune/dashboard/__init__.py +41 -0
- attune/dashboard/app.py +512 -0
- attune/dashboard/simple_server.py +435 -0
- attune/dashboard/standalone_server.py +547 -0
- attune/discovery.py +306 -0
- attune/emergence.py +306 -0
- attune/exceptions.py +123 -0
- attune/feedback_loops.py +373 -0
- attune/hot_reload/README.md +473 -0
- attune/hot_reload/__init__.py +62 -0
- attune/hot_reload/config.py +83 -0
- attune/hot_reload/integration.py +229 -0
- attune/hot_reload/reloader.py +298 -0
- attune/hot_reload/watcher.py +183 -0
- attune/hot_reload/websocket.py +177 -0
- attune/levels.py +577 -0
- attune/leverage_points.py +441 -0
- attune/logging_config.py +261 -0
- attune/mcp/__init__.py +10 -0
- attune/mcp/server.py +506 -0
- attune/memory/__init__.py +237 -0
- attune/memory/claude_memory.py +469 -0
- attune/memory/config.py +224 -0
- attune/memory/control_panel.py +1290 -0
- attune/memory/control_panel_support.py +145 -0
- attune/memory/cross_session.py +845 -0
- attune/memory/edges.py +179 -0
- attune/memory/encryption.py +159 -0
- attune/memory/file_session.py +770 -0
- attune/memory/graph.py +570 -0
- attune/memory/long_term.py +913 -0
- attune/memory/long_term_types.py +99 -0
- attune/memory/mixins/__init__.py +25 -0
- attune/memory/mixins/backend_init_mixin.py +249 -0
- attune/memory/mixins/capabilities_mixin.py +208 -0
- attune/memory/mixins/handoff_mixin.py +208 -0
- attune/memory/mixins/lifecycle_mixin.py +49 -0
- attune/memory/mixins/long_term_mixin.py +352 -0
- attune/memory/mixins/promotion_mixin.py +109 -0
- attune/memory/mixins/short_term_mixin.py +182 -0
- attune/memory/nodes.py +179 -0
- attune/memory/redis_bootstrap.py +540 -0
- attune/memory/security/__init__.py +31 -0
- attune/memory/security/audit_logger.py +932 -0
- attune/memory/security/pii_scrubber.py +640 -0
- attune/memory/security/secrets_detector.py +678 -0
- attune/memory/short_term.py +2192 -0
- attune/memory/simple_storage.py +302 -0
- attune/memory/storage/__init__.py +15 -0
- attune/memory/storage_backend.py +167 -0
- attune/memory/summary_index.py +583 -0
- attune/memory/types.py +446 -0
- attune/memory/unified.py +182 -0
- attune/meta_workflows/__init__.py +74 -0
- attune/meta_workflows/agent_creator.py +248 -0
- attune/meta_workflows/builtin_templates.py +567 -0
- attune/meta_workflows/cli_commands/__init__.py +56 -0
- attune/meta_workflows/cli_commands/agent_commands.py +321 -0
- attune/meta_workflows/cli_commands/analytics_commands.py +442 -0
- attune/meta_workflows/cli_commands/config_commands.py +232 -0
- attune/meta_workflows/cli_commands/memory_commands.py +182 -0
- attune/meta_workflows/cli_commands/template_commands.py +354 -0
- attune/meta_workflows/cli_commands/workflow_commands.py +382 -0
- attune/meta_workflows/cli_meta_workflows.py +59 -0
- attune/meta_workflows/form_engine.py +292 -0
- attune/meta_workflows/intent_detector.py +409 -0
- attune/meta_workflows/models.py +569 -0
- attune/meta_workflows/pattern_learner.py +738 -0
- attune/meta_workflows/plan_generator.py +384 -0
- attune/meta_workflows/session_context.py +397 -0
- attune/meta_workflows/template_registry.py +229 -0
- attune/meta_workflows/workflow.py +984 -0
- attune/metrics/__init__.py +12 -0
- attune/metrics/collector.py +31 -0
- attune/metrics/prompt_metrics.py +194 -0
- attune/models/__init__.py +172 -0
- attune/models/__main__.py +13 -0
- attune/models/adaptive_routing.py +437 -0
- attune/models/auth_cli.py +444 -0
- attune/models/auth_strategy.py +450 -0
- attune/models/cli.py +655 -0
- attune/models/empathy_executor.py +354 -0
- attune/models/executor.py +257 -0
- attune/models/fallback.py +762 -0
- attune/models/provider_config.py +282 -0
- attune/models/registry.py +472 -0
- attune/models/tasks.py +359 -0
- attune/models/telemetry/__init__.py +71 -0
- attune/models/telemetry/analytics.py +594 -0
- attune/models/telemetry/backend.py +196 -0
- attune/models/telemetry/data_models.py +431 -0
- attune/models/telemetry/storage.py +489 -0
- attune/models/token_estimator.py +420 -0
- attune/models/validation.py +280 -0
- attune/monitoring/__init__.py +52 -0
- attune/monitoring/alerts.py +946 -0
- attune/monitoring/alerts_cli.py +448 -0
- attune/monitoring/multi_backend.py +271 -0
- attune/monitoring/otel_backend.py +362 -0
- attune/optimization/__init__.py +19 -0
- attune/optimization/context_optimizer.py +272 -0
- attune/orchestration/__init__.py +67 -0
- attune/orchestration/agent_templates.py +707 -0
- attune/orchestration/config_store.py +499 -0
- attune/orchestration/execution_strategies.py +2111 -0
- attune/orchestration/meta_orchestrator.py +1168 -0
- attune/orchestration/pattern_learner.py +696 -0
- attune/orchestration/real_tools.py +931 -0
- attune/pattern_cache.py +187 -0
- attune/pattern_library.py +542 -0
- attune/patterns/debugging/all_patterns.json +81 -0
- attune/patterns/debugging/workflow_20260107_1770825e.json +77 -0
- attune/patterns/refactoring_memory.json +89 -0
- attune/persistence.py +564 -0
- attune/platform_utils.py +265 -0
- attune/plugins/__init__.py +28 -0
- attune/plugins/base.py +361 -0
- attune/plugins/registry.py +268 -0
- attune/project_index/__init__.py +32 -0
- attune/project_index/cli.py +335 -0
- attune/project_index/index.py +667 -0
- attune/project_index/models.py +504 -0
- attune/project_index/reports.py +474 -0
- attune/project_index/scanner.py +777 -0
- attune/project_index/scanner_parallel.py +291 -0
- attune/prompts/__init__.py +61 -0
- attune/prompts/config.py +77 -0
- attune/prompts/context.py +177 -0
- attune/prompts/parser.py +285 -0
- attune/prompts/registry.py +313 -0
- attune/prompts/templates.py +208 -0
- attune/redis_config.py +302 -0
- attune/redis_memory.py +799 -0
- attune/resilience/__init__.py +56 -0
- attune/resilience/circuit_breaker.py +256 -0
- attune/resilience/fallback.py +179 -0
- attune/resilience/health.py +300 -0
- attune/resilience/retry.py +209 -0
- attune/resilience/timeout.py +135 -0
- attune/routing/__init__.py +43 -0
- attune/routing/chain_executor.py +433 -0
- attune/routing/classifier.py +217 -0
- attune/routing/smart_router.py +234 -0
- attune/routing/workflow_registry.py +343 -0
- attune/scaffolding/README.md +589 -0
- attune/scaffolding/__init__.py +35 -0
- attune/scaffolding/__main__.py +14 -0
- attune/scaffolding/cli.py +240 -0
- attune/scaffolding/templates/base_wizard.py.jinja2 +121 -0
- attune/scaffolding/templates/coach_wizard.py.jinja2 +321 -0
- attune/scaffolding/templates/domain_wizard.py.jinja2 +408 -0
- attune/scaffolding/templates/linear_flow_wizard.py.jinja2 +203 -0
- attune/socratic/__init__.py +256 -0
- attune/socratic/ab_testing.py +958 -0
- attune/socratic/blueprint.py +533 -0
- attune/socratic/cli.py +703 -0
- attune/socratic/collaboration.py +1114 -0
- attune/socratic/domain_templates.py +924 -0
- attune/socratic/embeddings.py +738 -0
- attune/socratic/engine.py +794 -0
- attune/socratic/explainer.py +682 -0
- attune/socratic/feedback.py +772 -0
- attune/socratic/forms.py +629 -0
- attune/socratic/generator.py +732 -0
- attune/socratic/llm_analyzer.py +637 -0
- attune/socratic/mcp_server.py +702 -0
- attune/socratic/session.py +312 -0
- attune/socratic/storage.py +667 -0
- attune/socratic/success.py +730 -0
- attune/socratic/visual_editor.py +860 -0
- attune/socratic/web_ui.py +958 -0
- attune/telemetry/__init__.py +39 -0
- attune/telemetry/agent_coordination.py +475 -0
- attune/telemetry/agent_tracking.py +367 -0
- attune/telemetry/approval_gates.py +545 -0
- attune/telemetry/cli.py +1231 -0
- attune/telemetry/commands/__init__.py +14 -0
- attune/telemetry/commands/dashboard_commands.py +696 -0
- attune/telemetry/event_streaming.py +409 -0
- attune/telemetry/feedback_loop.py +567 -0
- attune/telemetry/usage_tracker.py +591 -0
- attune/templates.py +754 -0
- attune/test_generator/__init__.py +38 -0
- attune/test_generator/__main__.py +14 -0
- attune/test_generator/cli.py +234 -0
- attune/test_generator/generator.py +355 -0
- attune/test_generator/risk_analyzer.py +216 -0
- attune/test_generator/templates/unit_test.py.jinja2 +272 -0
- attune/tier_recommender.py +384 -0
- attune/tools.py +183 -0
- attune/trust/__init__.py +28 -0
- attune/trust/circuit_breaker.py +579 -0
- attune/trust_building.py +527 -0
- attune/validation/__init__.py +19 -0
- attune/validation/xml_validator.py +281 -0
- attune/vscode_bridge.py +173 -0
- attune/workflow_commands.py +780 -0
- attune/workflow_patterns/__init__.py +33 -0
- attune/workflow_patterns/behavior.py +249 -0
- attune/workflow_patterns/core.py +76 -0
- attune/workflow_patterns/output.py +99 -0
- attune/workflow_patterns/registry.py +255 -0
- attune/workflow_patterns/structural.py +288 -0
- attune/workflows/__init__.py +539 -0
- attune/workflows/autonomous_test_gen.py +1268 -0
- attune/workflows/base.py +2667 -0
- attune/workflows/batch_processing.py +342 -0
- attune/workflows/bug_predict.py +1084 -0
- attune/workflows/builder.py +273 -0
- attune/workflows/caching.py +253 -0
- attune/workflows/code_review.py +1048 -0
- attune/workflows/code_review_adapters.py +312 -0
- attune/workflows/code_review_pipeline.py +722 -0
- attune/workflows/config.py +645 -0
- attune/workflows/dependency_check.py +644 -0
- attune/workflows/document_gen/__init__.py +25 -0
- attune/workflows/document_gen/config.py +30 -0
- attune/workflows/document_gen/report_formatter.py +162 -0
- attune/workflows/document_gen/workflow.py +1426 -0
- attune/workflows/document_manager.py +216 -0
- attune/workflows/document_manager_README.md +134 -0
- attune/workflows/documentation_orchestrator.py +1205 -0
- attune/workflows/history.py +510 -0
- attune/workflows/keyboard_shortcuts/__init__.py +39 -0
- attune/workflows/keyboard_shortcuts/generators.py +391 -0
- attune/workflows/keyboard_shortcuts/parsers.py +416 -0
- attune/workflows/keyboard_shortcuts/prompts.py +295 -0
- attune/workflows/keyboard_shortcuts/schema.py +193 -0
- attune/workflows/keyboard_shortcuts/workflow.py +509 -0
- attune/workflows/llm_base.py +363 -0
- attune/workflows/manage_docs.py +87 -0
- attune/workflows/manage_docs_README.md +134 -0
- attune/workflows/manage_documentation.py +821 -0
- attune/workflows/new_sample_workflow1.py +149 -0
- attune/workflows/new_sample_workflow1_README.md +150 -0
- attune/workflows/orchestrated_health_check.py +849 -0
- attune/workflows/orchestrated_release_prep.py +600 -0
- attune/workflows/output.py +413 -0
- attune/workflows/perf_audit.py +863 -0
- attune/workflows/pr_review.py +762 -0
- attune/workflows/progress.py +785 -0
- attune/workflows/progress_server.py +322 -0
- attune/workflows/progressive/README 2.md +454 -0
- attune/workflows/progressive/README.md +454 -0
- attune/workflows/progressive/__init__.py +82 -0
- attune/workflows/progressive/cli.py +219 -0
- attune/workflows/progressive/core.py +488 -0
- attune/workflows/progressive/orchestrator.py +723 -0
- attune/workflows/progressive/reports.py +520 -0
- attune/workflows/progressive/telemetry.py +274 -0
- attune/workflows/progressive/test_gen.py +495 -0
- attune/workflows/progressive/workflow.py +589 -0
- attune/workflows/refactor_plan.py +694 -0
- attune/workflows/release_prep.py +895 -0
- attune/workflows/release_prep_crew.py +969 -0
- attune/workflows/research_synthesis.py +404 -0
- attune/workflows/routing.py +168 -0
- attune/workflows/secure_release.py +593 -0
- attune/workflows/security_adapters.py +297 -0
- attune/workflows/security_audit.py +1329 -0
- attune/workflows/security_audit_phase3.py +355 -0
- attune/workflows/seo_optimization.py +633 -0
- attune/workflows/step_config.py +234 -0
- attune/workflows/telemetry_mixin.py +269 -0
- attune/workflows/test5.py +125 -0
- attune/workflows/test5_README.md +158 -0
- attune/workflows/test_coverage_boost_crew.py +849 -0
- attune/workflows/test_gen/__init__.py +52 -0
- attune/workflows/test_gen/ast_analyzer.py +249 -0
- attune/workflows/test_gen/config.py +88 -0
- attune/workflows/test_gen/data_models.py +38 -0
- attune/workflows/test_gen/report_formatter.py +289 -0
- attune/workflows/test_gen/test_templates.py +381 -0
- attune/workflows/test_gen/workflow.py +655 -0
- attune/workflows/test_gen.py +54 -0
- attune/workflows/test_gen_behavioral.py +477 -0
- attune/workflows/test_gen_parallel.py +341 -0
- attune/workflows/test_lifecycle.py +526 -0
- attune/workflows/test_maintenance.py +627 -0
- attune/workflows/test_maintenance_cli.py +590 -0
- attune/workflows/test_maintenance_crew.py +840 -0
- attune/workflows/test_runner.py +622 -0
- attune/workflows/tier_tracking.py +531 -0
- attune/workflows/xml_enhanced_crew.py +285 -0
- attune_ai-2.0.0.dist-info/METADATA +1026 -0
- attune_ai-2.0.0.dist-info/RECORD +457 -0
- attune_ai-2.0.0.dist-info/WHEEL +5 -0
- attune_ai-2.0.0.dist-info/entry_points.txt +26 -0
- attune_ai-2.0.0.dist-info/licenses/LICENSE +201 -0
- attune_ai-2.0.0.dist-info/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +101 -0
- attune_ai-2.0.0.dist-info/top_level.txt +5 -0
- attune_healthcare/__init__.py +13 -0
- attune_healthcare/monitors/__init__.py +9 -0
- attune_healthcare/monitors/clinical_protocol_monitor.py +315 -0
- attune_healthcare/monitors/monitoring/__init__.py +44 -0
- attune_healthcare/monitors/monitoring/protocol_checker.py +300 -0
- attune_healthcare/monitors/monitoring/protocol_loader.py +214 -0
- attune_healthcare/monitors/monitoring/sensor_parsers.py +306 -0
- attune_healthcare/monitors/monitoring/trajectory_analyzer.py +389 -0
- attune_llm/README.md +553 -0
- attune_llm/__init__.py +28 -0
- attune_llm/agent_factory/__init__.py +53 -0
- attune_llm/agent_factory/adapters/__init__.py +85 -0
- attune_llm/agent_factory/adapters/autogen_adapter.py +312 -0
- attune_llm/agent_factory/adapters/crewai_adapter.py +483 -0
- attune_llm/agent_factory/adapters/haystack_adapter.py +298 -0
- attune_llm/agent_factory/adapters/langchain_adapter.py +362 -0
- attune_llm/agent_factory/adapters/langgraph_adapter.py +333 -0
- attune_llm/agent_factory/adapters/native.py +228 -0
- attune_llm/agent_factory/adapters/wizard_adapter.py +423 -0
- attune_llm/agent_factory/base.py +305 -0
- attune_llm/agent_factory/crews/__init__.py +67 -0
- attune_llm/agent_factory/crews/code_review.py +1113 -0
- attune_llm/agent_factory/crews/health_check.py +1262 -0
- attune_llm/agent_factory/crews/refactoring.py +1128 -0
- attune_llm/agent_factory/crews/security_audit.py +1018 -0
- attune_llm/agent_factory/decorators.py +287 -0
- attune_llm/agent_factory/factory.py +558 -0
- attune_llm/agent_factory/framework.py +193 -0
- attune_llm/agent_factory/memory_integration.py +328 -0
- attune_llm/agent_factory/resilient.py +320 -0
- attune_llm/agents_md/__init__.py +22 -0
- attune_llm/agents_md/loader.py +218 -0
- attune_llm/agents_md/parser.py +271 -0
- attune_llm/agents_md/registry.py +307 -0
- attune_llm/claude_memory.py +466 -0
- attune_llm/cli/__init__.py +8 -0
- attune_llm/cli/sync_claude.py +487 -0
- attune_llm/code_health.py +1313 -0
- attune_llm/commands/__init__.py +51 -0
- attune_llm/commands/context.py +375 -0
- attune_llm/commands/loader.py +301 -0
- attune_llm/commands/models.py +231 -0
- attune_llm/commands/parser.py +371 -0
- attune_llm/commands/registry.py +429 -0
- attune_llm/config/__init__.py +29 -0
- attune_llm/config/unified.py +291 -0
- attune_llm/context/__init__.py +22 -0
- attune_llm/context/compaction.py +455 -0
- attune_llm/context/manager.py +434 -0
- attune_llm/contextual_patterns.py +361 -0
- attune_llm/core.py +907 -0
- attune_llm/git_pattern_extractor.py +435 -0
- attune_llm/hooks/__init__.py +24 -0
- attune_llm/hooks/config.py +306 -0
- attune_llm/hooks/executor.py +289 -0
- attune_llm/hooks/registry.py +302 -0
- attune_llm/hooks/scripts/__init__.py +39 -0
- attune_llm/hooks/scripts/evaluate_session.py +201 -0
- attune_llm/hooks/scripts/first_time_init.py +285 -0
- attune_llm/hooks/scripts/pre_compact.py +207 -0
- attune_llm/hooks/scripts/session_end.py +183 -0
- attune_llm/hooks/scripts/session_start.py +163 -0
- attune_llm/hooks/scripts/suggest_compact.py +225 -0
- attune_llm/learning/__init__.py +30 -0
- attune_llm/learning/evaluator.py +438 -0
- attune_llm/learning/extractor.py +514 -0
- attune_llm/learning/storage.py +560 -0
- attune_llm/levels.py +227 -0
- attune_llm/pattern_confidence.py +414 -0
- attune_llm/pattern_resolver.py +272 -0
- attune_llm/pattern_summary.py +350 -0
- attune_llm/providers.py +967 -0
- attune_llm/routing/__init__.py +32 -0
- attune_llm/routing/model_router.py +362 -0
- attune_llm/security/IMPLEMENTATION_SUMMARY.md +413 -0
- attune_llm/security/PHASE2_COMPLETE.md +384 -0
- attune_llm/security/PHASE2_SECRETS_DETECTOR_COMPLETE.md +271 -0
- attune_llm/security/QUICK_REFERENCE.md +316 -0
- attune_llm/security/README.md +262 -0
- attune_llm/security/__init__.py +62 -0
- attune_llm/security/audit_logger.py +929 -0
- attune_llm/security/audit_logger_example.py +152 -0
- attune_llm/security/pii_scrubber.py +640 -0
- attune_llm/security/secrets_detector.py +678 -0
- attune_llm/security/secrets_detector_example.py +304 -0
- attune_llm/security/secure_memdocs.py +1192 -0
- attune_llm/security/secure_memdocs_example.py +278 -0
- attune_llm/session_status.py +745 -0
- attune_llm/state.py +246 -0
- attune_llm/utils/__init__.py +5 -0
- attune_llm/utils/tokens.py +349 -0
- attune_software/SOFTWARE_PLUGIN_README.md +57 -0
- attune_software/__init__.py +13 -0
- attune_software/cli/__init__.py +120 -0
- attune_software/cli/inspect.py +362 -0
- attune_software/cli.py +574 -0
- attune_software/plugin.py +188 -0
- workflow_scaffolding/__init__.py +11 -0
- workflow_scaffolding/__main__.py +12 -0
- workflow_scaffolding/cli.py +206 -0
- workflow_scaffolding/generator.py +265 -0
|
@@ -0,0 +1,1290 @@
|
|
|
1
|
+
"""Memory Control Panel for Empathy Framework
|
|
2
|
+
|
|
3
|
+
Enterprise-grade control panel for managing AI memory systems.
|
|
4
|
+
Provides both programmatic API and CLI interface.
|
|
5
|
+
|
|
6
|
+
Features:
|
|
7
|
+
- Redis lifecycle management (start/stop/status)
|
|
8
|
+
- Memory statistics and health monitoring
|
|
9
|
+
- Pattern management (list, search, delete)
|
|
10
|
+
- Configuration management
|
|
11
|
+
- Export/import capabilities
|
|
12
|
+
|
|
13
|
+
Usage (Python API):
|
|
14
|
+
from attune.memory import MemoryControlPanel
|
|
15
|
+
|
|
16
|
+
panel = MemoryControlPanel()
|
|
17
|
+
print(panel.status())
|
|
18
|
+
panel.start_redis()
|
|
19
|
+
panel.show_statistics()
|
|
20
|
+
|
|
21
|
+
Usage (CLI):
|
|
22
|
+
python -m attune.memory.control_panel status
|
|
23
|
+
python -m attune.memory.control_panel start
|
|
24
|
+
python -m attune.memory.control_panel stats
|
|
25
|
+
python -m attune.memory.control_panel patterns --list
|
|
26
|
+
|
|
27
|
+
Copyright 2025 Smart AI Memory, LLC
|
|
28
|
+
Licensed under Fair Source 0.9
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
import argparse
|
|
32
|
+
import json
|
|
33
|
+
import logging
|
|
34
|
+
import re
|
|
35
|
+
import signal
|
|
36
|
+
import ssl
|
|
37
|
+
import sys
|
|
38
|
+
import time
|
|
39
|
+
import warnings
|
|
40
|
+
from dataclasses import asdict, dataclass
|
|
41
|
+
from datetime import datetime
|
|
42
|
+
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
43
|
+
from pathlib import Path
|
|
44
|
+
from typing import Any
|
|
45
|
+
from urllib.parse import parse_qs, urlparse
|
|
46
|
+
|
|
47
|
+
import structlog
|
|
48
|
+
|
|
49
|
+
from .control_panel_support import APIKeyAuth, MemoryStats, RateLimiter
|
|
50
|
+
from .long_term import Classification, SecureMemDocsIntegration
|
|
51
|
+
from .redis_bootstrap import (
|
|
52
|
+
RedisStartMethod,
|
|
53
|
+
RedisStatus,
|
|
54
|
+
_check_redis_running,
|
|
55
|
+
ensure_redis,
|
|
56
|
+
stop_redis,
|
|
57
|
+
)
|
|
58
|
+
from .short_term import AccessTier, AgentCredentials, RedisShortTermMemory
|
|
59
|
+
|
|
60
|
+
# Suppress noisy warnings in CLI mode
|
|
61
|
+
warnings.filterwarnings("ignore", category=RuntimeWarning, module="runpy")
|
|
62
|
+
|
|
63
|
+
# Version
|
|
64
|
+
__version__ = "2.2.0"
|
|
65
|
+
|
|
66
|
+
logger = structlog.get_logger(__name__)
|
|
67
|
+
|
|
68
|
+
# =============================================================================
|
|
69
|
+
# Security Configuration
|
|
70
|
+
# =============================================================================
|
|
71
|
+
|
|
72
|
+
# Pattern ID validation regex - matches format: pat_YYYYMMDDHHMMSS_hexstring
|
|
73
|
+
PATTERN_ID_REGEX = re.compile(r"^pat_\d{14}_[a-f0-9]{8,16}$")
|
|
74
|
+
|
|
75
|
+
# Alternative pattern formats that are also valid
|
|
76
|
+
PATTERN_ID_ALT_REGEX = re.compile(r"^[a-zA-Z][a-zA-Z0-9_-]{2,63}$")
|
|
77
|
+
|
|
78
|
+
# Rate limiting configuration
|
|
79
|
+
RATE_LIMIT_WINDOW_SECONDS = 60
|
|
80
|
+
RATE_LIMIT_MAX_REQUESTS = 100 # Per IP per window
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _validate_pattern_id(pattern_id: str) -> bool:
|
|
84
|
+
"""Validate pattern ID to prevent path traversal and injection attacks.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
pattern_id: The pattern ID to validate
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
True if valid, False otherwise
|
|
91
|
+
|
|
92
|
+
"""
|
|
93
|
+
if not pattern_id or not isinstance(pattern_id, str):
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
# Check for path traversal attempts
|
|
97
|
+
if ".." in pattern_id or "/" in pattern_id or "\\" in pattern_id:
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
# Check for null bytes
|
|
101
|
+
if "\x00" in pattern_id:
|
|
102
|
+
return False
|
|
103
|
+
|
|
104
|
+
# Check length bounds
|
|
105
|
+
if len(pattern_id) < 3 or len(pattern_id) > 64:
|
|
106
|
+
return False
|
|
107
|
+
|
|
108
|
+
# Must match one of the valid formats
|
|
109
|
+
return bool(PATTERN_ID_REGEX.match(pattern_id) or PATTERN_ID_ALT_REGEX.match(pattern_id))
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _validate_agent_id(agent_id: str) -> bool:
|
|
113
|
+
"""Validate agent ID format.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
agent_id: The agent ID to validate
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
True if valid, False otherwise
|
|
120
|
+
|
|
121
|
+
"""
|
|
122
|
+
if not agent_id or not isinstance(agent_id, str):
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
# Check for dangerous characters (path separators, null bytes, command injection)
|
|
126
|
+
# Note: "." and "@" are allowed for email-style user IDs
|
|
127
|
+
if any(c in agent_id for c in ["/", "\\", "\x00", ";", "|", "&"]):
|
|
128
|
+
return False
|
|
129
|
+
|
|
130
|
+
# Check length bounds
|
|
131
|
+
if len(agent_id) < 1 or len(agent_id) > 64:
|
|
132
|
+
return False
|
|
133
|
+
|
|
134
|
+
# Simple alphanumeric with some allowed chars
|
|
135
|
+
return bool(re.match(r"^[a-zA-Z0-9_@.-]+$", agent_id))
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _validate_classification(classification: str | None) -> bool:
|
|
139
|
+
"""Validate classification parameter.
|
|
140
|
+
|
|
141
|
+
Args:
|
|
142
|
+
classification: The classification to validate
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
True if valid, False otherwise
|
|
146
|
+
|
|
147
|
+
"""
|
|
148
|
+
if classification is None:
|
|
149
|
+
return True
|
|
150
|
+
if not isinstance(classification, str):
|
|
151
|
+
return False
|
|
152
|
+
return classification.upper() in ("PUBLIC", "INTERNAL", "SENSITIVE")
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def _validate_file_path(path: str, allowed_dir: str | None = None) -> Path:
|
|
156
|
+
"""Validate file path to prevent path traversal and arbitrary writes.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
path: Path to validate
|
|
160
|
+
allowed_dir: Optional directory that must contain the path
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Resolved absolute Path object
|
|
164
|
+
|
|
165
|
+
Raises:
|
|
166
|
+
ValueError: If path is invalid or outside allowed directory
|
|
167
|
+
|
|
168
|
+
"""
|
|
169
|
+
if not path or not isinstance(path, str):
|
|
170
|
+
raise ValueError("path must be a non-empty string")
|
|
171
|
+
|
|
172
|
+
# Check for null bytes
|
|
173
|
+
if "\x00" in path:
|
|
174
|
+
raise ValueError("path contains null bytes")
|
|
175
|
+
|
|
176
|
+
try:
|
|
177
|
+
# Resolve to absolute path
|
|
178
|
+
resolved = Path(path).resolve()
|
|
179
|
+
except (OSError, RuntimeError) as e:
|
|
180
|
+
raise ValueError(f"Invalid path: {e}")
|
|
181
|
+
|
|
182
|
+
# Check if within allowed directory
|
|
183
|
+
if allowed_dir:
|
|
184
|
+
try:
|
|
185
|
+
allowed = Path(allowed_dir).resolve()
|
|
186
|
+
resolved.relative_to(allowed)
|
|
187
|
+
except ValueError:
|
|
188
|
+
raise ValueError(f"path must be within {allowed_dir}")
|
|
189
|
+
|
|
190
|
+
# Check for dangerous system paths
|
|
191
|
+
dangerous_paths = ["/etc", "/sys", "/proc", "/dev"]
|
|
192
|
+
for dangerous in dangerous_paths:
|
|
193
|
+
if str(resolved).startswith(dangerous):
|
|
194
|
+
raise ValueError(f"Cannot write to system directory: {dangerous}")
|
|
195
|
+
|
|
196
|
+
return resolved
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
@dataclass
|
|
200
|
+
class ControlPanelConfig:
|
|
201
|
+
"""Configuration for control panel."""
|
|
202
|
+
|
|
203
|
+
redis_host: str = "localhost"
|
|
204
|
+
redis_port: int = 6379
|
|
205
|
+
storage_dir: str = "./memdocs_storage"
|
|
206
|
+
audit_dir: str = "./logs"
|
|
207
|
+
auto_start_redis: bool = True
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
class MemoryControlPanel:
|
|
211
|
+
"""Enterprise control panel for Empathy memory management.
|
|
212
|
+
|
|
213
|
+
Provides unified management interface for:
|
|
214
|
+
- Short-term memory (Redis)
|
|
215
|
+
- Long-term memory (MemDocs/file storage)
|
|
216
|
+
- Security and compliance controls
|
|
217
|
+
|
|
218
|
+
Example:
|
|
219
|
+
>>> panel = MemoryControlPanel()
|
|
220
|
+
>>> status = panel.status()
|
|
221
|
+
>>> print(f"Redis: {status['redis']['status']}")
|
|
222
|
+
>>> print(f"Patterns: {status['long_term']['pattern_count']}")
|
|
223
|
+
|
|
224
|
+
"""
|
|
225
|
+
|
|
226
|
+
def __init__(self, config: ControlPanelConfig | None = None):
|
|
227
|
+
"""Initialize control panel.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
config: Configuration options (uses defaults if None)
|
|
231
|
+
|
|
232
|
+
"""
|
|
233
|
+
self.config = config or ControlPanelConfig()
|
|
234
|
+
self._redis_status: RedisStatus | None = None
|
|
235
|
+
self._short_term: RedisShortTermMemory | None = None
|
|
236
|
+
self._long_term: SecureMemDocsIntegration | None = None
|
|
237
|
+
|
|
238
|
+
def status(self) -> dict[str, Any]:
|
|
239
|
+
"""Get comprehensive status of memory system.
|
|
240
|
+
|
|
241
|
+
Returns:
|
|
242
|
+
Dictionary with status of all memory components
|
|
243
|
+
|
|
244
|
+
"""
|
|
245
|
+
redis_running = _check_redis_running(self.config.redis_host, self.config.redis_port)
|
|
246
|
+
|
|
247
|
+
result = {
|
|
248
|
+
"timestamp": datetime.utcnow().isoformat() + "Z",
|
|
249
|
+
"redis": {
|
|
250
|
+
"status": "running" if redis_running else "stopped",
|
|
251
|
+
"host": self.config.redis_host,
|
|
252
|
+
"port": self.config.redis_port,
|
|
253
|
+
"method": self._redis_status.method.value if self._redis_status else "unknown",
|
|
254
|
+
},
|
|
255
|
+
"long_term": {
|
|
256
|
+
"status": (
|
|
257
|
+
"available" if Path(self.config.storage_dir).exists() else "not_initialized"
|
|
258
|
+
),
|
|
259
|
+
"storage_dir": self.config.storage_dir,
|
|
260
|
+
"pattern_count": self._count_patterns(),
|
|
261
|
+
},
|
|
262
|
+
"config": {
|
|
263
|
+
"auto_start_redis": self.config.auto_start_redis,
|
|
264
|
+
"audit_dir": self.config.audit_dir,
|
|
265
|
+
},
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
return result
|
|
269
|
+
|
|
270
|
+
def start_redis(self, verbose: bool = True) -> RedisStatus:
|
|
271
|
+
"""Start Redis if not running.
|
|
272
|
+
|
|
273
|
+
Args:
|
|
274
|
+
verbose: Print status messages
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
RedisStatus with result
|
|
278
|
+
|
|
279
|
+
"""
|
|
280
|
+
self._redis_status = ensure_redis(
|
|
281
|
+
host=self.config.redis_host,
|
|
282
|
+
port=self.config.redis_port,
|
|
283
|
+
auto_start=True,
|
|
284
|
+
verbose=verbose,
|
|
285
|
+
)
|
|
286
|
+
return self._redis_status
|
|
287
|
+
|
|
288
|
+
def stop_redis(self) -> bool:
|
|
289
|
+
"""Stop Redis if we started it.
|
|
290
|
+
|
|
291
|
+
Returns:
|
|
292
|
+
True if stopped successfully
|
|
293
|
+
|
|
294
|
+
"""
|
|
295
|
+
if self._redis_status and self._redis_status.method != RedisStartMethod.ALREADY_RUNNING:
|
|
296
|
+
return stop_redis(self._redis_status.method)
|
|
297
|
+
return False
|
|
298
|
+
|
|
299
|
+
def get_statistics(self) -> MemoryStats:
|
|
300
|
+
"""Collect comprehensive statistics.
|
|
301
|
+
|
|
302
|
+
Returns:
|
|
303
|
+
MemoryStats with all metrics
|
|
304
|
+
|
|
305
|
+
"""
|
|
306
|
+
start_time = time.perf_counter()
|
|
307
|
+
stats = MemoryStats(collected_at=datetime.utcnow().isoformat() + "Z")
|
|
308
|
+
|
|
309
|
+
# Redis stats
|
|
310
|
+
redis_running = _check_redis_running(self.config.redis_host, self.config.redis_port)
|
|
311
|
+
stats.redis_available = redis_running
|
|
312
|
+
|
|
313
|
+
if redis_running:
|
|
314
|
+
try:
|
|
315
|
+
memory = self._get_short_term()
|
|
316
|
+
|
|
317
|
+
# Measure Redis ping latency
|
|
318
|
+
ping_start = time.perf_counter()
|
|
319
|
+
redis_stats = memory.get_stats()
|
|
320
|
+
stats.redis_ping_ms = (time.perf_counter() - ping_start) * 1000
|
|
321
|
+
|
|
322
|
+
stats.redis_method = redis_stats.get("mode", "redis")
|
|
323
|
+
stats.redis_keys_total = redis_stats.get("total_keys", 0)
|
|
324
|
+
stats.redis_keys_working = redis_stats.get("working_keys", 0)
|
|
325
|
+
stats.redis_keys_staged = redis_stats.get("staged_keys", 0)
|
|
326
|
+
stats.redis_memory_used = redis_stats.get("used_memory", "0")
|
|
327
|
+
except Exception as e:
|
|
328
|
+
logger.warning("redis_stats_failed", error=str(e))
|
|
329
|
+
|
|
330
|
+
# Long-term stats
|
|
331
|
+
storage_path = Path(self.config.storage_dir)
|
|
332
|
+
if storage_path.exists():
|
|
333
|
+
stats.long_term_available = True
|
|
334
|
+
|
|
335
|
+
# Calculate storage size
|
|
336
|
+
try:
|
|
337
|
+
stats.storage_bytes = sum(
|
|
338
|
+
f.stat().st_size for f in storage_path.glob("**/*") if f.is_file()
|
|
339
|
+
)
|
|
340
|
+
except Exception as e:
|
|
341
|
+
logger.debug("storage_size_calculation_failed", error=str(e))
|
|
342
|
+
stats.storage_bytes = 0
|
|
343
|
+
|
|
344
|
+
try:
|
|
345
|
+
long_term = self._get_long_term()
|
|
346
|
+
lt_stats = long_term.get_statistics()
|
|
347
|
+
stats.patterns_total = lt_stats.get("total_patterns", 0)
|
|
348
|
+
stats.patterns_public = lt_stats.get("by_classification", {}).get("PUBLIC", 0)
|
|
349
|
+
stats.patterns_internal = lt_stats.get("by_classification", {}).get("INTERNAL", 0)
|
|
350
|
+
stats.patterns_sensitive = lt_stats.get("by_classification", {}).get("SENSITIVE", 0)
|
|
351
|
+
stats.patterns_encrypted = lt_stats.get("encrypted_count", 0)
|
|
352
|
+
except Exception as e:
|
|
353
|
+
logger.warning("long_term_stats_failed", error=str(e))
|
|
354
|
+
|
|
355
|
+
# Total collection time
|
|
356
|
+
stats.collection_time_ms = (time.perf_counter() - start_time) * 1000
|
|
357
|
+
|
|
358
|
+
return stats
|
|
359
|
+
|
|
360
|
+
def list_patterns(
|
|
361
|
+
self,
|
|
362
|
+
classification: str | None = None,
|
|
363
|
+
limit: int = 100,
|
|
364
|
+
) -> list[dict[str, Any]]:
|
|
365
|
+
"""List patterns in long-term storage.
|
|
366
|
+
|
|
367
|
+
Args:
|
|
368
|
+
classification: Filter by classification (PUBLIC/INTERNAL/SENSITIVE)
|
|
369
|
+
limit: Maximum patterns to return
|
|
370
|
+
|
|
371
|
+
Returns:
|
|
372
|
+
List of pattern summaries
|
|
373
|
+
|
|
374
|
+
Raises:
|
|
375
|
+
ValueError: If classification is invalid or limit is out of range
|
|
376
|
+
|
|
377
|
+
"""
|
|
378
|
+
# Validate classification
|
|
379
|
+
if not _validate_classification(classification):
|
|
380
|
+
raise ValueError(
|
|
381
|
+
f"Invalid classification '{classification}'. "
|
|
382
|
+
f"Must be PUBLIC, INTERNAL, or SENSITIVE."
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
# Validate limit range
|
|
386
|
+
if limit < 1:
|
|
387
|
+
raise ValueError(f"limit must be positive, got {limit}")
|
|
388
|
+
|
|
389
|
+
if limit > 10000:
|
|
390
|
+
raise ValueError(f"limit too large (max 10000), got {limit}")
|
|
391
|
+
|
|
392
|
+
long_term = self._get_long_term()
|
|
393
|
+
|
|
394
|
+
class_filter = None
|
|
395
|
+
if classification:
|
|
396
|
+
class_filter = Classification[classification.upper()]
|
|
397
|
+
|
|
398
|
+
# Use admin user for listing
|
|
399
|
+
patterns = long_term.list_patterns(
|
|
400
|
+
user_id="admin@system",
|
|
401
|
+
classification=class_filter,
|
|
402
|
+
)
|
|
403
|
+
|
|
404
|
+
return patterns[:limit]
|
|
405
|
+
|
|
406
|
+
def delete_pattern(self, pattern_id: str, user_id: str = "admin@system") -> bool:
|
|
407
|
+
"""Delete a pattern from long-term storage.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
pattern_id: Pattern to delete
|
|
411
|
+
user_id: User performing deletion (for audit)
|
|
412
|
+
|
|
413
|
+
Returns:
|
|
414
|
+
True if deleted
|
|
415
|
+
|
|
416
|
+
Raises:
|
|
417
|
+
ValueError: If pattern_id or user_id format is invalid
|
|
418
|
+
|
|
419
|
+
"""
|
|
420
|
+
# Validate pattern_id
|
|
421
|
+
if not _validate_pattern_id(pattern_id):
|
|
422
|
+
raise ValueError(f"Invalid pattern_id format: {pattern_id}")
|
|
423
|
+
|
|
424
|
+
# Validate user_id (reuse agent_id validation - same format)
|
|
425
|
+
if not _validate_agent_id(user_id):
|
|
426
|
+
raise ValueError(f"Invalid user_id format: {user_id}")
|
|
427
|
+
|
|
428
|
+
long_term = self._get_long_term()
|
|
429
|
+
try:
|
|
430
|
+
return long_term.delete_pattern(pattern_id, user_id)
|
|
431
|
+
except Exception as e:
|
|
432
|
+
logger.error("delete_pattern_failed", pattern_id=pattern_id, error=str(e))
|
|
433
|
+
return (
|
|
434
|
+
False # Graceful degradation - validation errors raise, storage errors return False
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
def clear_short_term(self, agent_id: str = "admin") -> int:
|
|
438
|
+
"""Clear all short-term memory for an agent.
|
|
439
|
+
|
|
440
|
+
Args:
|
|
441
|
+
agent_id: Agent whose memory to clear
|
|
442
|
+
|
|
443
|
+
Returns:
|
|
444
|
+
Number of keys deleted
|
|
445
|
+
|
|
446
|
+
Raises:
|
|
447
|
+
ValueError: If agent_id format is invalid
|
|
448
|
+
|
|
449
|
+
"""
|
|
450
|
+
# Validate agent_id
|
|
451
|
+
if not _validate_agent_id(agent_id):
|
|
452
|
+
raise ValueError(f"Invalid agent_id format: {agent_id}")
|
|
453
|
+
|
|
454
|
+
memory = self._get_short_term()
|
|
455
|
+
creds = AgentCredentials(agent_id=agent_id, tier=AccessTier.STEWARD)
|
|
456
|
+
return memory.clear_working_memory(creds)
|
|
457
|
+
|
|
458
|
+
def export_patterns(self, output_path: str, classification: str | None = None) -> int:
|
|
459
|
+
"""Export patterns to JSON file.
|
|
460
|
+
|
|
461
|
+
Args:
|
|
462
|
+
output_path: Path to output file
|
|
463
|
+
classification: Filter by classification
|
|
464
|
+
|
|
465
|
+
Returns:
|
|
466
|
+
Number of patterns exported
|
|
467
|
+
|
|
468
|
+
Raises:
|
|
469
|
+
ValueError: If output_path is invalid, classification invalid, or path is unsafe
|
|
470
|
+
|
|
471
|
+
"""
|
|
472
|
+
# Validate file path to prevent path traversal attacks
|
|
473
|
+
validated_path = _validate_file_path(output_path)
|
|
474
|
+
|
|
475
|
+
# Validate classification (list_patterns will also validate, but do it early)
|
|
476
|
+
if not _validate_classification(classification):
|
|
477
|
+
raise ValueError(
|
|
478
|
+
f"Invalid classification '{classification}'. "
|
|
479
|
+
f"Must be PUBLIC, INTERNAL, or SENSITIVE."
|
|
480
|
+
)
|
|
481
|
+
|
|
482
|
+
patterns = self.list_patterns(classification=classification)
|
|
483
|
+
|
|
484
|
+
export_data = {
|
|
485
|
+
"exported_at": datetime.utcnow().isoformat() + "Z",
|
|
486
|
+
"classification_filter": classification,
|
|
487
|
+
"pattern_count": len(patterns),
|
|
488
|
+
"patterns": patterns,
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
with open(validated_path, "w") as f:
|
|
492
|
+
json.dump(export_data, f, indent=2)
|
|
493
|
+
|
|
494
|
+
return len(patterns)
|
|
495
|
+
|
|
496
|
+
def health_check(self) -> dict[str, Any]:
|
|
497
|
+
"""Perform comprehensive health check.
|
|
498
|
+
|
|
499
|
+
Returns:
|
|
500
|
+
Health status with recommendations
|
|
501
|
+
|
|
502
|
+
"""
|
|
503
|
+
status = self.status()
|
|
504
|
+
stats = self.get_statistics()
|
|
505
|
+
|
|
506
|
+
checks: list[dict[str, str]] = []
|
|
507
|
+
recommendations: list[str] = []
|
|
508
|
+
health: dict[str, Any] = {
|
|
509
|
+
"overall": "healthy",
|
|
510
|
+
"checks": checks,
|
|
511
|
+
"recommendations": recommendations,
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
# Check Redis
|
|
515
|
+
if status["redis"]["status"] == "running":
|
|
516
|
+
checks.append({"name": "redis", "status": "pass", "message": "Redis is running"})
|
|
517
|
+
else:
|
|
518
|
+
checks.append({"name": "redis", "status": "warn", "message": "Redis not running"})
|
|
519
|
+
recommendations.append("Start Redis for multi-agent coordination")
|
|
520
|
+
health["overall"] = "degraded"
|
|
521
|
+
|
|
522
|
+
# Check long-term storage
|
|
523
|
+
if status["long_term"]["status"] == "available":
|
|
524
|
+
checks.append({"name": "long_term", "status": "pass", "message": "Storage available"})
|
|
525
|
+
else:
|
|
526
|
+
checks.append(
|
|
527
|
+
{"name": "long_term", "status": "warn", "message": "Storage not initialized"},
|
|
528
|
+
)
|
|
529
|
+
recommendations.append("Initialize long-term storage directory")
|
|
530
|
+
health["overall"] = "degraded"
|
|
531
|
+
|
|
532
|
+
# Check pattern count
|
|
533
|
+
if stats.patterns_total > 0:
|
|
534
|
+
checks.append(
|
|
535
|
+
{
|
|
536
|
+
"name": "patterns",
|
|
537
|
+
"status": "pass",
|
|
538
|
+
"message": f"{stats.patterns_total} patterns stored",
|
|
539
|
+
},
|
|
540
|
+
)
|
|
541
|
+
else:
|
|
542
|
+
checks.append(
|
|
543
|
+
{"name": "patterns", "status": "info", "message": "No patterns stored yet"},
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
# Check encryption
|
|
547
|
+
if stats.patterns_sensitive > 0 and stats.patterns_encrypted < stats.patterns_sensitive:
|
|
548
|
+
checks.append(
|
|
549
|
+
{
|
|
550
|
+
"name": "encryption",
|
|
551
|
+
"status": "fail",
|
|
552
|
+
"message": "Some sensitive patterns are not encrypted",
|
|
553
|
+
},
|
|
554
|
+
)
|
|
555
|
+
recommendations.append("Enable encryption for sensitive patterns")
|
|
556
|
+
health["overall"] = "unhealthy"
|
|
557
|
+
elif stats.patterns_sensitive > 0:
|
|
558
|
+
checks.append(
|
|
559
|
+
{
|
|
560
|
+
"name": "encryption",
|
|
561
|
+
"status": "pass",
|
|
562
|
+
"message": "All sensitive patterns encrypted",
|
|
563
|
+
},
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
return health
|
|
567
|
+
|
|
568
|
+
def _get_short_term(self) -> RedisShortTermMemory:
|
|
569
|
+
"""Get or create short-term memory instance."""
|
|
570
|
+
if self._short_term is None:
|
|
571
|
+
redis_running = _check_redis_running(self.config.redis_host, self.config.redis_port)
|
|
572
|
+
self._short_term = RedisShortTermMemory(
|
|
573
|
+
host=self.config.redis_host,
|
|
574
|
+
port=self.config.redis_port,
|
|
575
|
+
use_mock=not redis_running,
|
|
576
|
+
)
|
|
577
|
+
return self._short_term
|
|
578
|
+
|
|
579
|
+
def _get_long_term(self) -> SecureMemDocsIntegration:
|
|
580
|
+
"""Get or create long-term memory instance."""
|
|
581
|
+
if self._long_term is None:
|
|
582
|
+
self._long_term = SecureMemDocsIntegration(
|
|
583
|
+
storage_dir=self.config.storage_dir,
|
|
584
|
+
audit_log_dir=self.config.audit_dir,
|
|
585
|
+
enable_encryption=True,
|
|
586
|
+
)
|
|
587
|
+
return self._long_term
|
|
588
|
+
|
|
589
|
+
def _count_patterns(self) -> int:
|
|
590
|
+
"""Count patterns in storage.
|
|
591
|
+
|
|
592
|
+
Returns:
|
|
593
|
+
Number of pattern files, or 0 if counting fails
|
|
594
|
+
|
|
595
|
+
"""
|
|
596
|
+
storage_path = Path(self.config.storage_dir)
|
|
597
|
+
if not storage_path.exists():
|
|
598
|
+
return 0
|
|
599
|
+
|
|
600
|
+
try:
|
|
601
|
+
return len(list(storage_path.glob("*.json")))
|
|
602
|
+
except (OSError, PermissionError) as e:
|
|
603
|
+
logger.debug("pattern_count_failed", error=str(e))
|
|
604
|
+
return 0
|
|
605
|
+
|
|
606
|
+
|
|
607
|
+
def print_status(panel: MemoryControlPanel):
|
|
608
|
+
"""Print status in a formatted way."""
|
|
609
|
+
status = panel.status()
|
|
610
|
+
|
|
611
|
+
print("\n" + "=" * 50)
|
|
612
|
+
print("EMPATHY MEMORY STATUS")
|
|
613
|
+
print("=" * 50)
|
|
614
|
+
|
|
615
|
+
# Redis
|
|
616
|
+
redis = status["redis"]
|
|
617
|
+
redis_icon = "✓" if redis["status"] == "running" else "✗"
|
|
618
|
+
print(f"\n{redis_icon} Redis: {redis['status'].upper()}")
|
|
619
|
+
print(f" Host: {redis['host']}:{redis['port']}")
|
|
620
|
+
if redis["method"] != "unknown":
|
|
621
|
+
print(f" Method: {redis['method']}")
|
|
622
|
+
|
|
623
|
+
# Long-term
|
|
624
|
+
lt = status["long_term"]
|
|
625
|
+
lt_icon = "✓" if lt["status"] == "available" else "○"
|
|
626
|
+
print(f"\n{lt_icon} Long-term Storage: {lt['status'].upper()}")
|
|
627
|
+
print(f" Path: {lt['storage_dir']}")
|
|
628
|
+
print(f" Patterns: {lt['pattern_count']}")
|
|
629
|
+
|
|
630
|
+
print()
|
|
631
|
+
|
|
632
|
+
|
|
633
|
+
def print_stats(panel: MemoryControlPanel):
|
|
634
|
+
"""Print statistics in a formatted way."""
|
|
635
|
+
stats = panel.get_statistics()
|
|
636
|
+
|
|
637
|
+
print("\n" + "=" * 50)
|
|
638
|
+
print("EMPATHY MEMORY STATISTICS")
|
|
639
|
+
print("=" * 50)
|
|
640
|
+
|
|
641
|
+
print("\nShort-term Memory (Redis):")
|
|
642
|
+
print(f" Available: {stats.redis_available}")
|
|
643
|
+
if stats.redis_available:
|
|
644
|
+
print(f" Total keys: {stats.redis_keys_total}")
|
|
645
|
+
print(f" Working keys: {stats.redis_keys_working}")
|
|
646
|
+
print(f" Staged patterns: {stats.redis_keys_staged}")
|
|
647
|
+
print(f" Memory used: {stats.redis_memory_used}")
|
|
648
|
+
|
|
649
|
+
print("\nLong-term Memory (Patterns):")
|
|
650
|
+
print(f" Available: {stats.long_term_available}")
|
|
651
|
+
print(f" Total patterns: {stats.patterns_total}")
|
|
652
|
+
print(f" └─ PUBLIC: {stats.patterns_public}")
|
|
653
|
+
print(f" └─ INTERNAL: {stats.patterns_internal}")
|
|
654
|
+
print(f" └─ SENSITIVE: {stats.patterns_sensitive}")
|
|
655
|
+
print(f" Encrypted: {stats.patterns_encrypted}")
|
|
656
|
+
|
|
657
|
+
# Performance stats
|
|
658
|
+
print("\nPerformance:")
|
|
659
|
+
if stats.redis_ping_ms > 0:
|
|
660
|
+
print(f" Redis latency: {stats.redis_ping_ms:.2f}ms")
|
|
661
|
+
if stats.storage_bytes > 0:
|
|
662
|
+
size_kb = stats.storage_bytes / 1024
|
|
663
|
+
print(f" Storage size: {size_kb:.1f} KB")
|
|
664
|
+
print(f" Stats collected in: {stats.collection_time_ms:.2f}ms")
|
|
665
|
+
|
|
666
|
+
print()
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
def print_health(panel: MemoryControlPanel):
|
|
670
|
+
"""Print health check in a formatted way."""
|
|
671
|
+
health = panel.health_check()
|
|
672
|
+
|
|
673
|
+
print("\n" + "=" * 50)
|
|
674
|
+
print("EMPATHY MEMORY HEALTH CHECK")
|
|
675
|
+
print("=" * 50)
|
|
676
|
+
|
|
677
|
+
status_icons = {"pass": "✓", "warn": "⚠", "fail": "✗", "info": "ℹ"}
|
|
678
|
+
overall_icon = (
|
|
679
|
+
"✓" if health["overall"] == "healthy" else "⚠" if health["overall"] == "degraded" else "✗"
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
print(f"\n{overall_icon} Overall: {health['overall'].upper()}")
|
|
683
|
+
|
|
684
|
+
print("\nChecks:")
|
|
685
|
+
for check in health["checks"]:
|
|
686
|
+
icon = status_icons.get(check["status"], "?")
|
|
687
|
+
print(f" {icon} {check['name']}: {check['message']}")
|
|
688
|
+
|
|
689
|
+
if health["recommendations"]:
|
|
690
|
+
print("\nRecommendations:")
|
|
691
|
+
for rec in health["recommendations"]:
|
|
692
|
+
print(f" • {rec}")
|
|
693
|
+
|
|
694
|
+
print()
|
|
695
|
+
|
|
696
|
+
|
|
697
|
+
class MemoryAPIHandler(BaseHTTPRequestHandler):
|
|
698
|
+
"""HTTP request handler for Memory Control Panel API."""
|
|
699
|
+
|
|
700
|
+
panel: MemoryControlPanel | None = None # Set by server
|
|
701
|
+
rate_limiter: RateLimiter | None = None # Set by server
|
|
702
|
+
api_auth: APIKeyAuth | None = None # Set by server
|
|
703
|
+
allowed_origins: list[str] | None = None # Set by server for CORS
|
|
704
|
+
|
|
705
|
+
def log_message(self, format, *args):
|
|
706
|
+
"""Override to use structlog instead of stderr."""
|
|
707
|
+
logger.debug("api_request", message=format % args)
|
|
708
|
+
|
|
709
|
+
def _get_client_ip(self) -> str:
|
|
710
|
+
"""Get client IP address, handling proxies."""
|
|
711
|
+
# Check for X-Forwarded-For header (behind proxy)
|
|
712
|
+
forwarded = self.headers.get("X-Forwarded-For")
|
|
713
|
+
if forwarded:
|
|
714
|
+
# Take the first IP in the chain
|
|
715
|
+
return forwarded.split(",")[0].strip()
|
|
716
|
+
# Fall back to direct connection
|
|
717
|
+
return self.client_address[0]
|
|
718
|
+
|
|
719
|
+
def _check_rate_limit(self) -> bool:
|
|
720
|
+
"""Check if request should be rate limited."""
|
|
721
|
+
if self.rate_limiter is None:
|
|
722
|
+
return True
|
|
723
|
+
return self.rate_limiter.is_allowed(self._get_client_ip())
|
|
724
|
+
|
|
725
|
+
def _check_auth(self) -> bool:
|
|
726
|
+
"""Check API key authentication."""
|
|
727
|
+
if self.api_auth is None or not self.api_auth.enabled:
|
|
728
|
+
return True
|
|
729
|
+
|
|
730
|
+
# Check Authorization header
|
|
731
|
+
auth_header = self.headers.get("Authorization")
|
|
732
|
+
if auth_header and auth_header.startswith("Bearer "):
|
|
733
|
+
token = auth_header[7:]
|
|
734
|
+
return self.api_auth.is_valid(token)
|
|
735
|
+
|
|
736
|
+
# Check X-API-Key header
|
|
737
|
+
api_key = self.headers.get("X-API-Key")
|
|
738
|
+
if api_key:
|
|
739
|
+
return self.api_auth.is_valid(api_key)
|
|
740
|
+
|
|
741
|
+
return False
|
|
742
|
+
|
|
743
|
+
def _get_cors_origin(self) -> str:
|
|
744
|
+
"""Get appropriate CORS origin header value."""
|
|
745
|
+
if self.allowed_origins is None:
|
|
746
|
+
# Default: allow localhost only
|
|
747
|
+
origin = self.headers.get("Origin", "")
|
|
748
|
+
if origin.startswith("http://localhost") or origin.startswith("https://localhost"):
|
|
749
|
+
return origin
|
|
750
|
+
return "http://localhost:8765"
|
|
751
|
+
|
|
752
|
+
if "*" in self.allowed_origins:
|
|
753
|
+
return "*"
|
|
754
|
+
|
|
755
|
+
origin = self.headers.get("Origin", "")
|
|
756
|
+
if origin in self.allowed_origins:
|
|
757
|
+
return origin
|
|
758
|
+
|
|
759
|
+
return self.allowed_origins[0] if self.allowed_origins else ""
|
|
760
|
+
|
|
761
|
+
def _send_json(self, data: Any, status: int = 200):
|
|
762
|
+
"""Send JSON response."""
|
|
763
|
+
self.send_response(status)
|
|
764
|
+
self.send_header("Content-Type", "application/json")
|
|
765
|
+
self.send_header("Access-Control-Allow-Origin", self._get_cors_origin())
|
|
766
|
+
self.send_header("Access-Control-Allow-Methods", "GET, POST, DELETE, OPTIONS")
|
|
767
|
+
self.send_header("Access-Control-Allow-Headers", "Content-Type, Authorization, X-API-Key")
|
|
768
|
+
|
|
769
|
+
# Add rate limit headers if available
|
|
770
|
+
if self.rate_limiter:
|
|
771
|
+
remaining = self.rate_limiter.get_remaining(self._get_client_ip())
|
|
772
|
+
self.send_header("X-RateLimit-Remaining", str(remaining))
|
|
773
|
+
self.send_header("X-RateLimit-Limit", str(self.rate_limiter.max_requests))
|
|
774
|
+
|
|
775
|
+
self.end_headers()
|
|
776
|
+
self.wfile.write(json.dumps(data).encode())
|
|
777
|
+
|
|
778
|
+
def _send_error(self, message: str, status: int = 400):
|
|
779
|
+
"""Send error response."""
|
|
780
|
+
self._send_json({"error": message, "status_code": status}, status)
|
|
781
|
+
|
|
782
|
+
def do_OPTIONS(self):
|
|
783
|
+
"""Handle CORS preflight."""
|
|
784
|
+
self.send_response(200)
|
|
785
|
+
self.send_header("Access-Control-Allow-Origin", self._get_cors_origin())
|
|
786
|
+
self.send_header("Access-Control-Allow-Methods", "GET, POST, DELETE, OPTIONS")
|
|
787
|
+
self.send_header("Access-Control-Allow-Headers", "Content-Type, Authorization, X-API-Key")
|
|
788
|
+
self.end_headers()
|
|
789
|
+
|
|
790
|
+
def do_GET(self):
|
|
791
|
+
"""Handle GET requests."""
|
|
792
|
+
# Rate limiting check
|
|
793
|
+
if not self._check_rate_limit():
|
|
794
|
+
self._send_error("Rate limit exceeded. Try again later.", 429)
|
|
795
|
+
return
|
|
796
|
+
|
|
797
|
+
# Authentication check (skip for ping endpoint)
|
|
798
|
+
parsed = urlparse(self.path)
|
|
799
|
+
path = parsed.path
|
|
800
|
+
|
|
801
|
+
if path != "/api/ping" and not self._check_auth():
|
|
802
|
+
self._send_error("Unauthorized. Provide valid API key.", 401)
|
|
803
|
+
return
|
|
804
|
+
|
|
805
|
+
query = parse_qs(parsed.query)
|
|
806
|
+
|
|
807
|
+
if path == "/api/ping":
|
|
808
|
+
self._send_json({"status": "ok", "service": "empathy-memory"})
|
|
809
|
+
|
|
810
|
+
elif path == "/api/status":
|
|
811
|
+
self._send_json(self.panel.status())
|
|
812
|
+
|
|
813
|
+
elif path == "/api/stats":
|
|
814
|
+
stats = self.panel.get_statistics()
|
|
815
|
+
self._send_json(asdict(stats))
|
|
816
|
+
|
|
817
|
+
elif path == "/api/health":
|
|
818
|
+
self._send_json(self.panel.health_check())
|
|
819
|
+
|
|
820
|
+
elif path == "/api/patterns":
|
|
821
|
+
classification = query.get("classification", [None])[0]
|
|
822
|
+
|
|
823
|
+
# Validate classification
|
|
824
|
+
if not _validate_classification(classification):
|
|
825
|
+
self._send_error("Invalid classification. Use PUBLIC, INTERNAL, or SENSITIVE.", 400)
|
|
826
|
+
return
|
|
827
|
+
|
|
828
|
+
# Validate and sanitize limit
|
|
829
|
+
try:
|
|
830
|
+
limit = int(query.get("limit", [100])[0])
|
|
831
|
+
limit = max(1, min(limit, 1000)) # Clamp between 1 and 1000
|
|
832
|
+
except (ValueError, TypeError):
|
|
833
|
+
limit = 100
|
|
834
|
+
|
|
835
|
+
patterns = self.panel.list_patterns(classification=classification, limit=limit)
|
|
836
|
+
self._send_json(patterns)
|
|
837
|
+
|
|
838
|
+
elif path == "/api/patterns/export":
|
|
839
|
+
classification = query.get("classification", [None])[0]
|
|
840
|
+
|
|
841
|
+
# Validate classification
|
|
842
|
+
if not _validate_classification(classification):
|
|
843
|
+
self._send_error("Invalid classification. Use PUBLIC, INTERNAL, or SENSITIVE.", 400)
|
|
844
|
+
return
|
|
845
|
+
|
|
846
|
+
patterns = self.panel.list_patterns(classification=classification)
|
|
847
|
+
export_data = {
|
|
848
|
+
"exported_at": datetime.utcnow().isoformat() + "Z",
|
|
849
|
+
"classification_filter": classification,
|
|
850
|
+
"patterns": patterns,
|
|
851
|
+
}
|
|
852
|
+
self._send_json({"pattern_count": len(patterns), "export_data": export_data})
|
|
853
|
+
|
|
854
|
+
elif path.startswith("/api/patterns/"):
|
|
855
|
+
pattern_id = path.split("/")[-1]
|
|
856
|
+
|
|
857
|
+
# Validate pattern ID
|
|
858
|
+
if not _validate_pattern_id(pattern_id):
|
|
859
|
+
self._send_error("Invalid pattern ID format", 400)
|
|
860
|
+
return
|
|
861
|
+
|
|
862
|
+
patterns = self.panel.list_patterns()
|
|
863
|
+
pattern = next((p for p in patterns if p.get("pattern_id") == pattern_id), None)
|
|
864
|
+
if pattern:
|
|
865
|
+
self._send_json(pattern)
|
|
866
|
+
else:
|
|
867
|
+
self._send_error("Pattern not found", 404)
|
|
868
|
+
|
|
869
|
+
else:
|
|
870
|
+
self._send_error("Not found", 404)
|
|
871
|
+
|
|
872
|
+
def do_POST(self):
|
|
873
|
+
"""Handle POST requests."""
|
|
874
|
+
# Rate limiting check
|
|
875
|
+
if not self._check_rate_limit():
|
|
876
|
+
self._send_error("Rate limit exceeded. Try again later.", 429)
|
|
877
|
+
return
|
|
878
|
+
|
|
879
|
+
# Authentication check
|
|
880
|
+
if not self._check_auth():
|
|
881
|
+
self._send_error("Unauthorized. Provide valid API key.", 401)
|
|
882
|
+
return
|
|
883
|
+
|
|
884
|
+
parsed = urlparse(self.path)
|
|
885
|
+
path = parsed.path
|
|
886
|
+
|
|
887
|
+
# Read body if present (with size limit to prevent DoS)
|
|
888
|
+
content_length = int(self.headers.get("Content-Length", 0))
|
|
889
|
+
max_body_size = 1024 * 1024 # 1MB limit
|
|
890
|
+
if content_length > max_body_size:
|
|
891
|
+
self._send_error("Request body too large", 413)
|
|
892
|
+
return
|
|
893
|
+
|
|
894
|
+
body = {}
|
|
895
|
+
if content_length > 0:
|
|
896
|
+
try:
|
|
897
|
+
body = json.loads(self.rfile.read(content_length).decode())
|
|
898
|
+
except (json.JSONDecodeError, UnicodeDecodeError):
|
|
899
|
+
self._send_error("Invalid JSON body", 400)
|
|
900
|
+
return
|
|
901
|
+
|
|
902
|
+
if path == "/api/redis/start":
|
|
903
|
+
status = self.panel.start_redis(verbose=False)
|
|
904
|
+
self._send_json(
|
|
905
|
+
{
|
|
906
|
+
"success": status.available,
|
|
907
|
+
"message": f"Redis {'OK' if status.available else 'failed'} via {status.method.value}",
|
|
908
|
+
},
|
|
909
|
+
)
|
|
910
|
+
|
|
911
|
+
elif path == "/api/redis/stop":
|
|
912
|
+
stopped = self.panel.stop_redis()
|
|
913
|
+
self._send_json(
|
|
914
|
+
{
|
|
915
|
+
"success": stopped,
|
|
916
|
+
"message": "Redis stopped" if stopped else "Could not stop Redis",
|
|
917
|
+
},
|
|
918
|
+
)
|
|
919
|
+
|
|
920
|
+
elif path == "/api/memory/clear":
|
|
921
|
+
agent_id = body.get("agent_id", "admin")
|
|
922
|
+
|
|
923
|
+
# Validate agent ID
|
|
924
|
+
if not _validate_agent_id(agent_id):
|
|
925
|
+
self._send_error("Invalid agent ID format", 400)
|
|
926
|
+
return
|
|
927
|
+
|
|
928
|
+
deleted = self.panel.clear_short_term(agent_id)
|
|
929
|
+
self._send_json({"keys_deleted": deleted})
|
|
930
|
+
|
|
931
|
+
else:
|
|
932
|
+
self._send_error("Not found", 404)
|
|
933
|
+
|
|
934
|
+
def do_DELETE(self):
|
|
935
|
+
"""Handle DELETE requests."""
|
|
936
|
+
# Rate limiting check
|
|
937
|
+
if not self._check_rate_limit():
|
|
938
|
+
self._send_error("Rate limit exceeded. Try again later.", 429)
|
|
939
|
+
return
|
|
940
|
+
|
|
941
|
+
# Authentication check
|
|
942
|
+
if not self._check_auth():
|
|
943
|
+
self._send_error("Unauthorized. Provide valid API key.", 401)
|
|
944
|
+
return
|
|
945
|
+
|
|
946
|
+
parsed = urlparse(self.path)
|
|
947
|
+
path = parsed.path
|
|
948
|
+
|
|
949
|
+
if path.startswith("/api/patterns/"):
|
|
950
|
+
pattern_id = path.split("/")[-1]
|
|
951
|
+
|
|
952
|
+
# Validate pattern ID to prevent path traversal
|
|
953
|
+
if not _validate_pattern_id(pattern_id):
|
|
954
|
+
self._send_error("Invalid pattern ID format", 400)
|
|
955
|
+
return
|
|
956
|
+
|
|
957
|
+
deleted = self.panel.delete_pattern(pattern_id)
|
|
958
|
+
self._send_json({"success": deleted})
|
|
959
|
+
else:
|
|
960
|
+
self._send_error("Not found", 404)
|
|
961
|
+
|
|
962
|
+
|
|
963
|
+
def run_api_server(
|
|
964
|
+
panel: MemoryControlPanel,
|
|
965
|
+
host: str = "localhost",
|
|
966
|
+
port: int = 8765,
|
|
967
|
+
api_key: str | None = None,
|
|
968
|
+
enable_rate_limit: bool = True,
|
|
969
|
+
rate_limit_requests: int = 100,
|
|
970
|
+
rate_limit_window: int = 60,
|
|
971
|
+
ssl_certfile: str | None = None,
|
|
972
|
+
ssl_keyfile: str | None = None,
|
|
973
|
+
allowed_origins: list[str] | None = None,
|
|
974
|
+
):
|
|
975
|
+
"""Run the Memory API server with security features.
|
|
976
|
+
|
|
977
|
+
Args:
|
|
978
|
+
panel: MemoryControlPanel instance
|
|
979
|
+
host: Host to bind to
|
|
980
|
+
port: Port to bind to
|
|
981
|
+
api_key: API key for authentication (or set EMPATHY_MEMORY_API_KEY env var)
|
|
982
|
+
enable_rate_limit: Enable rate limiting
|
|
983
|
+
rate_limit_requests: Max requests per window per IP
|
|
984
|
+
rate_limit_window: Rate limit window in seconds
|
|
985
|
+
ssl_certfile: Path to SSL certificate file for HTTPS
|
|
986
|
+
ssl_keyfile: Path to SSL key file for HTTPS
|
|
987
|
+
allowed_origins: List of allowed CORS origins (None = localhost only)
|
|
988
|
+
|
|
989
|
+
"""
|
|
990
|
+
# Set up handler class attributes
|
|
991
|
+
MemoryAPIHandler.panel = panel
|
|
992
|
+
MemoryAPIHandler.allowed_origins = allowed_origins
|
|
993
|
+
|
|
994
|
+
# Set up rate limiting
|
|
995
|
+
if enable_rate_limit:
|
|
996
|
+
MemoryAPIHandler.rate_limiter = RateLimiter(
|
|
997
|
+
window_seconds=rate_limit_window,
|
|
998
|
+
max_requests=rate_limit_requests,
|
|
999
|
+
)
|
|
1000
|
+
else:
|
|
1001
|
+
MemoryAPIHandler.rate_limiter = None
|
|
1002
|
+
|
|
1003
|
+
# Set up API key authentication
|
|
1004
|
+
MemoryAPIHandler.api_auth = APIKeyAuth(api_key)
|
|
1005
|
+
|
|
1006
|
+
server = HTTPServer((host, port), MemoryAPIHandler)
|
|
1007
|
+
|
|
1008
|
+
# Enable HTTPS if certificates provided
|
|
1009
|
+
use_https = False
|
|
1010
|
+
if ssl_certfile and ssl_keyfile:
|
|
1011
|
+
if Path(ssl_certfile).exists() and Path(ssl_keyfile).exists():
|
|
1012
|
+
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
|
1013
|
+
context.load_cert_chain(ssl_certfile, ssl_keyfile)
|
|
1014
|
+
server.socket = context.wrap_socket(server.socket, server_side=True)
|
|
1015
|
+
use_https = True
|
|
1016
|
+
else:
|
|
1017
|
+
logger.warning("ssl_cert_not_found", certfile=ssl_certfile, keyfile=ssl_keyfile)
|
|
1018
|
+
|
|
1019
|
+
protocol = "https" if use_https else "http"
|
|
1020
|
+
|
|
1021
|
+
# Graceful shutdown handler
|
|
1022
|
+
def shutdown_handler(signum, frame):
|
|
1023
|
+
print("\n\nReceived shutdown signal...")
|
|
1024
|
+
print("Stopping API server...")
|
|
1025
|
+
server.shutdown()
|
|
1026
|
+
# Stop Redis if we started it
|
|
1027
|
+
if panel.stop_redis():
|
|
1028
|
+
print("Stopped Redis")
|
|
1029
|
+
print("Shutdown complete.")
|
|
1030
|
+
sys.exit(0)
|
|
1031
|
+
|
|
1032
|
+
# Register signal handlers
|
|
1033
|
+
signal.signal(signal.SIGINT, shutdown_handler)
|
|
1034
|
+
signal.signal(signal.SIGTERM, shutdown_handler)
|
|
1035
|
+
|
|
1036
|
+
print(f"\n{'=' * 50}")
|
|
1037
|
+
print("EMPATHY MEMORY API SERVER")
|
|
1038
|
+
print(f"{'=' * 50}")
|
|
1039
|
+
print(f"\nServer running at {protocol}://{host}:{port}")
|
|
1040
|
+
|
|
1041
|
+
# Security status
|
|
1042
|
+
print("\nSecurity:")
|
|
1043
|
+
print(f" HTTPS: {'✓ Enabled' if use_https else '✗ Disabled'}")
|
|
1044
|
+
print(f" API Key Auth: {'✓ Enabled' if MemoryAPIHandler.api_auth.enabled else '✗ Disabled'}")
|
|
1045
|
+
print(
|
|
1046
|
+
f" Rate Limit: {'✓ Enabled (' + str(rate_limit_requests) + '/min)' if enable_rate_limit else '✗ Disabled'}",
|
|
1047
|
+
)
|
|
1048
|
+
print(f" CORS Origins: {allowed_origins or ['localhost']}")
|
|
1049
|
+
|
|
1050
|
+
print("\nEndpoints:")
|
|
1051
|
+
print(" GET /api/ping Health check (no auth)")
|
|
1052
|
+
print(" GET /api/status Memory system status")
|
|
1053
|
+
print(" GET /api/stats Detailed statistics")
|
|
1054
|
+
print(" GET /api/health Health check with recommendations")
|
|
1055
|
+
print(" GET /api/patterns List patterns")
|
|
1056
|
+
print(" GET /api/patterns/export Export patterns")
|
|
1057
|
+
print(" POST /api/redis/start Start Redis")
|
|
1058
|
+
print(" POST /api/redis/stop Stop Redis")
|
|
1059
|
+
print(" POST /api/memory/clear Clear short-term memory")
|
|
1060
|
+
|
|
1061
|
+
if MemoryAPIHandler.api_auth.enabled:
|
|
1062
|
+
print("\nAuthentication:")
|
|
1063
|
+
print(" Add header: Authorization: Bearer <your-api-key>")
|
|
1064
|
+
print(" Or header: X-API-Key: <your-api-key>")
|
|
1065
|
+
|
|
1066
|
+
print("\nPress Ctrl+C to stop\n")
|
|
1067
|
+
|
|
1068
|
+
server.serve_forever()
|
|
1069
|
+
|
|
1070
|
+
|
|
1071
|
+
def _configure_logging(verbose: bool = False):
|
|
1072
|
+
"""Configure logging for CLI mode."""
|
|
1073
|
+
level = logging.DEBUG if verbose else logging.WARNING
|
|
1074
|
+
logging.basicConfig(level=level, format="%(message)s")
|
|
1075
|
+
structlog.configure(
|
|
1076
|
+
wrapper_class=structlog.make_filtering_bound_logger(level),
|
|
1077
|
+
)
|
|
1078
|
+
|
|
1079
|
+
|
|
1080
|
+
def main():
|
|
1081
|
+
"""CLI entry point."""
|
|
1082
|
+
parser = argparse.ArgumentParser(
|
|
1083
|
+
description="Empathy Memory Control Panel - Manage Redis and pattern storage",
|
|
1084
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
1085
|
+
epilog="""
|
|
1086
|
+
Examples:
|
|
1087
|
+
%(prog)s status Show memory system status
|
|
1088
|
+
%(prog)s start Start Redis if not running
|
|
1089
|
+
%(prog)s stop Stop Redis (if we started it)
|
|
1090
|
+
%(prog)s stats Show detailed statistics
|
|
1091
|
+
%(prog)s health Run health check
|
|
1092
|
+
%(prog)s patterns List stored patterns
|
|
1093
|
+
%(prog)s export patterns.json Export patterns to file
|
|
1094
|
+
%(prog)s api --api-port 8765 Start REST API server only
|
|
1095
|
+
%(prog)s serve Start Redis + API server (recommended)
|
|
1096
|
+
|
|
1097
|
+
Quick Start:
|
|
1098
|
+
1. pip install empathy-framework
|
|
1099
|
+
2. empathy-memory serve
|
|
1100
|
+
3. Open http://localhost:8765/api/status in browser
|
|
1101
|
+
""",
|
|
1102
|
+
)
|
|
1103
|
+
|
|
1104
|
+
parser.add_argument(
|
|
1105
|
+
"command",
|
|
1106
|
+
choices=[
|
|
1107
|
+
"status",
|
|
1108
|
+
"start",
|
|
1109
|
+
"stop",
|
|
1110
|
+
"stats",
|
|
1111
|
+
"health",
|
|
1112
|
+
"patterns",
|
|
1113
|
+
"export",
|
|
1114
|
+
"api",
|
|
1115
|
+
"serve",
|
|
1116
|
+
],
|
|
1117
|
+
help="Command to execute",
|
|
1118
|
+
nargs="?",
|
|
1119
|
+
)
|
|
1120
|
+
parser.add_argument(
|
|
1121
|
+
"-V",
|
|
1122
|
+
"--version",
|
|
1123
|
+
action="version",
|
|
1124
|
+
version=f"empathy-memory {__version__}",
|
|
1125
|
+
)
|
|
1126
|
+
parser.add_argument(
|
|
1127
|
+
"--host",
|
|
1128
|
+
default="localhost",
|
|
1129
|
+
help="Redis host (or API host for 'api' command)",
|
|
1130
|
+
)
|
|
1131
|
+
parser.add_argument("--port", type=int, default=6379, help="Redis port")
|
|
1132
|
+
parser.add_argument(
|
|
1133
|
+
"--api-port",
|
|
1134
|
+
type=int,
|
|
1135
|
+
default=8765,
|
|
1136
|
+
help="API server port (for 'api' command)",
|
|
1137
|
+
)
|
|
1138
|
+
parser.add_argument(
|
|
1139
|
+
"--storage",
|
|
1140
|
+
default="./memdocs_storage",
|
|
1141
|
+
help="Long-term storage directory",
|
|
1142
|
+
)
|
|
1143
|
+
parser.add_argument(
|
|
1144
|
+
"--classification",
|
|
1145
|
+
"-c",
|
|
1146
|
+
help="Filter by classification (PUBLIC/INTERNAL/SENSITIVE)",
|
|
1147
|
+
)
|
|
1148
|
+
parser.add_argument("--output", "-o", help="Output file for export")
|
|
1149
|
+
parser.add_argument("--json", action="store_true", help="Output in JSON format")
|
|
1150
|
+
parser.add_argument("-v", "--verbose", action="store_true", help="Show debug output")
|
|
1151
|
+
|
|
1152
|
+
# Security options (for api/serve commands)
|
|
1153
|
+
parser.add_argument(
|
|
1154
|
+
"--api-key",
|
|
1155
|
+
help="API key for authentication (or set EMPATHY_MEMORY_API_KEY env var)",
|
|
1156
|
+
)
|
|
1157
|
+
parser.add_argument("--no-rate-limit", action="store_true", help="Disable rate limiting")
|
|
1158
|
+
parser.add_argument(
|
|
1159
|
+
"--rate-limit",
|
|
1160
|
+
type=int,
|
|
1161
|
+
default=100,
|
|
1162
|
+
help="Max requests per minute per IP (default: 100)",
|
|
1163
|
+
)
|
|
1164
|
+
parser.add_argument("--ssl-cert", help="Path to SSL certificate file for HTTPS")
|
|
1165
|
+
parser.add_argument("--ssl-key", help="Path to SSL key file for HTTPS")
|
|
1166
|
+
parser.add_argument(
|
|
1167
|
+
"--cors-origins",
|
|
1168
|
+
help="Comma-separated list of allowed CORS origins (default: localhost)",
|
|
1169
|
+
)
|
|
1170
|
+
|
|
1171
|
+
args = parser.parse_args()
|
|
1172
|
+
|
|
1173
|
+
# Configure logging (quiet by default)
|
|
1174
|
+
_configure_logging(verbose=args.verbose)
|
|
1175
|
+
|
|
1176
|
+
# If no command specified, show help
|
|
1177
|
+
if args.command is None:
|
|
1178
|
+
parser.print_help()
|
|
1179
|
+
sys.exit(0)
|
|
1180
|
+
|
|
1181
|
+
config = ControlPanelConfig(
|
|
1182
|
+
redis_host=args.host,
|
|
1183
|
+
redis_port=args.port,
|
|
1184
|
+
storage_dir=args.storage,
|
|
1185
|
+
)
|
|
1186
|
+
panel = MemoryControlPanel(config)
|
|
1187
|
+
|
|
1188
|
+
if args.command == "status":
|
|
1189
|
+
if args.json:
|
|
1190
|
+
print(json.dumps(panel.status(), indent=2))
|
|
1191
|
+
else:
|
|
1192
|
+
print_status(panel)
|
|
1193
|
+
|
|
1194
|
+
elif args.command == "start":
|
|
1195
|
+
status = panel.start_redis(verbose=not args.json)
|
|
1196
|
+
if args.json:
|
|
1197
|
+
print(json.dumps({"available": status.available, "method": status.method.value}))
|
|
1198
|
+
elif status.available:
|
|
1199
|
+
print(f"\n✓ Redis started via {status.method.value}")
|
|
1200
|
+
else:
|
|
1201
|
+
print(f"\n✗ Failed to start Redis: {status.message}")
|
|
1202
|
+
sys.exit(1)
|
|
1203
|
+
|
|
1204
|
+
elif args.command == "stop":
|
|
1205
|
+
if panel.stop_redis():
|
|
1206
|
+
print("✓ Redis stopped")
|
|
1207
|
+
else:
|
|
1208
|
+
print("⚠ Could not stop Redis (may not have been started by us)")
|
|
1209
|
+
|
|
1210
|
+
elif args.command == "stats":
|
|
1211
|
+
if args.json:
|
|
1212
|
+
print(json.dumps(asdict(panel.get_statistics()), indent=2))
|
|
1213
|
+
else:
|
|
1214
|
+
print_stats(panel)
|
|
1215
|
+
|
|
1216
|
+
elif args.command == "health":
|
|
1217
|
+
if args.json:
|
|
1218
|
+
print(json.dumps(panel.health_check(), indent=2))
|
|
1219
|
+
else:
|
|
1220
|
+
print_health(panel)
|
|
1221
|
+
|
|
1222
|
+
elif args.command == "patterns":
|
|
1223
|
+
patterns = panel.list_patterns(classification=args.classification)
|
|
1224
|
+
if args.json:
|
|
1225
|
+
print(json.dumps(patterns, indent=2))
|
|
1226
|
+
else:
|
|
1227
|
+
print(f"\nPatterns ({len(patterns)} found):")
|
|
1228
|
+
for p in patterns:
|
|
1229
|
+
print(
|
|
1230
|
+
f" [{p.get('classification', '?')}] {p.get('pattern_id', '?')} ({p.get('pattern_type', '?')})",
|
|
1231
|
+
)
|
|
1232
|
+
|
|
1233
|
+
elif args.command == "export":
|
|
1234
|
+
output = args.output or "patterns_export.json"
|
|
1235
|
+
count = panel.export_patterns(output, classification=args.classification)
|
|
1236
|
+
print(f"✓ Exported {count} patterns to {output}")
|
|
1237
|
+
|
|
1238
|
+
elif args.command == "api":
|
|
1239
|
+
# Parse CORS origins
|
|
1240
|
+
cors_origins = None
|
|
1241
|
+
if args.cors_origins:
|
|
1242
|
+
cors_origins = [o.strip() for o in args.cors_origins.split(",")]
|
|
1243
|
+
|
|
1244
|
+
run_api_server(
|
|
1245
|
+
panel,
|
|
1246
|
+
host=args.host,
|
|
1247
|
+
port=args.api_port,
|
|
1248
|
+
api_key=args.api_key,
|
|
1249
|
+
enable_rate_limit=not args.no_rate_limit,
|
|
1250
|
+
rate_limit_requests=args.rate_limit,
|
|
1251
|
+
ssl_certfile=args.ssl_cert,
|
|
1252
|
+
ssl_keyfile=args.ssl_key,
|
|
1253
|
+
allowed_origins=cors_origins,
|
|
1254
|
+
)
|
|
1255
|
+
|
|
1256
|
+
elif args.command == "serve":
|
|
1257
|
+
# Start Redis first
|
|
1258
|
+
print("\n" + "=" * 50)
|
|
1259
|
+
print("EMPATHY MEMORY - STARTING SERVICES")
|
|
1260
|
+
print("=" * 50)
|
|
1261
|
+
|
|
1262
|
+
print("\n[1/2] Starting Redis...")
|
|
1263
|
+
redis_status = panel.start_redis(verbose=False)
|
|
1264
|
+
if redis_status.available:
|
|
1265
|
+
print(f" ✓ Redis running via {redis_status.method.value}")
|
|
1266
|
+
else:
|
|
1267
|
+
print(f" ⚠ Redis not available: {redis_status.message}")
|
|
1268
|
+
print(" (Continuing with mock memory)")
|
|
1269
|
+
|
|
1270
|
+
# Parse CORS origins
|
|
1271
|
+
cors_origins = None
|
|
1272
|
+
if args.cors_origins:
|
|
1273
|
+
cors_origins = [o.strip() for o in args.cors_origins.split(",")]
|
|
1274
|
+
|
|
1275
|
+
print("\n[2/2] Starting API server...")
|
|
1276
|
+
run_api_server(
|
|
1277
|
+
panel,
|
|
1278
|
+
host=args.host,
|
|
1279
|
+
port=args.api_port,
|
|
1280
|
+
api_key=args.api_key,
|
|
1281
|
+
enable_rate_limit=not args.no_rate_limit,
|
|
1282
|
+
rate_limit_requests=args.rate_limit,
|
|
1283
|
+
ssl_certfile=args.ssl_cert,
|
|
1284
|
+
ssl_keyfile=args.ssl_key,
|
|
1285
|
+
allowed_origins=cors_origins,
|
|
1286
|
+
)
|
|
1287
|
+
|
|
1288
|
+
|
|
1289
|
+
if __name__ == "__main__":
|
|
1290
|
+
main()
|