attune-ai 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- attune/__init__.py +358 -0
- attune/adaptive/__init__.py +13 -0
- attune/adaptive/task_complexity.py +127 -0
- attune/agent_monitoring.py +414 -0
- attune/cache/__init__.py +117 -0
- attune/cache/base.py +166 -0
- attune/cache/dependency_manager.py +256 -0
- attune/cache/hash_only.py +251 -0
- attune/cache/hybrid.py +457 -0
- attune/cache/storage.py +285 -0
- attune/cache_monitor.py +356 -0
- attune/cache_stats.py +298 -0
- attune/cli/__init__.py +152 -0
- attune/cli/__main__.py +12 -0
- attune/cli/commands/__init__.py +1 -0
- attune/cli/commands/batch.py +264 -0
- attune/cli/commands/cache.py +248 -0
- attune/cli/commands/help.py +331 -0
- attune/cli/commands/info.py +140 -0
- attune/cli/commands/inspect.py +436 -0
- attune/cli/commands/inspection.py +57 -0
- attune/cli/commands/memory.py +48 -0
- attune/cli/commands/metrics.py +92 -0
- attune/cli/commands/orchestrate.py +184 -0
- attune/cli/commands/patterns.py +207 -0
- attune/cli/commands/profiling.py +202 -0
- attune/cli/commands/provider.py +98 -0
- attune/cli/commands/routing.py +285 -0
- attune/cli/commands/setup.py +96 -0
- attune/cli/commands/status.py +235 -0
- attune/cli/commands/sync.py +166 -0
- attune/cli/commands/tier.py +121 -0
- attune/cli/commands/utilities.py +114 -0
- attune/cli/commands/workflow.py +579 -0
- attune/cli/core.py +32 -0
- attune/cli/parsers/__init__.py +68 -0
- attune/cli/parsers/batch.py +118 -0
- attune/cli/parsers/cache.py +65 -0
- attune/cli/parsers/help.py +41 -0
- attune/cli/parsers/info.py +26 -0
- attune/cli/parsers/inspect.py +66 -0
- attune/cli/parsers/metrics.py +42 -0
- attune/cli/parsers/orchestrate.py +61 -0
- attune/cli/parsers/patterns.py +54 -0
- attune/cli/parsers/provider.py +40 -0
- attune/cli/parsers/routing.py +110 -0
- attune/cli/parsers/setup.py +42 -0
- attune/cli/parsers/status.py +47 -0
- attune/cli/parsers/sync.py +31 -0
- attune/cli/parsers/tier.py +33 -0
- attune/cli/parsers/workflow.py +77 -0
- attune/cli/utils/__init__.py +1 -0
- attune/cli/utils/data.py +242 -0
- attune/cli/utils/helpers.py +68 -0
- attune/cli_legacy.py +3957 -0
- attune/cli_minimal.py +1159 -0
- attune/cli_router.py +437 -0
- attune/cli_unified.py +814 -0
- attune/config/__init__.py +66 -0
- attune/config/xml_config.py +286 -0
- attune/config.py +545 -0
- attune/coordination.py +870 -0
- attune/core.py +1511 -0
- attune/core_modules/__init__.py +15 -0
- attune/cost_tracker.py +626 -0
- attune/dashboard/__init__.py +41 -0
- attune/dashboard/app.py +512 -0
- attune/dashboard/simple_server.py +435 -0
- attune/dashboard/standalone_server.py +547 -0
- attune/discovery.py +306 -0
- attune/emergence.py +306 -0
- attune/exceptions.py +123 -0
- attune/feedback_loops.py +373 -0
- attune/hot_reload/README.md +473 -0
- attune/hot_reload/__init__.py +62 -0
- attune/hot_reload/config.py +83 -0
- attune/hot_reload/integration.py +229 -0
- attune/hot_reload/reloader.py +298 -0
- attune/hot_reload/watcher.py +183 -0
- attune/hot_reload/websocket.py +177 -0
- attune/levels.py +577 -0
- attune/leverage_points.py +441 -0
- attune/logging_config.py +261 -0
- attune/mcp/__init__.py +10 -0
- attune/mcp/server.py +506 -0
- attune/memory/__init__.py +237 -0
- attune/memory/claude_memory.py +469 -0
- attune/memory/config.py +224 -0
- attune/memory/control_panel.py +1290 -0
- attune/memory/control_panel_support.py +145 -0
- attune/memory/cross_session.py +845 -0
- attune/memory/edges.py +179 -0
- attune/memory/encryption.py +159 -0
- attune/memory/file_session.py +770 -0
- attune/memory/graph.py +570 -0
- attune/memory/long_term.py +913 -0
- attune/memory/long_term_types.py +99 -0
- attune/memory/mixins/__init__.py +25 -0
- attune/memory/mixins/backend_init_mixin.py +249 -0
- attune/memory/mixins/capabilities_mixin.py +208 -0
- attune/memory/mixins/handoff_mixin.py +208 -0
- attune/memory/mixins/lifecycle_mixin.py +49 -0
- attune/memory/mixins/long_term_mixin.py +352 -0
- attune/memory/mixins/promotion_mixin.py +109 -0
- attune/memory/mixins/short_term_mixin.py +182 -0
- attune/memory/nodes.py +179 -0
- attune/memory/redis_bootstrap.py +540 -0
- attune/memory/security/__init__.py +31 -0
- attune/memory/security/audit_logger.py +932 -0
- attune/memory/security/pii_scrubber.py +640 -0
- attune/memory/security/secrets_detector.py +678 -0
- attune/memory/short_term.py +2192 -0
- attune/memory/simple_storage.py +302 -0
- attune/memory/storage/__init__.py +15 -0
- attune/memory/storage_backend.py +167 -0
- attune/memory/summary_index.py +583 -0
- attune/memory/types.py +446 -0
- attune/memory/unified.py +182 -0
- attune/meta_workflows/__init__.py +74 -0
- attune/meta_workflows/agent_creator.py +248 -0
- attune/meta_workflows/builtin_templates.py +567 -0
- attune/meta_workflows/cli_commands/__init__.py +56 -0
- attune/meta_workflows/cli_commands/agent_commands.py +321 -0
- attune/meta_workflows/cli_commands/analytics_commands.py +442 -0
- attune/meta_workflows/cli_commands/config_commands.py +232 -0
- attune/meta_workflows/cli_commands/memory_commands.py +182 -0
- attune/meta_workflows/cli_commands/template_commands.py +354 -0
- attune/meta_workflows/cli_commands/workflow_commands.py +382 -0
- attune/meta_workflows/cli_meta_workflows.py +59 -0
- attune/meta_workflows/form_engine.py +292 -0
- attune/meta_workflows/intent_detector.py +409 -0
- attune/meta_workflows/models.py +569 -0
- attune/meta_workflows/pattern_learner.py +738 -0
- attune/meta_workflows/plan_generator.py +384 -0
- attune/meta_workflows/session_context.py +397 -0
- attune/meta_workflows/template_registry.py +229 -0
- attune/meta_workflows/workflow.py +984 -0
- attune/metrics/__init__.py +12 -0
- attune/metrics/collector.py +31 -0
- attune/metrics/prompt_metrics.py +194 -0
- attune/models/__init__.py +172 -0
- attune/models/__main__.py +13 -0
- attune/models/adaptive_routing.py +437 -0
- attune/models/auth_cli.py +444 -0
- attune/models/auth_strategy.py +450 -0
- attune/models/cli.py +655 -0
- attune/models/empathy_executor.py +354 -0
- attune/models/executor.py +257 -0
- attune/models/fallback.py +762 -0
- attune/models/provider_config.py +282 -0
- attune/models/registry.py +472 -0
- attune/models/tasks.py +359 -0
- attune/models/telemetry/__init__.py +71 -0
- attune/models/telemetry/analytics.py +594 -0
- attune/models/telemetry/backend.py +196 -0
- attune/models/telemetry/data_models.py +431 -0
- attune/models/telemetry/storage.py +489 -0
- attune/models/token_estimator.py +420 -0
- attune/models/validation.py +280 -0
- attune/monitoring/__init__.py +52 -0
- attune/monitoring/alerts.py +946 -0
- attune/monitoring/alerts_cli.py +448 -0
- attune/monitoring/multi_backend.py +271 -0
- attune/monitoring/otel_backend.py +362 -0
- attune/optimization/__init__.py +19 -0
- attune/optimization/context_optimizer.py +272 -0
- attune/orchestration/__init__.py +67 -0
- attune/orchestration/agent_templates.py +707 -0
- attune/orchestration/config_store.py +499 -0
- attune/orchestration/execution_strategies.py +2111 -0
- attune/orchestration/meta_orchestrator.py +1168 -0
- attune/orchestration/pattern_learner.py +696 -0
- attune/orchestration/real_tools.py +931 -0
- attune/pattern_cache.py +187 -0
- attune/pattern_library.py +542 -0
- attune/patterns/debugging/all_patterns.json +81 -0
- attune/patterns/debugging/workflow_20260107_1770825e.json +77 -0
- attune/patterns/refactoring_memory.json +89 -0
- attune/persistence.py +564 -0
- attune/platform_utils.py +265 -0
- attune/plugins/__init__.py +28 -0
- attune/plugins/base.py +361 -0
- attune/plugins/registry.py +268 -0
- attune/project_index/__init__.py +32 -0
- attune/project_index/cli.py +335 -0
- attune/project_index/index.py +667 -0
- attune/project_index/models.py +504 -0
- attune/project_index/reports.py +474 -0
- attune/project_index/scanner.py +777 -0
- attune/project_index/scanner_parallel.py +291 -0
- attune/prompts/__init__.py +61 -0
- attune/prompts/config.py +77 -0
- attune/prompts/context.py +177 -0
- attune/prompts/parser.py +285 -0
- attune/prompts/registry.py +313 -0
- attune/prompts/templates.py +208 -0
- attune/redis_config.py +302 -0
- attune/redis_memory.py +799 -0
- attune/resilience/__init__.py +56 -0
- attune/resilience/circuit_breaker.py +256 -0
- attune/resilience/fallback.py +179 -0
- attune/resilience/health.py +300 -0
- attune/resilience/retry.py +209 -0
- attune/resilience/timeout.py +135 -0
- attune/routing/__init__.py +43 -0
- attune/routing/chain_executor.py +433 -0
- attune/routing/classifier.py +217 -0
- attune/routing/smart_router.py +234 -0
- attune/routing/workflow_registry.py +343 -0
- attune/scaffolding/README.md +589 -0
- attune/scaffolding/__init__.py +35 -0
- attune/scaffolding/__main__.py +14 -0
- attune/scaffolding/cli.py +240 -0
- attune/scaffolding/templates/base_wizard.py.jinja2 +121 -0
- attune/scaffolding/templates/coach_wizard.py.jinja2 +321 -0
- attune/scaffolding/templates/domain_wizard.py.jinja2 +408 -0
- attune/scaffolding/templates/linear_flow_wizard.py.jinja2 +203 -0
- attune/socratic/__init__.py +256 -0
- attune/socratic/ab_testing.py +958 -0
- attune/socratic/blueprint.py +533 -0
- attune/socratic/cli.py +703 -0
- attune/socratic/collaboration.py +1114 -0
- attune/socratic/domain_templates.py +924 -0
- attune/socratic/embeddings.py +738 -0
- attune/socratic/engine.py +794 -0
- attune/socratic/explainer.py +682 -0
- attune/socratic/feedback.py +772 -0
- attune/socratic/forms.py +629 -0
- attune/socratic/generator.py +732 -0
- attune/socratic/llm_analyzer.py +637 -0
- attune/socratic/mcp_server.py +702 -0
- attune/socratic/session.py +312 -0
- attune/socratic/storage.py +667 -0
- attune/socratic/success.py +730 -0
- attune/socratic/visual_editor.py +860 -0
- attune/socratic/web_ui.py +958 -0
- attune/telemetry/__init__.py +39 -0
- attune/telemetry/agent_coordination.py +475 -0
- attune/telemetry/agent_tracking.py +367 -0
- attune/telemetry/approval_gates.py +545 -0
- attune/telemetry/cli.py +1231 -0
- attune/telemetry/commands/__init__.py +14 -0
- attune/telemetry/commands/dashboard_commands.py +696 -0
- attune/telemetry/event_streaming.py +409 -0
- attune/telemetry/feedback_loop.py +567 -0
- attune/telemetry/usage_tracker.py +591 -0
- attune/templates.py +754 -0
- attune/test_generator/__init__.py +38 -0
- attune/test_generator/__main__.py +14 -0
- attune/test_generator/cli.py +234 -0
- attune/test_generator/generator.py +355 -0
- attune/test_generator/risk_analyzer.py +216 -0
- attune/test_generator/templates/unit_test.py.jinja2 +272 -0
- attune/tier_recommender.py +384 -0
- attune/tools.py +183 -0
- attune/trust/__init__.py +28 -0
- attune/trust/circuit_breaker.py +579 -0
- attune/trust_building.py +527 -0
- attune/validation/__init__.py +19 -0
- attune/validation/xml_validator.py +281 -0
- attune/vscode_bridge.py +173 -0
- attune/workflow_commands.py +780 -0
- attune/workflow_patterns/__init__.py +33 -0
- attune/workflow_patterns/behavior.py +249 -0
- attune/workflow_patterns/core.py +76 -0
- attune/workflow_patterns/output.py +99 -0
- attune/workflow_patterns/registry.py +255 -0
- attune/workflow_patterns/structural.py +288 -0
- attune/workflows/__init__.py +539 -0
- attune/workflows/autonomous_test_gen.py +1268 -0
- attune/workflows/base.py +2667 -0
- attune/workflows/batch_processing.py +342 -0
- attune/workflows/bug_predict.py +1084 -0
- attune/workflows/builder.py +273 -0
- attune/workflows/caching.py +253 -0
- attune/workflows/code_review.py +1048 -0
- attune/workflows/code_review_adapters.py +312 -0
- attune/workflows/code_review_pipeline.py +722 -0
- attune/workflows/config.py +645 -0
- attune/workflows/dependency_check.py +644 -0
- attune/workflows/document_gen/__init__.py +25 -0
- attune/workflows/document_gen/config.py +30 -0
- attune/workflows/document_gen/report_formatter.py +162 -0
- attune/workflows/document_gen/workflow.py +1426 -0
- attune/workflows/document_manager.py +216 -0
- attune/workflows/document_manager_README.md +134 -0
- attune/workflows/documentation_orchestrator.py +1205 -0
- attune/workflows/history.py +510 -0
- attune/workflows/keyboard_shortcuts/__init__.py +39 -0
- attune/workflows/keyboard_shortcuts/generators.py +391 -0
- attune/workflows/keyboard_shortcuts/parsers.py +416 -0
- attune/workflows/keyboard_shortcuts/prompts.py +295 -0
- attune/workflows/keyboard_shortcuts/schema.py +193 -0
- attune/workflows/keyboard_shortcuts/workflow.py +509 -0
- attune/workflows/llm_base.py +363 -0
- attune/workflows/manage_docs.py +87 -0
- attune/workflows/manage_docs_README.md +134 -0
- attune/workflows/manage_documentation.py +821 -0
- attune/workflows/new_sample_workflow1.py +149 -0
- attune/workflows/new_sample_workflow1_README.md +150 -0
- attune/workflows/orchestrated_health_check.py +849 -0
- attune/workflows/orchestrated_release_prep.py +600 -0
- attune/workflows/output.py +413 -0
- attune/workflows/perf_audit.py +863 -0
- attune/workflows/pr_review.py +762 -0
- attune/workflows/progress.py +785 -0
- attune/workflows/progress_server.py +322 -0
- attune/workflows/progressive/README 2.md +454 -0
- attune/workflows/progressive/README.md +454 -0
- attune/workflows/progressive/__init__.py +82 -0
- attune/workflows/progressive/cli.py +219 -0
- attune/workflows/progressive/core.py +488 -0
- attune/workflows/progressive/orchestrator.py +723 -0
- attune/workflows/progressive/reports.py +520 -0
- attune/workflows/progressive/telemetry.py +274 -0
- attune/workflows/progressive/test_gen.py +495 -0
- attune/workflows/progressive/workflow.py +589 -0
- attune/workflows/refactor_plan.py +694 -0
- attune/workflows/release_prep.py +895 -0
- attune/workflows/release_prep_crew.py +969 -0
- attune/workflows/research_synthesis.py +404 -0
- attune/workflows/routing.py +168 -0
- attune/workflows/secure_release.py +593 -0
- attune/workflows/security_adapters.py +297 -0
- attune/workflows/security_audit.py +1329 -0
- attune/workflows/security_audit_phase3.py +355 -0
- attune/workflows/seo_optimization.py +633 -0
- attune/workflows/step_config.py +234 -0
- attune/workflows/telemetry_mixin.py +269 -0
- attune/workflows/test5.py +125 -0
- attune/workflows/test5_README.md +158 -0
- attune/workflows/test_coverage_boost_crew.py +849 -0
- attune/workflows/test_gen/__init__.py +52 -0
- attune/workflows/test_gen/ast_analyzer.py +249 -0
- attune/workflows/test_gen/config.py +88 -0
- attune/workflows/test_gen/data_models.py +38 -0
- attune/workflows/test_gen/report_formatter.py +289 -0
- attune/workflows/test_gen/test_templates.py +381 -0
- attune/workflows/test_gen/workflow.py +655 -0
- attune/workflows/test_gen.py +54 -0
- attune/workflows/test_gen_behavioral.py +477 -0
- attune/workflows/test_gen_parallel.py +341 -0
- attune/workflows/test_lifecycle.py +526 -0
- attune/workflows/test_maintenance.py +627 -0
- attune/workflows/test_maintenance_cli.py +590 -0
- attune/workflows/test_maintenance_crew.py +840 -0
- attune/workflows/test_runner.py +622 -0
- attune/workflows/tier_tracking.py +531 -0
- attune/workflows/xml_enhanced_crew.py +285 -0
- attune_ai-2.0.0.dist-info/METADATA +1026 -0
- attune_ai-2.0.0.dist-info/RECORD +457 -0
- attune_ai-2.0.0.dist-info/WHEEL +5 -0
- attune_ai-2.0.0.dist-info/entry_points.txt +26 -0
- attune_ai-2.0.0.dist-info/licenses/LICENSE +201 -0
- attune_ai-2.0.0.dist-info/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +101 -0
- attune_ai-2.0.0.dist-info/top_level.txt +5 -0
- attune_healthcare/__init__.py +13 -0
- attune_healthcare/monitors/__init__.py +9 -0
- attune_healthcare/monitors/clinical_protocol_monitor.py +315 -0
- attune_healthcare/monitors/monitoring/__init__.py +44 -0
- attune_healthcare/monitors/monitoring/protocol_checker.py +300 -0
- attune_healthcare/monitors/monitoring/protocol_loader.py +214 -0
- attune_healthcare/monitors/monitoring/sensor_parsers.py +306 -0
- attune_healthcare/monitors/monitoring/trajectory_analyzer.py +389 -0
- attune_llm/README.md +553 -0
- attune_llm/__init__.py +28 -0
- attune_llm/agent_factory/__init__.py +53 -0
- attune_llm/agent_factory/adapters/__init__.py +85 -0
- attune_llm/agent_factory/adapters/autogen_adapter.py +312 -0
- attune_llm/agent_factory/adapters/crewai_adapter.py +483 -0
- attune_llm/agent_factory/adapters/haystack_adapter.py +298 -0
- attune_llm/agent_factory/adapters/langchain_adapter.py +362 -0
- attune_llm/agent_factory/adapters/langgraph_adapter.py +333 -0
- attune_llm/agent_factory/adapters/native.py +228 -0
- attune_llm/agent_factory/adapters/wizard_adapter.py +423 -0
- attune_llm/agent_factory/base.py +305 -0
- attune_llm/agent_factory/crews/__init__.py +67 -0
- attune_llm/agent_factory/crews/code_review.py +1113 -0
- attune_llm/agent_factory/crews/health_check.py +1262 -0
- attune_llm/agent_factory/crews/refactoring.py +1128 -0
- attune_llm/agent_factory/crews/security_audit.py +1018 -0
- attune_llm/agent_factory/decorators.py +287 -0
- attune_llm/agent_factory/factory.py +558 -0
- attune_llm/agent_factory/framework.py +193 -0
- attune_llm/agent_factory/memory_integration.py +328 -0
- attune_llm/agent_factory/resilient.py +320 -0
- attune_llm/agents_md/__init__.py +22 -0
- attune_llm/agents_md/loader.py +218 -0
- attune_llm/agents_md/parser.py +271 -0
- attune_llm/agents_md/registry.py +307 -0
- attune_llm/claude_memory.py +466 -0
- attune_llm/cli/__init__.py +8 -0
- attune_llm/cli/sync_claude.py +487 -0
- attune_llm/code_health.py +1313 -0
- attune_llm/commands/__init__.py +51 -0
- attune_llm/commands/context.py +375 -0
- attune_llm/commands/loader.py +301 -0
- attune_llm/commands/models.py +231 -0
- attune_llm/commands/parser.py +371 -0
- attune_llm/commands/registry.py +429 -0
- attune_llm/config/__init__.py +29 -0
- attune_llm/config/unified.py +291 -0
- attune_llm/context/__init__.py +22 -0
- attune_llm/context/compaction.py +455 -0
- attune_llm/context/manager.py +434 -0
- attune_llm/contextual_patterns.py +361 -0
- attune_llm/core.py +907 -0
- attune_llm/git_pattern_extractor.py +435 -0
- attune_llm/hooks/__init__.py +24 -0
- attune_llm/hooks/config.py +306 -0
- attune_llm/hooks/executor.py +289 -0
- attune_llm/hooks/registry.py +302 -0
- attune_llm/hooks/scripts/__init__.py +39 -0
- attune_llm/hooks/scripts/evaluate_session.py +201 -0
- attune_llm/hooks/scripts/first_time_init.py +285 -0
- attune_llm/hooks/scripts/pre_compact.py +207 -0
- attune_llm/hooks/scripts/session_end.py +183 -0
- attune_llm/hooks/scripts/session_start.py +163 -0
- attune_llm/hooks/scripts/suggest_compact.py +225 -0
- attune_llm/learning/__init__.py +30 -0
- attune_llm/learning/evaluator.py +438 -0
- attune_llm/learning/extractor.py +514 -0
- attune_llm/learning/storage.py +560 -0
- attune_llm/levels.py +227 -0
- attune_llm/pattern_confidence.py +414 -0
- attune_llm/pattern_resolver.py +272 -0
- attune_llm/pattern_summary.py +350 -0
- attune_llm/providers.py +967 -0
- attune_llm/routing/__init__.py +32 -0
- attune_llm/routing/model_router.py +362 -0
- attune_llm/security/IMPLEMENTATION_SUMMARY.md +413 -0
- attune_llm/security/PHASE2_COMPLETE.md +384 -0
- attune_llm/security/PHASE2_SECRETS_DETECTOR_COMPLETE.md +271 -0
- attune_llm/security/QUICK_REFERENCE.md +316 -0
- attune_llm/security/README.md +262 -0
- attune_llm/security/__init__.py +62 -0
- attune_llm/security/audit_logger.py +929 -0
- attune_llm/security/audit_logger_example.py +152 -0
- attune_llm/security/pii_scrubber.py +640 -0
- attune_llm/security/secrets_detector.py +678 -0
- attune_llm/security/secrets_detector_example.py +304 -0
- attune_llm/security/secure_memdocs.py +1192 -0
- attune_llm/security/secure_memdocs_example.py +278 -0
- attune_llm/session_status.py +745 -0
- attune_llm/state.py +246 -0
- attune_llm/utils/__init__.py +5 -0
- attune_llm/utils/tokens.py +349 -0
- attune_software/SOFTWARE_PLUGIN_README.md +57 -0
- attune_software/__init__.py +13 -0
- attune_software/cli/__init__.py +120 -0
- attune_software/cli/inspect.py +362 -0
- attune_software/cli.py +574 -0
- attune_software/plugin.py +188 -0
- workflow_scaffolding/__init__.py +11 -0
- workflow_scaffolding/__main__.py +12 -0
- workflow_scaffolding/cli.py +206 -0
- workflow_scaffolding/generator.py +265 -0
|
@@ -0,0 +1,667 @@
|
|
|
1
|
+
"""Project Index - Main index class with persistence.
|
|
2
|
+
|
|
3
|
+
Manages the project index, persists to JSON, syncs with Redis.
|
|
4
|
+
|
|
5
|
+
Copyright 2025 Smart AI Memory, LLC
|
|
6
|
+
Licensed under Fair Source 0.9
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
from collections.abc import Iterator
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
from attune.config import _validate_file_path
|
|
17
|
+
|
|
18
|
+
from .models import FileRecord, IndexConfig, ProjectSummary
|
|
19
|
+
from .scanner import ProjectScanner
|
|
20
|
+
from .scanner_parallel import ParallelProjectScanner
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ProjectIndex:
|
|
26
|
+
"""Central project index with file metadata.
|
|
27
|
+
|
|
28
|
+
Features:
|
|
29
|
+
- JSON persistence in .attune/project_index.json
|
|
30
|
+
- Optional Redis sync for real-time access
|
|
31
|
+
- Query API for workflows and agents
|
|
32
|
+
- Update API for writing metadata
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
SCHEMA_VERSION = "1.0"
|
|
36
|
+
DEFAULT_INDEX_PATH = ".attune/project_index.json"
|
|
37
|
+
|
|
38
|
+
def __init__(
|
|
39
|
+
self,
|
|
40
|
+
project_root: str,
|
|
41
|
+
config: IndexConfig | None = None,
|
|
42
|
+
redis_client: Any | None = None,
|
|
43
|
+
workers: int | None = None,
|
|
44
|
+
use_parallel: bool = True,
|
|
45
|
+
):
|
|
46
|
+
"""Initialize ProjectIndex.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
project_root: Root directory of the project
|
|
50
|
+
config: Optional index configuration
|
|
51
|
+
redis_client: Optional Redis client for real-time sync
|
|
52
|
+
workers: Number of worker processes for parallel scanning.
|
|
53
|
+
None (default): Use all CPU cores
|
|
54
|
+
1: Sequential processing
|
|
55
|
+
N: Use N worker processes
|
|
56
|
+
use_parallel: Whether to use parallel scanner (default: True).
|
|
57
|
+
Set to False to force sequential processing.
|
|
58
|
+
"""
|
|
59
|
+
self.project_root = Path(project_root)
|
|
60
|
+
self.config = config or IndexConfig()
|
|
61
|
+
self.redis_client = redis_client
|
|
62
|
+
self.workers = workers
|
|
63
|
+
self.use_parallel = use_parallel
|
|
64
|
+
|
|
65
|
+
# In-memory state
|
|
66
|
+
self._records: dict[str, FileRecord] = {}
|
|
67
|
+
self._summary: ProjectSummary = ProjectSummary()
|
|
68
|
+
self._generated_at: datetime | None = None
|
|
69
|
+
|
|
70
|
+
# Index file path
|
|
71
|
+
self._index_path = self.project_root / self.DEFAULT_INDEX_PATH
|
|
72
|
+
|
|
73
|
+
# ===== Persistence =====
|
|
74
|
+
|
|
75
|
+
def load(self) -> bool:
|
|
76
|
+
"""Load index from JSON file.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
True if loaded successfully, False otherwise
|
|
80
|
+
|
|
81
|
+
"""
|
|
82
|
+
if not self._index_path.exists():
|
|
83
|
+
logger.info(f"No index found at {self._index_path}")
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
with open(self._index_path, encoding="utf-8") as f:
|
|
88
|
+
data = json.load(f)
|
|
89
|
+
|
|
90
|
+
# Validate schema version
|
|
91
|
+
if data.get("schema_version") != self.SCHEMA_VERSION:
|
|
92
|
+
logger.warning("Schema version mismatch, regenerating index")
|
|
93
|
+
return False
|
|
94
|
+
|
|
95
|
+
# Load config
|
|
96
|
+
if "config" in data:
|
|
97
|
+
self.config = IndexConfig.from_dict(data["config"])
|
|
98
|
+
|
|
99
|
+
# Load summary
|
|
100
|
+
if "summary" in data:
|
|
101
|
+
self._summary = ProjectSummary.from_dict(data["summary"])
|
|
102
|
+
|
|
103
|
+
# Load records
|
|
104
|
+
self._records = {}
|
|
105
|
+
for path, record_data in data.get("files", {}).items():
|
|
106
|
+
self._records[path] = FileRecord.from_dict(record_data)
|
|
107
|
+
|
|
108
|
+
# Load timestamp
|
|
109
|
+
if data.get("generated_at"):
|
|
110
|
+
self._generated_at = datetime.fromisoformat(data["generated_at"])
|
|
111
|
+
|
|
112
|
+
logger.info(f"Loaded index with {len(self._records)} files")
|
|
113
|
+
return True
|
|
114
|
+
|
|
115
|
+
except (json.JSONDecodeError, KeyError, ValueError) as e:
|
|
116
|
+
logger.error(f"Failed to load index: {e}")
|
|
117
|
+
return False
|
|
118
|
+
|
|
119
|
+
def save(self) -> bool:
|
|
120
|
+
"""Save index to JSON file.
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
True if saved successfully, False otherwise
|
|
124
|
+
|
|
125
|
+
"""
|
|
126
|
+
try:
|
|
127
|
+
# Ensure directory exists
|
|
128
|
+
self._index_path.parent.mkdir(parents=True, exist_ok=True)
|
|
129
|
+
|
|
130
|
+
data = {
|
|
131
|
+
"schema_version": self.SCHEMA_VERSION,
|
|
132
|
+
"project": self.project_root.name,
|
|
133
|
+
"generated_at": datetime.now().isoformat(),
|
|
134
|
+
"config": self.config.to_dict(),
|
|
135
|
+
"summary": self._summary.to_dict(),
|
|
136
|
+
"files": {path: record.to_dict() for path, record in self._records.items()},
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
validated_path = _validate_file_path(str(self._index_path))
|
|
140
|
+
with open(validated_path, "w", encoding="utf-8") as f:
|
|
141
|
+
json.dump(data, f, indent=2, default=str)
|
|
142
|
+
|
|
143
|
+
logger.info(f"Saved index with {len(self._records)} files to {validated_path}")
|
|
144
|
+
|
|
145
|
+
# Sync to Redis if enabled
|
|
146
|
+
if self.redis_client and self.config.use_redis:
|
|
147
|
+
self._sync_to_redis()
|
|
148
|
+
|
|
149
|
+
return True
|
|
150
|
+
|
|
151
|
+
except OSError as e:
|
|
152
|
+
logger.error(f"Failed to save index: {e}")
|
|
153
|
+
return False
|
|
154
|
+
|
|
155
|
+
def _sync_to_redis(self) -> None:
|
|
156
|
+
"""Sync index to Redis for real-time access."""
|
|
157
|
+
if not self.redis_client:
|
|
158
|
+
return
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
prefix = self.config.redis_key_prefix
|
|
162
|
+
|
|
163
|
+
# Store summary
|
|
164
|
+
self.redis_client.set(
|
|
165
|
+
f"{prefix}:summary",
|
|
166
|
+
json.dumps(self._summary.to_dict()),
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Store each file record
|
|
170
|
+
for path, record in self._records.items():
|
|
171
|
+
self.redis_client.hset(
|
|
172
|
+
f"{prefix}:files",
|
|
173
|
+
path,
|
|
174
|
+
json.dumps(record.to_dict()),
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
# Store metadata
|
|
178
|
+
self.redis_client.set(
|
|
179
|
+
f"{prefix}:meta",
|
|
180
|
+
json.dumps(
|
|
181
|
+
{
|
|
182
|
+
"generated_at": datetime.now().isoformat(),
|
|
183
|
+
"file_count": len(self._records),
|
|
184
|
+
},
|
|
185
|
+
),
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
logger.info(f"Synced index to Redis with prefix {prefix}")
|
|
189
|
+
|
|
190
|
+
except Exception as e:
|
|
191
|
+
logger.error(f"Failed to sync to Redis: {e}")
|
|
192
|
+
|
|
193
|
+
# ===== Index Operations =====
|
|
194
|
+
|
|
195
|
+
def refresh(self, analyze_dependencies: bool = True) -> None:
|
|
196
|
+
"""Refresh the entire index by scanning the project.
|
|
197
|
+
|
|
198
|
+
This rebuilds the index from scratch using parallel processing when enabled.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
analyze_dependencies: Whether to analyze import dependencies.
|
|
202
|
+
Set to False for faster scans when dependency graph not needed.
|
|
203
|
+
Default: True.
|
|
204
|
+
|
|
205
|
+
Performance:
|
|
206
|
+
- Sequential: ~3.6s for 3,472 files
|
|
207
|
+
- Parallel (12 workers): ~1.8s for 3,472 files
|
|
208
|
+
- Parallel without deps: ~1.0s for 3,472 files
|
|
209
|
+
"""
|
|
210
|
+
logger.info(f"Refreshing index for {self.project_root}")
|
|
211
|
+
|
|
212
|
+
# Use parallel scanner by default for better performance
|
|
213
|
+
if self.use_parallel and (self.workers is None or self.workers > 1):
|
|
214
|
+
logger.info(f"Using parallel scanner (workers: {self.workers or 'auto'})")
|
|
215
|
+
scanner = ParallelProjectScanner(
|
|
216
|
+
str(self.project_root), self.config, workers=self.workers
|
|
217
|
+
)
|
|
218
|
+
else:
|
|
219
|
+
logger.info("Using sequential scanner")
|
|
220
|
+
scanner = ProjectScanner(str(self.project_root), self.config)
|
|
221
|
+
|
|
222
|
+
records, summary = scanner.scan(analyze_dependencies=analyze_dependencies)
|
|
223
|
+
|
|
224
|
+
# Update internal state
|
|
225
|
+
self._records = {r.path: r for r in records}
|
|
226
|
+
self._summary = summary
|
|
227
|
+
self._generated_at = datetime.now()
|
|
228
|
+
|
|
229
|
+
# Save to disk
|
|
230
|
+
self.save()
|
|
231
|
+
|
|
232
|
+
logger.info(
|
|
233
|
+
f"Index refreshed: {len(self._records)} files, "
|
|
234
|
+
f"{summary.files_needing_attention} need attention"
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
def refresh_incremental(
|
|
238
|
+
self, analyze_dependencies: bool = True, base_ref: str = "HEAD"
|
|
239
|
+
) -> tuple[int, int]:
|
|
240
|
+
"""Incrementally refresh index by scanning only changed files.
|
|
241
|
+
|
|
242
|
+
Uses git diff to identify changed files since last index generation.
|
|
243
|
+
This is significantly faster than full refresh for small changes.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
analyze_dependencies: Whether to rebuild dependency graph.
|
|
247
|
+
Note: Even if True, only changed files are re-scanned.
|
|
248
|
+
Default: True.
|
|
249
|
+
base_ref: Git ref to diff against (default: "HEAD").
|
|
250
|
+
Use "HEAD~1" for changes since last commit,
|
|
251
|
+
"origin/main" for changes vs remote, etc.
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
Tuple of (files_updated, files_removed)
|
|
255
|
+
|
|
256
|
+
Performance:
|
|
257
|
+
- Small change (10 files): ~0.1s vs ~1.0s full refresh (10x faster)
|
|
258
|
+
- Medium change (100 files): ~0.3s vs ~1.0s full refresh (3x faster)
|
|
259
|
+
- Large change (1000+ files): Similar to full refresh
|
|
260
|
+
|
|
261
|
+
Raises:
|
|
262
|
+
RuntimeError: If not in a git repository
|
|
263
|
+
ValueError: If no previous index exists
|
|
264
|
+
|
|
265
|
+
Example:
|
|
266
|
+
>>> index = ProjectIndex(".")
|
|
267
|
+
>>> index.load()
|
|
268
|
+
>>> updated, removed = index.refresh_incremental()
|
|
269
|
+
>>> print(f"Updated {updated} files, removed {removed}")
|
|
270
|
+
"""
|
|
271
|
+
import subprocess
|
|
272
|
+
|
|
273
|
+
# Ensure we have a previous index to update
|
|
274
|
+
if not self._records:
|
|
275
|
+
raise ValueError(
|
|
276
|
+
"No existing index to update. Run refresh() first to create initial index."
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
# Get changed files from git
|
|
280
|
+
try:
|
|
281
|
+
# Get untracked files
|
|
282
|
+
result_untracked = subprocess.run(
|
|
283
|
+
["git", "ls-files", "--others", "--exclude-standard"],
|
|
284
|
+
cwd=self.project_root,
|
|
285
|
+
capture_output=True,
|
|
286
|
+
text=True,
|
|
287
|
+
check=True,
|
|
288
|
+
)
|
|
289
|
+
untracked_files = (
|
|
290
|
+
set(result_untracked.stdout.strip().split("\n"))
|
|
291
|
+
if result_untracked.stdout.strip()
|
|
292
|
+
else set()
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
# Get modified/added files since base_ref
|
|
296
|
+
result_modified = subprocess.run(
|
|
297
|
+
["git", "diff", "--name-only", base_ref],
|
|
298
|
+
cwd=self.project_root,
|
|
299
|
+
capture_output=True,
|
|
300
|
+
text=True,
|
|
301
|
+
check=True,
|
|
302
|
+
)
|
|
303
|
+
modified_files = (
|
|
304
|
+
set(result_modified.stdout.strip().split("\n"))
|
|
305
|
+
if result_modified.stdout.strip()
|
|
306
|
+
else set()
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
# Get deleted files
|
|
310
|
+
result_deleted = subprocess.run(
|
|
311
|
+
["git", "diff", "--name-only", "--diff-filter=D", base_ref],
|
|
312
|
+
cwd=self.project_root,
|
|
313
|
+
capture_output=True,
|
|
314
|
+
text=True,
|
|
315
|
+
check=True,
|
|
316
|
+
)
|
|
317
|
+
deleted_files = (
|
|
318
|
+
set(result_deleted.stdout.strip().split("\n"))
|
|
319
|
+
if result_deleted.stdout.strip()
|
|
320
|
+
else set()
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
except subprocess.CalledProcessError as e:
|
|
324
|
+
raise RuntimeError(f"Git command failed: {e}. Are you in a git repository?")
|
|
325
|
+
except FileNotFoundError:
|
|
326
|
+
raise RuntimeError("Git not found. Incremental refresh requires git.")
|
|
327
|
+
|
|
328
|
+
# Combine untracked and modified
|
|
329
|
+
changed_files = untracked_files | modified_files
|
|
330
|
+
|
|
331
|
+
# Filter out files that don't match our patterns
|
|
332
|
+
changed_paths = []
|
|
333
|
+
for file_str in changed_files:
|
|
334
|
+
if not file_str: # Skip empty strings
|
|
335
|
+
continue
|
|
336
|
+
file_path = self.project_root / file_str
|
|
337
|
+
if file_path.exists() and not self._is_excluded(file_path):
|
|
338
|
+
changed_paths.append(file_path)
|
|
339
|
+
|
|
340
|
+
logger.info(
|
|
341
|
+
f"Incremental refresh: {len(changed_paths)} changed, {len(deleted_files)} deleted"
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
# If no changes, nothing to do
|
|
345
|
+
if not changed_paths and not deleted_files:
|
|
346
|
+
logger.info("No changes detected, index is up to date")
|
|
347
|
+
return 0, 0
|
|
348
|
+
|
|
349
|
+
# Re-scan changed files using appropriate scanner
|
|
350
|
+
if changed_paths:
|
|
351
|
+
if self.use_parallel and len(changed_paths) > 100:
|
|
352
|
+
# Use parallel scanner for large change sets
|
|
353
|
+
scanner = ParallelProjectScanner(
|
|
354
|
+
str(self.project_root), self.config, workers=self.workers
|
|
355
|
+
)
|
|
356
|
+
# Monkey-patch _discover_files to return only changed files
|
|
357
|
+
scanner._discover_files = lambda: changed_paths
|
|
358
|
+
else:
|
|
359
|
+
# Use sequential scanner for small change sets
|
|
360
|
+
scanner = ProjectScanner(str(self.project_root), self.config)
|
|
361
|
+
scanner._discover_files = lambda: changed_paths
|
|
362
|
+
|
|
363
|
+
# Scan only changed files (without dependency analysis yet)
|
|
364
|
+
new_records, _ = scanner.scan(analyze_dependencies=False)
|
|
365
|
+
|
|
366
|
+
# Update records
|
|
367
|
+
for record in new_records:
|
|
368
|
+
self._records[record.path] = record
|
|
369
|
+
|
|
370
|
+
# Remove deleted files
|
|
371
|
+
files_removed = 0
|
|
372
|
+
for deleted_file in deleted_files:
|
|
373
|
+
if deleted_file and deleted_file in self._records:
|
|
374
|
+
del self._records[deleted_file]
|
|
375
|
+
files_removed += 1
|
|
376
|
+
|
|
377
|
+
# Rebuild dependency graph if requested
|
|
378
|
+
if analyze_dependencies:
|
|
379
|
+
scanner = ProjectScanner(str(self.project_root), self.config)
|
|
380
|
+
all_records = list(self._records.values())
|
|
381
|
+
scanner._analyze_dependencies(all_records)
|
|
382
|
+
scanner._calculate_impact_scores(all_records)
|
|
383
|
+
|
|
384
|
+
# Rebuild summary
|
|
385
|
+
scanner = ProjectScanner(str(self.project_root), self.config)
|
|
386
|
+
self._summary = scanner._build_summary(list(self._records.values()))
|
|
387
|
+
self._generated_at = datetime.now()
|
|
388
|
+
|
|
389
|
+
# Save to disk
|
|
390
|
+
self.save()
|
|
391
|
+
|
|
392
|
+
files_updated = len(changed_paths)
|
|
393
|
+
logger.info(
|
|
394
|
+
f"Incremental refresh complete: {files_updated} updated, {files_removed} removed"
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
return files_updated, files_removed
|
|
398
|
+
|
|
399
|
+
def _is_excluded(self, path: Path) -> bool:
|
|
400
|
+
"""Check if a path should be excluded from indexing."""
|
|
401
|
+
scanner = ProjectScanner(str(self.project_root), self.config)
|
|
402
|
+
return scanner._is_excluded(path)
|
|
403
|
+
|
|
404
|
+
def update_file(self, path: str, **updates: Any) -> bool:
|
|
405
|
+
"""Update metadata for a specific file.
|
|
406
|
+
|
|
407
|
+
This is the write API for workflows and agents.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
path: Relative path to the file
|
|
411
|
+
**updates: Key-value pairs to update
|
|
412
|
+
|
|
413
|
+
Returns:
|
|
414
|
+
True if updated successfully
|
|
415
|
+
|
|
416
|
+
"""
|
|
417
|
+
if path not in self._records:
|
|
418
|
+
logger.warning(f"File not in index: {path}")
|
|
419
|
+
return False
|
|
420
|
+
|
|
421
|
+
record = self._records[path]
|
|
422
|
+
|
|
423
|
+
# Apply updates
|
|
424
|
+
for key, value in updates.items():
|
|
425
|
+
if hasattr(record, key):
|
|
426
|
+
setattr(record, key, value)
|
|
427
|
+
else:
|
|
428
|
+
# Store in metadata
|
|
429
|
+
record.metadata[key] = value
|
|
430
|
+
|
|
431
|
+
record.last_indexed = datetime.now()
|
|
432
|
+
|
|
433
|
+
# Save changes
|
|
434
|
+
self.save()
|
|
435
|
+
|
|
436
|
+
return True
|
|
437
|
+
|
|
438
|
+
def update_coverage(self, coverage_data: dict[str, float]) -> int:
|
|
439
|
+
"""Update coverage data for files.
|
|
440
|
+
|
|
441
|
+
Args:
|
|
442
|
+
coverage_data: Dict mapping file paths to coverage percentages
|
|
443
|
+
|
|
444
|
+
Returns:
|
|
445
|
+
Number of files updated
|
|
446
|
+
|
|
447
|
+
"""
|
|
448
|
+
updated = 0
|
|
449
|
+
|
|
450
|
+
for path, coverage in coverage_data.items():
|
|
451
|
+
# Normalize path
|
|
452
|
+
path = path.removeprefix("./")
|
|
453
|
+
|
|
454
|
+
if path in self._records:
|
|
455
|
+
self._records[path].coverage_percent = coverage
|
|
456
|
+
updated += 1
|
|
457
|
+
|
|
458
|
+
if updated > 0:
|
|
459
|
+
# Recalculate summary
|
|
460
|
+
self._recalculate_summary()
|
|
461
|
+
self.save()
|
|
462
|
+
|
|
463
|
+
logger.info(f"Updated coverage for {updated} files")
|
|
464
|
+
return updated
|
|
465
|
+
|
|
466
|
+
def _recalculate_summary(self) -> None:
|
|
467
|
+
"""Recalculate summary from current records."""
|
|
468
|
+
records = list(self._records.values())
|
|
469
|
+
|
|
470
|
+
# Testing health with coverage
|
|
471
|
+
covered = [r for r in records if r.coverage_percent > 0]
|
|
472
|
+
if covered:
|
|
473
|
+
self._summary.test_coverage_avg = sum(r.coverage_percent for r in covered) / len(
|
|
474
|
+
covered,
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
# ===== Query API =====
|
|
478
|
+
|
|
479
|
+
def get_file(self, path: str) -> FileRecord | None:
|
|
480
|
+
"""Get record for a specific file."""
|
|
481
|
+
return self._records.get(path)
|
|
482
|
+
|
|
483
|
+
def get_summary(self) -> ProjectSummary:
|
|
484
|
+
"""Get project summary."""
|
|
485
|
+
return self._summary
|
|
486
|
+
|
|
487
|
+
def iter_all_files(self) -> Iterator[FileRecord]:
|
|
488
|
+
"""Iterate over all file records (memory-efficient).
|
|
489
|
+
|
|
490
|
+
Use this when you don't need all records at once.
|
|
491
|
+
"""
|
|
492
|
+
yield from self._records.values()
|
|
493
|
+
|
|
494
|
+
def get_all_files(self) -> list[FileRecord]:
|
|
495
|
+
"""Get all file records as a list.
|
|
496
|
+
|
|
497
|
+
Note: For large indexes, prefer iter_all_files() to avoid
|
|
498
|
+
loading all records into memory at once.
|
|
499
|
+
"""
|
|
500
|
+
return list(self.iter_all_files())
|
|
501
|
+
|
|
502
|
+
def iter_files_needing_tests(self) -> Iterator[FileRecord]:
|
|
503
|
+
"""Iterate over files that need tests (memory-efficient)."""
|
|
504
|
+
for r in self._records.values():
|
|
505
|
+
if r.test_requirement.value == "required" and not r.tests_exist:
|
|
506
|
+
yield r
|
|
507
|
+
|
|
508
|
+
def get_files_needing_tests(self) -> list[FileRecord]:
|
|
509
|
+
"""Get files that need tests but don't have them."""
|
|
510
|
+
return list(self.iter_files_needing_tests())
|
|
511
|
+
|
|
512
|
+
def iter_stale_files(self) -> Iterator[FileRecord]:
|
|
513
|
+
"""Iterate over files with stale tests (memory-efficient)."""
|
|
514
|
+
for r in self._records.values():
|
|
515
|
+
if r.is_stale:
|
|
516
|
+
yield r
|
|
517
|
+
|
|
518
|
+
def get_stale_files(self) -> list[FileRecord]:
|
|
519
|
+
"""Get files with stale tests."""
|
|
520
|
+
return list(self.iter_stale_files())
|
|
521
|
+
|
|
522
|
+
def iter_files_needing_attention(self) -> Iterator[FileRecord]:
|
|
523
|
+
"""Iterate over files that need attention (memory-efficient).
|
|
524
|
+
|
|
525
|
+
Note: For sorted results, use get_files_needing_attention().
|
|
526
|
+
"""
|
|
527
|
+
for r in self._records.values():
|
|
528
|
+
if r.needs_attention:
|
|
529
|
+
yield r
|
|
530
|
+
|
|
531
|
+
def get_files_needing_attention(self) -> list[FileRecord]:
|
|
532
|
+
"""Get files that need attention, sorted by impact score."""
|
|
533
|
+
return sorted(
|
|
534
|
+
self.iter_files_needing_attention(),
|
|
535
|
+
key=lambda r: -r.impact_score,
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
def iter_high_impact_files(self) -> Iterator[FileRecord]:
|
|
539
|
+
"""Iterate over high-impact files (memory-efficient).
|
|
540
|
+
|
|
541
|
+
Note: For sorted results, use get_high_impact_files().
|
|
542
|
+
"""
|
|
543
|
+
for r in self._records.values():
|
|
544
|
+
if r.impact_score >= self.config.high_impact_threshold:
|
|
545
|
+
yield r
|
|
546
|
+
|
|
547
|
+
def get_high_impact_files(self) -> list[FileRecord]:
|
|
548
|
+
"""Get high-impact files sorted by impact score."""
|
|
549
|
+
return sorted(
|
|
550
|
+
self.iter_high_impact_files(),
|
|
551
|
+
key=lambda r: -r.impact_score,
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
def get_files_by_category(self, category: str) -> list[FileRecord]:
|
|
555
|
+
"""Get files by category."""
|
|
556
|
+
return [r for r in self._records.values() if r.category.value == category]
|
|
557
|
+
|
|
558
|
+
def get_files_by_language(self, language: str) -> list[FileRecord]:
|
|
559
|
+
"""Get files by programming language."""
|
|
560
|
+
return [r for r in self._records.values() if r.language == language]
|
|
561
|
+
|
|
562
|
+
def search_files(self, pattern: str) -> list[FileRecord]:
|
|
563
|
+
"""Search files by path pattern."""
|
|
564
|
+
import fnmatch
|
|
565
|
+
|
|
566
|
+
return [r for r in self._records.values() if fnmatch.fnmatch(r.path, pattern)]
|
|
567
|
+
|
|
568
|
+
def get_dependents(self, path: str) -> list[FileRecord]:
|
|
569
|
+
"""Get files that depend on the given file."""
|
|
570
|
+
record = self._records.get(path)
|
|
571
|
+
if not record:
|
|
572
|
+
return []
|
|
573
|
+
return [self._records[p] for p in record.imported_by if p in self._records]
|
|
574
|
+
|
|
575
|
+
def get_dependencies(self, path: str) -> list[FileRecord]:
|
|
576
|
+
"""Get files that the given file depends on."""
|
|
577
|
+
record = self._records.get(path)
|
|
578
|
+
if not record:
|
|
579
|
+
return []
|
|
580
|
+
# Match imports to paths
|
|
581
|
+
results = []
|
|
582
|
+
for imp in record.imports:
|
|
583
|
+
for other_path, other_record in self._records.items():
|
|
584
|
+
if imp in other_path.replace("/", ".").replace("\\", "."):
|
|
585
|
+
results.append(other_record)
|
|
586
|
+
break
|
|
587
|
+
return results
|
|
588
|
+
|
|
589
|
+
# ===== Statistics =====
|
|
590
|
+
|
|
591
|
+
def get_test_gap_stats(self) -> dict[str, Any]:
|
|
592
|
+
"""Get statistics about test gaps."""
|
|
593
|
+
files_needing_tests = self.get_files_needing_tests()
|
|
594
|
+
|
|
595
|
+
return {
|
|
596
|
+
"files_without_tests": len(files_needing_tests),
|
|
597
|
+
"high_impact_untested": len(
|
|
598
|
+
[
|
|
599
|
+
f
|
|
600
|
+
for f in files_needing_tests
|
|
601
|
+
if f.impact_score >= self.config.high_impact_threshold
|
|
602
|
+
],
|
|
603
|
+
),
|
|
604
|
+
"total_loc_untested": sum(f.lines_of_code for f in files_needing_tests),
|
|
605
|
+
"by_directory": self._group_by_directory(files_needing_tests),
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
def get_staleness_stats(self) -> dict[str, Any]:
|
|
609
|
+
"""Get statistics about stale tests."""
|
|
610
|
+
stale = self.get_stale_files()
|
|
611
|
+
|
|
612
|
+
return {
|
|
613
|
+
"stale_count": len(stale),
|
|
614
|
+
"avg_staleness_days": sum(f.staleness_days for f in stale) / len(stale) if stale else 0,
|
|
615
|
+
"max_staleness_days": max((f.staleness_days for f in stale), default=0),
|
|
616
|
+
"by_directory": self._group_by_directory(stale),
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
def _group_by_directory(self, records: list[FileRecord]) -> dict[str, int]:
|
|
620
|
+
"""Group records by top-level directory."""
|
|
621
|
+
counts: dict[str, int] = {}
|
|
622
|
+
for r in records:
|
|
623
|
+
parts = r.path.split("/")
|
|
624
|
+
if len(parts) > 1:
|
|
625
|
+
dir_name = parts[0]
|
|
626
|
+
else:
|
|
627
|
+
dir_name = "."
|
|
628
|
+
counts[dir_name] = counts.get(dir_name, 0) + 1
|
|
629
|
+
return counts
|
|
630
|
+
|
|
631
|
+
# ===== Context for Workflows =====
|
|
632
|
+
|
|
633
|
+
def get_context_for_workflow(self, workflow_type: str) -> dict[str, Any]:
|
|
634
|
+
"""Get relevant context for a specific workflow type.
|
|
635
|
+
|
|
636
|
+
This provides a filtered view of the index tailored to workflow needs.
|
|
637
|
+
"""
|
|
638
|
+
if workflow_type == "test_gen":
|
|
639
|
+
files = self.get_files_needing_tests()
|
|
640
|
+
return {
|
|
641
|
+
"files_needing_tests": [f.to_dict() for f in files[:20]],
|
|
642
|
+
"summary": self.get_test_gap_stats(),
|
|
643
|
+
"priority_files": [
|
|
644
|
+
f.path for f in files if f.impact_score >= self.config.high_impact_threshold
|
|
645
|
+
][:10],
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
if workflow_type == "code_review":
|
|
649
|
+
return {
|
|
650
|
+
"high_impact_files": [f.to_dict() for f in self.get_high_impact_files()[:10]],
|
|
651
|
+
"stale_files": [f.to_dict() for f in self.get_stale_files()[:10]],
|
|
652
|
+
"summary": self._summary.to_dict(),
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
if workflow_type == "security_audit":
|
|
656
|
+
return {
|
|
657
|
+
"all_source_files": [f.to_dict() for f in self.get_files_by_category("source")],
|
|
658
|
+
"untested_files": [f.to_dict() for f in self.get_files_needing_tests()],
|
|
659
|
+
"summary": self._summary.to_dict(),
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
return {
|
|
663
|
+
"summary": self._summary.to_dict(),
|
|
664
|
+
"files_needing_attention": [
|
|
665
|
+
f.to_dict() for f in self.get_files_needing_attention()[:20]
|
|
666
|
+
],
|
|
667
|
+
}
|