htmlgraph 0.20.1__py3-none-any.whl → 0.27.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/.htmlgraph/agents.json +72 -0
- htmlgraph/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/__init__.py +51 -1
- htmlgraph/__init__.pyi +123 -0
- htmlgraph/agent_detection.py +26 -10
- htmlgraph/agent_registry.py +2 -1
- htmlgraph/analytics/__init__.py +8 -1
- htmlgraph/analytics/cli.py +86 -20
- htmlgraph/analytics/cost_analyzer.py +391 -0
- htmlgraph/analytics/cost_monitor.py +664 -0
- htmlgraph/analytics/cost_reporter.py +675 -0
- htmlgraph/analytics/cross_session.py +617 -0
- htmlgraph/analytics/dependency.py +10 -6
- htmlgraph/analytics/pattern_learning.py +771 -0
- htmlgraph/analytics/session_graph.py +707 -0
- htmlgraph/analytics/strategic/__init__.py +80 -0
- htmlgraph/analytics/strategic/cost_optimizer.py +611 -0
- htmlgraph/analytics/strategic/pattern_detector.py +876 -0
- htmlgraph/analytics/strategic/preference_manager.py +709 -0
- htmlgraph/analytics/strategic/suggestion_engine.py +747 -0
- htmlgraph/analytics/work_type.py +67 -27
- htmlgraph/analytics_index.py +53 -20
- htmlgraph/api/__init__.py +3 -0
- htmlgraph/api/cost_alerts_websocket.py +416 -0
- htmlgraph/api/main.py +2498 -0
- htmlgraph/api/static/htmx.min.js +1 -0
- htmlgraph/api/static/style-redesign.css +1344 -0
- htmlgraph/api/static/style.css +1079 -0
- htmlgraph/api/templates/dashboard-redesign.html +1366 -0
- htmlgraph/api/templates/dashboard.html +794 -0
- htmlgraph/api/templates/partials/activity-feed-hierarchical.html +326 -0
- htmlgraph/api/templates/partials/activity-feed.html +1100 -0
- htmlgraph/api/templates/partials/agents-redesign.html +317 -0
- htmlgraph/api/templates/partials/agents.html +317 -0
- htmlgraph/api/templates/partials/event-traces.html +373 -0
- htmlgraph/api/templates/partials/features-kanban-redesign.html +509 -0
- htmlgraph/api/templates/partials/features.html +578 -0
- htmlgraph/api/templates/partials/metrics-redesign.html +346 -0
- htmlgraph/api/templates/partials/metrics.html +346 -0
- htmlgraph/api/templates/partials/orchestration-redesign.html +443 -0
- htmlgraph/api/templates/partials/orchestration.html +198 -0
- htmlgraph/api/templates/partials/spawners.html +375 -0
- htmlgraph/api/templates/partials/work-items.html +613 -0
- htmlgraph/api/websocket.py +538 -0
- htmlgraph/archive/__init__.py +24 -0
- htmlgraph/archive/bloom.py +234 -0
- htmlgraph/archive/fts.py +297 -0
- htmlgraph/archive/manager.py +583 -0
- htmlgraph/archive/search.py +244 -0
- htmlgraph/atomic_ops.py +560 -0
- htmlgraph/attribute_index.py +2 -1
- htmlgraph/bounded_paths.py +539 -0
- htmlgraph/builders/base.py +57 -2
- htmlgraph/builders/bug.py +19 -3
- htmlgraph/builders/chore.py +19 -3
- htmlgraph/builders/epic.py +19 -3
- htmlgraph/builders/feature.py +27 -3
- htmlgraph/builders/insight.py +2 -1
- htmlgraph/builders/metric.py +2 -1
- htmlgraph/builders/pattern.py +2 -1
- htmlgraph/builders/phase.py +19 -3
- htmlgraph/builders/spike.py +29 -3
- htmlgraph/builders/track.py +42 -1
- htmlgraph/cigs/__init__.py +81 -0
- htmlgraph/cigs/autonomy.py +385 -0
- htmlgraph/cigs/cost.py +475 -0
- htmlgraph/cigs/messages_basic.py +472 -0
- htmlgraph/cigs/messaging.py +365 -0
- htmlgraph/cigs/models.py +771 -0
- htmlgraph/cigs/pattern_storage.py +427 -0
- htmlgraph/cigs/patterns.py +503 -0
- htmlgraph/cigs/posttool_analyzer.py +234 -0
- htmlgraph/cigs/reporter.py +818 -0
- htmlgraph/cigs/tracker.py +317 -0
- htmlgraph/cli/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/cli/.htmlgraph/agents.json +72 -0
- htmlgraph/cli/.htmlgraph/htmlgraph.db +0 -0
- htmlgraph/cli/__init__.py +42 -0
- htmlgraph/cli/__main__.py +6 -0
- htmlgraph/cli/analytics.py +1424 -0
- htmlgraph/cli/base.py +685 -0
- htmlgraph/cli/constants.py +206 -0
- htmlgraph/cli/core.py +954 -0
- htmlgraph/cli/main.py +147 -0
- htmlgraph/cli/models.py +475 -0
- htmlgraph/cli/templates/__init__.py +1 -0
- htmlgraph/cli/templates/cost_dashboard.py +399 -0
- htmlgraph/cli/work/__init__.py +239 -0
- htmlgraph/cli/work/browse.py +115 -0
- htmlgraph/cli/work/features.py +568 -0
- htmlgraph/cli/work/orchestration.py +676 -0
- htmlgraph/cli/work/report.py +728 -0
- htmlgraph/cli/work/sessions.py +466 -0
- htmlgraph/cli/work/snapshot.py +559 -0
- htmlgraph/cli/work/tracks.py +486 -0
- htmlgraph/cli_commands/__init__.py +1 -0
- htmlgraph/cli_commands/feature.py +195 -0
- htmlgraph/cli_framework.py +115 -0
- htmlgraph/collections/__init__.py +2 -0
- htmlgraph/collections/base.py +197 -14
- htmlgraph/collections/bug.py +2 -1
- htmlgraph/collections/chore.py +2 -1
- htmlgraph/collections/epic.py +2 -1
- htmlgraph/collections/feature.py +2 -1
- htmlgraph/collections/insight.py +2 -1
- htmlgraph/collections/metric.py +2 -1
- htmlgraph/collections/pattern.py +2 -1
- htmlgraph/collections/phase.py +2 -1
- htmlgraph/collections/session.py +194 -0
- htmlgraph/collections/spike.py +13 -2
- htmlgraph/collections/task_delegation.py +241 -0
- htmlgraph/collections/todo.py +14 -1
- htmlgraph/collections/traces.py +487 -0
- htmlgraph/config/cost_models.json +56 -0
- htmlgraph/config.py +190 -0
- htmlgraph/context_analytics.py +2 -1
- htmlgraph/converter.py +116 -7
- htmlgraph/cost_analysis/__init__.py +5 -0
- htmlgraph/cost_analysis/analyzer.py +438 -0
- htmlgraph/dashboard.html +2246 -248
- htmlgraph/dashboard.html.backup +6592 -0
- htmlgraph/dashboard.html.bak +7181 -0
- htmlgraph/dashboard.html.bak2 +7231 -0
- htmlgraph/dashboard.html.bak3 +7232 -0
- htmlgraph/db/__init__.py +38 -0
- htmlgraph/db/queries.py +790 -0
- htmlgraph/db/schema.py +1788 -0
- htmlgraph/decorators.py +317 -0
- htmlgraph/dependency_models.py +2 -1
- htmlgraph/deploy.py +26 -27
- htmlgraph/docs/API_REFERENCE.md +841 -0
- htmlgraph/docs/HTTP_API.md +750 -0
- htmlgraph/docs/INTEGRATION_GUIDE.md +752 -0
- htmlgraph/docs/ORCHESTRATION_PATTERNS.md +717 -0
- htmlgraph/docs/README.md +532 -0
- htmlgraph/docs/__init__.py +77 -0
- htmlgraph/docs/docs_version.py +55 -0
- htmlgraph/docs/metadata.py +93 -0
- htmlgraph/docs/migrations.py +232 -0
- htmlgraph/docs/template_engine.py +143 -0
- htmlgraph/docs/templates/_sections/cli_reference.md.j2 +52 -0
- htmlgraph/docs/templates/_sections/core_concepts.md.j2 +29 -0
- htmlgraph/docs/templates/_sections/sdk_basics.md.j2 +69 -0
- htmlgraph/docs/templates/base_agents.md.j2 +78 -0
- htmlgraph/docs/templates/example_user_override.md.j2 +47 -0
- htmlgraph/docs/version_check.py +163 -0
- htmlgraph/edge_index.py +2 -1
- htmlgraph/error_handler.py +544 -0
- htmlgraph/event_log.py +86 -37
- htmlgraph/event_migration.py +2 -1
- htmlgraph/file_watcher.py +12 -8
- htmlgraph/find_api.py +2 -1
- htmlgraph/git_events.py +67 -9
- htmlgraph/hooks/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/hooks/.htmlgraph/agents.json +72 -0
- htmlgraph/hooks/.htmlgraph/index.sqlite +0 -0
- htmlgraph/hooks/__init__.py +8 -0
- htmlgraph/hooks/bootstrap.py +169 -0
- htmlgraph/hooks/cigs_pretool_enforcer.py +354 -0
- htmlgraph/hooks/concurrent_sessions.py +208 -0
- htmlgraph/hooks/context.py +350 -0
- htmlgraph/hooks/drift_handler.py +525 -0
- htmlgraph/hooks/event_tracker.py +790 -99
- htmlgraph/hooks/git_commands.py +175 -0
- htmlgraph/hooks/installer.py +5 -1
- htmlgraph/hooks/orchestrator.py +327 -76
- htmlgraph/hooks/orchestrator_reflector.py +31 -4
- htmlgraph/hooks/post_tool_use_failure.py +32 -7
- htmlgraph/hooks/post_tool_use_handler.py +257 -0
- htmlgraph/hooks/posttooluse.py +92 -19
- htmlgraph/hooks/pretooluse.py +527 -7
- htmlgraph/hooks/prompt_analyzer.py +637 -0
- htmlgraph/hooks/session_handler.py +668 -0
- htmlgraph/hooks/session_summary.py +395 -0
- htmlgraph/hooks/state_manager.py +504 -0
- htmlgraph/hooks/subagent_detection.py +202 -0
- htmlgraph/hooks/subagent_stop.py +369 -0
- htmlgraph/hooks/task_enforcer.py +99 -4
- htmlgraph/hooks/validator.py +212 -91
- htmlgraph/ids.py +2 -1
- htmlgraph/learning.py +125 -100
- htmlgraph/mcp_server.py +2 -1
- htmlgraph/models.py +217 -18
- htmlgraph/operations/README.md +62 -0
- htmlgraph/operations/__init__.py +79 -0
- htmlgraph/operations/analytics.py +339 -0
- htmlgraph/operations/bootstrap.py +289 -0
- htmlgraph/operations/events.py +244 -0
- htmlgraph/operations/fastapi_server.py +231 -0
- htmlgraph/operations/hooks.py +350 -0
- htmlgraph/operations/initialization.py +597 -0
- htmlgraph/operations/initialization.py.backup +228 -0
- htmlgraph/operations/server.py +303 -0
- htmlgraph/orchestration/__init__.py +58 -0
- htmlgraph/orchestration/claude_launcher.py +179 -0
- htmlgraph/orchestration/command_builder.py +72 -0
- htmlgraph/orchestration/headless_spawner.py +281 -0
- htmlgraph/orchestration/live_events.py +377 -0
- htmlgraph/orchestration/model_selection.py +327 -0
- htmlgraph/orchestration/plugin_manager.py +140 -0
- htmlgraph/orchestration/prompts.py +137 -0
- htmlgraph/orchestration/spawner_event_tracker.py +383 -0
- htmlgraph/orchestration/spawners/__init__.py +16 -0
- htmlgraph/orchestration/spawners/base.py +194 -0
- htmlgraph/orchestration/spawners/claude.py +173 -0
- htmlgraph/orchestration/spawners/codex.py +435 -0
- htmlgraph/orchestration/spawners/copilot.py +294 -0
- htmlgraph/orchestration/spawners/gemini.py +471 -0
- htmlgraph/orchestration/subprocess_runner.py +36 -0
- htmlgraph/{orchestration.py → orchestration/task_coordination.py} +16 -8
- htmlgraph/orchestration.md +563 -0
- htmlgraph/orchestrator-system-prompt-optimized.txt +863 -0
- htmlgraph/orchestrator.py +2 -1
- htmlgraph/orchestrator_config.py +357 -0
- htmlgraph/orchestrator_mode.py +115 -4
- htmlgraph/parallel.py +2 -1
- htmlgraph/parser.py +86 -6
- htmlgraph/path_query.py +608 -0
- htmlgraph/pattern_matcher.py +636 -0
- htmlgraph/pydantic_models.py +476 -0
- htmlgraph/quality_gates.py +350 -0
- htmlgraph/query_builder.py +2 -1
- htmlgraph/query_composer.py +509 -0
- htmlgraph/reflection.py +443 -0
- htmlgraph/refs.py +344 -0
- htmlgraph/repo_hash.py +512 -0
- htmlgraph/repositories/__init__.py +292 -0
- htmlgraph/repositories/analytics_repository.py +455 -0
- htmlgraph/repositories/analytics_repository_standard.py +628 -0
- htmlgraph/repositories/feature_repository.py +581 -0
- htmlgraph/repositories/feature_repository_htmlfile.py +668 -0
- htmlgraph/repositories/feature_repository_memory.py +607 -0
- htmlgraph/repositories/feature_repository_sqlite.py +858 -0
- htmlgraph/repositories/filter_service.py +620 -0
- htmlgraph/repositories/filter_service_standard.py +445 -0
- htmlgraph/repositories/shared_cache.py +621 -0
- htmlgraph/repositories/shared_cache_memory.py +395 -0
- htmlgraph/repositories/track_repository.py +552 -0
- htmlgraph/repositories/track_repository_htmlfile.py +619 -0
- htmlgraph/repositories/track_repository_memory.py +508 -0
- htmlgraph/repositories/track_repository_sqlite.py +711 -0
- htmlgraph/sdk/__init__.py +398 -0
- htmlgraph/sdk/__init__.pyi +14 -0
- htmlgraph/sdk/analytics/__init__.py +19 -0
- htmlgraph/sdk/analytics/engine.py +155 -0
- htmlgraph/sdk/analytics/helpers.py +178 -0
- htmlgraph/sdk/analytics/registry.py +109 -0
- htmlgraph/sdk/base.py +484 -0
- htmlgraph/sdk/constants.py +216 -0
- htmlgraph/sdk/core.pyi +308 -0
- htmlgraph/sdk/discovery.py +120 -0
- htmlgraph/sdk/help/__init__.py +12 -0
- htmlgraph/sdk/help/mixin.py +699 -0
- htmlgraph/sdk/mixins/__init__.py +15 -0
- htmlgraph/sdk/mixins/attribution.py +113 -0
- htmlgraph/sdk/mixins/mixin.py +410 -0
- htmlgraph/sdk/operations/__init__.py +12 -0
- htmlgraph/sdk/operations/mixin.py +427 -0
- htmlgraph/sdk/orchestration/__init__.py +17 -0
- htmlgraph/sdk/orchestration/coordinator.py +203 -0
- htmlgraph/sdk/orchestration/spawner.py +204 -0
- htmlgraph/sdk/planning/__init__.py +19 -0
- htmlgraph/sdk/planning/bottlenecks.py +93 -0
- htmlgraph/sdk/planning/mixin.py +211 -0
- htmlgraph/sdk/planning/parallel.py +186 -0
- htmlgraph/sdk/planning/queue.py +210 -0
- htmlgraph/sdk/planning/recommendations.py +87 -0
- htmlgraph/sdk/planning/smart_planning.py +319 -0
- htmlgraph/sdk/session/__init__.py +19 -0
- htmlgraph/sdk/session/continuity.py +57 -0
- htmlgraph/sdk/session/handoff.py +110 -0
- htmlgraph/sdk/session/info.py +309 -0
- htmlgraph/sdk/session/manager.py +103 -0
- htmlgraph/sdk/strategic/__init__.py +26 -0
- htmlgraph/sdk/strategic/mixin.py +563 -0
- htmlgraph/server.py +295 -107
- htmlgraph/session_hooks.py +300 -0
- htmlgraph/session_manager.py +285 -3
- htmlgraph/session_registry.py +587 -0
- htmlgraph/session_state.py +436 -0
- htmlgraph/session_warning.py +2 -1
- htmlgraph/sessions/__init__.py +23 -0
- htmlgraph/sessions/handoff.py +756 -0
- htmlgraph/system_prompts.py +450 -0
- htmlgraph/templates/orchestration-view.html +350 -0
- htmlgraph/track_builder.py +33 -1
- htmlgraph/track_manager.py +38 -0
- htmlgraph/transcript.py +18 -5
- htmlgraph/validation.py +115 -0
- htmlgraph/watch.py +2 -1
- htmlgraph/work_type_utils.py +2 -1
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/dashboard.html +2246 -248
- {htmlgraph-0.20.1.dist-info → htmlgraph-0.27.5.dist-info}/METADATA +95 -64
- htmlgraph-0.27.5.dist-info/RECORD +337 -0
- {htmlgraph-0.20.1.dist-info → htmlgraph-0.27.5.dist-info}/entry_points.txt +1 -1
- htmlgraph/cli.py +0 -4839
- htmlgraph/sdk.py +0 -2359
- htmlgraph-0.20.1.dist-info/RECORD +0 -118
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
- {htmlgraph-0.20.1.data → htmlgraph-0.27.5.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
- {htmlgraph-0.20.1.dist-info → htmlgraph-0.27.5.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,628 @@
|
|
|
1
|
+
"""
|
|
2
|
+
StandardAnalyticsRepository - Unified analytics using Feature/Track repositories.
|
|
3
|
+
|
|
4
|
+
Composes FeatureRepository and TrackRepository to provide:
|
|
5
|
+
- Work recommendations with multi-criteria scoring
|
|
6
|
+
- Dependency analysis and critical path detection
|
|
7
|
+
- Project health metrics and completion estimates
|
|
8
|
+
- Blocked/blocking item detection
|
|
9
|
+
|
|
10
|
+
NO direct data access - all data through repositories.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from collections import defaultdict, deque
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
from .analytics_repository import (
|
|
17
|
+
AnalysisError,
|
|
18
|
+
AnalyticsRepository,
|
|
19
|
+
DependencyAnalysis,
|
|
20
|
+
InvalidItemError,
|
|
21
|
+
WorkRecommendation,
|
|
22
|
+
)
|
|
23
|
+
from .feature_repository import FeatureRepository
|
|
24
|
+
from .shared_cache import SharedCache
|
|
25
|
+
from .track_repository import TrackRepository
|
|
26
|
+
|
|
27
|
+
# Priority scoring weights
|
|
28
|
+
PRIORITY_WEIGHTS = {
|
|
29
|
+
"low": 0.25,
|
|
30
|
+
"medium": 0.5,
|
|
31
|
+
"high": 0.75,
|
|
32
|
+
"critical": 1.0,
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class StandardAnalyticsRepository(AnalyticsRepository):
|
|
37
|
+
"""
|
|
38
|
+
Standard implementation of AnalyticsRepository.
|
|
39
|
+
|
|
40
|
+
Features:
|
|
41
|
+
- Composes Feature and Track repositories
|
|
42
|
+
- Multi-criteria work recommendations
|
|
43
|
+
- Transitive dependency analysis
|
|
44
|
+
- Critical path detection
|
|
45
|
+
- Comprehensive caching with invalidation
|
|
46
|
+
|
|
47
|
+
Performance:
|
|
48
|
+
- recommend_next_work(): O(n) with caching
|
|
49
|
+
- analyze_dependencies(): O(n) graph traversal, cached
|
|
50
|
+
- calculate_priority(): O(1) if cached, O(n) if not
|
|
51
|
+
- get_critical_path(): O(n) computed once, cached
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
feature_repo: FeatureRepository,
|
|
57
|
+
track_repo: TrackRepository,
|
|
58
|
+
cache: SharedCache,
|
|
59
|
+
):
|
|
60
|
+
"""Initialize analytics repository with dependencies."""
|
|
61
|
+
self.feature_repo = feature_repo
|
|
62
|
+
self.track_repo = track_repo
|
|
63
|
+
self.cache = cache
|
|
64
|
+
|
|
65
|
+
# ===== RECOMMENDATION OPERATIONS =====
|
|
66
|
+
|
|
67
|
+
def recommend_next_work(
|
|
68
|
+
self,
|
|
69
|
+
filters: dict[str, Any] | None = None,
|
|
70
|
+
limit: int = 10,
|
|
71
|
+
min_priority: float = 0.0,
|
|
72
|
+
) -> list[WorkRecommendation]:
|
|
73
|
+
"""Get prioritized recommendations for next work items."""
|
|
74
|
+
try:
|
|
75
|
+
# Get all work items
|
|
76
|
+
items = self.get_work_items(
|
|
77
|
+
status=filters.get("status") if filters else None, include_tracks=True
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Apply additional filters
|
|
81
|
+
if filters:
|
|
82
|
+
items = self._apply_filters(items, filters)
|
|
83
|
+
|
|
84
|
+
# Score each item
|
|
85
|
+
recommendations = []
|
|
86
|
+
for item in items:
|
|
87
|
+
item_id = getattr(item, "id", str(item))
|
|
88
|
+
|
|
89
|
+
# Calculate priority score
|
|
90
|
+
try:
|
|
91
|
+
priority_score = self.calculate_priority(item_id)
|
|
92
|
+
except (InvalidItemError, AnalysisError):
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
# Skip if below threshold
|
|
96
|
+
if priority_score < min_priority:
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
# Get dependency analysis
|
|
100
|
+
try:
|
|
101
|
+
analysis = self.analyze_dependencies(item_id)
|
|
102
|
+
except (InvalidItemError, AnalysisError):
|
|
103
|
+
analysis = DependencyAnalysis(
|
|
104
|
+
item_id=item_id,
|
|
105
|
+
dependencies=[],
|
|
106
|
+
blocking=[],
|
|
107
|
+
blocked_by=[],
|
|
108
|
+
critical_path=False,
|
|
109
|
+
blocked_count=0,
|
|
110
|
+
dependency_count=0,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Build recommendation
|
|
114
|
+
title = getattr(item, "title", item_id)
|
|
115
|
+
rationale = self._build_rationale(item, analysis, priority_score)
|
|
116
|
+
|
|
117
|
+
recommendation = WorkRecommendation(
|
|
118
|
+
item_id=item_id,
|
|
119
|
+
title=title,
|
|
120
|
+
priority_score=priority_score,
|
|
121
|
+
rationale=rationale,
|
|
122
|
+
estimated_impact=self._estimate_impact(item, analysis),
|
|
123
|
+
blocking_count=analysis.blocked_count,
|
|
124
|
+
dependency_count=analysis.dependency_count,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
recommendations.append(recommendation)
|
|
128
|
+
|
|
129
|
+
# Sort by priority score (descending)
|
|
130
|
+
recommendations.sort(key=lambda r: r.priority_score, reverse=True)
|
|
131
|
+
|
|
132
|
+
# Return top N
|
|
133
|
+
return recommendations[:limit]
|
|
134
|
+
|
|
135
|
+
except Exception as e:
|
|
136
|
+
raise AnalysisError(f"Failed to generate recommendations: {e}")
|
|
137
|
+
|
|
138
|
+
def analyze_dependencies(self, item_id: str) -> DependencyAnalysis:
|
|
139
|
+
"""Compute complete dependency analysis for a work item."""
|
|
140
|
+
# Check cache
|
|
141
|
+
cache_key = f"dependency:{item_id}"
|
|
142
|
+
cached = self.cache.get(cache_key)
|
|
143
|
+
if cached is not None:
|
|
144
|
+
result: DependencyAnalysis = cached
|
|
145
|
+
return result
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
# Get item
|
|
149
|
+
item = self._get_item(item_id)
|
|
150
|
+
if item is None:
|
|
151
|
+
raise InvalidItemError(item_id)
|
|
152
|
+
|
|
153
|
+
# Build dependency graph
|
|
154
|
+
all_deps = self._get_transitive_dependencies(item_id)
|
|
155
|
+
blocking = self._get_blocking_items(item_id)
|
|
156
|
+
|
|
157
|
+
# Get items blocking this one (dependencies that are incomplete)
|
|
158
|
+
blocked_by = []
|
|
159
|
+
direct_deps = self._get_direct_dependencies(item_id)
|
|
160
|
+
for dep_id in direct_deps:
|
|
161
|
+
dep_item = self._get_item(dep_id)
|
|
162
|
+
if dep_item and self._is_incomplete(dep_item):
|
|
163
|
+
blocked_by.append(dep_id)
|
|
164
|
+
|
|
165
|
+
# Check if on critical path
|
|
166
|
+
critical_path = self.is_on_critical_path(item_id)
|
|
167
|
+
|
|
168
|
+
analysis = DependencyAnalysis(
|
|
169
|
+
item_id=item_id,
|
|
170
|
+
dependencies=all_deps,
|
|
171
|
+
blocking=blocking,
|
|
172
|
+
blocked_by=blocked_by,
|
|
173
|
+
critical_path=critical_path,
|
|
174
|
+
blocked_count=len(blocking),
|
|
175
|
+
dependency_count=len(all_deps),
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
# Cache result
|
|
179
|
+
self.cache.set(cache_key, analysis, ttl=3600)
|
|
180
|
+
|
|
181
|
+
return analysis
|
|
182
|
+
|
|
183
|
+
except InvalidItemError:
|
|
184
|
+
raise
|
|
185
|
+
except Exception as e:
|
|
186
|
+
raise AnalysisError(f"Failed to analyze dependencies for {item_id}: {e}")
|
|
187
|
+
|
|
188
|
+
def calculate_priority(self, item_id: str) -> float:
|
|
189
|
+
"""Calculate normalized priority score for item."""
|
|
190
|
+
# Check cache
|
|
191
|
+
cache_key = f"priority:{item_id}"
|
|
192
|
+
cached = self.cache.get(cache_key)
|
|
193
|
+
if cached is not None:
|
|
194
|
+
result: float = cached
|
|
195
|
+
return result
|
|
196
|
+
|
|
197
|
+
try:
|
|
198
|
+
# Get item
|
|
199
|
+
item = self._get_item(item_id)
|
|
200
|
+
if item is None:
|
|
201
|
+
raise InvalidItemError(item_id)
|
|
202
|
+
|
|
203
|
+
# Base priority from item
|
|
204
|
+
priority_str = getattr(item, "priority", "medium").lower()
|
|
205
|
+
base_score = PRIORITY_WEIGHTS.get(priority_str, 0.5)
|
|
206
|
+
|
|
207
|
+
# Get dependency analysis
|
|
208
|
+
analysis = self.analyze_dependencies(item_id)
|
|
209
|
+
|
|
210
|
+
# Boost if blocking many items
|
|
211
|
+
blocking_boost = min(analysis.blocked_count * 0.1, 0.3)
|
|
212
|
+
|
|
213
|
+
# Boost if on critical path
|
|
214
|
+
critical_boost = 0.2 if analysis.critical_path else 0.0
|
|
215
|
+
|
|
216
|
+
# Penalty if blocked
|
|
217
|
+
blocked_penalty = -0.2 if analysis.is_blocked else 0.0
|
|
218
|
+
|
|
219
|
+
# Calculate final score (clamped to 0-1)
|
|
220
|
+
score = base_score + blocking_boost + critical_boost + blocked_penalty
|
|
221
|
+
score = max(0.0, min(1.0, score))
|
|
222
|
+
|
|
223
|
+
# Cache result
|
|
224
|
+
self.cache.set(cache_key, score, ttl=3600)
|
|
225
|
+
|
|
226
|
+
return score
|
|
227
|
+
|
|
228
|
+
except InvalidItemError:
|
|
229
|
+
raise
|
|
230
|
+
except Exception as e:
|
|
231
|
+
raise AnalysisError(f"Failed to calculate priority for {item_id}: {e}")
|
|
232
|
+
|
|
233
|
+
# ===== WORK ITEM QUERIES =====
|
|
234
|
+
|
|
235
|
+
def get_work_items(
|
|
236
|
+
self, status: str | None = None, include_tracks: bool = True
|
|
237
|
+
) -> list[Any]:
|
|
238
|
+
"""Get all work items (features and optionally tracks)."""
|
|
239
|
+
items = []
|
|
240
|
+
|
|
241
|
+
# Get features
|
|
242
|
+
try:
|
|
243
|
+
features = self.feature_repo.list()
|
|
244
|
+
if status:
|
|
245
|
+
features = [f for f in features if getattr(f, "status", None) == status]
|
|
246
|
+
items.extend(features)
|
|
247
|
+
except Exception:
|
|
248
|
+
pass
|
|
249
|
+
|
|
250
|
+
# Get tracks
|
|
251
|
+
if include_tracks:
|
|
252
|
+
try:
|
|
253
|
+
tracks = self.track_repo.list()
|
|
254
|
+
if status:
|
|
255
|
+
tracks = [t for t in tracks if getattr(t, "status", None) == status]
|
|
256
|
+
items.extend(tracks)
|
|
257
|
+
except Exception:
|
|
258
|
+
pass
|
|
259
|
+
|
|
260
|
+
return items
|
|
261
|
+
|
|
262
|
+
def find_blocked_items(self) -> list[str]:
|
|
263
|
+
"""Find all work items currently blocked by incomplete dependencies."""
|
|
264
|
+
# Check cache
|
|
265
|
+
cache_key = "blocking:all_blocked"
|
|
266
|
+
cached = self.cache.get(cache_key)
|
|
267
|
+
if cached is not None:
|
|
268
|
+
result: list[str] = cached
|
|
269
|
+
return result
|
|
270
|
+
|
|
271
|
+
blocked = []
|
|
272
|
+
all_items = self.get_work_items(include_tracks=True)
|
|
273
|
+
|
|
274
|
+
for item in all_items:
|
|
275
|
+
item_id = getattr(item, "id", str(item))
|
|
276
|
+
try:
|
|
277
|
+
analysis = self.analyze_dependencies(item_id)
|
|
278
|
+
if analysis.is_blocked:
|
|
279
|
+
blocked.append(item_id)
|
|
280
|
+
except (InvalidItemError, AnalysisError):
|
|
281
|
+
continue
|
|
282
|
+
|
|
283
|
+
# Cache result
|
|
284
|
+
self.cache.set(cache_key, blocked, ttl=1800)
|
|
285
|
+
|
|
286
|
+
return list(blocked)
|
|
287
|
+
|
|
288
|
+
def find_blocking_items(self, item_id: str) -> list[str]:
|
|
289
|
+
"""Find what items are blocked by the given item."""
|
|
290
|
+
try:
|
|
291
|
+
analysis = self.analyze_dependencies(item_id)
|
|
292
|
+
return analysis.blocking
|
|
293
|
+
except (InvalidItemError, AnalysisError):
|
|
294
|
+
raise InvalidItemError(item_id)
|
|
295
|
+
|
|
296
|
+
# ===== CRITICAL PATH ANALYSIS =====
|
|
297
|
+
|
|
298
|
+
def get_critical_path(self) -> list[str]:
|
|
299
|
+
"""Get items on the critical path to project completion."""
|
|
300
|
+
# Check cache
|
|
301
|
+
cache_key = "critical_path:path"
|
|
302
|
+
cached = self.cache.get(cache_key)
|
|
303
|
+
if cached is not None:
|
|
304
|
+
result: list[str] = cached
|
|
305
|
+
return result
|
|
306
|
+
|
|
307
|
+
try:
|
|
308
|
+
# Build full dependency graph
|
|
309
|
+
all_items = self.get_work_items(include_tracks=True)
|
|
310
|
+
graph = self._build_dependency_graph(all_items)
|
|
311
|
+
|
|
312
|
+
# Find longest path (critical path)
|
|
313
|
+
critical_path = self._compute_critical_path(graph)
|
|
314
|
+
|
|
315
|
+
# Cache result
|
|
316
|
+
self.cache.set(cache_key, critical_path, ttl=3600)
|
|
317
|
+
|
|
318
|
+
return list(critical_path)
|
|
319
|
+
|
|
320
|
+
except Exception as e:
|
|
321
|
+
raise AnalysisError(f"Failed to compute critical path: {e}")
|
|
322
|
+
|
|
323
|
+
def is_on_critical_path(self, item_id: str) -> bool:
|
|
324
|
+
"""Check if item is on critical path."""
|
|
325
|
+
try:
|
|
326
|
+
path = self.get_critical_path()
|
|
327
|
+
return item_id in path
|
|
328
|
+
except (InvalidItemError, AnalysisError):
|
|
329
|
+
return False
|
|
330
|
+
|
|
331
|
+
# ===== METRICS & HEALTH =====
|
|
332
|
+
|
|
333
|
+
def cache_metrics(self) -> dict[str, Any]:
|
|
334
|
+
"""Get cache performance metrics."""
|
|
335
|
+
return self.cache.stats()
|
|
336
|
+
|
|
337
|
+
def invalidate_analytics_cache(self, item_id: str | None = None) -> None:
|
|
338
|
+
"""Invalidate cached analytics for item or all items."""
|
|
339
|
+
if item_id:
|
|
340
|
+
self.cache.delete(f"dependency:{item_id}")
|
|
341
|
+
self.cache.delete(f"priority:{item_id}")
|
|
342
|
+
self.cache.delete_pattern(f"dependency:*:blocking_for_{item_id}")
|
|
343
|
+
else:
|
|
344
|
+
self.cache.invalidate_analytics()
|
|
345
|
+
|
|
346
|
+
# ===== ADVANCED QUERIES =====
|
|
347
|
+
|
|
348
|
+
def find_dependency_cycles(self) -> list[list[str]]:
|
|
349
|
+
"""Find any circular dependencies in the project graph."""
|
|
350
|
+
all_items = self.get_work_items(include_tracks=True)
|
|
351
|
+
graph = self._build_dependency_graph(all_items)
|
|
352
|
+
|
|
353
|
+
cycles = []
|
|
354
|
+
visited = set()
|
|
355
|
+
rec_stack = set()
|
|
356
|
+
|
|
357
|
+
def dfs_cycle(node: str, path: list[str]) -> None:
|
|
358
|
+
visited.add(node)
|
|
359
|
+
rec_stack.add(node)
|
|
360
|
+
path.append(node)
|
|
361
|
+
|
|
362
|
+
for neighbor in graph.get(node, []):
|
|
363
|
+
if neighbor not in visited:
|
|
364
|
+
dfs_cycle(neighbor, path[:])
|
|
365
|
+
elif neighbor in rec_stack:
|
|
366
|
+
# Found cycle
|
|
367
|
+
cycle_start = path.index(neighbor)
|
|
368
|
+
cycle = path[cycle_start:] + [neighbor]
|
|
369
|
+
if cycle not in cycles:
|
|
370
|
+
cycles.append(cycle)
|
|
371
|
+
|
|
372
|
+
rec_stack.remove(node)
|
|
373
|
+
|
|
374
|
+
for item_id in graph:
|
|
375
|
+
if item_id not in visited:
|
|
376
|
+
dfs_cycle(item_id, [])
|
|
377
|
+
|
|
378
|
+
return cycles
|
|
379
|
+
|
|
380
|
+
def suggest_parallelizable_work(self) -> list[list[str]]:
|
|
381
|
+
"""Suggest groups of work that can be done in parallel."""
|
|
382
|
+
all_items = self.get_work_items(status="todo", include_tracks=True)
|
|
383
|
+
graph = self._build_dependency_graph(all_items)
|
|
384
|
+
|
|
385
|
+
# Topological sort to find waves of parallel work
|
|
386
|
+
waves = []
|
|
387
|
+
remaining = set(getattr(item, "id", str(item)) for item in all_items)
|
|
388
|
+
|
|
389
|
+
while remaining:
|
|
390
|
+
# Find items with no incomplete dependencies
|
|
391
|
+
wave = []
|
|
392
|
+
for item_id in list(remaining):
|
|
393
|
+
deps = graph.get(item_id, [])
|
|
394
|
+
if all(dep not in remaining for dep in deps):
|
|
395
|
+
wave.append(item_id)
|
|
396
|
+
|
|
397
|
+
if not wave:
|
|
398
|
+
break # Cycle or blocked
|
|
399
|
+
|
|
400
|
+
waves.append(wave)
|
|
401
|
+
remaining -= set(wave)
|
|
402
|
+
|
|
403
|
+
return waves
|
|
404
|
+
|
|
405
|
+
def project_completion_estimate(self) -> dict[str, Any]:
|
|
406
|
+
"""Estimate project completion time based on current state."""
|
|
407
|
+
all_items = self.get_work_items(include_tracks=True)
|
|
408
|
+
|
|
409
|
+
# Count by status
|
|
410
|
+
status_counts: dict[str, int] = defaultdict(int)
|
|
411
|
+
for item in all_items:
|
|
412
|
+
status = getattr(item, "status", "unknown")
|
|
413
|
+
status_counts[status] += 1
|
|
414
|
+
|
|
415
|
+
# Get critical path length
|
|
416
|
+
try:
|
|
417
|
+
critical_path = self.get_critical_path()
|
|
418
|
+
critical_path_length = len(critical_path)
|
|
419
|
+
except AnalysisError:
|
|
420
|
+
critical_path_length = 0
|
|
421
|
+
|
|
422
|
+
# Get blocked items
|
|
423
|
+
blocked = self.find_blocked_items()
|
|
424
|
+
|
|
425
|
+
# Incomplete items
|
|
426
|
+
incomplete = [
|
|
427
|
+
item
|
|
428
|
+
for item in all_items
|
|
429
|
+
if getattr(item, "status", "") not in ["done", "completed"]
|
|
430
|
+
]
|
|
431
|
+
|
|
432
|
+
# Estimate (simplified: assume 1 day per critical path item)
|
|
433
|
+
estimated_days = critical_path_length
|
|
434
|
+
worst_case_days = len(incomplete) # If all serial
|
|
435
|
+
|
|
436
|
+
return {
|
|
437
|
+
"items_remaining": len(incomplete),
|
|
438
|
+
"critical_path_length": critical_path_length,
|
|
439
|
+
"estimated_days": estimated_days,
|
|
440
|
+
"blocking_items": len(blocked),
|
|
441
|
+
"worst_case_days": worst_case_days,
|
|
442
|
+
"status_breakdown": dict(status_counts),
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
# ===== HELPER METHODS =====
|
|
446
|
+
|
|
447
|
+
def _get_item(self, item_id: str) -> Any | None:
|
|
448
|
+
"""Get item by ID from either repository."""
|
|
449
|
+
# Try feature first
|
|
450
|
+
item = self.feature_repo.get(item_id)
|
|
451
|
+
if item:
|
|
452
|
+
return item
|
|
453
|
+
|
|
454
|
+
# Try track
|
|
455
|
+
item = self.track_repo.get(item_id)
|
|
456
|
+
return item
|
|
457
|
+
|
|
458
|
+
def _get_direct_dependencies(self, item_id: str) -> list[str]:
|
|
459
|
+
"""Get direct dependencies of an item."""
|
|
460
|
+
item = self._get_item(item_id)
|
|
461
|
+
if not item:
|
|
462
|
+
return []
|
|
463
|
+
|
|
464
|
+
deps = getattr(item, "dependencies", [])
|
|
465
|
+
if isinstance(deps, str):
|
|
466
|
+
deps = [d.strip() for d in deps.split(",") if d.strip()]
|
|
467
|
+
|
|
468
|
+
return deps
|
|
469
|
+
|
|
470
|
+
def _get_transitive_dependencies(self, item_id: str) -> list[str]:
|
|
471
|
+
"""Get all transitive dependencies (BFS)."""
|
|
472
|
+
visited = set()
|
|
473
|
+
queue = deque([item_id])
|
|
474
|
+
all_deps = []
|
|
475
|
+
|
|
476
|
+
while queue:
|
|
477
|
+
current = queue.popleft()
|
|
478
|
+
if current in visited:
|
|
479
|
+
continue
|
|
480
|
+
|
|
481
|
+
visited.add(current)
|
|
482
|
+
deps = self._get_direct_dependencies(current)
|
|
483
|
+
|
|
484
|
+
for dep in deps:
|
|
485
|
+
if dep not in visited and dep != item_id:
|
|
486
|
+
all_deps.append(dep)
|
|
487
|
+
queue.append(dep)
|
|
488
|
+
|
|
489
|
+
return all_deps
|
|
490
|
+
|
|
491
|
+
def _get_blocking_items(self, item_id: str) -> list[str]:
|
|
492
|
+
"""Get items that depend on this item."""
|
|
493
|
+
all_items = self.get_work_items(include_tracks=True)
|
|
494
|
+
blocking = []
|
|
495
|
+
|
|
496
|
+
for item in all_items:
|
|
497
|
+
current_id = getattr(item, "id", str(item))
|
|
498
|
+
if current_id == item_id:
|
|
499
|
+
continue
|
|
500
|
+
|
|
501
|
+
deps = self._get_direct_dependencies(current_id)
|
|
502
|
+
if item_id in deps:
|
|
503
|
+
blocking.append(current_id)
|
|
504
|
+
|
|
505
|
+
return blocking
|
|
506
|
+
|
|
507
|
+
def _is_incomplete(self, item: Any) -> bool:
|
|
508
|
+
"""Check if item is incomplete."""
|
|
509
|
+
status = getattr(item, "status", "").lower()
|
|
510
|
+
return status not in ["done", "completed"]
|
|
511
|
+
|
|
512
|
+
def _build_dependency_graph(self, items: list[Any]) -> dict[str, list[str]]:
|
|
513
|
+
"""Build adjacency list for dependency graph."""
|
|
514
|
+
graph = {}
|
|
515
|
+
|
|
516
|
+
for item in items:
|
|
517
|
+
item_id = getattr(item, "id", str(item))
|
|
518
|
+
deps = self._get_direct_dependencies(item_id)
|
|
519
|
+
graph[item_id] = deps
|
|
520
|
+
|
|
521
|
+
return graph
|
|
522
|
+
|
|
523
|
+
def _compute_critical_path(self, graph: dict[str, list[str]]) -> list[str]:
|
|
524
|
+
"""Compute critical path using longest path algorithm."""
|
|
525
|
+
# Topological sort
|
|
526
|
+
in_degree: dict[str, int] = defaultdict(int)
|
|
527
|
+
for node in graph:
|
|
528
|
+
for dep in graph[node]:
|
|
529
|
+
in_degree[dep] += 1
|
|
530
|
+
|
|
531
|
+
# Find starting nodes (no dependencies)
|
|
532
|
+
queue = deque([node for node in graph if in_degree[node] == 0])
|
|
533
|
+
|
|
534
|
+
# Compute longest path
|
|
535
|
+
distances: dict[str, int] = defaultdict(int)
|
|
536
|
+
predecessors: dict[str, str] = {}
|
|
537
|
+
|
|
538
|
+
while queue:
|
|
539
|
+
node = queue.popleft()
|
|
540
|
+
|
|
541
|
+
for neighbor in graph.get(node, []):
|
|
542
|
+
# Update distance if longer path found
|
|
543
|
+
if distances[node] + 1 > distances[neighbor]:
|
|
544
|
+
distances[neighbor] = distances[node] + 1
|
|
545
|
+
predecessors[neighbor] = node
|
|
546
|
+
|
|
547
|
+
in_degree[neighbor] -= 1
|
|
548
|
+
if in_degree[neighbor] == 0:
|
|
549
|
+
queue.append(neighbor)
|
|
550
|
+
|
|
551
|
+
# Find end node (max distance)
|
|
552
|
+
if not distances:
|
|
553
|
+
return []
|
|
554
|
+
|
|
555
|
+
end_node = max(distances, key=lambda k: distances[k])
|
|
556
|
+
|
|
557
|
+
# Reconstruct path
|
|
558
|
+
path: list[str] = []
|
|
559
|
+
current: str | None = end_node
|
|
560
|
+
while current is not None:
|
|
561
|
+
path.append(current)
|
|
562
|
+
current = predecessors.get(current)
|
|
563
|
+
|
|
564
|
+
path.reverse()
|
|
565
|
+
return path
|
|
566
|
+
|
|
567
|
+
def _apply_filters(self, items: list[Any], filters: dict[str, Any]) -> list[Any]:
|
|
568
|
+
"""Apply additional filters to items."""
|
|
569
|
+
filtered = items
|
|
570
|
+
|
|
571
|
+
for key, value in filters.items():
|
|
572
|
+
if key == "status":
|
|
573
|
+
continue # Already handled
|
|
574
|
+
|
|
575
|
+
filtered = [item for item in filtered if getattr(item, key, None) == value]
|
|
576
|
+
|
|
577
|
+
return filtered
|
|
578
|
+
|
|
579
|
+
def _build_rationale(
|
|
580
|
+
self, item: Any, analysis: DependencyAnalysis, priority_score: float
|
|
581
|
+
) -> str:
|
|
582
|
+
"""Build human-readable rationale for recommendation."""
|
|
583
|
+
reasons = []
|
|
584
|
+
|
|
585
|
+
# Priority
|
|
586
|
+
if priority_score > 0.8:
|
|
587
|
+
reasons.append("Critical priority")
|
|
588
|
+
elif priority_score > 0.6:
|
|
589
|
+
reasons.append("High priority")
|
|
590
|
+
|
|
591
|
+
# Blocking
|
|
592
|
+
if analysis.blocked_count > 0:
|
|
593
|
+
reasons.append(f"Unblocks {analysis.blocked_count} item(s)")
|
|
594
|
+
|
|
595
|
+
# Critical path
|
|
596
|
+
if analysis.critical_path:
|
|
597
|
+
reasons.append("On critical path")
|
|
598
|
+
|
|
599
|
+
# Dependencies
|
|
600
|
+
if not analysis.is_blocked:
|
|
601
|
+
reasons.append("No blockers")
|
|
602
|
+
|
|
603
|
+
if not reasons:
|
|
604
|
+
return "Ready to work"
|
|
605
|
+
|
|
606
|
+
return "; ".join(reasons)
|
|
607
|
+
|
|
608
|
+
def _estimate_impact(self, item: Any, analysis: DependencyAnalysis) -> float:
|
|
609
|
+
"""Estimate impact of completing this item (0-1)."""
|
|
610
|
+
# Impact based on:
|
|
611
|
+
# - How many items it unblocks
|
|
612
|
+
# - Whether it's on critical path
|
|
613
|
+
# - Base priority
|
|
614
|
+
|
|
615
|
+
impact = 0.0
|
|
616
|
+
|
|
617
|
+
# Blocking impact
|
|
618
|
+
if analysis.blocked_count > 0:
|
|
619
|
+
impact += min(analysis.blocked_count * 0.2, 0.6)
|
|
620
|
+
|
|
621
|
+
# Critical path impact
|
|
622
|
+
if analysis.critical_path:
|
|
623
|
+
impact += 0.3
|
|
624
|
+
|
|
625
|
+
# Base impact
|
|
626
|
+
impact += 0.1
|
|
627
|
+
|
|
628
|
+
return min(impact, 1.0)
|