titan-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- titan_cli/__init__.py +3 -0
- titan_cli/__main__.py +4 -0
- titan_cli/ai/__init__.py +0 -0
- titan_cli/ai/agents/__init__.py +15 -0
- titan_cli/ai/agents/base.py +152 -0
- titan_cli/ai/client.py +170 -0
- titan_cli/ai/constants.py +56 -0
- titan_cli/ai/exceptions.py +48 -0
- titan_cli/ai/models.py +34 -0
- titan_cli/ai/oauth_helper.py +120 -0
- titan_cli/ai/providers/__init__.py +9 -0
- titan_cli/ai/providers/anthropic.py +117 -0
- titan_cli/ai/providers/base.py +75 -0
- titan_cli/ai/providers/gemini.py +278 -0
- titan_cli/cli.py +59 -0
- titan_cli/clients/__init__.py +1 -0
- titan_cli/clients/gcloud_client.py +52 -0
- titan_cli/core/__init__.py +3 -0
- titan_cli/core/config.py +274 -0
- titan_cli/core/discovery.py +51 -0
- titan_cli/core/errors.py +81 -0
- titan_cli/core/models.py +52 -0
- titan_cli/core/plugins/available.py +36 -0
- titan_cli/core/plugins/models.py +67 -0
- titan_cli/core/plugins/plugin_base.py +108 -0
- titan_cli/core/plugins/plugin_registry.py +163 -0
- titan_cli/core/secrets.py +141 -0
- titan_cli/core/workflows/__init__.py +22 -0
- titan_cli/core/workflows/models.py +88 -0
- titan_cli/core/workflows/project_step_source.py +86 -0
- titan_cli/core/workflows/workflow_exceptions.py +17 -0
- titan_cli/core/workflows/workflow_filter_service.py +137 -0
- titan_cli/core/workflows/workflow_registry.py +419 -0
- titan_cli/core/workflows/workflow_sources.py +307 -0
- titan_cli/engine/__init__.py +39 -0
- titan_cli/engine/builder.py +159 -0
- titan_cli/engine/context.py +82 -0
- titan_cli/engine/mock_context.py +176 -0
- titan_cli/engine/results.py +91 -0
- titan_cli/engine/steps/ai_assistant_step.py +185 -0
- titan_cli/engine/steps/command_step.py +93 -0
- titan_cli/engine/utils/__init__.py +3 -0
- titan_cli/engine/utils/venv.py +31 -0
- titan_cli/engine/workflow_executor.py +187 -0
- titan_cli/external_cli/__init__.py +0 -0
- titan_cli/external_cli/configs.py +17 -0
- titan_cli/external_cli/launcher.py +65 -0
- titan_cli/messages.py +121 -0
- titan_cli/ui/tui/__init__.py +205 -0
- titan_cli/ui/tui/__previews__/statusbar_preview.py +88 -0
- titan_cli/ui/tui/app.py +113 -0
- titan_cli/ui/tui/icons.py +70 -0
- titan_cli/ui/tui/screens/__init__.py +24 -0
- titan_cli/ui/tui/screens/ai_config.py +498 -0
- titan_cli/ui/tui/screens/ai_config_wizard.py +882 -0
- titan_cli/ui/tui/screens/base.py +110 -0
- titan_cli/ui/tui/screens/cli_launcher.py +151 -0
- titan_cli/ui/tui/screens/global_setup_wizard.py +363 -0
- titan_cli/ui/tui/screens/main_menu.py +162 -0
- titan_cli/ui/tui/screens/plugin_config_wizard.py +550 -0
- titan_cli/ui/tui/screens/plugin_management.py +377 -0
- titan_cli/ui/tui/screens/project_setup_wizard.py +686 -0
- titan_cli/ui/tui/screens/workflow_execution.py +592 -0
- titan_cli/ui/tui/screens/workflows.py +249 -0
- titan_cli/ui/tui/textual_components.py +537 -0
- titan_cli/ui/tui/textual_workflow_executor.py +405 -0
- titan_cli/ui/tui/theme.py +102 -0
- titan_cli/ui/tui/widgets/__init__.py +40 -0
- titan_cli/ui/tui/widgets/button.py +108 -0
- titan_cli/ui/tui/widgets/header.py +116 -0
- titan_cli/ui/tui/widgets/panel.py +81 -0
- titan_cli/ui/tui/widgets/status_bar.py +115 -0
- titan_cli/ui/tui/widgets/table.py +77 -0
- titan_cli/ui/tui/widgets/text.py +177 -0
- titan_cli/utils/__init__.py +0 -0
- titan_cli/utils/autoupdate.py +155 -0
- titan_cli-0.1.0.dist-info/METADATA +149 -0
- titan_cli-0.1.0.dist-info/RECORD +146 -0
- titan_cli-0.1.0.dist-info/WHEEL +4 -0
- titan_cli-0.1.0.dist-info/entry_points.txt +9 -0
- titan_cli-0.1.0.dist-info/licenses/LICENSE +201 -0
- titan_plugin_git/__init__.py +1 -0
- titan_plugin_git/clients/__init__.py +8 -0
- titan_plugin_git/clients/git_client.py +772 -0
- titan_plugin_git/exceptions.py +40 -0
- titan_plugin_git/messages.py +112 -0
- titan_plugin_git/models.py +39 -0
- titan_plugin_git/plugin.py +118 -0
- titan_plugin_git/steps/__init__.py +1 -0
- titan_plugin_git/steps/ai_commit_message_step.py +171 -0
- titan_plugin_git/steps/branch_steps.py +104 -0
- titan_plugin_git/steps/commit_step.py +80 -0
- titan_plugin_git/steps/push_step.py +63 -0
- titan_plugin_git/steps/status_step.py +59 -0
- titan_plugin_git/workflows/__previews__/__init__.py +1 -0
- titan_plugin_git/workflows/__previews__/commit_ai_preview.py +124 -0
- titan_plugin_git/workflows/commit-ai.yaml +28 -0
- titan_plugin_github/__init__.py +11 -0
- titan_plugin_github/agents/__init__.py +6 -0
- titan_plugin_github/agents/config_loader.py +130 -0
- titan_plugin_github/agents/issue_generator.py +353 -0
- titan_plugin_github/agents/pr_agent.py +528 -0
- titan_plugin_github/clients/__init__.py +8 -0
- titan_plugin_github/clients/github_client.py +1105 -0
- titan_plugin_github/config/__init__.py +0 -0
- titan_plugin_github/config/pr_agent.toml +85 -0
- titan_plugin_github/exceptions.py +28 -0
- titan_plugin_github/messages.py +88 -0
- titan_plugin_github/models.py +330 -0
- titan_plugin_github/plugin.py +131 -0
- titan_plugin_github/steps/__init__.py +12 -0
- titan_plugin_github/steps/ai_pr_step.py +172 -0
- titan_plugin_github/steps/create_pr_step.py +86 -0
- titan_plugin_github/steps/github_prompt_steps.py +171 -0
- titan_plugin_github/steps/issue_steps.py +143 -0
- titan_plugin_github/steps/preview_step.py +40 -0
- titan_plugin_github/utils.py +82 -0
- titan_plugin_github/workflows/__previews__/__init__.py +1 -0
- titan_plugin_github/workflows/__previews__/create_pr_ai_preview.py +140 -0
- titan_plugin_github/workflows/create-issue-ai.yaml +32 -0
- titan_plugin_github/workflows/create-pr-ai.yaml +49 -0
- titan_plugin_jira/__init__.py +8 -0
- titan_plugin_jira/agents/__init__.py +6 -0
- titan_plugin_jira/agents/config_loader.py +154 -0
- titan_plugin_jira/agents/jira_agent.py +553 -0
- titan_plugin_jira/agents/prompts.py +364 -0
- titan_plugin_jira/agents/response_parser.py +435 -0
- titan_plugin_jira/agents/token_tracker.py +223 -0
- titan_plugin_jira/agents/validators.py +246 -0
- titan_plugin_jira/clients/jira_client.py +745 -0
- titan_plugin_jira/config/jira_agent.toml +92 -0
- titan_plugin_jira/config/templates/issue_analysis.md.j2 +78 -0
- titan_plugin_jira/exceptions.py +37 -0
- titan_plugin_jira/formatters/__init__.py +6 -0
- titan_plugin_jira/formatters/markdown_formatter.py +245 -0
- titan_plugin_jira/messages.py +115 -0
- titan_plugin_jira/models.py +89 -0
- titan_plugin_jira/plugin.py +264 -0
- titan_plugin_jira/steps/ai_analyze_issue_step.py +105 -0
- titan_plugin_jira/steps/get_issue_step.py +82 -0
- titan_plugin_jira/steps/prompt_select_issue_step.py +80 -0
- titan_plugin_jira/steps/search_saved_query_step.py +238 -0
- titan_plugin_jira/utils/__init__.py +13 -0
- titan_plugin_jira/utils/issue_sorter.py +140 -0
- titan_plugin_jira/utils/saved_queries.py +150 -0
- titan_plugin_jira/workflows/analyze-jira-issues.yaml +34 -0
|
@@ -0,0 +1,419 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Dict, List, Optional, Any
|
|
5
|
+
import yaml
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from copy import deepcopy
|
|
8
|
+
|
|
9
|
+
from titan_cli.core.plugins.plugin_registry import PluginRegistry
|
|
10
|
+
from titan_cli.core.workflows.project_step_source import ProjectStepSource, StepFunction
|
|
11
|
+
|
|
12
|
+
from .workflow_sources import (
|
|
13
|
+
WorkflowSource,
|
|
14
|
+
ProjectWorkflowSource,
|
|
15
|
+
UserWorkflowSource,
|
|
16
|
+
SystemWorkflowSource,
|
|
17
|
+
PluginWorkflowSource,
|
|
18
|
+
WorkflowInfo,
|
|
19
|
+
)
|
|
20
|
+
from .workflow_exceptions import WorkflowNotFoundError, WorkflowError
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class ParsedWorkflow:
|
|
25
|
+
"""
|
|
26
|
+
A fully parsed, resolved, and merged workflow, ready to be executed.
|
|
27
|
+
This is the output of the registry's 'get_workflow' method.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
name: str
|
|
31
|
+
description: str
|
|
32
|
+
source: str
|
|
33
|
+
steps: List[Dict[str, Any]]
|
|
34
|
+
params: Dict[str, Any]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class WorkflowRegistry:
|
|
38
|
+
"""
|
|
39
|
+
Central registry for discovering and managing workflows from all sources.
|
|
40
|
+
|
|
41
|
+
This class is analogous to PluginRegistry. It discovers workflows from
|
|
42
|
+
various sources (project, user, system, plugins), resolves 'extends'
|
|
43
|
+
chains, merges configurations, and caches the final, parsed workflows.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
project_root: Path,
|
|
49
|
+
plugin_registry: PluginRegistry,
|
|
50
|
+
project_step_source: ProjectStepSource,
|
|
51
|
+
config: Any = None
|
|
52
|
+
):
|
|
53
|
+
"""
|
|
54
|
+
Initialize the WorkflowRegistry.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
project_root: Root path of the current project.
|
|
58
|
+
plugin_registry: Registry of installed plugins.
|
|
59
|
+
project_step_source: Source for discovering project-specific steps.
|
|
60
|
+
config: TitanConfig instance (optional, for filtering by enabled plugins).
|
|
61
|
+
"""
|
|
62
|
+
self.project_root = project_root
|
|
63
|
+
self.plugin_registry = plugin_registry
|
|
64
|
+
self._project_step_source = project_step_source
|
|
65
|
+
self._config = config
|
|
66
|
+
|
|
67
|
+
# Define the base path for system workflows, assuming it's in the root of the package
|
|
68
|
+
# (e.g., titan_cli/workflows). The path is constructed relative to this file's location.
|
|
69
|
+
system_workflows_path = (
|
|
70
|
+
Path(__file__).resolve().parent.parent.parent / "workflows"
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# Workflow sources are listed in order of precedence (highest to lowest).
|
|
74
|
+
self._sources: List[WorkflowSource] = [
|
|
75
|
+
ProjectWorkflowSource(project_root / ".titan" / "workflows", plugin_registry),
|
|
76
|
+
UserWorkflowSource(Path.home() / ".titan" / "workflows", plugin_registry),
|
|
77
|
+
SystemWorkflowSource(system_workflows_path, plugin_registry),
|
|
78
|
+
PluginWorkflowSource(plugin_registry), # PluginWorkflowSource takes plugin_registry once
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
# Cache for fully parsed workflows (similar to PluginRegistry._plugins).
|
|
82
|
+
self._workflows: Dict[str, ParsedWorkflow] = {}
|
|
83
|
+
|
|
84
|
+
# Cache for discovered workflow metadata (to avoid re-scanning files).
|
|
85
|
+
self._discovered: Optional[List[WorkflowInfo]] = None
|
|
86
|
+
|
|
87
|
+
def discover(self) -> List[WorkflowInfo]:
|
|
88
|
+
"""
|
|
89
|
+
Discovers all available workflows from all registered sources,
|
|
90
|
+
filtering out those with unmet plugin dependencies.
|
|
91
|
+
|
|
92
|
+
This method respects precedence; if a workflow with the same name
|
|
93
|
+
exists in multiple sources, only the one from the highest-precedence
|
|
94
|
+
source is included.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
A list of WorkflowInfo objects for all unique, executable workflows.
|
|
98
|
+
"""
|
|
99
|
+
# Return from cache if already discovered
|
|
100
|
+
if self._discovered is not None:
|
|
101
|
+
return self._discovered
|
|
102
|
+
|
|
103
|
+
workflows: List[WorkflowInfo] = []
|
|
104
|
+
seen_names = set()
|
|
105
|
+
|
|
106
|
+
# Use enabled plugins if config is available, otherwise fall back to installed
|
|
107
|
+
if self._config:
|
|
108
|
+
available_plugins = set(self.plugin_registry.list_enabled(self._config))
|
|
109
|
+
else:
|
|
110
|
+
available_plugins = set(self.plugin_registry.list_installed())
|
|
111
|
+
|
|
112
|
+
for source in self._sources:
|
|
113
|
+
try:
|
|
114
|
+
for workflow_info in source.discover():
|
|
115
|
+
if workflow_info.name not in seen_names:
|
|
116
|
+
# Check if all required plugins for this workflow are available (enabled)
|
|
117
|
+
if workflow_info.required_plugins.issubset(available_plugins):
|
|
118
|
+
workflows.append(workflow_info)
|
|
119
|
+
seen_names.add(workflow_info.name)
|
|
120
|
+
except Exception:
|
|
121
|
+
# Catch all exceptions from source discovery to prevent a single broken source
|
|
122
|
+
# from breaking the entire discovery process. This allows other sources to continue.
|
|
123
|
+
# TODO: Add proper logging when logger is available to help with debugging.
|
|
124
|
+
# For now, we continue silently to maintain graceful degradation.
|
|
125
|
+
continue
|
|
126
|
+
|
|
127
|
+
self._discovered = workflows
|
|
128
|
+
return workflows
|
|
129
|
+
|
|
130
|
+
def list_available(self) -> List[str]:
|
|
131
|
+
"""
|
|
132
|
+
Returns a simple list of the names of all available workflows.
|
|
133
|
+
|
|
134
|
+
Similar to PluginRegistry.list_installed().
|
|
135
|
+
"""
|
|
136
|
+
return [wf.name for wf in self.discover()]
|
|
137
|
+
|
|
138
|
+
def get_workflow(self, name: str) -> Optional[ParsedWorkflow]:
|
|
139
|
+
"""
|
|
140
|
+
Gets a fully parsed and resolved workflow by its name.
|
|
141
|
+
|
|
142
|
+
This is the main entry point for fetching a workflow for execution.
|
|
143
|
+
It handles finding the file, resolving the 'extends' chain,
|
|
144
|
+
merging configurations, and caching the result.
|
|
145
|
+
|
|
146
|
+
Similar to PluginRegistry.get_plugin().
|
|
147
|
+
"""
|
|
148
|
+
# Return from cache if available
|
|
149
|
+
if name in self._workflows:
|
|
150
|
+
return self._workflows[name]
|
|
151
|
+
|
|
152
|
+
# Find the highest-precedence workflow file for the given name
|
|
153
|
+
workflow_file = self._find_workflow_file(name)
|
|
154
|
+
if not workflow_file:
|
|
155
|
+
return None
|
|
156
|
+
|
|
157
|
+
# Load, parse, merge, and validate the workflow
|
|
158
|
+
try:
|
|
159
|
+
parsed_workflow = self._load_and_parse(name, workflow_file)
|
|
160
|
+
# Cache the successfully parsed workflow
|
|
161
|
+
self._workflows[name] = parsed_workflow
|
|
162
|
+
return parsed_workflow
|
|
163
|
+
except (WorkflowNotFoundError, yaml.YAMLError) as e:
|
|
164
|
+
# Propagate specific workflow errors for upstream handling (e.g., UI display)
|
|
165
|
+
raise e
|
|
166
|
+
except Exception as e:
|
|
167
|
+
# Catch any other unexpected errors during parsing/merging
|
|
168
|
+
raise WorkflowError(f"An unexpected error occurred while loading workflow '{name}': {e}") from e
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _find_workflow_file(self, name: str) -> Optional[Path]:
|
|
172
|
+
"""Finds a workflow file by name, respecting source precedence."""
|
|
173
|
+
for source in self._sources:
|
|
174
|
+
path = source.find(name)
|
|
175
|
+
if path:
|
|
176
|
+
return path
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
def _ensure_unique_step_ids(self, steps: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
180
|
+
"""
|
|
181
|
+
Ensures all step IDs are unique by adding numeric suffixes for duplicates.
|
|
182
|
+
|
|
183
|
+
For example, if two steps both have id="git_status", they will become
|
|
184
|
+
"git_status_1" and "git_status_2".
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
steps: List of step dictionaries
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
List of step dictionaries with unique IDs
|
|
191
|
+
"""
|
|
192
|
+
from titan_cli.core.workflows.models import WorkflowStepModel
|
|
193
|
+
|
|
194
|
+
# First, validate all steps to trigger auto-generation of IDs
|
|
195
|
+
validated_steps = []
|
|
196
|
+
for step_data in steps:
|
|
197
|
+
try:
|
|
198
|
+
step_model = WorkflowStepModel(**step_data)
|
|
199
|
+
validated_steps.append(step_model)
|
|
200
|
+
except Exception as e:
|
|
201
|
+
# If a step fails validation, re-raise with more context
|
|
202
|
+
raise WorkflowError(f"Invalid step configuration: {e}") from e
|
|
203
|
+
|
|
204
|
+
# Track ID counts and assign unique IDs
|
|
205
|
+
id_counts: Dict[str, int] = {}
|
|
206
|
+
final_steps = []
|
|
207
|
+
|
|
208
|
+
for step in validated_steps:
|
|
209
|
+
original_id = step.id
|
|
210
|
+
|
|
211
|
+
if original_id in id_counts:
|
|
212
|
+
# This ID has been used before, add a suffix
|
|
213
|
+
id_counts[original_id] += 1
|
|
214
|
+
step.id = f"{original_id}_{id_counts[original_id]}"
|
|
215
|
+
else:
|
|
216
|
+
# First occurrence of this ID
|
|
217
|
+
id_counts[original_id] = 1
|
|
218
|
+
# Check if we need to rename the first occurrence
|
|
219
|
+
if id_counts[original_id] > 1:
|
|
220
|
+
# This shouldn't happen in this logic, but keeping for safety
|
|
221
|
+
step.id = f"{original_id}_1"
|
|
222
|
+
|
|
223
|
+
final_steps.append(step.model_dump())
|
|
224
|
+
|
|
225
|
+
# If any ID appeared more than once, we need to rename all occurrences
|
|
226
|
+
# to maintain consistency (e.g., git_status_1, git_status_2 instead of git_status, git_status_2)
|
|
227
|
+
duplicate_ids = {id for id, count in id_counts.items() if count > 1}
|
|
228
|
+
|
|
229
|
+
if duplicate_ids:
|
|
230
|
+
# Re-process to add suffixes to ALL duplicates including first occurrence
|
|
231
|
+
final_steps = []
|
|
232
|
+
id_occurrence: Dict[str, int] = {}
|
|
233
|
+
|
|
234
|
+
for step_data in steps:
|
|
235
|
+
step_model = WorkflowStepModel(**step_data)
|
|
236
|
+
original_id = step_model.id
|
|
237
|
+
|
|
238
|
+
if original_id in duplicate_ids:
|
|
239
|
+
id_occurrence[original_id] = id_occurrence.get(original_id, 0) + 1
|
|
240
|
+
step_model.id = f"{original_id}_{id_occurrence[original_id]}"
|
|
241
|
+
|
|
242
|
+
final_steps.append(step_model.model_dump())
|
|
243
|
+
|
|
244
|
+
return final_steps
|
|
245
|
+
|
|
246
|
+
def _load_and_parse(self, name: str, file_path: Path) -> ParsedWorkflow:
|
|
247
|
+
"""Loads and parses a single workflow file, resolving its 'extends' chain."""
|
|
248
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
249
|
+
config = yaml.safe_load(f) or {}
|
|
250
|
+
|
|
251
|
+
# Resolve 'extends' chain if present
|
|
252
|
+
if "extends" in config:
|
|
253
|
+
base_config = self._resolve_extends(config["extends"])
|
|
254
|
+
config = self._merge_configs(base_config, config)
|
|
255
|
+
|
|
256
|
+
# Ensure step IDs are unique
|
|
257
|
+
steps = config.get("steps", [])
|
|
258
|
+
if steps:
|
|
259
|
+
steps = self._ensure_unique_step_ids(steps)
|
|
260
|
+
|
|
261
|
+
# Create the final ParsedWorkflow object
|
|
262
|
+
return ParsedWorkflow(
|
|
263
|
+
name=config.get("name", name),
|
|
264
|
+
description=config.get("description", ""),
|
|
265
|
+
source=self._get_source_name_from_path(file_path),
|
|
266
|
+
steps=steps,
|
|
267
|
+
params=config.get("params", {}),
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
def _resolve_extends(self, extends_ref: str) -> Dict[str, Any]:
|
|
271
|
+
"""
|
|
272
|
+
Recursively resolves a base workflow from an 'extends' reference.
|
|
273
|
+
|
|
274
|
+
Supports:
|
|
275
|
+
- "plugin:github/create-pr"
|
|
276
|
+
- "system/quick-commit"
|
|
277
|
+
- "create-pr" (resolved by precedence)
|
|
278
|
+
"""
|
|
279
|
+
# Parse the extends reference to find the correct file
|
|
280
|
+
base_workflow_path = None
|
|
281
|
+
if ":" in extends_ref:
|
|
282
|
+
source_type, ref_path = extends_ref.split(":", 1)
|
|
283
|
+
# Find a source that matches the type (e.g., 'plugin')
|
|
284
|
+
for source in self._sources:
|
|
285
|
+
# This logic assumes plugin source names are like "plugin:github", "plugin:git"
|
|
286
|
+
if source.name == source_type or source.name.startswith(f"{source_type}:"):
|
|
287
|
+
base_workflow_path = source.find(ref_path)
|
|
288
|
+
if base_workflow_path:
|
|
289
|
+
break
|
|
290
|
+
if not base_workflow_path:
|
|
291
|
+
# Better error message: check if plugin is installed
|
|
292
|
+
if source_type == "plugin":
|
|
293
|
+
plugin_name = ref_path.split("/")[0] if "/" in ref_path else None
|
|
294
|
+
if plugin_name:
|
|
295
|
+
installed_plugins = self.plugin_registry.list_installed()
|
|
296
|
+
if plugin_name not in installed_plugins:
|
|
297
|
+
raise WorkflowNotFoundError(
|
|
298
|
+
f"Cannot extend '{extends_ref}': Plugin '{plugin_name}' is not installed.\n"
|
|
299
|
+
f"Installed plugins: {', '.join(installed_plugins) if installed_plugins else 'none'}\n"
|
|
300
|
+
f"Please install it from the Plugin Management menu."
|
|
301
|
+
)
|
|
302
|
+
raise WorkflowNotFoundError(f"Base workflow '{extends_ref}' not found in source '{source_type}'.")
|
|
303
|
+
else:
|
|
304
|
+
# Normal resolution across all sources by precedence
|
|
305
|
+
base_workflow_path = self._find_workflow_file(extends_ref)
|
|
306
|
+
|
|
307
|
+
if not base_workflow_path:
|
|
308
|
+
raise WorkflowNotFoundError(f"Base workflow '{extends_ref}' not found.")
|
|
309
|
+
|
|
310
|
+
# Load the base configuration from the file
|
|
311
|
+
with open(base_workflow_path, 'r', encoding='utf-8') as f:
|
|
312
|
+
base_config = yaml.safe_load(f) or {}
|
|
313
|
+
|
|
314
|
+
# If the base itself extends another workflow, resolve it recursively
|
|
315
|
+
if "extends" in base_config:
|
|
316
|
+
parent_config = self._resolve_extends(base_config["extends"])
|
|
317
|
+
return self._merge_configs(parent_config, base_config)
|
|
318
|
+
|
|
319
|
+
return base_config
|
|
320
|
+
|
|
321
|
+
def _merge_configs(self, base: Dict[str, Any], overlay: Dict[str, Any]) -> Dict[str, Any]:
|
|
322
|
+
"""
|
|
323
|
+
Merges an overlay configuration into a base configuration.
|
|
324
|
+
- Metadata: overlay wins
|
|
325
|
+
- Params: overlay wins (shallow merge)
|
|
326
|
+
- Steps: merged via hooks
|
|
327
|
+
"""
|
|
328
|
+
merged = deepcopy(base)
|
|
329
|
+
|
|
330
|
+
# Merge metadata
|
|
331
|
+
for key in ["name", "description", "category"]:
|
|
332
|
+
if key in overlay:
|
|
333
|
+
merged[key] = overlay[key]
|
|
334
|
+
|
|
335
|
+
# Merge params (shallow merge, overlay takes precedence)
|
|
336
|
+
if "params" in overlay:
|
|
337
|
+
merged.setdefault("params", {}).update(overlay["params"])
|
|
338
|
+
|
|
339
|
+
# Merge steps using hooks defined in the overlay
|
|
340
|
+
if "hooks" in overlay and isinstance(overlay["hooks"], dict):
|
|
341
|
+
merged["steps"] = self._merge_steps_with_hooks(
|
|
342
|
+
base_steps=base.get("steps", []),
|
|
343
|
+
hooks=overlay["hooks"]
|
|
344
|
+
)
|
|
345
|
+
# If overlay specifies its own steps, it is in full control.
|
|
346
|
+
# This is implicitly handled by deepcopy and then not entering the hooks block.
|
|
347
|
+
# If 'steps' key is present in overlay, it completely replaces base 'steps' during deepcopy
|
|
348
|
+
# before the hooks logic is applied, if no 'hooks' are in overlay for step merging.
|
|
349
|
+
# So, if overlay.steps exists AND overlay.hooks is empty/not a dict, overlay.steps takes precedence.
|
|
350
|
+
elif "steps" in overlay:
|
|
351
|
+
merged["steps"] = overlay["steps"]
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
return merged
|
|
355
|
+
|
|
356
|
+
def _merge_steps_with_hooks(self, base_steps: List[Dict], hooks: Dict[str, List[Dict]]) -> List[Dict]:
|
|
357
|
+
"""Injects steps from the 'hooks' dictionary into the base step list."""
|
|
358
|
+
|
|
359
|
+
# Find all available hook points in base workflow
|
|
360
|
+
available_hooks = set()
|
|
361
|
+
for step in base_steps:
|
|
362
|
+
if "hook" in step and isinstance(step["hook"], str):
|
|
363
|
+
available_hooks.add(step["hook"])
|
|
364
|
+
|
|
365
|
+
# Add implicit 'after' hook (always available)
|
|
366
|
+
available_hooks.add("after")
|
|
367
|
+
|
|
368
|
+
# Validate that all hooks being used exist in base workflow
|
|
369
|
+
undefined_hooks = set(hooks.keys()) - available_hooks
|
|
370
|
+
if undefined_hooks:
|
|
371
|
+
from .workflow_exceptions import WorkflowError
|
|
372
|
+
raise WorkflowError(
|
|
373
|
+
f"Workflow defines hooks {sorted(undefined_hooks)} but base workflow only supports: {sorted(available_hooks)}.\n"
|
|
374
|
+
f"Available hooks in base workflow: {', '.join(sorted(available_hooks))}"
|
|
375
|
+
)
|
|
376
|
+
|
|
377
|
+
merged = []
|
|
378
|
+
|
|
379
|
+
for step in base_steps:
|
|
380
|
+
# Check if the current step is a hook point
|
|
381
|
+
if "hook" in step and isinstance(step["hook"], str):
|
|
382
|
+
hook_name = step["hook"]
|
|
383
|
+
# If the overlay provides steps for this hook, inject them
|
|
384
|
+
if hook_name in hooks:
|
|
385
|
+
# The value from the hooks dict should be a list of step dicts
|
|
386
|
+
injected_steps = hooks[hook_name]
|
|
387
|
+
if isinstance(injected_steps, list):
|
|
388
|
+
merged.extend(injected_steps)
|
|
389
|
+
else:
|
|
390
|
+
# This is a regular step, just append it
|
|
391
|
+
merged.append(step)
|
|
392
|
+
|
|
393
|
+
# Handle implicit 'after' hook for steps to be added at the very end
|
|
394
|
+
if "after" in hooks: # User's example used "after" as a hook name, not "after_workflow"
|
|
395
|
+
after_steps = hooks["after"]
|
|
396
|
+
if isinstance(after_steps, list):
|
|
397
|
+
merged.extend(after_steps)
|
|
398
|
+
|
|
399
|
+
return merged
|
|
400
|
+
|
|
401
|
+
def _get_source_name_from_path(self, file_path: Path) -> str:
|
|
402
|
+
"""Determines the source ('project', 'user', etc.) from a file path."""
|
|
403
|
+
for source in self._sources:
|
|
404
|
+
if source.contains(file_path):
|
|
405
|
+
return source.name
|
|
406
|
+
return "unknown"
|
|
407
|
+
|
|
408
|
+
def reload(self):
|
|
409
|
+
"""Clears all caches, forcing re-discovery and re-parsing."""
|
|
410
|
+
self._workflows.clear()
|
|
411
|
+
self._discovered = None
|
|
412
|
+
|
|
413
|
+
def get_project_step(self, step_name: str) -> Optional[StepFunction]:
|
|
414
|
+
"""
|
|
415
|
+
Retrieves a loaded project step function by its name from the project step source.
|
|
416
|
+
"""
|
|
417
|
+
return self._project_step_source.get_step(step_name)
|
|
418
|
+
|
|
419
|
+
|