mcp-souschef 2.8.0__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.2.0.dist-info}/METADATA +159 -384
  2. mcp_souschef-3.2.0.dist-info/RECORD +47 -0
  3. {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.2.0.dist-info}/WHEEL +1 -1
  4. souschef/__init__.py +31 -7
  5. souschef/assessment.py +1451 -105
  6. souschef/ci/common.py +126 -0
  7. souschef/ci/github_actions.py +3 -92
  8. souschef/ci/gitlab_ci.py +2 -52
  9. souschef/ci/jenkins_pipeline.py +2 -59
  10. souschef/cli.py +149 -16
  11. souschef/converters/playbook.py +378 -138
  12. souschef/converters/resource.py +12 -11
  13. souschef/converters/template.py +177 -0
  14. souschef/core/__init__.py +6 -1
  15. souschef/core/metrics.py +313 -0
  16. souschef/core/path_utils.py +233 -19
  17. souschef/core/validation.py +53 -0
  18. souschef/deployment.py +71 -12
  19. souschef/generators/__init__.py +13 -0
  20. souschef/generators/repo.py +695 -0
  21. souschef/parsers/attributes.py +1 -1
  22. souschef/parsers/habitat.py +1 -1
  23. souschef/parsers/inspec.py +25 -2
  24. souschef/parsers/metadata.py +5 -3
  25. souschef/parsers/recipe.py +1 -1
  26. souschef/parsers/resource.py +1 -1
  27. souschef/parsers/template.py +1 -1
  28. souschef/server.py +1039 -121
  29. souschef/ui/app.py +486 -374
  30. souschef/ui/pages/ai_settings.py +74 -8
  31. souschef/ui/pages/cookbook_analysis.py +3216 -373
  32. souschef/ui/pages/validation_reports.py +274 -0
  33. mcp_souschef-2.8.0.dist-info/RECORD +0 -42
  34. souschef/converters/cookbook_specific.py.backup +0 -109
  35. {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.2.0.dist-info}/entry_points.txt +0 -0
  36. {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.2.0.dist-info}/licenses/LICENSE +0 -0
souschef/ui/app.py CHANGED
@@ -1,72 +1,62 @@
1
- """
2
- Visual Migration Planning Interface for SousChef.
1
+ # Add the parent directory to the path so we can import souschef modules
2
+ import sys
3
+ from pathlib import Path
3
4
 
4
- A Streamlit-based web interface for Chef to Ansible migration planning,
5
- assessment, and visualization.
6
- """
5
+ app_path = Path(__file__).parent.parent
6
+ if str(app_path) not in sys.path:
7
+ sys.path.insert(0, str(app_path))
7
8
 
8
9
  import contextlib
9
- import logging
10
- import sys
11
- from pathlib import Path
10
+ import os
11
+ from collections.abc import Callable, Iterable, Mapping, Sequence
12
+ from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar
12
13
 
13
14
  import streamlit as st
14
15
 
15
- # Configure logging to stdout for Docker visibility
16
- logging.basicConfig(
17
- level=logging.INFO,
18
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
19
- stream=sys.stdout,
20
- force=True, # Override any existing configuration
21
- )
22
-
23
- logger = logging.getLogger(__name__)
24
- logger.info("Starting SousChef UI application")
16
+ if TYPE_CHECKING:
17
+ import networkx as nx
18
+ import plotly.graph_objects as go
19
+ from matplotlib.figure import Figure
25
20
 
26
- # Add the parent directory to the path so we can import souschef modules
27
- app_path = Path(__file__).parent.parent
28
- if str(app_path) not in sys.path:
29
- sys.path.insert(0, str(app_path))
21
+ P = ParamSpec("P")
22
+ R = TypeVar("R")
30
23
 
31
- # Import page modules
32
- from souschef.ui.pages.ai_settings import show_ai_settings_page # noqa: E402
33
- from souschef.ui.pages.cookbook_analysis import ( # noqa: E402
34
- show_cookbook_analysis_page,
35
- )
24
+ from souschef.core import _ensure_within_base_path, _normalize_path
25
+ from souschef.core.path_utils import safe_exists, safe_glob, safe_is_dir, safe_is_file
26
+ from souschef.ui.pages.ai_settings import show_ai_settings_page
27
+ from souschef.ui.pages.cookbook_analysis import show_cookbook_analysis_page
36
28
 
37
- # Constants for repeated strings
29
+ # Constants
30
+ SECTION_COMMUNITY_COOKBOOKS_HEADER = "Community Cookbooks:"
31
+ SECTION_COMMUNITY_COOKBOOKS = "Community Cookbooks"
32
+ SECTION_CIRCULAR_DEPENDENCIES = "Circular Dependencies"
33
+ SECTION_MIGRATION_IMPACT_ANALYSIS = "Migration Impact Analysis"
38
34
  NAV_MIGRATION_PLANNING = "Migration Planning"
39
35
  NAV_DEPENDENCY_MAPPING = "Dependency Mapping"
40
36
  NAV_VALIDATION_REPORTS = "Validation Reports"
41
- MIME_TEXT_MARKDOWN = "text/markdown"
42
- MIME_APPLICATION_JSON = "application/json"
43
- SECTION_CIRCULAR_DEPENDENCIES = "Circular Dependencies"
44
- NAV_COOKBOOK_ANALYSIS = "Cookbook Analysis"
45
37
  NAV_AI_SETTINGS = "AI Settings"
38
+ NAV_COOKBOOK_ANALYSIS = "Cookbook Analysis"
46
39
  BUTTON_ANALYSE_DEPENDENCIES = "Analyse Dependencies"
47
- SECTION_COMMUNITY_COOKBOOKS = "Community Cookbooks"
48
- SECTION_COMMUNITY_COOKBOOKS_HEADER = "Community Cookbooks:"
49
40
  INPUT_METHOD_DIRECTORY_PATH = "Directory Path"
41
+ MIME_TEXT_MARKDOWN = "text/markdown"
42
+ MIME_APPLICATION_JSON = "application/json"
43
+ ERROR_MSG_ENTER_PATH = "Please enter a path."
50
44
  SCOPE_BEST_PRACTICES = "Best Practices"
51
- ERROR_MSG_ENTER_PATH = "Please enter a path to validate."
52
-
53
-
54
- def health_check():
55
- """Return simple health check endpoint for Docker."""
56
- return {"status": "healthy", "service": "souschef-ui"}
57
45
 
58
46
 
59
47
  class ProgressTracker:
60
48
  """Track progress for long-running operations."""
61
49
 
62
- def __init__(self, total_steps=100, description="Processing..."):
63
- self.total_steps = total_steps
64
- self.current_step = 0
65
- self.description = description
50
+ def __init__(
51
+ self, total_steps: int = 100, description: str = "Processing..."
52
+ ) -> None:
53
+ self.total_steps: int = total_steps
54
+ self.current_step: int = 0
55
+ self.description: str = description
66
56
  self.progress_bar = st.progress(0)
67
57
  self.status_text = st.empty()
68
58
 
69
- def update(self, step=None, description=None):
59
+ def update(self, step: int | None = None, description: str | None = None) -> None:
70
60
  """Update progress."""
71
61
  if step is not None:
72
62
  self.current_step = min(step, self.total_steps)
@@ -82,7 +72,7 @@ class ProgressTracker:
82
72
  f"{self.description} ({self.current_step}/{self.total_steps})"
83
73
  )
84
74
 
85
- def complete(self, message="Completed!"):
75
+ def complete(self, message: str = "Completed!") -> None:
86
76
  """Mark progress as complete."""
87
77
  self.progress_bar.progress(1.0)
88
78
  self.status_text.text(message)
@@ -90,24 +80,26 @@ class ProgressTracker:
90
80
 
91
81
  time.sleep(0.5) # Brief pause to show completion
92
82
 
93
- def close(self):
83
+ def close(self) -> None:
94
84
  """Clean up progress indicators."""
95
85
  self.progress_bar.empty()
96
86
  self.status_text.empty()
97
87
 
98
88
 
99
89
  def with_progress_tracking(
100
- operation_func, description="Processing...", total_steps=100
101
- ):
90
+ operation_func: Callable[Concatenate[ProgressTracker, P], R],
91
+ description: str = "Processing...",
92
+ total_steps: int = 100,
93
+ ) -> Callable[P, R]:
102
94
  """Add progress tracking to operations."""
103
95
 
104
- def wrapper(*args, **kwargs):
96
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
105
97
  tracker = ProgressTracker(total_steps, description)
106
98
  try:
107
- result = operation_func(tracker, *args, **kwargs)
99
+ result: R = operation_func(tracker, *args, **kwargs)
108
100
  tracker.complete()
109
101
  return result
110
- except Exception as e:
102
+ except Exception as e: # noqa: BLE001
111
103
  tracker.close()
112
104
  raise e
113
105
  finally:
@@ -116,111 +108,104 @@ def with_progress_tracking(
116
108
  return wrapper
117
109
 
118
110
 
119
- def _setup_sidebar_navigation():
120
- """Set up the sidebar navigation with buttons."""
121
- st.sidebar.title("Navigation")
111
+ def main() -> None:
112
+ """Run the main Streamlit application."""
113
+ st.set_page_config(
114
+ page_title="SousChef - Chef to Ansible Migration",
115
+ layout="wide",
116
+ initial_sidebar_state="collapsed",
117
+ )
122
118
 
123
- # Dashboard button
124
- if st.sidebar.button(
125
- "Dashboard",
126
- help="View migration overview and quick actions",
127
- width="stretch",
128
- ):
129
- st.session_state.current_page = "Dashboard"
130
- st.rerun()
119
+ st.title("SousChef - Visual Migration Planning")
120
+ st.markdown("*AI-powered Chef to Ansible migration planning interface*")
131
121
 
132
- # Cookbook Analysis button
133
- if st.sidebar.button(
134
- NAV_COOKBOOK_ANALYSIS,
135
- help="Analyse Chef cookbooks and assess migration complexity",
136
- width="stretch",
137
- ):
138
- st.session_state.current_page = NAV_COOKBOOK_ANALYSIS
139
- st.rerun()
122
+ # Main content area - default to dashboard
123
+ page = st.session_state.get("current_page", "Dashboard")
140
124
 
141
- # Dependency Mapping button
142
- if st.sidebar.button(
143
- NAV_DEPENDENCY_MAPPING,
144
- help="Visualise cookbook dependencies and migration order",
145
- width="stretch",
146
- ):
147
- st.session_state.current_page = NAV_DEPENDENCY_MAPPING
148
- st.rerun()
125
+ # Navigation section
126
+ st.subheader("Navigation")
149
127
 
150
- # Migration Planning button
151
- if st.sidebar.button(
152
- NAV_MIGRATION_PLANNING,
153
- help="Plan your Chef to Ansible migration with detailed timelines",
154
- width="stretch",
155
- ):
156
- st.session_state.current_page = NAV_MIGRATION_PLANNING
157
- st.rerun()
128
+ col1, col2, col3, col4, col5 = st.columns(5)
158
129
 
159
- # Validation Reports button
160
- if st.sidebar.button(
161
- NAV_VALIDATION_REPORTS,
162
- help="Validate conversions and generate quality assurance reports",
163
- width="stretch",
164
- ):
165
- st.session_state.current_page = NAV_VALIDATION_REPORTS
166
- st.rerun()
130
+ with col1:
131
+ if st.button(
132
+ "Cookbook Analysis",
133
+ type="primary" if page == NAV_COOKBOOK_ANALYSIS else "secondary",
134
+ width="stretch",
135
+ key="nav_cookbook_analysis",
136
+ ):
137
+ st.session_state.current_page = NAV_COOKBOOK_ANALYSIS
138
+ st.rerun()
167
139
 
168
- # AI Settings button
169
- if st.sidebar.button(
170
- NAV_AI_SETTINGS,
171
- help="Configure AI provider settings for intelligent conversions",
172
- width="stretch",
173
- ):
174
- st.session_state.current_page = NAV_AI_SETTINGS
175
- st.rerun()
140
+ with col2:
141
+ if st.button(
142
+ "Migration Planning",
143
+ type="primary" if page == NAV_MIGRATION_PLANNING else "secondary",
144
+ width="stretch",
145
+ key="nav_migration_planning",
146
+ ):
147
+ st.session_state.current_page = NAV_MIGRATION_PLANNING
148
+ st.rerun()
149
+
150
+ with col3:
151
+ if st.button(
152
+ "Dependency Mapping",
153
+ type="primary" if page == NAV_DEPENDENCY_MAPPING else "secondary",
154
+ width="stretch",
155
+ key="nav_dependency_mapping",
156
+ ):
157
+ st.session_state.current_page = NAV_DEPENDENCY_MAPPING
158
+ st.rerun()
176
159
 
160
+ with col4:
161
+ if st.button(
162
+ "Validation Reports",
163
+ type="primary" if page == NAV_VALIDATION_REPORTS else "secondary",
164
+ width="stretch",
165
+ key="nav_validation_reports",
166
+ ):
167
+ st.session_state.current_page = NAV_VALIDATION_REPORTS
168
+ st.rerun()
177
169
 
178
- def main():
179
- """Run the main Streamlit application."""
180
- st.set_page_config(
181
- page_title="SousChef - Chef to Ansible Migration",
182
- layout="wide",
183
- initial_sidebar_state="expanded",
184
- )
170
+ with col5:
171
+ if st.button(
172
+ "AI Settings",
173
+ type="primary" if page == NAV_AI_SETTINGS else "secondary",
174
+ width="stretch",
175
+ key="nav_ai_settings",
176
+ ):
177
+ st.session_state.current_page = NAV_AI_SETTINGS
178
+ st.rerun()
185
179
 
186
- # Hide Streamlit's default header elements and sidebar navigation
187
- st.markdown(
188
- """
189
- <style>
190
- #MainMenu {visibility: hidden;}
191
- .stDeployButton {display:none;}
192
- [data-testid="stSidebarNavLink"] {display: none;}
193
- </style>
194
- """,
195
- unsafe_allow_html=True,
196
- )
180
+ st.divider()
197
181
 
198
- # Set up sidebar navigation
199
- _setup_sidebar_navigation()
182
+ # Page routing
183
+ _route_to_page(page)
200
184
 
201
- # Get current page from session state, default to Dashboard
202
- page = st.session_state.get("current_page", "Dashboard")
203
185
 
204
- # Main content area
205
- if page == "Dashboard":
206
- show_dashboard()
207
- elif page == NAV_COOKBOOK_ANALYSIS:
208
- show_cookbook_analysis_page()
209
- elif page == NAV_AI_SETTINGS:
210
- show_ai_settings_page()
211
- elif page == NAV_MIGRATION_PLANNING:
212
- show_migration_planning()
213
- elif page == NAV_DEPENDENCY_MAPPING:
214
- show_dependency_mapping()
215
- elif page == NAV_VALIDATION_REPORTS:
216
- show_validation_reports()
217
-
218
-
219
- def _calculate_dashboard_metrics():
186
+ def _route_to_page(page: str) -> None:
187
+ """Route to the appropriate page based on the current page state."""
188
+ page_routes = {
189
+ "Dashboard": show_dashboard,
190
+ NAV_COOKBOOK_ANALYSIS: show_cookbook_analysis_page,
191
+ NAV_MIGRATION_PLANNING: show_migration_planning,
192
+ NAV_DEPENDENCY_MAPPING: show_dependency_mapping,
193
+ NAV_VALIDATION_REPORTS: show_validation_reports,
194
+ NAV_AI_SETTINGS: show_ai_settings_page,
195
+ }
196
+
197
+ route_func = page_routes.get(page)
198
+ if route_func:
199
+ route_func()
200
+ else:
201
+ show_dashboard() # Default fallback
202
+
203
+
204
+ def _calculate_dashboard_metrics() -> tuple[int, str, int, int]:
220
205
  """Calculate and return dashboard metrics."""
221
- cookbooks_analysed = 0
222
206
  complexity_counts = {"High": 0, "Medium": 0, "Low": 0}
223
207
  successful_analyses = 0
208
+ cookbooks_analysed = 0
224
209
 
225
210
  if "analysis_results" in st.session_state and st.session_state.analysis_results:
226
211
  results = st.session_state.analysis_results
@@ -246,12 +231,20 @@ def _calculate_dashboard_metrics():
246
231
  if cookbooks_analysed > 0:
247
232
  conversion_rate = int((successful_analyses / cookbooks_analysed) * 100)
248
233
 
249
- return cookbooks_analysed, overall_complexity, conversion_rate, successful_analyses
234
+ return (
235
+ cookbooks_analysed,
236
+ overall_complexity,
237
+ conversion_rate,
238
+ successful_analyses,
239
+ )
250
240
 
251
241
 
252
242
  def _display_dashboard_metrics(
253
- cookbooks_analysed, overall_complexity, conversion_rate, successful_analyses
254
- ):
243
+ cookbooks_analysed: int,
244
+ overall_complexity: str,
245
+ conversion_rate: int,
246
+ successful_analyses: int,
247
+ ) -> None:
255
248
  """Display the dashboard metrics."""
256
249
  col1, col2, col3 = st.columns(3)
257
250
 
@@ -286,9 +279,9 @@ def _display_dashboard_metrics(
286
279
  st.caption("Successful analyses")
287
280
 
288
281
 
289
- def _display_quick_upload_section():
282
+ def _display_quick_upload_section() -> None:
290
283
  """Display the quick upload section."""
291
- st.subheader("Quick Start")
284
+ st.subheader("Quick Start - Upload Cookbooks")
292
285
 
293
286
  col1, col2 = st.columns([2, 1])
294
287
 
@@ -309,33 +302,23 @@ def _display_quick_upload_section():
309
302
 
310
303
  st.success(f"File {uploaded_file.name} uploaded successfully!")
311
304
  st.info(
312
- "Navigate to Cookbook Analysis to process this file, "
305
+ "Navigate to Cookbook Analysis above to process this file, "
313
306
  "or upload another file to replace it."
314
307
  )
315
308
 
316
309
  with col2:
317
- st.markdown("**Or choose your workflow:**")
310
+ st.markdown("**Upload Options:**")
311
+ st.markdown("- ZIP archives (.zip)")
312
+ st.markdown("- TAR archives (.tar, .tar.gz, .tgz)")
313
+ st.markdown("- Process uploaded files using the navigation buttons above")
318
314
 
319
- # Quick actions
320
- if st.button("Analyse Cookbooks", type="primary", width="stretch"):
321
- st.session_state.current_page = "Cookbook Analysis"
322
- st.rerun()
323
315
 
324
- if st.button("Generate Migration Plan", width="stretch"):
325
- st.session_state.current_page = NAV_MIGRATION_PLANNING
326
- st.rerun()
327
-
328
- if st.button(BUTTON_ANALYSE_DEPENDENCIES, width="stretch"):
329
- st.session_state.current_page = NAV_DEPENDENCY_MAPPING
330
- st.rerun()
331
-
332
-
333
- def _display_recent_activity():
316
+ def _display_recent_activity() -> None:
334
317
  """Display the recent activity section."""
335
318
  st.subheader("Recent Activity")
336
319
  st.info(
337
320
  "No recent migration activity. Start by uploading cookbooks "
338
- "above or using the Cookbook Analysis page!"
321
+ f"above or using the {NAV_COOKBOOK_ANALYSIS} page!"
339
322
  )
340
323
 
341
324
  # Getting started guide
@@ -343,7 +326,7 @@ def _display_recent_activity():
343
326
  st.markdown("""
344
327
  **New to SousChef? Here's how to begin:**
345
328
 
346
- 1. **Upload Cookbooks**: Use the uploader above or go to Cookbook Analysis
329
+ 1. **Upload Cookbooks**: Use the uploader above or go to {NAV_COOKBOOK_ANALYSIS}
347
330
  2. **Analyse Complexity**: Get detailed migration assessments
348
331
  3. **Plan Migration**: Generate timelines and resource requirements
349
332
  4. **Convert to Ansible**: Download converted playbooks
@@ -351,7 +334,7 @@ def _display_recent_activity():
351
334
  **Supported Formats:**
352
335
  - ZIP archives (.zip)
353
336
  - TAR archives (.tar, .tar.gz, .tgz)
354
- - Directory paths (in Cookbook Analysis)
337
+ - Directory paths (in {NAV_COOKBOOK_ANALYSIS})
355
338
 
356
339
  **Expected Structure:**
357
340
  ```
@@ -366,7 +349,7 @@ def _display_recent_activity():
366
349
  """)
367
350
 
368
351
 
369
- def show_dashboard():
352
+ def show_dashboard() -> None:
370
353
  """Show the main dashboard with migration overview."""
371
354
  st.header("Migration Dashboard")
372
355
 
@@ -389,7 +372,7 @@ def show_dashboard():
389
372
  _display_recent_activity()
390
373
 
391
374
 
392
- def show_migration_planning():
375
+ def show_migration_planning() -> None:
393
376
  """Show migration planning interface."""
394
377
  st.header(NAV_MIGRATION_PLANNING)
395
378
 
@@ -510,7 +493,12 @@ def show_migration_planning():
510
493
  # Step 3: Generate Plan
511
494
  st.subheader("Step 3: Generate Migration Plan")
512
495
 
513
- if st.button("Generate Migration Plan", type="primary", width="stretch"):
496
+ if st.button(
497
+ "Generate Migration Plan",
498
+ type="primary",
499
+ width="stretch",
500
+ key="migration_plan_generate",
501
+ ):
514
502
  if not cookbook_paths.strip():
515
503
  st.error("Please enter cookbook paths to generate a migration plan.")
516
504
  return
@@ -554,7 +542,9 @@ def show_migration_planning():
554
542
  display_migration_plan_results()
555
543
 
556
544
 
557
- def _display_migration_summary_metrics(cookbook_paths, strategy, timeline):
545
+ def _display_migration_summary_metrics(
546
+ cookbook_paths: str, strategy: str, timeline: int
547
+ ) -> None:
558
548
  """Display migration overview summary metrics."""
559
549
  st.subheader("Migration Overview")
560
550
 
@@ -574,7 +564,7 @@ def _display_migration_summary_metrics(cookbook_paths, strategy, timeline):
574
564
  st.metric("Status", "Plan Generated")
575
565
 
576
566
 
577
- def _display_migration_plan_details(plan_result):
567
+ def _display_migration_plan_details(plan_result: str) -> None:
578
568
  """Display the detailed migration plan sections."""
579
569
  st.subheader("Migration Plan Details")
580
570
 
@@ -595,14 +585,18 @@ def _display_migration_plan_details(plan_result):
595
585
  st.markdown(section)
596
586
 
597
587
 
598
- def _display_migration_action_buttons(cookbook_paths):
588
+ def _display_migration_action_buttons(cookbook_paths: str) -> None:
599
589
  """Display action buttons for next steps."""
600
590
  st.subheader("Next Steps")
601
591
 
602
592
  col1, col2, col3 = st.columns(3)
603
593
 
604
594
  with col1:
605
- if st.button("📊 Generate Detailed Report", width="stretch"):
595
+ if st.button(
596
+ "Generate Detailed Report",
597
+ width="stretch",
598
+ key="migration_detailed_report",
599
+ ):
606
600
  with st.spinner("Generating detailed migration report..."):
607
601
  try:
608
602
  from souschef.assessment import generate_migration_report
@@ -616,7 +610,11 @@ def _display_migration_action_buttons(cookbook_paths):
616
610
  st.error(f"Error generating report: {e}")
617
611
 
618
612
  with col2:
619
- if st.button("🔍 Analyse Dependencies", width="stretch"):
613
+ if st.button(
614
+ "Analyse Dependencies",
615
+ width="stretch",
616
+ key="migration_analyse_dependencies",
617
+ ):
620
618
  if len(cookbook_paths.split(",")) == 1:
621
619
  # Single cookbook dependency analysis
622
620
  cookbook_path = cookbook_paths.split(",")[0].strip()
@@ -636,7 +634,7 @@ def _display_migration_action_buttons(cookbook_paths):
636
634
  )
637
635
 
638
636
  with col3:
639
- if st.button("📥 Export Plan", width="stretch"):
637
+ if st.button("Export Plan", width="stretch", key="migration_export_plan"):
640
638
  # Create downloadable plan
641
639
  plan_content = f"""# Chef to Ansible Migration Plan
642
640
  Generated: {st.session_state.get("timestamp", "Unknown")}
@@ -659,20 +657,20 @@ Generated: {st.session_state.get("timestamp", "Unknown")}
659
657
  )
660
658
 
661
659
 
662
- def _display_additional_reports():
660
+ def _display_additional_reports() -> None:
663
661
  """Display detailed report and dependency analysis if available."""
664
662
  # Display detailed report if generated
665
663
  if "detailed_report" in st.session_state:
666
- with st.expander("📊 Detailed Migration Report"):
664
+ with st.expander("Detailed Migration Report"):
667
665
  st.markdown(st.session_state.detailed_report)
668
666
 
669
667
  # Display dependency analysis if generated
670
668
  if "dep_analysis" in st.session_state:
671
- with st.expander("🔍 Dependency Analysis"):
669
+ with st.expander("Dependency Analysis"):
672
670
  st.markdown(st.session_state.dep_analysis)
673
671
 
674
672
 
675
- def display_migration_plan_results():
673
+ def display_migration_plan_results() -> None:
676
674
  """Display the generated migration plan results."""
677
675
  plan_result = st.session_state.migration_plan
678
676
  cookbook_paths = st.session_state.cookbook_paths
@@ -685,7 +683,7 @@ def display_migration_plan_results():
685
683
  _display_additional_reports()
686
684
 
687
685
 
688
- def show_dependency_mapping():
686
+ def show_dependency_mapping() -> None:
689
687
  """Show dependency mapping visualization."""
690
688
  st.header(NAV_DEPENDENCY_MAPPING)
691
689
 
@@ -762,7 +760,12 @@ def show_dependency_mapping():
762
760
  )
763
761
 
764
762
  # Analysis button
765
- if st.button(BUTTON_ANALYSE_DEPENDENCIES, type="primary", width="stretch"):
763
+ if st.button(
764
+ BUTTON_ANALYSE_DEPENDENCIES,
765
+ type="primary",
766
+ width="stretch",
767
+ key="dep_analyse_dependencies",
768
+ ):
766
769
  if not cookbook_path or not cookbook_path.strip():
767
770
  st.error("Please enter a cookbook directory path.")
768
771
  return
@@ -804,7 +807,7 @@ def show_dependency_mapping():
804
807
  display_dependency_analysis_results()
805
808
 
806
809
 
807
- def _setup_dependency_mapping_ui():
810
+ def _setup_dependency_mapping_ui() -> None:
808
811
  """Set up the dependency mapping UI header and description."""
809
812
  st.header(NAV_DEPENDENCY_MAPPING)
810
813
 
@@ -814,7 +817,7 @@ def _setup_dependency_mapping_ui():
814
817
  """)
815
818
 
816
819
 
817
- def _get_dependency_mapping_inputs():
820
+ def _get_dependency_mapping_inputs() -> tuple[str, str, str]:
818
821
  """Collect user inputs for dependency analysis."""
819
822
  # Cookbook path input
820
823
  cookbook_path = st.text_input(
@@ -854,11 +857,13 @@ def _get_dependency_mapping_inputs():
854
857
 
855
858
 
856
859
  def _handle_dependency_analysis_execution(
857
- cookbook_path, dependency_depth, visualization_type
858
- ):
860
+ cookbook_path: str, dependency_depth: str, visualization_type: str
861
+ ) -> None:
859
862
  """Handle the dependency analysis execution when button is clicked."""
860
863
  # Analysis button
861
- if st.button(BUTTON_ANALYSE_DEPENDENCIES, type="primary", width="stretch"):
864
+ if st.button(
865
+ BUTTON_ANALYSE_DEPENDENCIES, type="primary", width="stretch", key="dep_analyze"
866
+ ):
862
867
  if not cookbook_path or not cookbook_path.strip():
863
868
  st.error("Please enter a cookbook directory path.")
864
869
  return
@@ -868,7 +873,9 @@ def _handle_dependency_analysis_execution(
868
873
  )
869
874
 
870
875
 
871
- def _perform_dependency_analysis(cookbook_path, dependency_depth, visualization_type):
876
+ def _perform_dependency_analysis(
877
+ cookbook_path: str, dependency_depth: str, visualization_type: str
878
+ ) -> None:
872
879
  """Perform the actual dependency analysis."""
873
880
  # Import assessment functions
874
881
  from souschef.assessment import analyse_cookbook_dependencies
@@ -903,16 +910,16 @@ def _perform_dependency_analysis(cookbook_path, dependency_depth, visualization_
903
910
  st.error(f"Error analyzing dependencies: {e}")
904
911
 
905
912
 
906
- def _display_dependency_analysis_results_if_available():
913
+ def _display_dependency_analysis_results_if_available() -> None:
907
914
  """Display dependency analysis results if they exist in session state."""
908
915
  # Display results if available
909
916
  if "dep_analysis_result" in st.session_state:
910
917
  display_dependency_analysis_results()
911
918
 
912
919
 
913
- def _extract_dependency_relationships(lines):
920
+ def _extract_dependency_relationships(lines: Iterable[str]) -> dict[str, list[str]]:
914
921
  """Extract dependency relationships from analysis lines."""
915
- dependencies = {}
922
+ dependencies: dict[str, list[str]] = {}
916
923
  current_section = None
917
924
 
918
925
  for line in lines:
@@ -935,11 +942,13 @@ def _extract_dependency_relationships(lines):
935
942
  return dependencies
936
943
 
937
944
 
938
- def _extract_circular_and_community_deps(lines):
945
+ def _extract_circular_and_community_deps(
946
+ lines: Iterable[str],
947
+ ) -> tuple[list[tuple[str, str]], list[str]]:
939
948
  """Extract circular dependencies and community cookbooks."""
940
949
  circular_deps: list[tuple[str, str]] = []
941
950
  community_cookbooks: list[str] = []
942
- current_section = None
951
+ current_section: str | None = None
943
952
 
944
953
  for line in lines:
945
954
  current_section = _update_current_section(line, current_section)
@@ -951,7 +960,7 @@ def _extract_circular_and_community_deps(lines):
951
960
  return circular_deps, community_cookbooks
952
961
 
953
962
 
954
- def _update_current_section(line, current_section):
963
+ def _update_current_section(line: str, current_section: str | None) -> str | None:
955
964
  """Update the current section based on the line content."""
956
965
  line = line.strip()
957
966
  if "Circular Dependencies:" in line:
@@ -961,12 +970,17 @@ def _update_current_section(line, current_section):
961
970
  return current_section
962
971
 
963
972
 
964
- def _is_list_item(line):
973
+ def _is_list_item(line: str) -> bool:
965
974
  """Check if the line is a list item."""
966
975
  return line.strip().startswith("- ")
967
976
 
968
977
 
969
- def _process_list_item(line, current_section, circular_deps, community_cookbooks):
978
+ def _process_list_item(
979
+ line: str,
980
+ current_section: str | None,
981
+ circular_deps: list[tuple[str, str]],
982
+ community_cookbooks: list[str],
983
+ ) -> None:
970
984
  """Process a list item based on the current section."""
971
985
  if current_section == "circular":
972
986
  _process_circular_dependency_item(line, circular_deps)
@@ -974,7 +988,9 @@ def _process_list_item(line, current_section, circular_deps, community_cookbooks
974
988
  _process_community_cookbook_item(line, community_cookbooks)
975
989
 
976
990
 
977
- def _process_circular_dependency_item(line, circular_deps):
991
+ def _process_circular_dependency_item(
992
+ line: str, circular_deps: list[tuple[str, str]]
993
+ ) -> None:
978
994
  """Process a circular dependency list item."""
979
995
  dep_text = line[2:].strip()
980
996
  if "->" in dep_text:
@@ -983,14 +999,16 @@ def _process_circular_dependency_item(line, circular_deps):
983
999
  circular_deps.append((parts[0].strip(), parts[1].strip()))
984
1000
 
985
1001
 
986
- def _process_community_cookbook_item(line, community_cookbooks):
1002
+ def _process_community_cookbook_item(line: str, community_cookbooks: list[str]) -> None:
987
1003
  """Process a community cookbook list item."""
988
1004
  cookbook = line[2:].strip()
989
1005
  if cookbook:
990
1006
  community_cookbooks.append(cookbook)
991
1007
 
992
1008
 
993
- def _parse_dependency_analysis(analysis_result):
1009
+ def _parse_dependency_analysis(
1010
+ analysis_result: str,
1011
+ ) -> tuple[dict[str, list[str]], list[tuple[str, str]], list[str]]:
994
1012
  """Parse dependency analysis result into structured data."""
995
1013
  lines = analysis_result.split("\n")
996
1014
 
@@ -1000,7 +1018,11 @@ def _parse_dependency_analysis(analysis_result):
1000
1018
  return dependencies, circular_deps, community_cookbooks
1001
1019
 
1002
1020
 
1003
- def _create_networkx_graph(dependencies, circular_deps, community_cookbooks):
1021
+ def _create_networkx_graph(
1022
+ dependencies: Mapping[str, Sequence[str]],
1023
+ circular_deps: Sequence[tuple[str, str]],
1024
+ community_cookbooks: Sequence[str],
1025
+ ) -> "nx.DiGraph":
1004
1026
  """Create NetworkX graph from dependency data."""
1005
1027
  import networkx as nx
1006
1028
 
@@ -1025,7 +1047,9 @@ def _create_networkx_graph(dependencies, circular_deps, community_cookbooks):
1025
1047
  return graph
1026
1048
 
1027
1049
 
1028
- def _calculate_graph_positions(graph, layout_algorithm):
1050
+ def _calculate_graph_positions(
1051
+ graph: "nx.DiGraph", layout_algorithm: str
1052
+ ) -> tuple[dict[Any, tuple[float, float]], str]:
1029
1053
  """
1030
1054
  Calculate positions for graph nodes using the specified layout algorithm.
1031
1055
 
@@ -1048,7 +1072,7 @@ def _calculate_graph_positions(graph, layout_algorithm):
1048
1072
  return pos, layout_algorithm
1049
1073
 
1050
1074
 
1051
- def _choose_auto_layout_algorithm(num_nodes):
1075
+ def _choose_auto_layout_algorithm(num_nodes: int) -> str:
1052
1076
  """Choose the best layout algorithm based on graph size."""
1053
1077
  if num_nodes <= 10:
1054
1078
  return "circular"
@@ -1058,7 +1082,9 @@ def _choose_auto_layout_algorithm(num_nodes):
1058
1082
  return "kamada_kawai"
1059
1083
 
1060
1084
 
1061
- def _calculate_positions_with_algorithm(graph, layout_algorithm):
1085
+ def _calculate_positions_with_algorithm(
1086
+ graph: "nx.DiGraph", layout_algorithm: str
1087
+ ) -> Any:
1062
1088
  """Calculate node positions using the specified algorithm."""
1063
1089
  import networkx as nx
1064
1090
 
@@ -1087,7 +1113,9 @@ def _calculate_positions_with_algorithm(graph, layout_algorithm):
1087
1113
  return nx.spring_layout(graph, k=2, iterations=50, seed=42)
1088
1114
 
1089
1115
 
1090
- def _calculate_shell_layout_positions(graph):
1116
+ def _calculate_shell_layout_positions(
1117
+ graph: "nx.DiGraph",
1118
+ ) -> Any:
1091
1119
  """Calculate shell layout positions for hierarchical organization."""
1092
1120
  import networkx as nx
1093
1121
 
@@ -1114,9 +1142,11 @@ def _calculate_shell_layout_positions(graph):
1114
1142
  return nx.spring_layout(graph, k=2, iterations=50, seed=42)
1115
1143
 
1116
1144
 
1117
- def _create_plotly_edge_traces(graph, pos):
1145
+ def _create_plotly_edge_traces(
1146
+ graph: "nx.DiGraph", pos: Mapping[Any, tuple[float, float]]
1147
+ ) -> list["go.Scatter"]:
1118
1148
  """Create edge traces for Plotly graph."""
1119
- import plotly.graph_objects as go # type: ignore[import-untyped]
1149
+ import plotly.graph_objects as go
1120
1150
 
1121
1151
  edge_traces = []
1122
1152
 
@@ -1167,7 +1197,9 @@ def _create_plotly_edge_traces(graph, pos):
1167
1197
  return edge_traces
1168
1198
 
1169
1199
 
1170
- def _create_plotly_node_trace(graph, pos):
1200
+ def _create_plotly_node_trace(
1201
+ graph: "nx.DiGraph", pos: Mapping[Any, tuple[float, float]]
1202
+ ) -> "go.Scatter":
1171
1203
  """Create node trace for Plotly graph."""
1172
1204
  import plotly.graph_objects as go
1173
1205
 
@@ -1220,14 +1252,16 @@ def _create_plotly_node_trace(graph, pos):
1220
1252
  return node_trace
1221
1253
 
1222
1254
 
1223
- def _create_plotly_figure_layout(num_nodes, layout_algorithm):
1255
+ def _create_plotly_figure_layout(num_nodes: int, layout_algorithm: str) -> "go.Layout":
1224
1256
  """Create Plotly figure layout."""
1225
1257
  import plotly.graph_objects as go
1226
1258
 
1227
1259
  layout: go.Layout = go.Layout(
1228
- title=f"Cookbook Dependency Graph ({num_nodes} nodes, "
1229
- f"{layout_algorithm} layout)",
1230
- titlefont_size=16,
1260
+ title={
1261
+ "text": f"Cookbook Dependency Graph ({num_nodes} nodes, "
1262
+ f"{layout_algorithm} layout)",
1263
+ "font": {"size": 16},
1264
+ },
1231
1265
  showlegend=True,
1232
1266
  hovermode="closest",
1233
1267
  margin={"b": 20, "l": 5, "r": 5, "t": 40},
@@ -1247,7 +1281,12 @@ def _create_plotly_figure_layout(num_nodes, layout_algorithm):
1247
1281
  return layout
1248
1282
 
1249
1283
 
1250
- def _create_interactive_plotly_graph(graph, pos, num_nodes, layout_algorithm):
1284
+ def _create_interactive_plotly_graph(
1285
+ graph: "nx.DiGraph",
1286
+ pos: Mapping[Any, tuple[float, float]],
1287
+ num_nodes: int,
1288
+ layout_algorithm: str,
1289
+ ) -> "go.Figure":
1251
1290
  """Create interactive Plotly graph visualization."""
1252
1291
  import plotly.graph_objects as go
1253
1292
 
@@ -1261,7 +1300,12 @@ def _create_interactive_plotly_graph(graph, pos, num_nodes, layout_algorithm):
1261
1300
  return fig
1262
1301
 
1263
1302
 
1264
- def _create_static_matplotlib_graph(graph, pos, num_nodes, layout_algorithm):
1303
+ def _create_static_matplotlib_graph(
1304
+ graph: "nx.DiGraph",
1305
+ pos: Mapping[Any, tuple[float, float]],
1306
+ num_nodes: int,
1307
+ layout_algorithm: str,
1308
+ ) -> "Figure":
1265
1309
  """Create static matplotlib graph visualization."""
1266
1310
  import matplotlib.pyplot as plt
1267
1311
 
@@ -1353,8 +1397,11 @@ def _create_static_matplotlib_graph(graph, pos, num_nodes, layout_algorithm):
1353
1397
 
1354
1398
 
1355
1399
  def create_dependency_graph(
1356
- analysis_result, viz_type, layout_algorithm="auto", filters=None
1357
- ):
1400
+ analysis_result: str,
1401
+ viz_type: str,
1402
+ layout_algorithm: str = "auto",
1403
+ filters: Mapping[str, Any] | None = None,
1404
+ ) -> "go.Figure | Figure | None":
1358
1405
  """
1359
1406
  Create a dependency graph visualization with optional filtering.
1360
1407
 
@@ -1401,7 +1448,9 @@ def create_dependency_graph(
1401
1448
  return None
1402
1449
 
1403
1450
 
1404
- def _apply_graph_filters(graph, filters):
1451
+ def _apply_graph_filters(
1452
+ graph: "nx.DiGraph", filters: Mapping[str, Any]
1453
+ ) -> "nx.DiGraph":
1405
1454
  """Apply filters to the NetworkX graph."""
1406
1455
  filtered_graph = graph.copy()
1407
1456
 
@@ -1413,7 +1462,9 @@ def _apply_graph_filters(graph, filters):
1413
1462
  return filtered_graph
1414
1463
 
1415
1464
 
1416
- def _filter_circular_dependencies_only(graph, filters):
1465
+ def _filter_circular_dependencies_only(
1466
+ graph: "nx.DiGraph", filters: Mapping[str, Any]
1467
+ ) -> "nx.DiGraph":
1417
1468
  """Filter graph to show only nodes involved in circular dependencies."""
1418
1469
  if not filters.get("circular_only", False):
1419
1470
  return graph
@@ -1431,7 +1482,9 @@ def _filter_circular_dependencies_only(graph, filters):
1431
1482
  return graph
1432
1483
 
1433
1484
 
1434
- def _filter_community_cookbooks_only(graph, filters):
1485
+ def _filter_community_cookbooks_only(
1486
+ graph: "nx.DiGraph", filters: Mapping[str, Any]
1487
+ ) -> "nx.DiGraph":
1435
1488
  """Filter graph to show only community cookbooks and their dependencies."""
1436
1489
  if not filters.get("community_only", False):
1437
1490
  return graph
@@ -1451,7 +1504,9 @@ def _filter_community_cookbooks_only(graph, filters):
1451
1504
  return graph
1452
1505
 
1453
1506
 
1454
- def _filter_minimum_connections(graph, filters):
1507
+ def _filter_minimum_connections(
1508
+ graph: "nx.DiGraph", filters: Mapping[str, Any]
1509
+ ) -> "nx.DiGraph":
1455
1510
  """Filter graph to show only nodes with minimum connection count."""
1456
1511
  min_connections = filters.get("min_connections", 0)
1457
1512
  if min_connections <= 0:
@@ -1467,7 +1522,9 @@ def _filter_minimum_connections(graph, filters):
1467
1522
  return graph
1468
1523
 
1469
1524
 
1470
- def _parse_dependency_metrics_from_result(analysis_result):
1525
+ def _parse_dependency_metrics_from_result(
1526
+ analysis_result: str,
1527
+ ) -> tuple[int, int, int, int]:
1471
1528
  """Parse dependency analysis result to extract key metrics."""
1472
1529
  lines = analysis_result.split("\n")
1473
1530
 
@@ -1495,8 +1552,8 @@ def _parse_dependency_metrics_from_result(analysis_result):
1495
1552
 
1496
1553
 
1497
1554
  def _display_dependency_summary_metrics(
1498
- direct_deps, transitive_deps, circular_deps, community_cookbooks
1499
- ):
1555
+ direct_deps: int, transitive_deps: int, circular_deps: int, community_cookbooks: int
1556
+ ) -> None:
1500
1557
  """Display dependency analysis summary metrics."""
1501
1558
  col1, col2, col3, col4 = st.columns(4)
1502
1559
 
@@ -1510,17 +1567,19 @@ def _display_dependency_summary_metrics(
1510
1567
  st.metric(
1511
1568
  SECTION_CIRCULAR_DEPENDENCIES,
1512
1569
  circular_deps,
1513
- delta="⚠️ Check" if circular_deps > 0 else "OK",
1570
+ delta="Check" if circular_deps > 0 else "OK",
1514
1571
  )
1515
1572
 
1516
1573
  with col4:
1517
1574
  st.metric(SECTION_COMMUNITY_COOKBOOKS, community_cookbooks)
1518
1575
 
1519
1576
 
1520
- def _calculate_migration_impact(dependencies, circular_deps, community_cookbooks):
1577
+ def _calculate_migration_impact(
1578
+ dependencies: Mapping[str, Sequence[str]],
1579
+ circular_deps: Sequence[tuple[str, str]],
1580
+ community_cookbooks: Sequence[str],
1581
+ ) -> dict[str, Any]:
1521
1582
  """Calculate migration impact analysis based on dependency structure."""
1522
- from typing import Any
1523
-
1524
1583
  impact: dict[str, Any] = {
1525
1584
  "risk_score": 0.0,
1526
1585
  "timeline_impact_weeks": 0,
@@ -1577,7 +1636,7 @@ def _calculate_migration_impact(dependencies, circular_deps, community_cookbooks
1577
1636
  return impact
1578
1637
 
1579
1638
 
1580
- def _calculate_max_dependency_chain(dependencies):
1639
+ def _calculate_max_dependency_chain(dependencies: Mapping[str, Sequence[str]]) -> int:
1581
1640
  """Calculate the maximum dependency chain length."""
1582
1641
  max_length = 0
1583
1642
 
@@ -1608,7 +1667,7 @@ def _calculate_max_dependency_chain(dependencies):
1608
1667
  return max_length
1609
1668
 
1610
1669
 
1611
- def _find_critical_path(dependencies):
1670
+ def _find_critical_path(dependencies: Mapping[str, Sequence[str]]) -> list[str]:
1612
1671
  """Find the critical path (longest dependency chain)."""
1613
1672
  longest_chain: list[str] = []
1614
1673
 
@@ -1641,7 +1700,9 @@ def _find_critical_path(dependencies):
1641
1700
  return longest_chain
1642
1701
 
1643
1702
 
1644
- def _identify_bottlenecks(dependencies: dict[str, list[str]]):
1703
+ def _identify_bottlenecks(
1704
+ dependencies: Mapping[str, Sequence[str]],
1705
+ ) -> list[dict[str, Any]]:
1645
1706
  """Identify bottleneck cookbooks (highly depended upon)."""
1646
1707
  # Count how many times each cookbook is depended upon
1647
1708
  dependency_counts: dict[str, int] = {}
@@ -1674,7 +1735,11 @@ def _identify_bottlenecks(dependencies: dict[str, list[str]]):
1674
1735
  return sorted(bottlenecks, key=lambda x: x["dependent_count"], reverse=True)
1675
1736
 
1676
1737
 
1677
- def _generate_impact_recommendations(impact, circular_deps, community_cookbooks):
1738
+ def _generate_impact_recommendations(
1739
+ impact: Mapping[str, Any],
1740
+ circular_deps: Sequence[tuple[str, str]],
1741
+ community_cookbooks: Sequence[str],
1742
+ ) -> list[dict[str, Any]]:
1678
1743
  """Generate recommendations based on impact analysis."""
1679
1744
  recommendations = []
1680
1745
 
@@ -1743,8 +1808,11 @@ def _generate_impact_recommendations(impact, circular_deps, community_cookbooks)
1743
1808
 
1744
1809
 
1745
1810
  def _display_detailed_impact_analysis(
1746
- impact_analysis, dependencies, circular_deps, community_cookbooks
1747
- ):
1811
+ impact_analysis: Mapping[str, Any],
1812
+ dependencies: Mapping[str, Sequence[str]],
1813
+ circular_deps: Sequence[tuple[str, str]],
1814
+ community_cookbooks: Sequence[str],
1815
+ ) -> None:
1748
1816
  """Display detailed impact analysis breakdown."""
1749
1817
  _display_risk_assessment_breakdown(dependencies, circular_deps, community_cookbooks)
1750
1818
  _display_critical_path_analysis(impact_analysis)
@@ -1753,8 +1821,10 @@ def _display_detailed_impact_analysis(
1753
1821
 
1754
1822
 
1755
1823
  def _display_risk_assessment_breakdown(
1756
- dependencies, circular_deps, community_cookbooks
1757
- ):
1824
+ dependencies: Mapping[str, Sequence[str]],
1825
+ circular_deps: Sequence[tuple[str, str]],
1826
+ community_cookbooks: Sequence[str],
1827
+ ) -> None:
1758
1828
  """Display risk assessment breakdown."""
1759
1829
  st.markdown("### Risk Assessment Breakdown")
1760
1830
 
@@ -1771,7 +1841,7 @@ def _display_risk_assessment_breakdown(
1771
1841
  st.write(f"• **{factor}**: {score:.1f} points")
1772
1842
 
1773
1843
 
1774
- def _display_critical_path_analysis(impact_analysis):
1844
+ def _display_critical_path_analysis(impact_analysis: Mapping[str, Any]) -> None:
1775
1845
  """Display critical path analysis."""
1776
1846
  st.markdown("### Critical Path Analysis")
1777
1847
  if impact_analysis["critical_path"]:
@@ -1781,45 +1851,45 @@ def _display_critical_path_analysis(impact_analysis):
1781
1851
  st.write("No dependency chains identified.")
1782
1852
 
1783
1853
 
1784
- def _display_migration_bottlenecks(impact_analysis):
1854
+ def _display_migration_bottlenecks(impact_analysis: Mapping[str, Any]) -> None:
1785
1855
  """Display migration bottlenecks."""
1786
1856
  st.markdown("### Migration Bottlenecks")
1787
1857
  if impact_analysis["bottlenecks"]:
1788
1858
  for bottleneck in impact_analysis["bottlenecks"]:
1789
1859
  risk_level = bottleneck["risk_level"]
1790
1860
  if risk_level == "High":
1791
- risk_icon = "🔴"
1861
+ risk_icon = "HIGH"
1792
1862
  elif risk_level == "Medium":
1793
- risk_icon = "🟡"
1863
+ risk_icon = "MEDIUM"
1794
1864
  else:
1795
- risk_icon = "🟢"
1865
+ risk_icon = "LOW"
1796
1866
  st.write(
1797
1867
  f"• {risk_icon} **{bottleneck['cookbook']}**: "
1798
1868
  f"{bottleneck['dependent_count']} dependents "
1799
1869
  f"({risk_level} risk)"
1800
1870
  )
1801
1871
  else:
1802
- st.write("No significant bottlenecks identified.")
1872
+ st.write("No significant bottlenecks identified.")
1803
1873
 
1804
1874
 
1805
- def _display_strategic_recommendations(impact_analysis):
1875
+ def _display_strategic_recommendations(impact_analysis: Mapping[str, Any]) -> None:
1806
1876
  """Display strategic recommendations."""
1807
1877
  st.markdown("### Strategic Recommendations")
1808
1878
  for rec in impact_analysis["recommendations"]:
1809
1879
  priority = rec["priority"]
1810
1880
  if priority == "Critical":
1811
- priority_icon = "🔴"
1881
+ priority_icon = "CRITICAL"
1812
1882
  elif priority == "High":
1813
- priority_icon = "🟡"
1883
+ priority_icon = "HIGH"
1814
1884
  else:
1815
- priority_icon = "🟢"
1885
+ priority_icon = "MEDIUM"
1816
1886
  st.write(f"• {priority_icon} **{priority}**: {rec['action']}")
1817
1887
  st.write(f" *Impact*: {rec['impact']}")
1818
1888
 
1819
1889
 
1820
- def _handle_graph_caching():
1890
+ def _handle_graph_caching() -> None:
1821
1891
  """Handle graph caching controls and cleanup."""
1822
- st.subheader("💾 Graph Cache Management")
1892
+ st.subheader("Graph Cache Management")
1823
1893
 
1824
1894
  col1, col2, col3 = st.columns([1, 1, 2])
1825
1895
 
@@ -1834,12 +1904,14 @@ def _handle_graph_caching():
1834
1904
 
1835
1905
  with col2:
1836
1906
  # Clear cache button
1837
- if st.button("🗑️ Clear Cache", help="Clear all cached graph data"):
1907
+ if st.button(
1908
+ "Clear Cache", help="Clear all cached graph data", key="clear_cache"
1909
+ ):
1838
1910
  # Find and remove all graph cache keys
1839
1911
  cache_keys = [key for key in st.session_state if key.startswith("graph_")]
1840
1912
  for key in cache_keys:
1841
1913
  del st.session_state[key]
1842
- st.success(f"Cleared {len(cache_keys)} cached graphs")
1914
+ st.success(f"Cleared {len(cache_keys)} cached graphs")
1843
1915
  st.rerun()
1844
1916
 
1845
1917
  with col3:
@@ -1861,23 +1933,23 @@ def _handle_graph_caching():
1861
1933
  # Cache status indicator
1862
1934
  if cache_enabled:
1863
1935
  st.success(
1864
- "Graph caching is enabled - visualizations will be "
1936
+ "Graph caching is enabled - visualizations will be "
1865
1937
  "cached for faster loading"
1866
1938
  )
1867
1939
  else:
1868
1940
  st.warning(
1869
- "⚠️ Graph caching is disabled - each visualization will be recalculated"
1941
+ "Graph caching is disabled - each visualization will be recalculated"
1870
1942
  )
1871
1943
 
1872
1944
 
1873
1945
  def _display_dependency_graph_visualization(
1874
- analysis_result,
1875
- viz_type,
1876
- selected_layout,
1877
- show_circular_only,
1878
- show_community_only,
1879
- min_connections,
1880
- ):
1946
+ analysis_result: str,
1947
+ viz_type: str,
1948
+ selected_layout: str,
1949
+ show_circular_only: bool,
1950
+ show_community_only: bool,
1951
+ min_connections: int,
1952
+ ) -> None:
1881
1953
  """Display the dependency graph visualization section with filtering."""
1882
1954
  try:
1883
1955
  # Parse dependencies for filtering
@@ -1917,7 +1989,12 @@ def _display_dependency_graph_visualization(
1917
1989
  _handle_graph_visualization_error(e, analysis_result)
1918
1990
 
1919
1991
 
1920
- def _get_cached_graph_data(analysis_result, viz_type, selected_layout, filters):
1992
+ def _get_cached_graph_data(
1993
+ analysis_result: str,
1994
+ viz_type: str,
1995
+ selected_layout: str,
1996
+ filters: Mapping[str, Any],
1997
+ ) -> Any | None:
1921
1998
  """Get cached graph data if available."""
1922
1999
  cache_key = (
1923
2000
  f"graph_{hash(analysis_result)}_{viz_type}_{selected_layout}_{str(filters)}"
@@ -1927,13 +2004,19 @@ def _get_cached_graph_data(analysis_result, viz_type, selected_layout, filters):
1927
2004
  "graph_cache_enabled", True
1928
2005
  ):
1929
2006
  graph_data = st.session_state[cache_key]
1930
- st.info("📋 Using cached graph data")
2007
+ st.info("Using cached graph data")
1931
2008
  return graph_data
1932
2009
 
1933
2010
  return None
1934
2011
 
1935
2012
 
1936
- def _cache_graph_data(analysis_result, viz_type, selected_layout, filters, graph_data):
2013
+ def _cache_graph_data(
2014
+ analysis_result: str,
2015
+ viz_type: str,
2016
+ selected_layout: str,
2017
+ filters: Mapping[str, Any],
2018
+ graph_data: Any,
2019
+ ) -> None:
1937
2020
  """Cache graph data if caching is enabled."""
1938
2021
  if graph_data is not None and st.session_state.get("graph_cache_enabled", True):
1939
2022
  cache_key = (
@@ -1942,7 +2025,7 @@ def _cache_graph_data(analysis_result, viz_type, selected_layout, filters, graph
1942
2025
  st.session_state[cache_key] = graph_data
1943
2026
 
1944
2027
 
1945
- def _display_graph_with_export_options(graph_data, viz_type):
2028
+ def _display_graph_with_export_options(graph_data: Any, viz_type: str) -> None:
1946
2029
  """Display graph and provide export options."""
1947
2030
  if viz_type == "interactive":
1948
2031
  # Interactive Plotly graph
@@ -1956,7 +2039,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
1956
2039
  # Export as HTML
1957
2040
  html_content = graph_data.to_html(full_html=False, include_plotlyjs="cdn")
1958
2041
  st.download_button(
1959
- label="🌐 HTML",
2042
+ label="HTML",
1960
2043
  data=html_content,
1961
2044
  file_name="dependency_graph.html",
1962
2045
  mime="text/html",
@@ -1967,7 +2050,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
1967
2050
  # Export as JSON
1968
2051
  json_data = graph_data.to_json()
1969
2052
  st.download_button(
1970
- label="📊 JSON",
2053
+ label="JSON",
1971
2054
  data=json_data,
1972
2055
  file_name="dependency_graph.json",
1973
2056
  mime=MIME_APPLICATION_JSON,
@@ -1977,11 +2060,11 @@ def _display_graph_with_export_options(graph_data, viz_type):
1977
2060
  with col3:
1978
2061
  # Export as PNG (requires kaleido)
1979
2062
  try:
1980
- import plotly.io as pio # type: ignore[import-untyped]
2063
+ import plotly.io as pio
1981
2064
 
1982
2065
  png_data = pio.to_image(graph_data, format="png", scale=2)
1983
2066
  st.download_button(
1984
- label="🖼️ PNG (High-res)",
2067
+ label="PNG (High-res)",
1985
2068
  data=png_data,
1986
2069
  file_name="dependency_graph.png",
1987
2070
  mime="image/png",
@@ -1997,7 +2080,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
1997
2080
 
1998
2081
  pdf_data = pio.to_image(graph_data, format="pdf")
1999
2082
  st.download_button(
2000
- label="📄 PDF",
2083
+ label="PDF",
2001
2084
  data=pdf_data,
2002
2085
  file_name="dependency_graph.pdf",
2003
2086
  mime="application/pdf",
@@ -2022,7 +2105,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
2022
2105
  graph_data.savefig(buf, format="png", dpi=300, bbox_inches="tight")
2023
2106
  buf.seek(0)
2024
2107
  st.download_button(
2025
- label="🖼️ PNG (High-res)",
2108
+ label="PNG (High-res)",
2026
2109
  data=buf.getvalue(),
2027
2110
  file_name="dependency_graph.png",
2028
2111
  mime="image/png",
@@ -2035,7 +2118,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
2035
2118
  graph_data.savefig(buf_svg, format="svg", bbox_inches="tight")
2036
2119
  buf_svg.seek(0)
2037
2120
  st.download_button(
2038
- label="📈 SVG",
2121
+ label="SVG",
2039
2122
  data=buf_svg.getvalue(),
2040
2123
  file_name="dependency_graph.svg",
2041
2124
  mime="image/svg+xml",
@@ -2048,7 +2131,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
2048
2131
  graph_data.savefig(buf_pdf, format="pdf", bbox_inches="tight")
2049
2132
  buf_pdf.seek(0)
2050
2133
  st.download_button(
2051
- label="📄 PDF",
2134
+ label="PDF",
2052
2135
  data=buf_pdf.getvalue(),
2053
2136
  file_name="dependency_graph.pdf",
2054
2137
  mime="application/pdf",
@@ -2061,7 +2144,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
2061
2144
  graph_data.savefig(buf_eps, format="eps", bbox_inches="tight")
2062
2145
  buf_eps.seek(0)
2063
2146
  st.download_button(
2064
- label="🔧 EPS",
2147
+ label="EPS",
2065
2148
  data=buf_eps.getvalue(),
2066
2149
  file_name="dependency_graph.eps",
2067
2150
  mime="application/postscript",
@@ -2069,9 +2152,9 @@ def _display_graph_with_export_options(graph_data, viz_type):
2069
2152
  )
2070
2153
 
2071
2154
 
2072
- def _handle_graph_visualization_error(error, analysis_result):
2155
+ def _handle_graph_visualization_error(error: Exception, analysis_result: str) -> None:
2073
2156
  """Handle graph visualization errors with fallback display."""
2074
- st.error("**Graph Visualization Error**")
2157
+ st.error("**Graph Visualization Error**")
2075
2158
  with st.expander("Error Details"):
2076
2159
  st.code(str(error), language="text")
2077
2160
  st.markdown("""
@@ -2087,7 +2170,7 @@ def _handle_graph_visualization_error(error, analysis_result):
2087
2170
  """)
2088
2171
 
2089
2172
  # Fallback: show text summary
2090
- st.info("📄 Showing text-based dependency summary instead:")
2173
+ st.info("Showing text-based dependency summary instead:")
2091
2174
  st.text_area(
2092
2175
  "Dependency Analysis Text",
2093
2176
  analysis_result,
@@ -2096,7 +2179,7 @@ def _handle_graph_visualization_error(error, analysis_result):
2096
2179
  )
2097
2180
 
2098
2181
 
2099
- def _display_dependency_analysis_sections(analysis_result):
2182
+ def _display_dependency_analysis_sections(analysis_result: str) -> None:
2100
2183
  """Display dependency analysis results in expandable sections."""
2101
2184
  # Split analysis into sections
2102
2185
  sections = analysis_result.split("\n## ")
@@ -2108,35 +2191,38 @@ def _display_dependency_analysis_sections(analysis_result):
2108
2191
 
2109
2192
  # Add expanders for different sections
2110
2193
  if "Migration Order Recommendations" in section:
2111
- with st.expander("📋 Migration Order Recommendations"):
2194
+ with st.expander("Migration Order Recommendations"):
2112
2195
  st.markdown(
2113
2196
  section.replace("## Migration Order Recommendations", "")
2114
2197
  )
2115
2198
  elif "Dependency Graph" in section:
2116
- with st.expander("🔗 Dependency Graph"):
2199
+ with st.expander("Dependency Graph"):
2117
2200
  st.markdown(section.replace("## Dependency Graph", ""))
2118
- with st.expander(f"⚠️ {SECTION_CIRCULAR_DEPENDENCIES}"):
2201
+ with st.expander(f"{SECTION_CIRCULAR_DEPENDENCIES}"):
2119
2202
  st.markdown(
2120
2203
  section.replace(f"## {SECTION_CIRCULAR_DEPENDENCIES}", "")
2121
2204
  )
2122
- with st.expander(f"🌐 {SECTION_COMMUNITY_COOKBOOKS}"):
2205
+ with st.expander(f"{SECTION_COMMUNITY_COOKBOOKS}"):
2123
2206
  st.markdown(
2124
2207
  section.replace(f"## {SECTION_COMMUNITY_COOKBOOKS}", "")
2125
2208
  )
2126
- elif "Migration Impact Analysis" in section:
2127
- with st.expander("📊 Migration Impact Analysis"):
2128
- st.markdown(section.replace("## Migration Impact Analysis", ""))
2209
+ elif SECTION_MIGRATION_IMPACT_ANALYSIS in section:
2210
+ with st.expander(SECTION_MIGRATION_IMPACT_ANALYSIS):
2211
+ header_text = f"## {SECTION_MIGRATION_IMPACT_ANALYSIS}"
2212
+ st.markdown(section.replace(header_text, ""))
2129
2213
  else:
2130
2214
  st.markdown(section)
2131
2215
 
2132
2216
 
2133
- def _display_migration_recommendations(circular_deps, community_cookbooks, direct_deps):
2217
+ def _display_migration_recommendations(
2218
+ circular_deps: int, community_cookbooks: int, direct_deps: int
2219
+ ) -> None:
2134
2220
  """Display migration recommendations based on analysis results."""
2135
2221
  st.subheader("Migration Recommendations")
2136
2222
 
2137
2223
  if circular_deps > 0:
2138
2224
  st.error(
2139
- "⚠️ **Critical Issue**: Circular dependencies detected. "
2225
+ "**Critical Issue**: Circular dependencies detected. "
2140
2226
  "Resolve before migration."
2141
2227
  )
2142
2228
  st.markdown("""
@@ -2149,7 +2235,7 @@ def _display_migration_recommendations(circular_deps, community_cookbooks, direc
2149
2235
 
2150
2236
  if community_cookbooks > 0:
2151
2237
  st.success(
2152
- f"**Good News**: {community_cookbooks} community cookbooks identified."
2238
+ f"**Good News**: {community_cookbooks} community cookbooks identified."
2153
2239
  )
2154
2240
  st.markdown("""
2155
2241
  **Recommendations:**
@@ -2159,7 +2245,7 @@ def _display_migration_recommendations(circular_deps, community_cookbooks, direc
2159
2245
  """)
2160
2246
 
2161
2247
  if direct_deps > 10:
2162
- st.warning("⚠️ **Complex Dependencies**: High dependency count detected.")
2248
+ st.warning("**Complex Dependencies**: High dependency count detected.")
2163
2249
  st.markdown("""
2164
2250
  **Consider:**
2165
2251
  - Breaking down monolithic cookbooks
@@ -2168,15 +2254,26 @@ def _display_migration_recommendations(circular_deps, community_cookbooks, direc
2168
2254
  """)
2169
2255
 
2170
2256
 
2257
+ def health_check() -> dict[str, str]:
2258
+ """Return health check information for the SousChef UI service."""
2259
+ from souschef.core.constants import VERSION
2260
+
2261
+ return {
2262
+ "status": "healthy",
2263
+ "service": "souschef-ui",
2264
+ "version": VERSION,
2265
+ }
2266
+
2267
+
2171
2268
  def _display_dependency_export_options(
2172
- analysis_result,
2173
- cookbook_path,
2174
- depth,
2175
- direct_deps,
2176
- transitive_deps,
2177
- circular_deps,
2178
- community_cookbooks,
2179
- ):
2269
+ analysis_result: str,
2270
+ cookbook_path: str,
2271
+ depth: str,
2272
+ direct_deps: int,
2273
+ transitive_deps: int,
2274
+ circular_deps: int,
2275
+ community_cookbooks: int,
2276
+ ) -> None:
2180
2277
  """Display export options for dependency analysis."""
2181
2278
  st.subheader("Export Analysis")
2182
2279
 
@@ -2184,7 +2281,7 @@ def _display_dependency_export_options(
2184
2281
 
2185
2282
  with col1:
2186
2283
  st.download_button(
2187
- label="📥 Download Full Analysis",
2284
+ label="Download Full Analysis",
2188
2285
  data=analysis_result,
2189
2286
  file_name="dependency_analysis.md",
2190
2287
  mime=MIME_TEXT_MARKDOWN,
@@ -2208,7 +2305,7 @@ def _display_dependency_export_options(
2208
2305
  import json
2209
2306
 
2210
2307
  st.download_button(
2211
- label="📊 Download JSON Summary",
2308
+ label="Download JSON Summary",
2212
2309
  data=json.dumps(analysis_json, indent=2),
2213
2310
  file_name="dependency_analysis.json",
2214
2311
  mime=MIME_APPLICATION_JSON,
@@ -2216,7 +2313,9 @@ def _display_dependency_export_options(
2216
2313
  )
2217
2314
 
2218
2315
 
2219
- def _display_dependency_analysis_summary(analysis_result, cookbook_path, depth):
2316
+ def _display_dependency_analysis_summary(
2317
+ analysis_result: str, cookbook_path: str, depth: str
2318
+ ) -> None:
2220
2319
  """Display dependency analysis summary section."""
2221
2320
  # Summary metrics
2222
2321
  st.subheader("Dependency Analysis Summary")
@@ -2236,12 +2335,12 @@ def _display_dependency_analysis_summary(analysis_result, cookbook_path, depth):
2236
2335
  st.info(analysis_msg)
2237
2336
 
2238
2337
 
2239
- def _display_graph_visualization_section(analysis_result, viz_type):
2338
+ def _display_graph_visualization_section(analysis_result: str, viz_type: str) -> None:
2240
2339
  """Display graph visualization section."""
2241
2340
  if viz_type not in ["graph", "interactive"]:
2242
2341
  return
2243
2342
 
2244
- st.subheader("📊 Dependency Graph Visualization")
2343
+ st.subheader("Dependency Graph Visualization")
2245
2344
 
2246
2345
  # Parse dependencies for filtering and analysis
2247
2346
  _ = _parse_dependency_analysis(analysis_result)
@@ -2278,7 +2377,7 @@ def _display_graph_visualization_section(analysis_result, viz_type):
2278
2377
  _handle_graph_caching()
2279
2378
 
2280
2379
  # Graph Filtering Options
2281
- st.subheader("🔍 Graph Filtering & Analysis")
2380
+ st.subheader("Graph Filtering & Analysis")
2282
2381
 
2283
2382
  col1, col2, col3 = st.columns(3)
2284
2383
 
@@ -2315,7 +2414,7 @@ def _display_graph_visualization_section(analysis_result, viz_type):
2315
2414
  )
2316
2415
 
2317
2416
 
2318
- def _display_impact_analysis_section(analysis_result):
2417
+ def _display_impact_analysis_section(analysis_result: str) -> None:
2319
2418
  """Display migration impact analysis section."""
2320
2419
  # Parse dependencies for impact analysis
2321
2420
  dependencies, circular_deps, community_cookbooks = _parse_dependency_analysis(
@@ -2323,7 +2422,7 @@ def _display_impact_analysis_section(analysis_result):
2323
2422
  )
2324
2423
 
2325
2424
  # Impact Analysis Section
2326
- st.subheader("📊 Migration Impact Analysis")
2425
+ st.subheader("Migration Impact Analysis")
2327
2426
 
2328
2427
  if not dependencies:
2329
2428
  st.info("No dependencies found for impact analysis.")
@@ -2336,11 +2435,11 @@ def _display_impact_analysis_section(analysis_result):
2336
2435
  # Calculate risk score delta
2337
2436
  risk_score = impact_analysis["risk_score"]
2338
2437
  if risk_score > 7:
2339
- risk_delta = "🔴 High"
2438
+ risk_delta = "High"
2340
2439
  elif risk_score > 4:
2341
- risk_delta = "🟡 Medium"
2440
+ risk_delta = "Medium"
2342
2441
  else:
2343
- risk_delta = "🟢 Low"
2442
+ risk_delta = "Low"
2344
2443
 
2345
2444
  col1, col2, col3, col4 = st.columns(4)
2346
2445
 
@@ -2353,7 +2452,7 @@ def _display_impact_analysis_section(analysis_result):
2353
2452
 
2354
2453
  with col2:
2355
2454
  timeline_weeks = impact_analysis["timeline_impact_weeks"]
2356
- timeline_delta = "↗️" if timeline_weeks > 0 else ""
2455
+ timeline_delta = "Increase" if timeline_weeks > 0 else "Unchanged"
2357
2456
  st.metric(
2358
2457
  "Estimated Timeline Impact",
2359
2458
  f"{timeline_weeks} weeks",
@@ -2362,7 +2461,7 @@ def _display_impact_analysis_section(analysis_result):
2362
2461
 
2363
2462
  with col3:
2364
2463
  complexity_level = impact_analysis["complexity_level"]
2365
- complexity_delta = "⚠️ High" if complexity_level == "High" else "Low"
2464
+ complexity_delta = "High" if complexity_level == "High" else "Low"
2366
2465
  st.metric(
2367
2466
  "Dependency Complexity",
2368
2467
  complexity_level,
@@ -2371,7 +2470,7 @@ def _display_impact_analysis_section(analysis_result):
2371
2470
 
2372
2471
  with col4:
2373
2472
  parallel_streams = impact_analysis["parallel_streams"]
2374
- parallel_delta = "🔀 Multiple" if parallel_streams > 1 else "➡️ Single"
2473
+ parallel_delta = "Multiple" if parallel_streams > 1 else "Single"
2375
2474
  st.metric(
2376
2475
  "Parallel Migration Streams",
2377
2476
  parallel_streams,
@@ -2379,15 +2478,18 @@ def _display_impact_analysis_section(analysis_result):
2379
2478
  )
2380
2479
 
2381
2480
  # Detailed impact breakdown
2382
- with st.expander("📈 Detailed Impact Analysis"):
2481
+ with st.expander("Detailed Impact Analysis"):
2383
2482
  _display_detailed_impact_analysis(
2384
2483
  impact_analysis, dependencies, circular_deps, community_cookbooks
2385
2484
  )
2386
2485
 
2387
2486
 
2388
2487
  def _display_analysis_details_section(
2389
- analysis_result, circular_deps, community_cookbooks, direct_deps
2390
- ):
2488
+ analysis_result: str,
2489
+ circular_deps: list[tuple[str, str]],
2490
+ community_cookbooks: list[str],
2491
+ direct_deps: int,
2492
+ ) -> None:
2391
2493
  """Display analysis details section."""
2392
2494
  # Display analysis results
2393
2495
  st.subheader("Dependency Analysis Details")
@@ -2395,10 +2497,12 @@ def _display_analysis_details_section(
2395
2497
  _display_dependency_analysis_sections(analysis_result)
2396
2498
 
2397
2499
  # Migration recommendations
2398
- _display_migration_recommendations(circular_deps, community_cookbooks, direct_deps)
2500
+ _display_migration_recommendations(
2501
+ len(circular_deps), len(community_cookbooks), direct_deps
2502
+ )
2399
2503
 
2400
2504
 
2401
- def display_dependency_analysis_results():
2505
+ def display_dependency_analysis_results() -> None:
2402
2506
  """Display dependency analysis results."""
2403
2507
  analysis_result = st.session_state.dep_analysis_result
2404
2508
  cookbook_path = st.session_state.dep_cookbook_path
@@ -2430,8 +2534,8 @@ def display_dependency_analysis_results():
2430
2534
  depth,
2431
2535
  direct_deps,
2432
2536
  len(dependencies) if dependencies else 0, # transitive_deps approximation
2433
- circular_deps,
2434
- community_cookbooks,
2537
+ len(circular_deps),
2538
+ len(community_cookbooks),
2435
2539
  )
2436
2540
 
2437
2541
 
@@ -2442,34 +2546,37 @@ def _collect_files_to_validate(input_path: str) -> list[Path]:
2442
2546
  # Error already reported by _normalize_and_validate_input_path
2443
2547
  return []
2444
2548
 
2445
- path_obj = validated_path
2446
- files_to_validate = []
2549
+ # Path is normalized and validated to be within app root
2550
+ path_obj: Path = validated_path
2551
+ files_to_validate: list[Path] = []
2447
2552
 
2448
- if not path_obj.exists():
2553
+ # Check if path exists using safe function
2554
+ if not safe_exists(path_obj, Path.cwd()):
2449
2555
  st.error(f"Path does not exist: {path_obj}")
2450
2556
  return []
2451
2557
 
2452
- if path_obj.is_file():
2558
+ # Determine if it's a file or directory
2559
+ if safe_is_file(path_obj, Path.cwd()):
2453
2560
  if path_obj.suffix in [".yml", ".yaml"] and path_obj.name not in [
2454
2561
  ".kitchen.yml",
2455
2562
  "kitchen.yml",
2456
2563
  "docker-compose.yml",
2457
2564
  ]:
2458
2565
  files_to_validate.append(path_obj)
2459
- elif path_obj.is_dir():
2566
+ elif safe_is_dir(path_obj, Path.cwd()):
2460
2567
  # Filter out obvious non-playbook files
2461
2568
  excluded_files = {".kitchen.yml", "kitchen.yml", "docker-compose.yml"}
2462
2569
 
2463
- yml_files = list(path_obj.glob("**/*.yml"))
2464
- yaml_files = list(path_obj.glob("**/*.yaml"))
2570
+ yml_files: list[Path] = safe_glob(path_obj, "**/*.yml", Path.cwd())
2571
+ yaml_files: list[Path] = safe_glob(path_obj, "**/*.yaml", Path.cwd())
2465
2572
 
2466
- raw_files = yml_files + yaml_files
2573
+ raw_files: list[Path] = yml_files + yaml_files
2467
2574
  files_to_validate.extend([f for f in raw_files if f.name not in excluded_files])
2468
2575
 
2469
2576
  return files_to_validate
2470
2577
 
2471
2578
 
2472
- def _run_validation_engine(files_to_validate):
2579
+ def _run_validation_engine(files_to_validate: Sequence[Path]) -> list[Any]:
2473
2580
  """Run validation engine on a list of files."""
2474
2581
  from souschef.core.validation import (
2475
2582
  ValidationCategory,
@@ -2510,7 +2617,7 @@ def _run_validation_engine(files_to_validate):
2510
2617
  return all_results
2511
2618
 
2512
2619
 
2513
- def _get_default_validation_path():
2620
+ def _get_default_validation_path() -> str:
2514
2621
  """Determine the default path for validation from session state."""
2515
2622
  default_path = ""
2516
2623
  if "converted_playbooks_path" in st.session_state:
@@ -2529,7 +2636,7 @@ def _get_default_validation_path():
2529
2636
  return default_path
2530
2637
 
2531
2638
 
2532
- def _render_validation_options_ui():
2639
+ def _render_validation_options_ui() -> tuple[str, str]:
2533
2640
  """Render validation scope and format options."""
2534
2641
  col1, col2 = st.columns(2)
2535
2642
 
@@ -2560,7 +2667,7 @@ def _render_validation_options_ui():
2560
2667
  return sub_scope, sub_format
2561
2668
 
2562
2669
 
2563
- def _render_validation_input_ui(default_path):
2670
+ def _render_validation_input_ui(default_path: str) -> str:
2564
2671
  """Render input source selection UI."""
2565
2672
  st.subheader("Input Source")
2566
2673
 
@@ -2590,7 +2697,7 @@ def _render_validation_input_ui(default_path):
2590
2697
  return input_path
2591
2698
 
2592
2699
 
2593
- def _render_validation_settings_ui():
2700
+ def _render_validation_settings_ui() -> tuple[bool, bool, bool]:
2594
2701
  """Render strict mode and other validation settings."""
2595
2702
  st.subheader("Validation Options")
2596
2703
 
@@ -2635,24 +2742,16 @@ def _normalize_and_validate_input_path(input_path: str) -> Path | None:
2635
2742
  return None
2636
2743
 
2637
2744
  try:
2638
- # Expand user home and resolve to an absolute, normalized path
2639
- path_obj = Path(raw).expanduser().resolve()
2640
- except Exception:
2641
- st.error(f"Invalid path: {raw}")
2642
- return None
2643
-
2644
- # Optional safety: constrain to the application root directory
2645
- try:
2745
+ path_obj = _normalize_path(raw)
2646
2746
  app_root = Path(app_path).resolve()
2647
- path_obj.relative_to(app_root)
2648
- except Exception:
2649
- st.error("Path must be within the SousChef project directory.")
2747
+ # Use centralised containment validation
2748
+ return _ensure_within_base_path(path_obj, app_root)
2749
+ except (ValueError, OSError) as e:
2750
+ st.error(f"Invalid path: {e}")
2650
2751
  return None
2651
2752
 
2652
- return path_obj
2653
2753
 
2654
-
2655
- def _handle_validation_execution(input_path, options):
2754
+ def _handle_validation_execution(input_path: str, options: Mapping[str, Any]) -> None:
2656
2755
  """Execute the validation process with progress tracking."""
2657
2756
  progress_tracker = ProgressTracker(
2658
2757
  total_steps=6, description="Running validation..."
@@ -2669,8 +2768,11 @@ def _handle_validation_execution(input_path, options):
2669
2768
  # Error is handled inside _collect_files_to_validate
2670
2769
  # if path doesn't exist or is invalid
2671
2770
  validated_path = _normalize_and_validate_input_path(input_path)
2672
- if validated_path is not None and validated_path.exists():
2673
- st.warning(f"No YAML files found in {validated_path}")
2771
+ if validated_path is not None:
2772
+ # Check if the validated path exists
2773
+ path_exists: bool = safe_exists(validated_path, Path.cwd())
2774
+ if path_exists:
2775
+ st.warning(f"No YAML files found in {validated_path}")
2674
2776
  return
2675
2777
 
2676
2778
  progress_tracker.update(3, f"Validating {len(files_to_validate)} files...")
@@ -2708,7 +2810,7 @@ def _handle_validation_execution(input_path, options):
2708
2810
  st.error(f"Error during validation: {e}")
2709
2811
 
2710
2812
 
2711
- def show_validation_reports():
2813
+ def show_validation_reports() -> None:
2712
2814
  """Show validation reports and conversion validation."""
2713
2815
  st.header(NAV_VALIDATION_REPORTS)
2714
2816
 
@@ -2728,7 +2830,9 @@ def show_validation_reports():
2728
2830
  )
2729
2831
 
2730
2832
  # Validation button
2731
- if st.button("Run Validation", type="primary", width="stretch"):
2833
+ if st.button(
2834
+ "Run Validation", type="primary", width="stretch", key="run_validation"
2835
+ ):
2732
2836
  if not input_path or not input_path.strip():
2733
2837
  st.error("Please enter a path to validate.")
2734
2838
  return
@@ -2748,7 +2852,7 @@ def show_validation_reports():
2748
2852
  display_validation_results()
2749
2853
 
2750
2854
 
2751
- def _filter_results_by_scope(results, scope):
2855
+ def _filter_results_by_scope(results: list[Any], scope: str) -> list[Any]:
2752
2856
  """Filter validation results based on selected scope."""
2753
2857
  from souschef.core.validation import ValidationCategory
2754
2858
 
@@ -2769,7 +2873,7 @@ def _filter_results_by_scope(results, scope):
2769
2873
  return [r for r in results if r.category == target_category]
2770
2874
 
2771
2875
 
2772
- def _parse_validation_metrics(validation_result):
2876
+ def _parse_validation_metrics(validation_result: str) -> tuple[int, int, int, int]:
2773
2877
  """Parse validation result to extract key metrics."""
2774
2878
  lines = validation_result.split("\n")
2775
2879
 
@@ -2805,7 +2909,9 @@ def _parse_validation_metrics(validation_result):
2805
2909
  return errors, warnings, passed, total_checks
2806
2910
 
2807
2911
 
2808
- def _display_validation_summary_metrics(errors, warnings, passed, total_checks):
2912
+ def _display_validation_summary_metrics(
2913
+ errors: int, warnings: int, passed: int, total_checks: int
2914
+ ) -> None:
2809
2915
  """Display validation summary metrics."""
2810
2916
  col1, col2, col3, col4 = st.columns(4)
2811
2917
 
@@ -2813,28 +2919,28 @@ def _display_validation_summary_metrics(errors, warnings, passed, total_checks):
2813
2919
  st.metric("Total Checks", total_checks)
2814
2920
 
2815
2921
  with col2:
2816
- st.metric("Passed", passed, delta="" if passed > 0 else "")
2922
+ st.metric("Passed", passed, delta="Pass" if passed > 0 else "")
2817
2923
 
2818
2924
  with col3:
2819
- st.metric("Warnings", warnings, delta="⚠️" if warnings > 0 else "")
2925
+ st.metric("Warnings", warnings, delta="Warning" if warnings > 0 else "")
2820
2926
 
2821
2927
  with col4:
2822
- st.metric("Errors", errors, delta="" if errors > 0 else "")
2928
+ st.metric("Errors", errors, delta="Error" if errors > 0 else "")
2823
2929
 
2824
2930
 
2825
- def _display_validation_status(errors, warnings):
2931
+ def _display_validation_status(errors: int, warnings: int) -> None:
2826
2932
  """Display overall validation status."""
2827
2933
  if errors > 0:
2828
- st.error("**Validation Failed**: Critical issues found that need attention.")
2934
+ st.error("**Validation Failed**: Critical issues found that need attention.")
2829
2935
  elif warnings > 0:
2830
2936
  st.warning(
2831
- "⚠️ **Validation Passed with Warnings**: Review warnings before proceeding."
2937
+ "**Validation Passed with Warnings**: Review warnings before proceeding."
2832
2938
  )
2833
2939
  else:
2834
- st.success("**Validation Passed**: All checks successful!")
2940
+ st.success("**Validation Passed**: All checks successful!")
2835
2941
 
2836
2942
 
2837
- def _display_validation_sections(validation_result):
2943
+ def _display_validation_sections(validation_result: str) -> None:
2838
2944
  """Display validation results in expandable sections."""
2839
2945
  # Split results into sections
2840
2946
  sections = validation_result.split("\n## ")
@@ -2846,28 +2952,28 @@ def _display_validation_sections(validation_result):
2846
2952
 
2847
2953
  # Add expanders for different sections
2848
2954
  if "Syntax Validation" in section:
2849
- with st.expander("🔍 Syntax Validation"):
2955
+ with st.expander("Syntax Validation"):
2850
2956
  st.markdown(section.replace("## Syntax Validation", ""))
2851
2957
  elif "Logic Validation" in section:
2852
- with st.expander("🧠 Logic Validation"):
2958
+ with st.expander("Logic Validation"):
2853
2959
  st.markdown(section.replace("## Logic Validation", ""))
2854
2960
  elif "Security Validation" in section:
2855
- with st.expander("🔒 Security Validation"):
2961
+ with st.expander("Security Validation"):
2856
2962
  st.markdown(section.replace("## Security Validation", ""))
2857
2963
  elif "Performance Validation" in section:
2858
- with st.expander("Performance Validation"):
2964
+ with st.expander("Performance Validation"):
2859
2965
  st.markdown(section.replace("## Performance Validation", ""))
2860
2966
  elif SCOPE_BEST_PRACTICES in section:
2861
- with st.expander(f"📋 {SCOPE_BEST_PRACTICES}"):
2967
+ with st.expander(f"{SCOPE_BEST_PRACTICES}"):
2862
2968
  st.markdown(section.replace(f"## {SCOPE_BEST_PRACTICES}", ""))
2863
2969
  elif "Recommendations" in section:
2864
- with st.expander("💡 Recommendations"):
2970
+ with st.expander("Recommendations"):
2865
2971
  st.markdown(section.replace("## Recommendations", ""))
2866
2972
  else:
2867
2973
  st.markdown(section)
2868
2974
 
2869
2975
 
2870
- def _display_validation_action_items(errors, warnings):
2976
+ def _display_validation_action_items(errors: int, warnings: int) -> None:
2871
2977
  """Display action items based on validation results."""
2872
2978
  if errors > 0 or warnings > 0:
2873
2979
  st.subheader("Action Items")
@@ -2892,15 +2998,15 @@ def _display_validation_action_items(errors, warnings):
2892
2998
 
2893
2999
 
2894
3000
  def _display_validation_export_options(
2895
- validation_result,
2896
- input_path,
2897
- validation_type,
2898
- options,
2899
- errors,
2900
- warnings,
2901
- passed,
2902
- total_checks,
2903
- ):
3001
+ validation_result: str,
3002
+ input_path: str,
3003
+ validation_type: str,
3004
+ options: Mapping[str, Any],
3005
+ errors: int,
3006
+ warnings: int,
3007
+ passed: int,
3008
+ total_checks: int,
3009
+ ) -> None:
2904
3010
  """Display export options for validation results."""
2905
3011
  st.subheader("Export Report")
2906
3012
 
@@ -2908,7 +3014,7 @@ def _display_validation_export_options(
2908
3014
 
2909
3015
  with col1:
2910
3016
  st.download_button(
2911
- label="📥 Download Full Report",
3017
+ label="Download Full Report",
2912
3018
  data=validation_result,
2913
3019
  file_name="validation_report.md",
2914
3020
  mime=MIME_TEXT_MARKDOWN,
@@ -2940,7 +3046,7 @@ def _display_validation_export_options(
2940
3046
  import json
2941
3047
 
2942
3048
  st.download_button(
2943
- label="📊 Download JSON Summary",
3049
+ label="Download JSON Summary",
2944
3050
  data=json.dumps(report_json, indent=2),
2945
3051
  file_name="validation_report.json",
2946
3052
  mime=MIME_APPLICATION_JSON,
@@ -2948,7 +3054,7 @@ def _display_validation_export_options(
2948
3054
  )
2949
3055
 
2950
3056
 
2951
- def display_validation_results():
3057
+ def display_validation_results() -> None:
2952
3058
  """Display validation results."""
2953
3059
  validation_result = st.session_state.validation_result
2954
3060
  input_path = st.session_state.validation_path
@@ -2994,5 +3100,11 @@ def display_validation_results():
2994
3100
  )
2995
3101
 
2996
3102
 
3103
+ # UI code only when running under Streamlit
3104
+ if not os.environ.get("STREAMLIT_SERVER_PORT") and not os.environ.get(
3105
+ "STREAMLIT_SERVER_HEADLESS"
3106
+ ):
3107
+ main()
3108
+
2997
3109
  if __name__ == "__main__":
2998
3110
  main()