mcp-souschef 2.8.0__py3-none-any.whl → 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.0.0.dist-info}/METADATA +82 -10
- {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.0.0.dist-info}/RECORD +23 -19
- {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.0.0.dist-info}/WHEEL +1 -1
- souschef/__init__.py +37 -5
- souschef/assessment.py +1248 -57
- souschef/ci/common.py +126 -0
- souschef/ci/github_actions.py +3 -92
- souschef/ci/gitlab_ci.py +2 -52
- souschef/ci/jenkins_pipeline.py +2 -59
- souschef/cli.py +117 -8
- souschef/converters/playbook.py +259 -90
- souschef/converters/resource.py +12 -11
- souschef/converters/template.py +177 -0
- souschef/core/metrics.py +313 -0
- souschef/core/validation.py +53 -0
- souschef/deployment.py +61 -9
- souschef/server.py +680 -0
- souschef/ui/app.py +469 -351
- souschef/ui/pages/ai_settings.py +74 -8
- souschef/ui/pages/cookbook_analysis.py +2467 -298
- souschef/ui/pages/validation_reports.py +274 -0
- {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.0.0.dist-info}/entry_points.txt +0 -0
- {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.0.0.dist-info}/licenses/LICENSE +0 -0
souschef/ui/app.py
CHANGED
|
@@ -1,72 +1,66 @@
|
|
|
1
|
-
|
|
2
|
-
Visual Migration Planning Interface for SousChef.
|
|
3
|
-
|
|
4
|
-
A Streamlit-based web interface for Chef to Ansible migration planning,
|
|
5
|
-
assessment, and visualization.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import contextlib
|
|
9
|
-
import logging
|
|
1
|
+
# Add the parent directory to the path so we can import souschef modules
|
|
10
2
|
import sys
|
|
11
3
|
from pathlib import Path
|
|
12
4
|
|
|
13
|
-
import streamlit as st
|
|
14
|
-
|
|
15
|
-
# Configure logging to stdout for Docker visibility
|
|
16
|
-
logging.basicConfig(
|
|
17
|
-
level=logging.INFO,
|
|
18
|
-
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
19
|
-
stream=sys.stdout,
|
|
20
|
-
force=True, # Override any existing configuration
|
|
21
|
-
)
|
|
22
|
-
|
|
23
|
-
logger = logging.getLogger(__name__)
|
|
24
|
-
logger.info("Starting SousChef UI application")
|
|
25
|
-
|
|
26
|
-
# Add the parent directory to the path so we can import souschef modules
|
|
27
5
|
app_path = Path(__file__).parent.parent
|
|
28
6
|
if str(app_path) not in sys.path:
|
|
29
7
|
sys.path.insert(0, str(app_path))
|
|
30
8
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
from
|
|
34
|
-
|
|
9
|
+
import contextlib
|
|
10
|
+
import os
|
|
11
|
+
from collections.abc import Callable, Iterable, Mapping, Sequence
|
|
12
|
+
from typing import (
|
|
13
|
+
TYPE_CHECKING,
|
|
14
|
+
Any,
|
|
15
|
+
Concatenate,
|
|
16
|
+
ParamSpec,
|
|
17
|
+
TypeVar,
|
|
35
18
|
)
|
|
36
19
|
|
|
37
|
-
|
|
20
|
+
import streamlit as st
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
import networkx as nx
|
|
24
|
+
import plotly.graph_objects as go
|
|
25
|
+
from matplotlib.figure import Figure
|
|
26
|
+
|
|
27
|
+
P = ParamSpec("P")
|
|
28
|
+
R = TypeVar("R")
|
|
29
|
+
|
|
30
|
+
from souschef.ui.pages.ai_settings import show_ai_settings_page
|
|
31
|
+
from souschef.ui.pages.cookbook_analysis import show_cookbook_analysis_page
|
|
32
|
+
|
|
33
|
+
# Constants
|
|
34
|
+
SECTION_COMMUNITY_COOKBOOKS_HEADER = "Community Cookbooks:"
|
|
35
|
+
SECTION_COMMUNITY_COOKBOOKS = "Community Cookbooks"
|
|
36
|
+
SECTION_CIRCULAR_DEPENDENCIES = "Circular Dependencies"
|
|
37
|
+
SECTION_MIGRATION_IMPACT_ANALYSIS = "Migration Impact Analysis"
|
|
38
38
|
NAV_MIGRATION_PLANNING = "Migration Planning"
|
|
39
39
|
NAV_DEPENDENCY_MAPPING = "Dependency Mapping"
|
|
40
40
|
NAV_VALIDATION_REPORTS = "Validation Reports"
|
|
41
|
-
MIME_TEXT_MARKDOWN = "text/markdown"
|
|
42
|
-
MIME_APPLICATION_JSON = "application/json"
|
|
43
|
-
SECTION_CIRCULAR_DEPENDENCIES = "Circular Dependencies"
|
|
44
|
-
NAV_COOKBOOK_ANALYSIS = "Cookbook Analysis"
|
|
45
41
|
NAV_AI_SETTINGS = "AI Settings"
|
|
42
|
+
NAV_COOKBOOK_ANALYSIS = "Cookbook Analysis"
|
|
46
43
|
BUTTON_ANALYSE_DEPENDENCIES = "Analyse Dependencies"
|
|
47
|
-
SECTION_COMMUNITY_COOKBOOKS = "Community Cookbooks"
|
|
48
|
-
SECTION_COMMUNITY_COOKBOOKS_HEADER = "Community Cookbooks:"
|
|
49
44
|
INPUT_METHOD_DIRECTORY_PATH = "Directory Path"
|
|
45
|
+
MIME_TEXT_MARKDOWN = "text/markdown"
|
|
46
|
+
MIME_APPLICATION_JSON = "application/json"
|
|
47
|
+
ERROR_MSG_ENTER_PATH = "Please enter a path."
|
|
50
48
|
SCOPE_BEST_PRACTICES = "Best Practices"
|
|
51
|
-
ERROR_MSG_ENTER_PATH = "Please enter a path to validate."
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
def health_check():
|
|
55
|
-
"""Return simple health check endpoint for Docker."""
|
|
56
|
-
return {"status": "healthy", "service": "souschef-ui"}
|
|
57
49
|
|
|
58
50
|
|
|
59
51
|
class ProgressTracker:
|
|
60
52
|
"""Track progress for long-running operations."""
|
|
61
53
|
|
|
62
|
-
def __init__(
|
|
63
|
-
self
|
|
64
|
-
|
|
65
|
-
self.
|
|
54
|
+
def __init__(
|
|
55
|
+
self, total_steps: int = 100, description: str = "Processing..."
|
|
56
|
+
) -> None:
|
|
57
|
+
self.total_steps: int = total_steps
|
|
58
|
+
self.current_step: int = 0
|
|
59
|
+
self.description: str = description
|
|
66
60
|
self.progress_bar = st.progress(0)
|
|
67
61
|
self.status_text = st.empty()
|
|
68
62
|
|
|
69
|
-
def update(self, step=None, description=None):
|
|
63
|
+
def update(self, step: int | None = None, description: str | None = None) -> None:
|
|
70
64
|
"""Update progress."""
|
|
71
65
|
if step is not None:
|
|
72
66
|
self.current_step = min(step, self.total_steps)
|
|
@@ -82,7 +76,7 @@ class ProgressTracker:
|
|
|
82
76
|
f"{self.description} ({self.current_step}/{self.total_steps})"
|
|
83
77
|
)
|
|
84
78
|
|
|
85
|
-
def complete(self, message="Completed!"):
|
|
79
|
+
def complete(self, message: str = "Completed!") -> None:
|
|
86
80
|
"""Mark progress as complete."""
|
|
87
81
|
self.progress_bar.progress(1.0)
|
|
88
82
|
self.status_text.text(message)
|
|
@@ -90,24 +84,26 @@ class ProgressTracker:
|
|
|
90
84
|
|
|
91
85
|
time.sleep(0.5) # Brief pause to show completion
|
|
92
86
|
|
|
93
|
-
def close(self):
|
|
87
|
+
def close(self) -> None:
|
|
94
88
|
"""Clean up progress indicators."""
|
|
95
89
|
self.progress_bar.empty()
|
|
96
90
|
self.status_text.empty()
|
|
97
91
|
|
|
98
92
|
|
|
99
93
|
def with_progress_tracking(
|
|
100
|
-
operation_func,
|
|
101
|
-
|
|
94
|
+
operation_func: Callable[Concatenate[ProgressTracker, P], R],
|
|
95
|
+
description: str = "Processing...",
|
|
96
|
+
total_steps: int = 100,
|
|
97
|
+
) -> Callable[P, R]:
|
|
102
98
|
"""Add progress tracking to operations."""
|
|
103
99
|
|
|
104
|
-
def wrapper(*args, **kwargs):
|
|
100
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
105
101
|
tracker = ProgressTracker(total_steps, description)
|
|
106
102
|
try:
|
|
107
|
-
result = operation_func(tracker, *args, **kwargs)
|
|
103
|
+
result: R = operation_func(tracker, *args, **kwargs)
|
|
108
104
|
tracker.complete()
|
|
109
105
|
return result
|
|
110
|
-
except Exception as e:
|
|
106
|
+
except Exception as e: # noqa: BLE001
|
|
111
107
|
tracker.close()
|
|
112
108
|
raise e
|
|
113
109
|
finally:
|
|
@@ -116,111 +112,104 @@ def with_progress_tracking(
|
|
|
116
112
|
return wrapper
|
|
117
113
|
|
|
118
114
|
|
|
119
|
-
def
|
|
120
|
-
"""
|
|
121
|
-
st.
|
|
115
|
+
def main() -> None:
|
|
116
|
+
"""Run the main Streamlit application."""
|
|
117
|
+
st.set_page_config(
|
|
118
|
+
page_title="SousChef - Chef to Ansible Migration",
|
|
119
|
+
layout="wide",
|
|
120
|
+
initial_sidebar_state="collapsed",
|
|
121
|
+
)
|
|
122
122
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
"Dashboard",
|
|
126
|
-
help="View migration overview and quick actions",
|
|
127
|
-
width="stretch",
|
|
128
|
-
):
|
|
129
|
-
st.session_state.current_page = "Dashboard"
|
|
130
|
-
st.rerun()
|
|
123
|
+
st.title("SousChef - Visual Migration Planning")
|
|
124
|
+
st.markdown("*AI-powered Chef to Ansible migration planning interface*")
|
|
131
125
|
|
|
132
|
-
#
|
|
133
|
-
|
|
134
|
-
NAV_COOKBOOK_ANALYSIS,
|
|
135
|
-
help="Analyse Chef cookbooks and assess migration complexity",
|
|
136
|
-
width="stretch",
|
|
137
|
-
):
|
|
138
|
-
st.session_state.current_page = NAV_COOKBOOK_ANALYSIS
|
|
139
|
-
st.rerun()
|
|
126
|
+
# Main content area - default to dashboard
|
|
127
|
+
page = st.session_state.get("current_page", "Dashboard")
|
|
140
128
|
|
|
141
|
-
#
|
|
142
|
-
|
|
143
|
-
NAV_DEPENDENCY_MAPPING,
|
|
144
|
-
help="Visualise cookbook dependencies and migration order",
|
|
145
|
-
width="stretch",
|
|
146
|
-
):
|
|
147
|
-
st.session_state.current_page = NAV_DEPENDENCY_MAPPING
|
|
148
|
-
st.rerun()
|
|
129
|
+
# Navigation section
|
|
130
|
+
st.subheader("Navigation")
|
|
149
131
|
|
|
150
|
-
|
|
151
|
-
if st.sidebar.button(
|
|
152
|
-
NAV_MIGRATION_PLANNING,
|
|
153
|
-
help="Plan your Chef to Ansible migration with detailed timelines",
|
|
154
|
-
width="stretch",
|
|
155
|
-
):
|
|
156
|
-
st.session_state.current_page = NAV_MIGRATION_PLANNING
|
|
157
|
-
st.rerun()
|
|
132
|
+
col1, col2, col3, col4, col5 = st.columns(5)
|
|
158
133
|
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
134
|
+
with col1:
|
|
135
|
+
if st.button(
|
|
136
|
+
"Cookbook Analysis",
|
|
137
|
+
type="primary" if page == NAV_COOKBOOK_ANALYSIS else "secondary",
|
|
138
|
+
width="stretch",
|
|
139
|
+
key="nav_cookbook_analysis",
|
|
140
|
+
):
|
|
141
|
+
st.session_state.current_page = NAV_COOKBOOK_ANALYSIS
|
|
142
|
+
st.rerun()
|
|
167
143
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
144
|
+
with col2:
|
|
145
|
+
if st.button(
|
|
146
|
+
"Migration Planning",
|
|
147
|
+
type="primary" if page == NAV_MIGRATION_PLANNING else "secondary",
|
|
148
|
+
width="stretch",
|
|
149
|
+
key="nav_migration_planning",
|
|
150
|
+
):
|
|
151
|
+
st.session_state.current_page = NAV_MIGRATION_PLANNING
|
|
152
|
+
st.rerun()
|
|
176
153
|
|
|
154
|
+
with col3:
|
|
155
|
+
if st.button(
|
|
156
|
+
"Dependency Mapping",
|
|
157
|
+
type="primary" if page == NAV_DEPENDENCY_MAPPING else "secondary",
|
|
158
|
+
width="stretch",
|
|
159
|
+
key="nav_dependency_mapping",
|
|
160
|
+
):
|
|
161
|
+
st.session_state.current_page = NAV_DEPENDENCY_MAPPING
|
|
162
|
+
st.rerun()
|
|
177
163
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
164
|
+
with col4:
|
|
165
|
+
if st.button(
|
|
166
|
+
"Validation Reports",
|
|
167
|
+
type="primary" if page == NAV_VALIDATION_REPORTS else "secondary",
|
|
168
|
+
width="stretch",
|
|
169
|
+
key="nav_validation_reports",
|
|
170
|
+
):
|
|
171
|
+
st.session_state.current_page = NAV_VALIDATION_REPORTS
|
|
172
|
+
st.rerun()
|
|
185
173
|
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
unsafe_allow_html=True,
|
|
196
|
-
)
|
|
174
|
+
with col5:
|
|
175
|
+
if st.button(
|
|
176
|
+
"AI Settings",
|
|
177
|
+
type="primary" if page == NAV_AI_SETTINGS else "secondary",
|
|
178
|
+
width="stretch",
|
|
179
|
+
key="nav_ai_settings",
|
|
180
|
+
):
|
|
181
|
+
st.session_state.current_page = NAV_AI_SETTINGS
|
|
182
|
+
st.rerun()
|
|
197
183
|
|
|
198
|
-
|
|
199
|
-
_setup_sidebar_navigation()
|
|
184
|
+
st.divider()
|
|
200
185
|
|
|
201
|
-
#
|
|
202
|
-
page
|
|
186
|
+
# Page routing
|
|
187
|
+
_route_to_page(page)
|
|
203
188
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
189
|
+
|
|
190
|
+
def _route_to_page(page: str) -> None:
|
|
191
|
+
"""Route to the appropriate page based on the current page state."""
|
|
192
|
+
page_routes = {
|
|
193
|
+
"Dashboard": show_dashboard,
|
|
194
|
+
NAV_COOKBOOK_ANALYSIS: show_cookbook_analysis_page,
|
|
195
|
+
NAV_MIGRATION_PLANNING: show_migration_planning,
|
|
196
|
+
NAV_DEPENDENCY_MAPPING: show_dependency_mapping,
|
|
197
|
+
NAV_VALIDATION_REPORTS: show_validation_reports,
|
|
198
|
+
NAV_AI_SETTINGS: show_ai_settings_page,
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
route_func = page_routes.get(page)
|
|
202
|
+
if route_func:
|
|
203
|
+
route_func()
|
|
204
|
+
else:
|
|
205
|
+
show_dashboard() # Default fallback
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def _calculate_dashboard_metrics() -> tuple[int, str, int, int]:
|
|
220
209
|
"""Calculate and return dashboard metrics."""
|
|
221
|
-
cookbooks_analysed = 0
|
|
222
210
|
complexity_counts = {"High": 0, "Medium": 0, "Low": 0}
|
|
223
211
|
successful_analyses = 0
|
|
212
|
+
cookbooks_analysed = 0
|
|
224
213
|
|
|
225
214
|
if "analysis_results" in st.session_state and st.session_state.analysis_results:
|
|
226
215
|
results = st.session_state.analysis_results
|
|
@@ -246,12 +235,20 @@ def _calculate_dashboard_metrics():
|
|
|
246
235
|
if cookbooks_analysed > 0:
|
|
247
236
|
conversion_rate = int((successful_analyses / cookbooks_analysed) * 100)
|
|
248
237
|
|
|
249
|
-
return
|
|
238
|
+
return (
|
|
239
|
+
cookbooks_analysed,
|
|
240
|
+
overall_complexity,
|
|
241
|
+
conversion_rate,
|
|
242
|
+
successful_analyses,
|
|
243
|
+
)
|
|
250
244
|
|
|
251
245
|
|
|
252
246
|
def _display_dashboard_metrics(
|
|
253
|
-
cookbooks_analysed
|
|
254
|
-
|
|
247
|
+
cookbooks_analysed: int,
|
|
248
|
+
overall_complexity: str,
|
|
249
|
+
conversion_rate: int,
|
|
250
|
+
successful_analyses: int,
|
|
251
|
+
) -> None:
|
|
255
252
|
"""Display the dashboard metrics."""
|
|
256
253
|
col1, col2, col3 = st.columns(3)
|
|
257
254
|
|
|
@@ -286,9 +283,9 @@ def _display_dashboard_metrics(
|
|
|
286
283
|
st.caption("Successful analyses")
|
|
287
284
|
|
|
288
285
|
|
|
289
|
-
def _display_quick_upload_section():
|
|
286
|
+
def _display_quick_upload_section() -> None:
|
|
290
287
|
"""Display the quick upload section."""
|
|
291
|
-
st.subheader("Quick Start")
|
|
288
|
+
st.subheader("Quick Start - Upload Cookbooks")
|
|
292
289
|
|
|
293
290
|
col1, col2 = st.columns([2, 1])
|
|
294
291
|
|
|
@@ -309,33 +306,23 @@ def _display_quick_upload_section():
|
|
|
309
306
|
|
|
310
307
|
st.success(f"File {uploaded_file.name} uploaded successfully!")
|
|
311
308
|
st.info(
|
|
312
|
-
"Navigate to Cookbook Analysis to process this file, "
|
|
309
|
+
"Navigate to Cookbook Analysis above to process this file, "
|
|
313
310
|
"or upload another file to replace it."
|
|
314
311
|
)
|
|
315
312
|
|
|
316
313
|
with col2:
|
|
317
|
-
st.markdown("**
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
st.session_state.current_page = "Cookbook Analysis"
|
|
322
|
-
st.rerun()
|
|
323
|
-
|
|
324
|
-
if st.button("Generate Migration Plan", width="stretch"):
|
|
325
|
-
st.session_state.current_page = NAV_MIGRATION_PLANNING
|
|
326
|
-
st.rerun()
|
|
327
|
-
|
|
328
|
-
if st.button(BUTTON_ANALYSE_DEPENDENCIES, width="stretch"):
|
|
329
|
-
st.session_state.current_page = NAV_DEPENDENCY_MAPPING
|
|
330
|
-
st.rerun()
|
|
314
|
+
st.markdown("**Upload Options:**")
|
|
315
|
+
st.markdown("- ZIP archives (.zip)")
|
|
316
|
+
st.markdown("- TAR archives (.tar, .tar.gz, .tgz)")
|
|
317
|
+
st.markdown("- Process uploaded files using the navigation buttons above")
|
|
331
318
|
|
|
332
319
|
|
|
333
|
-
def _display_recent_activity():
|
|
320
|
+
def _display_recent_activity() -> None:
|
|
334
321
|
"""Display the recent activity section."""
|
|
335
322
|
st.subheader("Recent Activity")
|
|
336
323
|
st.info(
|
|
337
324
|
"No recent migration activity. Start by uploading cookbooks "
|
|
338
|
-
"above or using the
|
|
325
|
+
f"above or using the {NAV_COOKBOOK_ANALYSIS} page!"
|
|
339
326
|
)
|
|
340
327
|
|
|
341
328
|
# Getting started guide
|
|
@@ -343,7 +330,7 @@ def _display_recent_activity():
|
|
|
343
330
|
st.markdown("""
|
|
344
331
|
**New to SousChef? Here's how to begin:**
|
|
345
332
|
|
|
346
|
-
1. **Upload Cookbooks**: Use the uploader above or go to
|
|
333
|
+
1. **Upload Cookbooks**: Use the uploader above or go to {NAV_COOKBOOK_ANALYSIS}
|
|
347
334
|
2. **Analyse Complexity**: Get detailed migration assessments
|
|
348
335
|
3. **Plan Migration**: Generate timelines and resource requirements
|
|
349
336
|
4. **Convert to Ansible**: Download converted playbooks
|
|
@@ -351,7 +338,7 @@ def _display_recent_activity():
|
|
|
351
338
|
**Supported Formats:**
|
|
352
339
|
- ZIP archives (.zip)
|
|
353
340
|
- TAR archives (.tar, .tar.gz, .tgz)
|
|
354
|
-
- Directory paths (in
|
|
341
|
+
- Directory paths (in {NAV_COOKBOOK_ANALYSIS})
|
|
355
342
|
|
|
356
343
|
**Expected Structure:**
|
|
357
344
|
```
|
|
@@ -366,7 +353,7 @@ def _display_recent_activity():
|
|
|
366
353
|
""")
|
|
367
354
|
|
|
368
355
|
|
|
369
|
-
def show_dashboard():
|
|
356
|
+
def show_dashboard() -> None:
|
|
370
357
|
"""Show the main dashboard with migration overview."""
|
|
371
358
|
st.header("Migration Dashboard")
|
|
372
359
|
|
|
@@ -389,7 +376,7 @@ def show_dashboard():
|
|
|
389
376
|
_display_recent_activity()
|
|
390
377
|
|
|
391
378
|
|
|
392
|
-
def show_migration_planning():
|
|
379
|
+
def show_migration_planning() -> None:
|
|
393
380
|
"""Show migration planning interface."""
|
|
394
381
|
st.header(NAV_MIGRATION_PLANNING)
|
|
395
382
|
|
|
@@ -510,7 +497,12 @@ def show_migration_planning():
|
|
|
510
497
|
# Step 3: Generate Plan
|
|
511
498
|
st.subheader("Step 3: Generate Migration Plan")
|
|
512
499
|
|
|
513
|
-
if st.button(
|
|
500
|
+
if st.button(
|
|
501
|
+
"Generate Migration Plan",
|
|
502
|
+
type="primary",
|
|
503
|
+
width="stretch",
|
|
504
|
+
key="migration_plan_generate",
|
|
505
|
+
):
|
|
514
506
|
if not cookbook_paths.strip():
|
|
515
507
|
st.error("Please enter cookbook paths to generate a migration plan.")
|
|
516
508
|
return
|
|
@@ -554,7 +546,9 @@ def show_migration_planning():
|
|
|
554
546
|
display_migration_plan_results()
|
|
555
547
|
|
|
556
548
|
|
|
557
|
-
def _display_migration_summary_metrics(
|
|
549
|
+
def _display_migration_summary_metrics(
|
|
550
|
+
cookbook_paths: str, strategy: str, timeline: int
|
|
551
|
+
) -> None:
|
|
558
552
|
"""Display migration overview summary metrics."""
|
|
559
553
|
st.subheader("Migration Overview")
|
|
560
554
|
|
|
@@ -574,7 +568,7 @@ def _display_migration_summary_metrics(cookbook_paths, strategy, timeline):
|
|
|
574
568
|
st.metric("Status", "Plan Generated")
|
|
575
569
|
|
|
576
570
|
|
|
577
|
-
def _display_migration_plan_details(plan_result):
|
|
571
|
+
def _display_migration_plan_details(plan_result: str) -> None:
|
|
578
572
|
"""Display the detailed migration plan sections."""
|
|
579
573
|
st.subheader("Migration Plan Details")
|
|
580
574
|
|
|
@@ -595,14 +589,18 @@ def _display_migration_plan_details(plan_result):
|
|
|
595
589
|
st.markdown(section)
|
|
596
590
|
|
|
597
591
|
|
|
598
|
-
def _display_migration_action_buttons(cookbook_paths):
|
|
592
|
+
def _display_migration_action_buttons(cookbook_paths: str) -> None:
|
|
599
593
|
"""Display action buttons for next steps."""
|
|
600
594
|
st.subheader("Next Steps")
|
|
601
595
|
|
|
602
596
|
col1, col2, col3 = st.columns(3)
|
|
603
597
|
|
|
604
598
|
with col1:
|
|
605
|
-
if st.button(
|
|
599
|
+
if st.button(
|
|
600
|
+
"Generate Detailed Report",
|
|
601
|
+
width="stretch",
|
|
602
|
+
key="migration_detailed_report",
|
|
603
|
+
):
|
|
606
604
|
with st.spinner("Generating detailed migration report..."):
|
|
607
605
|
try:
|
|
608
606
|
from souschef.assessment import generate_migration_report
|
|
@@ -616,7 +614,11 @@ def _display_migration_action_buttons(cookbook_paths):
|
|
|
616
614
|
st.error(f"Error generating report: {e}")
|
|
617
615
|
|
|
618
616
|
with col2:
|
|
619
|
-
if st.button(
|
|
617
|
+
if st.button(
|
|
618
|
+
"Analyse Dependencies",
|
|
619
|
+
width="stretch",
|
|
620
|
+
key="migration_analyse_dependencies",
|
|
621
|
+
):
|
|
620
622
|
if len(cookbook_paths.split(",")) == 1:
|
|
621
623
|
# Single cookbook dependency analysis
|
|
622
624
|
cookbook_path = cookbook_paths.split(",")[0].strip()
|
|
@@ -636,7 +638,7 @@ def _display_migration_action_buttons(cookbook_paths):
|
|
|
636
638
|
)
|
|
637
639
|
|
|
638
640
|
with col3:
|
|
639
|
-
if st.button("
|
|
641
|
+
if st.button("Export Plan", width="stretch", key="migration_export_plan"):
|
|
640
642
|
# Create downloadable plan
|
|
641
643
|
plan_content = f"""# Chef to Ansible Migration Plan
|
|
642
644
|
Generated: {st.session_state.get("timestamp", "Unknown")}
|
|
@@ -659,20 +661,20 @@ Generated: {st.session_state.get("timestamp", "Unknown")}
|
|
|
659
661
|
)
|
|
660
662
|
|
|
661
663
|
|
|
662
|
-
def _display_additional_reports():
|
|
664
|
+
def _display_additional_reports() -> None:
|
|
663
665
|
"""Display detailed report and dependency analysis if available."""
|
|
664
666
|
# Display detailed report if generated
|
|
665
667
|
if "detailed_report" in st.session_state:
|
|
666
|
-
with st.expander("
|
|
668
|
+
with st.expander("Detailed Migration Report"):
|
|
667
669
|
st.markdown(st.session_state.detailed_report)
|
|
668
670
|
|
|
669
671
|
# Display dependency analysis if generated
|
|
670
672
|
if "dep_analysis" in st.session_state:
|
|
671
|
-
with st.expander("
|
|
673
|
+
with st.expander("Dependency Analysis"):
|
|
672
674
|
st.markdown(st.session_state.dep_analysis)
|
|
673
675
|
|
|
674
676
|
|
|
675
|
-
def display_migration_plan_results():
|
|
677
|
+
def display_migration_plan_results() -> None:
|
|
676
678
|
"""Display the generated migration plan results."""
|
|
677
679
|
plan_result = st.session_state.migration_plan
|
|
678
680
|
cookbook_paths = st.session_state.cookbook_paths
|
|
@@ -685,7 +687,7 @@ def display_migration_plan_results():
|
|
|
685
687
|
_display_additional_reports()
|
|
686
688
|
|
|
687
689
|
|
|
688
|
-
def show_dependency_mapping():
|
|
690
|
+
def show_dependency_mapping() -> None:
|
|
689
691
|
"""Show dependency mapping visualization."""
|
|
690
692
|
st.header(NAV_DEPENDENCY_MAPPING)
|
|
691
693
|
|
|
@@ -762,7 +764,12 @@ def show_dependency_mapping():
|
|
|
762
764
|
)
|
|
763
765
|
|
|
764
766
|
# Analysis button
|
|
765
|
-
if st.button(
|
|
767
|
+
if st.button(
|
|
768
|
+
BUTTON_ANALYSE_DEPENDENCIES,
|
|
769
|
+
type="primary",
|
|
770
|
+
width="stretch",
|
|
771
|
+
key="dep_analyse_dependencies",
|
|
772
|
+
):
|
|
766
773
|
if not cookbook_path or not cookbook_path.strip():
|
|
767
774
|
st.error("Please enter a cookbook directory path.")
|
|
768
775
|
return
|
|
@@ -804,7 +811,7 @@ def show_dependency_mapping():
|
|
|
804
811
|
display_dependency_analysis_results()
|
|
805
812
|
|
|
806
813
|
|
|
807
|
-
def _setup_dependency_mapping_ui():
|
|
814
|
+
def _setup_dependency_mapping_ui() -> None:
|
|
808
815
|
"""Set up the dependency mapping UI header and description."""
|
|
809
816
|
st.header(NAV_DEPENDENCY_MAPPING)
|
|
810
817
|
|
|
@@ -814,7 +821,7 @@ def _setup_dependency_mapping_ui():
|
|
|
814
821
|
""")
|
|
815
822
|
|
|
816
823
|
|
|
817
|
-
def _get_dependency_mapping_inputs():
|
|
824
|
+
def _get_dependency_mapping_inputs() -> tuple[str, str, str]:
|
|
818
825
|
"""Collect user inputs for dependency analysis."""
|
|
819
826
|
# Cookbook path input
|
|
820
827
|
cookbook_path = st.text_input(
|
|
@@ -854,11 +861,13 @@ def _get_dependency_mapping_inputs():
|
|
|
854
861
|
|
|
855
862
|
|
|
856
863
|
def _handle_dependency_analysis_execution(
|
|
857
|
-
cookbook_path, dependency_depth, visualization_type
|
|
858
|
-
):
|
|
864
|
+
cookbook_path: str, dependency_depth: str, visualization_type: str
|
|
865
|
+
) -> None:
|
|
859
866
|
"""Handle the dependency analysis execution when button is clicked."""
|
|
860
867
|
# Analysis button
|
|
861
|
-
if st.button(
|
|
868
|
+
if st.button(
|
|
869
|
+
BUTTON_ANALYSE_DEPENDENCIES, type="primary", width="stretch", key="dep_analyze"
|
|
870
|
+
):
|
|
862
871
|
if not cookbook_path or not cookbook_path.strip():
|
|
863
872
|
st.error("Please enter a cookbook directory path.")
|
|
864
873
|
return
|
|
@@ -868,7 +877,9 @@ def _handle_dependency_analysis_execution(
|
|
|
868
877
|
)
|
|
869
878
|
|
|
870
879
|
|
|
871
|
-
def _perform_dependency_analysis(
|
|
880
|
+
def _perform_dependency_analysis(
|
|
881
|
+
cookbook_path: str, dependency_depth: str, visualization_type: str
|
|
882
|
+
) -> None:
|
|
872
883
|
"""Perform the actual dependency analysis."""
|
|
873
884
|
# Import assessment functions
|
|
874
885
|
from souschef.assessment import analyse_cookbook_dependencies
|
|
@@ -903,16 +914,16 @@ def _perform_dependency_analysis(cookbook_path, dependency_depth, visualization_
|
|
|
903
914
|
st.error(f"Error analyzing dependencies: {e}")
|
|
904
915
|
|
|
905
916
|
|
|
906
|
-
def _display_dependency_analysis_results_if_available():
|
|
917
|
+
def _display_dependency_analysis_results_if_available() -> None:
|
|
907
918
|
"""Display dependency analysis results if they exist in session state."""
|
|
908
919
|
# Display results if available
|
|
909
920
|
if "dep_analysis_result" in st.session_state:
|
|
910
921
|
display_dependency_analysis_results()
|
|
911
922
|
|
|
912
923
|
|
|
913
|
-
def _extract_dependency_relationships(lines):
|
|
924
|
+
def _extract_dependency_relationships(lines: Iterable[str]) -> dict[str, list[str]]:
|
|
914
925
|
"""Extract dependency relationships from analysis lines."""
|
|
915
|
-
dependencies = {}
|
|
926
|
+
dependencies: dict[str, list[str]] = {}
|
|
916
927
|
current_section = None
|
|
917
928
|
|
|
918
929
|
for line in lines:
|
|
@@ -935,11 +946,13 @@ def _extract_dependency_relationships(lines):
|
|
|
935
946
|
return dependencies
|
|
936
947
|
|
|
937
948
|
|
|
938
|
-
def _extract_circular_and_community_deps(
|
|
949
|
+
def _extract_circular_and_community_deps(
|
|
950
|
+
lines: Iterable[str],
|
|
951
|
+
) -> tuple[list[tuple[str, str]], list[str]]:
|
|
939
952
|
"""Extract circular dependencies and community cookbooks."""
|
|
940
953
|
circular_deps: list[tuple[str, str]] = []
|
|
941
954
|
community_cookbooks: list[str] = []
|
|
942
|
-
current_section = None
|
|
955
|
+
current_section: str | None = None
|
|
943
956
|
|
|
944
957
|
for line in lines:
|
|
945
958
|
current_section = _update_current_section(line, current_section)
|
|
@@ -951,7 +964,7 @@ def _extract_circular_and_community_deps(lines):
|
|
|
951
964
|
return circular_deps, community_cookbooks
|
|
952
965
|
|
|
953
966
|
|
|
954
|
-
def _update_current_section(line, current_section):
|
|
967
|
+
def _update_current_section(line: str, current_section: str | None) -> str | None:
|
|
955
968
|
"""Update the current section based on the line content."""
|
|
956
969
|
line = line.strip()
|
|
957
970
|
if "Circular Dependencies:" in line:
|
|
@@ -961,12 +974,17 @@ def _update_current_section(line, current_section):
|
|
|
961
974
|
return current_section
|
|
962
975
|
|
|
963
976
|
|
|
964
|
-
def _is_list_item(line):
|
|
977
|
+
def _is_list_item(line: str) -> bool:
|
|
965
978
|
"""Check if the line is a list item."""
|
|
966
979
|
return line.strip().startswith("- ")
|
|
967
980
|
|
|
968
981
|
|
|
969
|
-
def _process_list_item(
|
|
982
|
+
def _process_list_item(
|
|
983
|
+
line: str,
|
|
984
|
+
current_section: str | None,
|
|
985
|
+
circular_deps: list[tuple[str, str]],
|
|
986
|
+
community_cookbooks: list[str],
|
|
987
|
+
) -> None:
|
|
970
988
|
"""Process a list item based on the current section."""
|
|
971
989
|
if current_section == "circular":
|
|
972
990
|
_process_circular_dependency_item(line, circular_deps)
|
|
@@ -974,7 +992,9 @@ def _process_list_item(line, current_section, circular_deps, community_cookbooks
|
|
|
974
992
|
_process_community_cookbook_item(line, community_cookbooks)
|
|
975
993
|
|
|
976
994
|
|
|
977
|
-
def _process_circular_dependency_item(
|
|
995
|
+
def _process_circular_dependency_item(
|
|
996
|
+
line: str, circular_deps: list[tuple[str, str]]
|
|
997
|
+
) -> None:
|
|
978
998
|
"""Process a circular dependency list item."""
|
|
979
999
|
dep_text = line[2:].strip()
|
|
980
1000
|
if "->" in dep_text:
|
|
@@ -983,14 +1003,16 @@ def _process_circular_dependency_item(line, circular_deps):
|
|
|
983
1003
|
circular_deps.append((parts[0].strip(), parts[1].strip()))
|
|
984
1004
|
|
|
985
1005
|
|
|
986
|
-
def _process_community_cookbook_item(line, community_cookbooks):
|
|
1006
|
+
def _process_community_cookbook_item(line: str, community_cookbooks: list[str]) -> None:
|
|
987
1007
|
"""Process a community cookbook list item."""
|
|
988
1008
|
cookbook = line[2:].strip()
|
|
989
1009
|
if cookbook:
|
|
990
1010
|
community_cookbooks.append(cookbook)
|
|
991
1011
|
|
|
992
1012
|
|
|
993
|
-
def _parse_dependency_analysis(
|
|
1013
|
+
def _parse_dependency_analysis(
|
|
1014
|
+
analysis_result: str,
|
|
1015
|
+
) -> tuple[dict[str, list[str]], list[tuple[str, str]], list[str]]:
|
|
994
1016
|
"""Parse dependency analysis result into structured data."""
|
|
995
1017
|
lines = analysis_result.split("\n")
|
|
996
1018
|
|
|
@@ -1000,7 +1022,11 @@ def _parse_dependency_analysis(analysis_result):
|
|
|
1000
1022
|
return dependencies, circular_deps, community_cookbooks
|
|
1001
1023
|
|
|
1002
1024
|
|
|
1003
|
-
def _create_networkx_graph(
|
|
1025
|
+
def _create_networkx_graph(
|
|
1026
|
+
dependencies: Mapping[str, Sequence[str]],
|
|
1027
|
+
circular_deps: Sequence[tuple[str, str]],
|
|
1028
|
+
community_cookbooks: Sequence[str],
|
|
1029
|
+
) -> "nx.DiGraph":
|
|
1004
1030
|
"""Create NetworkX graph from dependency data."""
|
|
1005
1031
|
import networkx as nx
|
|
1006
1032
|
|
|
@@ -1025,7 +1051,9 @@ def _create_networkx_graph(dependencies, circular_deps, community_cookbooks):
|
|
|
1025
1051
|
return graph
|
|
1026
1052
|
|
|
1027
1053
|
|
|
1028
|
-
def _calculate_graph_positions(
|
|
1054
|
+
def _calculate_graph_positions(
|
|
1055
|
+
graph: "nx.DiGraph", layout_algorithm: str
|
|
1056
|
+
) -> tuple[dict[Any, tuple[float, float]], str]:
|
|
1029
1057
|
"""
|
|
1030
1058
|
Calculate positions for graph nodes using the specified layout algorithm.
|
|
1031
1059
|
|
|
@@ -1048,7 +1076,7 @@ def _calculate_graph_positions(graph, layout_algorithm):
|
|
|
1048
1076
|
return pos, layout_algorithm
|
|
1049
1077
|
|
|
1050
1078
|
|
|
1051
|
-
def _choose_auto_layout_algorithm(num_nodes):
|
|
1079
|
+
def _choose_auto_layout_algorithm(num_nodes: int) -> str:
|
|
1052
1080
|
"""Choose the best layout algorithm based on graph size."""
|
|
1053
1081
|
if num_nodes <= 10:
|
|
1054
1082
|
return "circular"
|
|
@@ -1058,7 +1086,9 @@ def _choose_auto_layout_algorithm(num_nodes):
|
|
|
1058
1086
|
return "kamada_kawai"
|
|
1059
1087
|
|
|
1060
1088
|
|
|
1061
|
-
def _calculate_positions_with_algorithm(
|
|
1089
|
+
def _calculate_positions_with_algorithm(
|
|
1090
|
+
graph: "nx.DiGraph", layout_algorithm: str
|
|
1091
|
+
) -> Any:
|
|
1062
1092
|
"""Calculate node positions using the specified algorithm."""
|
|
1063
1093
|
import networkx as nx
|
|
1064
1094
|
|
|
@@ -1087,7 +1117,9 @@ def _calculate_positions_with_algorithm(graph, layout_algorithm):
|
|
|
1087
1117
|
return nx.spring_layout(graph, k=2, iterations=50, seed=42)
|
|
1088
1118
|
|
|
1089
1119
|
|
|
1090
|
-
def _calculate_shell_layout_positions(
|
|
1120
|
+
def _calculate_shell_layout_positions(
|
|
1121
|
+
graph: "nx.DiGraph",
|
|
1122
|
+
) -> Any:
|
|
1091
1123
|
"""Calculate shell layout positions for hierarchical organization."""
|
|
1092
1124
|
import networkx as nx
|
|
1093
1125
|
|
|
@@ -1114,9 +1146,11 @@ def _calculate_shell_layout_positions(graph):
|
|
|
1114
1146
|
return nx.spring_layout(graph, k=2, iterations=50, seed=42)
|
|
1115
1147
|
|
|
1116
1148
|
|
|
1117
|
-
def _create_plotly_edge_traces(
|
|
1149
|
+
def _create_plotly_edge_traces(
|
|
1150
|
+
graph: "nx.DiGraph", pos: Mapping[Any, tuple[float, float]]
|
|
1151
|
+
) -> list["go.Scatter"]:
|
|
1118
1152
|
"""Create edge traces for Plotly graph."""
|
|
1119
|
-
import plotly.graph_objects as go
|
|
1153
|
+
import plotly.graph_objects as go
|
|
1120
1154
|
|
|
1121
1155
|
edge_traces = []
|
|
1122
1156
|
|
|
@@ -1167,7 +1201,9 @@ def _create_plotly_edge_traces(graph, pos):
|
|
|
1167
1201
|
return edge_traces
|
|
1168
1202
|
|
|
1169
1203
|
|
|
1170
|
-
def _create_plotly_node_trace(
|
|
1204
|
+
def _create_plotly_node_trace(
|
|
1205
|
+
graph: "nx.DiGraph", pos: Mapping[Any, tuple[float, float]]
|
|
1206
|
+
) -> "go.Scatter":
|
|
1171
1207
|
"""Create node trace for Plotly graph."""
|
|
1172
1208
|
import plotly.graph_objects as go
|
|
1173
1209
|
|
|
@@ -1220,14 +1256,16 @@ def _create_plotly_node_trace(graph, pos):
|
|
|
1220
1256
|
return node_trace
|
|
1221
1257
|
|
|
1222
1258
|
|
|
1223
|
-
def _create_plotly_figure_layout(num_nodes, layout_algorithm):
|
|
1259
|
+
def _create_plotly_figure_layout(num_nodes: int, layout_algorithm: str) -> "go.Layout":
|
|
1224
1260
|
"""Create Plotly figure layout."""
|
|
1225
1261
|
import plotly.graph_objects as go
|
|
1226
1262
|
|
|
1227
1263
|
layout: go.Layout = go.Layout(
|
|
1228
|
-
title=
|
|
1229
|
-
|
|
1230
|
-
|
|
1264
|
+
title={
|
|
1265
|
+
"text": f"Cookbook Dependency Graph ({num_nodes} nodes, "
|
|
1266
|
+
f"{layout_algorithm} layout)",
|
|
1267
|
+
"font": {"size": 16},
|
|
1268
|
+
},
|
|
1231
1269
|
showlegend=True,
|
|
1232
1270
|
hovermode="closest",
|
|
1233
1271
|
margin={"b": 20, "l": 5, "r": 5, "t": 40},
|
|
@@ -1247,7 +1285,12 @@ def _create_plotly_figure_layout(num_nodes, layout_algorithm):
|
|
|
1247
1285
|
return layout
|
|
1248
1286
|
|
|
1249
1287
|
|
|
1250
|
-
def _create_interactive_plotly_graph(
|
|
1288
|
+
def _create_interactive_plotly_graph(
|
|
1289
|
+
graph: "nx.DiGraph",
|
|
1290
|
+
pos: Mapping[Any, tuple[float, float]],
|
|
1291
|
+
num_nodes: int,
|
|
1292
|
+
layout_algorithm: str,
|
|
1293
|
+
) -> "go.Figure":
|
|
1251
1294
|
"""Create interactive Plotly graph visualization."""
|
|
1252
1295
|
import plotly.graph_objects as go
|
|
1253
1296
|
|
|
@@ -1261,7 +1304,12 @@ def _create_interactive_plotly_graph(graph, pos, num_nodes, layout_algorithm):
|
|
|
1261
1304
|
return fig
|
|
1262
1305
|
|
|
1263
1306
|
|
|
1264
|
-
def _create_static_matplotlib_graph(
|
|
1307
|
+
def _create_static_matplotlib_graph(
|
|
1308
|
+
graph: "nx.DiGraph",
|
|
1309
|
+
pos: Mapping[Any, tuple[float, float]],
|
|
1310
|
+
num_nodes: int,
|
|
1311
|
+
layout_algorithm: str,
|
|
1312
|
+
) -> "Figure":
|
|
1265
1313
|
"""Create static matplotlib graph visualization."""
|
|
1266
1314
|
import matplotlib.pyplot as plt
|
|
1267
1315
|
|
|
@@ -1353,8 +1401,11 @@ def _create_static_matplotlib_graph(graph, pos, num_nodes, layout_algorithm):
|
|
|
1353
1401
|
|
|
1354
1402
|
|
|
1355
1403
|
def create_dependency_graph(
|
|
1356
|
-
analysis_result
|
|
1357
|
-
|
|
1404
|
+
analysis_result: str,
|
|
1405
|
+
viz_type: str,
|
|
1406
|
+
layout_algorithm: str = "auto",
|
|
1407
|
+
filters: Mapping[str, Any] | None = None,
|
|
1408
|
+
) -> "go.Figure | Figure | None":
|
|
1358
1409
|
"""
|
|
1359
1410
|
Create a dependency graph visualization with optional filtering.
|
|
1360
1411
|
|
|
@@ -1401,7 +1452,9 @@ def create_dependency_graph(
|
|
|
1401
1452
|
return None
|
|
1402
1453
|
|
|
1403
1454
|
|
|
1404
|
-
def _apply_graph_filters(
|
|
1455
|
+
def _apply_graph_filters(
|
|
1456
|
+
graph: "nx.DiGraph", filters: Mapping[str, Any]
|
|
1457
|
+
) -> "nx.DiGraph":
|
|
1405
1458
|
"""Apply filters to the NetworkX graph."""
|
|
1406
1459
|
filtered_graph = graph.copy()
|
|
1407
1460
|
|
|
@@ -1413,7 +1466,9 @@ def _apply_graph_filters(graph, filters):
|
|
|
1413
1466
|
return filtered_graph
|
|
1414
1467
|
|
|
1415
1468
|
|
|
1416
|
-
def _filter_circular_dependencies_only(
|
|
1469
|
+
def _filter_circular_dependencies_only(
|
|
1470
|
+
graph: "nx.DiGraph", filters: Mapping[str, Any]
|
|
1471
|
+
) -> "nx.DiGraph":
|
|
1417
1472
|
"""Filter graph to show only nodes involved in circular dependencies."""
|
|
1418
1473
|
if not filters.get("circular_only", False):
|
|
1419
1474
|
return graph
|
|
@@ -1431,7 +1486,9 @@ def _filter_circular_dependencies_only(graph, filters):
|
|
|
1431
1486
|
return graph
|
|
1432
1487
|
|
|
1433
1488
|
|
|
1434
|
-
def _filter_community_cookbooks_only(
|
|
1489
|
+
def _filter_community_cookbooks_only(
|
|
1490
|
+
graph: "nx.DiGraph", filters: Mapping[str, Any]
|
|
1491
|
+
) -> "nx.DiGraph":
|
|
1435
1492
|
"""Filter graph to show only community cookbooks and their dependencies."""
|
|
1436
1493
|
if not filters.get("community_only", False):
|
|
1437
1494
|
return graph
|
|
@@ -1451,7 +1508,9 @@ def _filter_community_cookbooks_only(graph, filters):
|
|
|
1451
1508
|
return graph
|
|
1452
1509
|
|
|
1453
1510
|
|
|
1454
|
-
def _filter_minimum_connections(
|
|
1511
|
+
def _filter_minimum_connections(
|
|
1512
|
+
graph: "nx.DiGraph", filters: Mapping[str, Any]
|
|
1513
|
+
) -> "nx.DiGraph":
|
|
1455
1514
|
"""Filter graph to show only nodes with minimum connection count."""
|
|
1456
1515
|
min_connections = filters.get("min_connections", 0)
|
|
1457
1516
|
if min_connections <= 0:
|
|
@@ -1467,7 +1526,9 @@ def _filter_minimum_connections(graph, filters):
|
|
|
1467
1526
|
return graph
|
|
1468
1527
|
|
|
1469
1528
|
|
|
1470
|
-
def _parse_dependency_metrics_from_result(
|
|
1529
|
+
def _parse_dependency_metrics_from_result(
|
|
1530
|
+
analysis_result: str,
|
|
1531
|
+
) -> tuple[int, int, int, int]:
|
|
1471
1532
|
"""Parse dependency analysis result to extract key metrics."""
|
|
1472
1533
|
lines = analysis_result.split("\n")
|
|
1473
1534
|
|
|
@@ -1495,8 +1556,8 @@ def _parse_dependency_metrics_from_result(analysis_result):
|
|
|
1495
1556
|
|
|
1496
1557
|
|
|
1497
1558
|
def _display_dependency_summary_metrics(
|
|
1498
|
-
direct_deps, transitive_deps, circular_deps, community_cookbooks
|
|
1499
|
-
):
|
|
1559
|
+
direct_deps: int, transitive_deps: int, circular_deps: int, community_cookbooks: int
|
|
1560
|
+
) -> None:
|
|
1500
1561
|
"""Display dependency analysis summary metrics."""
|
|
1501
1562
|
col1, col2, col3, col4 = st.columns(4)
|
|
1502
1563
|
|
|
@@ -1510,17 +1571,19 @@ def _display_dependency_summary_metrics(
|
|
|
1510
1571
|
st.metric(
|
|
1511
1572
|
SECTION_CIRCULAR_DEPENDENCIES,
|
|
1512
1573
|
circular_deps,
|
|
1513
|
-
delta="
|
|
1574
|
+
delta="Check" if circular_deps > 0 else "OK",
|
|
1514
1575
|
)
|
|
1515
1576
|
|
|
1516
1577
|
with col4:
|
|
1517
1578
|
st.metric(SECTION_COMMUNITY_COOKBOOKS, community_cookbooks)
|
|
1518
1579
|
|
|
1519
1580
|
|
|
1520
|
-
def _calculate_migration_impact(
|
|
1581
|
+
def _calculate_migration_impact(
|
|
1582
|
+
dependencies: Mapping[str, Sequence[str]],
|
|
1583
|
+
circular_deps: Sequence[tuple[str, str]],
|
|
1584
|
+
community_cookbooks: Sequence[str],
|
|
1585
|
+
) -> dict[str, Any]:
|
|
1521
1586
|
"""Calculate migration impact analysis based on dependency structure."""
|
|
1522
|
-
from typing import Any
|
|
1523
|
-
|
|
1524
1587
|
impact: dict[str, Any] = {
|
|
1525
1588
|
"risk_score": 0.0,
|
|
1526
1589
|
"timeline_impact_weeks": 0,
|
|
@@ -1577,7 +1640,7 @@ def _calculate_migration_impact(dependencies, circular_deps, community_cookbooks
|
|
|
1577
1640
|
return impact
|
|
1578
1641
|
|
|
1579
1642
|
|
|
1580
|
-
def _calculate_max_dependency_chain(dependencies):
|
|
1643
|
+
def _calculate_max_dependency_chain(dependencies: Mapping[str, Sequence[str]]) -> int:
|
|
1581
1644
|
"""Calculate the maximum dependency chain length."""
|
|
1582
1645
|
max_length = 0
|
|
1583
1646
|
|
|
@@ -1608,7 +1671,7 @@ def _calculate_max_dependency_chain(dependencies):
|
|
|
1608
1671
|
return max_length
|
|
1609
1672
|
|
|
1610
1673
|
|
|
1611
|
-
def _find_critical_path(dependencies):
|
|
1674
|
+
def _find_critical_path(dependencies: Mapping[str, Sequence[str]]) -> list[str]:
|
|
1612
1675
|
"""Find the critical path (longest dependency chain)."""
|
|
1613
1676
|
longest_chain: list[str] = []
|
|
1614
1677
|
|
|
@@ -1641,7 +1704,9 @@ def _find_critical_path(dependencies):
|
|
|
1641
1704
|
return longest_chain
|
|
1642
1705
|
|
|
1643
1706
|
|
|
1644
|
-
def _identify_bottlenecks(
|
|
1707
|
+
def _identify_bottlenecks(
|
|
1708
|
+
dependencies: Mapping[str, Sequence[str]],
|
|
1709
|
+
) -> list[dict[str, Any]]:
|
|
1645
1710
|
"""Identify bottleneck cookbooks (highly depended upon)."""
|
|
1646
1711
|
# Count how many times each cookbook is depended upon
|
|
1647
1712
|
dependency_counts: dict[str, int] = {}
|
|
@@ -1674,7 +1739,11 @@ def _identify_bottlenecks(dependencies: dict[str, list[str]]):
|
|
|
1674
1739
|
return sorted(bottlenecks, key=lambda x: x["dependent_count"], reverse=True)
|
|
1675
1740
|
|
|
1676
1741
|
|
|
1677
|
-
def _generate_impact_recommendations(
|
|
1742
|
+
def _generate_impact_recommendations(
|
|
1743
|
+
impact: Mapping[str, Any],
|
|
1744
|
+
circular_deps: Sequence[tuple[str, str]],
|
|
1745
|
+
community_cookbooks: Sequence[str],
|
|
1746
|
+
) -> list[dict[str, Any]]:
|
|
1678
1747
|
"""Generate recommendations based on impact analysis."""
|
|
1679
1748
|
recommendations = []
|
|
1680
1749
|
|
|
@@ -1743,8 +1812,11 @@ def _generate_impact_recommendations(impact, circular_deps, community_cookbooks)
|
|
|
1743
1812
|
|
|
1744
1813
|
|
|
1745
1814
|
def _display_detailed_impact_analysis(
|
|
1746
|
-
impact_analysis
|
|
1747
|
-
|
|
1815
|
+
impact_analysis: Mapping[str, Any],
|
|
1816
|
+
dependencies: Mapping[str, Sequence[str]],
|
|
1817
|
+
circular_deps: Sequence[tuple[str, str]],
|
|
1818
|
+
community_cookbooks: Sequence[str],
|
|
1819
|
+
) -> None:
|
|
1748
1820
|
"""Display detailed impact analysis breakdown."""
|
|
1749
1821
|
_display_risk_assessment_breakdown(dependencies, circular_deps, community_cookbooks)
|
|
1750
1822
|
_display_critical_path_analysis(impact_analysis)
|
|
@@ -1753,8 +1825,10 @@ def _display_detailed_impact_analysis(
|
|
|
1753
1825
|
|
|
1754
1826
|
|
|
1755
1827
|
def _display_risk_assessment_breakdown(
|
|
1756
|
-
dependencies,
|
|
1757
|
-
|
|
1828
|
+
dependencies: Mapping[str, Sequence[str]],
|
|
1829
|
+
circular_deps: Sequence[tuple[str, str]],
|
|
1830
|
+
community_cookbooks: Sequence[str],
|
|
1831
|
+
) -> None:
|
|
1758
1832
|
"""Display risk assessment breakdown."""
|
|
1759
1833
|
st.markdown("### Risk Assessment Breakdown")
|
|
1760
1834
|
|
|
@@ -1771,7 +1845,7 @@ def _display_risk_assessment_breakdown(
|
|
|
1771
1845
|
st.write(f"• **{factor}**: {score:.1f} points")
|
|
1772
1846
|
|
|
1773
1847
|
|
|
1774
|
-
def _display_critical_path_analysis(impact_analysis):
|
|
1848
|
+
def _display_critical_path_analysis(impact_analysis: Mapping[str, Any]) -> None:
|
|
1775
1849
|
"""Display critical path analysis."""
|
|
1776
1850
|
st.markdown("### Critical Path Analysis")
|
|
1777
1851
|
if impact_analysis["critical_path"]:
|
|
@@ -1781,45 +1855,45 @@ def _display_critical_path_analysis(impact_analysis):
|
|
|
1781
1855
|
st.write("No dependency chains identified.")
|
|
1782
1856
|
|
|
1783
1857
|
|
|
1784
|
-
def _display_migration_bottlenecks(impact_analysis):
|
|
1858
|
+
def _display_migration_bottlenecks(impact_analysis: Mapping[str, Any]) -> None:
|
|
1785
1859
|
"""Display migration bottlenecks."""
|
|
1786
1860
|
st.markdown("### Migration Bottlenecks")
|
|
1787
1861
|
if impact_analysis["bottlenecks"]:
|
|
1788
1862
|
for bottleneck in impact_analysis["bottlenecks"]:
|
|
1789
1863
|
risk_level = bottleneck["risk_level"]
|
|
1790
1864
|
if risk_level == "High":
|
|
1791
|
-
risk_icon = "
|
|
1865
|
+
risk_icon = "HIGH"
|
|
1792
1866
|
elif risk_level == "Medium":
|
|
1793
|
-
risk_icon = "
|
|
1867
|
+
risk_icon = "MEDIUM"
|
|
1794
1868
|
else:
|
|
1795
|
-
risk_icon = "
|
|
1869
|
+
risk_icon = "LOW"
|
|
1796
1870
|
st.write(
|
|
1797
1871
|
f"• {risk_icon} **{bottleneck['cookbook']}**: "
|
|
1798
1872
|
f"{bottleneck['dependent_count']} dependents "
|
|
1799
1873
|
f"({risk_level} risk)"
|
|
1800
1874
|
)
|
|
1801
1875
|
else:
|
|
1802
|
-
st.write("
|
|
1876
|
+
st.write("No significant bottlenecks identified.")
|
|
1803
1877
|
|
|
1804
1878
|
|
|
1805
|
-
def _display_strategic_recommendations(impact_analysis):
|
|
1879
|
+
def _display_strategic_recommendations(impact_analysis: Mapping[str, Any]) -> None:
|
|
1806
1880
|
"""Display strategic recommendations."""
|
|
1807
1881
|
st.markdown("### Strategic Recommendations")
|
|
1808
1882
|
for rec in impact_analysis["recommendations"]:
|
|
1809
1883
|
priority = rec["priority"]
|
|
1810
1884
|
if priority == "Critical":
|
|
1811
|
-
priority_icon = "
|
|
1885
|
+
priority_icon = "CRITICAL"
|
|
1812
1886
|
elif priority == "High":
|
|
1813
|
-
priority_icon = "
|
|
1887
|
+
priority_icon = "HIGH"
|
|
1814
1888
|
else:
|
|
1815
|
-
priority_icon = "
|
|
1889
|
+
priority_icon = "MEDIUM"
|
|
1816
1890
|
st.write(f"• {priority_icon} **{priority}**: {rec['action']}")
|
|
1817
1891
|
st.write(f" *Impact*: {rec['impact']}")
|
|
1818
1892
|
|
|
1819
1893
|
|
|
1820
|
-
def _handle_graph_caching():
|
|
1894
|
+
def _handle_graph_caching() -> None:
|
|
1821
1895
|
"""Handle graph caching controls and cleanup."""
|
|
1822
|
-
st.subheader("
|
|
1896
|
+
st.subheader("Graph Cache Management")
|
|
1823
1897
|
|
|
1824
1898
|
col1, col2, col3 = st.columns([1, 1, 2])
|
|
1825
1899
|
|
|
@@ -1834,12 +1908,14 @@ def _handle_graph_caching():
|
|
|
1834
1908
|
|
|
1835
1909
|
with col2:
|
|
1836
1910
|
# Clear cache button
|
|
1837
|
-
if st.button(
|
|
1911
|
+
if st.button(
|
|
1912
|
+
"Clear Cache", help="Clear all cached graph data", key="clear_cache"
|
|
1913
|
+
):
|
|
1838
1914
|
# Find and remove all graph cache keys
|
|
1839
1915
|
cache_keys = [key for key in st.session_state if key.startswith("graph_")]
|
|
1840
1916
|
for key in cache_keys:
|
|
1841
1917
|
del st.session_state[key]
|
|
1842
|
-
st.success(f"
|
|
1918
|
+
st.success(f"Cleared {len(cache_keys)} cached graphs")
|
|
1843
1919
|
st.rerun()
|
|
1844
1920
|
|
|
1845
1921
|
with col3:
|
|
@@ -1861,23 +1937,23 @@ def _handle_graph_caching():
|
|
|
1861
1937
|
# Cache status indicator
|
|
1862
1938
|
if cache_enabled:
|
|
1863
1939
|
st.success(
|
|
1864
|
-
"
|
|
1940
|
+
"Graph caching is enabled - visualizations will be "
|
|
1865
1941
|
"cached for faster loading"
|
|
1866
1942
|
)
|
|
1867
1943
|
else:
|
|
1868
1944
|
st.warning(
|
|
1869
|
-
"
|
|
1945
|
+
"Graph caching is disabled - each visualization will be recalculated"
|
|
1870
1946
|
)
|
|
1871
1947
|
|
|
1872
1948
|
|
|
1873
1949
|
def _display_dependency_graph_visualization(
|
|
1874
|
-
analysis_result,
|
|
1875
|
-
viz_type,
|
|
1876
|
-
selected_layout,
|
|
1877
|
-
show_circular_only,
|
|
1878
|
-
show_community_only,
|
|
1879
|
-
min_connections,
|
|
1880
|
-
):
|
|
1950
|
+
analysis_result: str,
|
|
1951
|
+
viz_type: str,
|
|
1952
|
+
selected_layout: str,
|
|
1953
|
+
show_circular_only: bool,
|
|
1954
|
+
show_community_only: bool,
|
|
1955
|
+
min_connections: int,
|
|
1956
|
+
) -> None:
|
|
1881
1957
|
"""Display the dependency graph visualization section with filtering."""
|
|
1882
1958
|
try:
|
|
1883
1959
|
# Parse dependencies for filtering
|
|
@@ -1917,7 +1993,12 @@ def _display_dependency_graph_visualization(
|
|
|
1917
1993
|
_handle_graph_visualization_error(e, analysis_result)
|
|
1918
1994
|
|
|
1919
1995
|
|
|
1920
|
-
def _get_cached_graph_data(
|
|
1996
|
+
def _get_cached_graph_data(
|
|
1997
|
+
analysis_result: str,
|
|
1998
|
+
viz_type: str,
|
|
1999
|
+
selected_layout: str,
|
|
2000
|
+
filters: Mapping[str, Any],
|
|
2001
|
+
) -> Any | None:
|
|
1921
2002
|
"""Get cached graph data if available."""
|
|
1922
2003
|
cache_key = (
|
|
1923
2004
|
f"graph_{hash(analysis_result)}_{viz_type}_{selected_layout}_{str(filters)}"
|
|
@@ -1927,13 +2008,19 @@ def _get_cached_graph_data(analysis_result, viz_type, selected_layout, filters):
|
|
|
1927
2008
|
"graph_cache_enabled", True
|
|
1928
2009
|
):
|
|
1929
2010
|
graph_data = st.session_state[cache_key]
|
|
1930
|
-
st.info("
|
|
2011
|
+
st.info("Using cached graph data")
|
|
1931
2012
|
return graph_data
|
|
1932
2013
|
|
|
1933
2014
|
return None
|
|
1934
2015
|
|
|
1935
2016
|
|
|
1936
|
-
def _cache_graph_data(
|
|
2017
|
+
def _cache_graph_data(
|
|
2018
|
+
analysis_result: str,
|
|
2019
|
+
viz_type: str,
|
|
2020
|
+
selected_layout: str,
|
|
2021
|
+
filters: Mapping[str, Any],
|
|
2022
|
+
graph_data: Any,
|
|
2023
|
+
) -> None:
|
|
1937
2024
|
"""Cache graph data if caching is enabled."""
|
|
1938
2025
|
if graph_data is not None and st.session_state.get("graph_cache_enabled", True):
|
|
1939
2026
|
cache_key = (
|
|
@@ -1942,7 +2029,7 @@ def _cache_graph_data(analysis_result, viz_type, selected_layout, filters, graph
|
|
|
1942
2029
|
st.session_state[cache_key] = graph_data
|
|
1943
2030
|
|
|
1944
2031
|
|
|
1945
|
-
def _display_graph_with_export_options(graph_data, viz_type):
|
|
2032
|
+
def _display_graph_with_export_options(graph_data: Any, viz_type: str) -> None:
|
|
1946
2033
|
"""Display graph and provide export options."""
|
|
1947
2034
|
if viz_type == "interactive":
|
|
1948
2035
|
# Interactive Plotly graph
|
|
@@ -1956,7 +2043,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
1956
2043
|
# Export as HTML
|
|
1957
2044
|
html_content = graph_data.to_html(full_html=False, include_plotlyjs="cdn")
|
|
1958
2045
|
st.download_button(
|
|
1959
|
-
label="
|
|
2046
|
+
label="HTML",
|
|
1960
2047
|
data=html_content,
|
|
1961
2048
|
file_name="dependency_graph.html",
|
|
1962
2049
|
mime="text/html",
|
|
@@ -1967,7 +2054,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
1967
2054
|
# Export as JSON
|
|
1968
2055
|
json_data = graph_data.to_json()
|
|
1969
2056
|
st.download_button(
|
|
1970
|
-
label="
|
|
2057
|
+
label="JSON",
|
|
1971
2058
|
data=json_data,
|
|
1972
2059
|
file_name="dependency_graph.json",
|
|
1973
2060
|
mime=MIME_APPLICATION_JSON,
|
|
@@ -1977,11 +2064,11 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
1977
2064
|
with col3:
|
|
1978
2065
|
# Export as PNG (requires kaleido)
|
|
1979
2066
|
try:
|
|
1980
|
-
import plotly.io as pio
|
|
2067
|
+
import plotly.io as pio
|
|
1981
2068
|
|
|
1982
2069
|
png_data = pio.to_image(graph_data, format="png", scale=2)
|
|
1983
2070
|
st.download_button(
|
|
1984
|
-
label="
|
|
2071
|
+
label="PNG (High-res)",
|
|
1985
2072
|
data=png_data,
|
|
1986
2073
|
file_name="dependency_graph.png",
|
|
1987
2074
|
mime="image/png",
|
|
@@ -1997,7 +2084,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
1997
2084
|
|
|
1998
2085
|
pdf_data = pio.to_image(graph_data, format="pdf")
|
|
1999
2086
|
st.download_button(
|
|
2000
|
-
label="
|
|
2087
|
+
label="PDF",
|
|
2001
2088
|
data=pdf_data,
|
|
2002
2089
|
file_name="dependency_graph.pdf",
|
|
2003
2090
|
mime="application/pdf",
|
|
@@ -2022,7 +2109,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
2022
2109
|
graph_data.savefig(buf, format="png", dpi=300, bbox_inches="tight")
|
|
2023
2110
|
buf.seek(0)
|
|
2024
2111
|
st.download_button(
|
|
2025
|
-
label="
|
|
2112
|
+
label="PNG (High-res)",
|
|
2026
2113
|
data=buf.getvalue(),
|
|
2027
2114
|
file_name="dependency_graph.png",
|
|
2028
2115
|
mime="image/png",
|
|
@@ -2035,7 +2122,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
2035
2122
|
graph_data.savefig(buf_svg, format="svg", bbox_inches="tight")
|
|
2036
2123
|
buf_svg.seek(0)
|
|
2037
2124
|
st.download_button(
|
|
2038
|
-
label="
|
|
2125
|
+
label="SVG",
|
|
2039
2126
|
data=buf_svg.getvalue(),
|
|
2040
2127
|
file_name="dependency_graph.svg",
|
|
2041
2128
|
mime="image/svg+xml",
|
|
@@ -2048,7 +2135,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
2048
2135
|
graph_data.savefig(buf_pdf, format="pdf", bbox_inches="tight")
|
|
2049
2136
|
buf_pdf.seek(0)
|
|
2050
2137
|
st.download_button(
|
|
2051
|
-
label="
|
|
2138
|
+
label="PDF",
|
|
2052
2139
|
data=buf_pdf.getvalue(),
|
|
2053
2140
|
file_name="dependency_graph.pdf",
|
|
2054
2141
|
mime="application/pdf",
|
|
@@ -2061,7 +2148,7 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
2061
2148
|
graph_data.savefig(buf_eps, format="eps", bbox_inches="tight")
|
|
2062
2149
|
buf_eps.seek(0)
|
|
2063
2150
|
st.download_button(
|
|
2064
|
-
label="
|
|
2151
|
+
label="EPS",
|
|
2065
2152
|
data=buf_eps.getvalue(),
|
|
2066
2153
|
file_name="dependency_graph.eps",
|
|
2067
2154
|
mime="application/postscript",
|
|
@@ -2069,9 +2156,9 @@ def _display_graph_with_export_options(graph_data, viz_type):
|
|
|
2069
2156
|
)
|
|
2070
2157
|
|
|
2071
2158
|
|
|
2072
|
-
def _handle_graph_visualization_error(error, analysis_result):
|
|
2159
|
+
def _handle_graph_visualization_error(error: Exception, analysis_result: str) -> None:
|
|
2073
2160
|
"""Handle graph visualization errors with fallback display."""
|
|
2074
|
-
st.error("
|
|
2161
|
+
st.error("**Graph Visualization Error**")
|
|
2075
2162
|
with st.expander("Error Details"):
|
|
2076
2163
|
st.code(str(error), language="text")
|
|
2077
2164
|
st.markdown("""
|
|
@@ -2087,7 +2174,7 @@ def _handle_graph_visualization_error(error, analysis_result):
|
|
|
2087
2174
|
""")
|
|
2088
2175
|
|
|
2089
2176
|
# Fallback: show text summary
|
|
2090
|
-
st.info("
|
|
2177
|
+
st.info("Showing text-based dependency summary instead:")
|
|
2091
2178
|
st.text_area(
|
|
2092
2179
|
"Dependency Analysis Text",
|
|
2093
2180
|
analysis_result,
|
|
@@ -2096,7 +2183,7 @@ def _handle_graph_visualization_error(error, analysis_result):
|
|
|
2096
2183
|
)
|
|
2097
2184
|
|
|
2098
2185
|
|
|
2099
|
-
def _display_dependency_analysis_sections(analysis_result):
|
|
2186
|
+
def _display_dependency_analysis_sections(analysis_result: str) -> None:
|
|
2100
2187
|
"""Display dependency analysis results in expandable sections."""
|
|
2101
2188
|
# Split analysis into sections
|
|
2102
2189
|
sections = analysis_result.split("\n## ")
|
|
@@ -2108,35 +2195,38 @@ def _display_dependency_analysis_sections(analysis_result):
|
|
|
2108
2195
|
|
|
2109
2196
|
# Add expanders for different sections
|
|
2110
2197
|
if "Migration Order Recommendations" in section:
|
|
2111
|
-
with st.expander("
|
|
2198
|
+
with st.expander("Migration Order Recommendations"):
|
|
2112
2199
|
st.markdown(
|
|
2113
2200
|
section.replace("## Migration Order Recommendations", "")
|
|
2114
2201
|
)
|
|
2115
2202
|
elif "Dependency Graph" in section:
|
|
2116
|
-
with st.expander("
|
|
2203
|
+
with st.expander("Dependency Graph"):
|
|
2117
2204
|
st.markdown(section.replace("## Dependency Graph", ""))
|
|
2118
|
-
with st.expander(f"
|
|
2205
|
+
with st.expander(f"{SECTION_CIRCULAR_DEPENDENCIES}"):
|
|
2119
2206
|
st.markdown(
|
|
2120
2207
|
section.replace(f"## {SECTION_CIRCULAR_DEPENDENCIES}", "")
|
|
2121
2208
|
)
|
|
2122
|
-
with st.expander(f"
|
|
2209
|
+
with st.expander(f"{SECTION_COMMUNITY_COOKBOOKS}"):
|
|
2123
2210
|
st.markdown(
|
|
2124
2211
|
section.replace(f"## {SECTION_COMMUNITY_COOKBOOKS}", "")
|
|
2125
2212
|
)
|
|
2126
|
-
elif
|
|
2127
|
-
with st.expander(
|
|
2128
|
-
|
|
2213
|
+
elif SECTION_MIGRATION_IMPACT_ANALYSIS in section:
|
|
2214
|
+
with st.expander(SECTION_MIGRATION_IMPACT_ANALYSIS):
|
|
2215
|
+
header_text = f"## {SECTION_MIGRATION_IMPACT_ANALYSIS}"
|
|
2216
|
+
st.markdown(section.replace(header_text, ""))
|
|
2129
2217
|
else:
|
|
2130
2218
|
st.markdown(section)
|
|
2131
2219
|
|
|
2132
2220
|
|
|
2133
|
-
def _display_migration_recommendations(
|
|
2221
|
+
def _display_migration_recommendations(
|
|
2222
|
+
circular_deps: int, community_cookbooks: int, direct_deps: int
|
|
2223
|
+
) -> None:
|
|
2134
2224
|
"""Display migration recommendations based on analysis results."""
|
|
2135
2225
|
st.subheader("Migration Recommendations")
|
|
2136
2226
|
|
|
2137
2227
|
if circular_deps > 0:
|
|
2138
2228
|
st.error(
|
|
2139
|
-
"
|
|
2229
|
+
"**Critical Issue**: Circular dependencies detected. "
|
|
2140
2230
|
"Resolve before migration."
|
|
2141
2231
|
)
|
|
2142
2232
|
st.markdown("""
|
|
@@ -2149,7 +2239,7 @@ def _display_migration_recommendations(circular_deps, community_cookbooks, direc
|
|
|
2149
2239
|
|
|
2150
2240
|
if community_cookbooks > 0:
|
|
2151
2241
|
st.success(
|
|
2152
|
-
f"
|
|
2242
|
+
f"**Good News**: {community_cookbooks} community cookbooks identified."
|
|
2153
2243
|
)
|
|
2154
2244
|
st.markdown("""
|
|
2155
2245
|
**Recommendations:**
|
|
@@ -2159,7 +2249,7 @@ def _display_migration_recommendations(circular_deps, community_cookbooks, direc
|
|
|
2159
2249
|
""")
|
|
2160
2250
|
|
|
2161
2251
|
if direct_deps > 10:
|
|
2162
|
-
st.warning("
|
|
2252
|
+
st.warning("**Complex Dependencies**: High dependency count detected.")
|
|
2163
2253
|
st.markdown("""
|
|
2164
2254
|
**Consider:**
|
|
2165
2255
|
- Breaking down monolithic cookbooks
|
|
@@ -2168,15 +2258,26 @@ def _display_migration_recommendations(circular_deps, community_cookbooks, direc
|
|
|
2168
2258
|
""")
|
|
2169
2259
|
|
|
2170
2260
|
|
|
2261
|
+
def health_check() -> dict[str, str]:
|
|
2262
|
+
"""Return health check information for the SousChef UI service."""
|
|
2263
|
+
from souschef.core.constants import VERSION
|
|
2264
|
+
|
|
2265
|
+
return {
|
|
2266
|
+
"status": "healthy",
|
|
2267
|
+
"service": "souschef-ui",
|
|
2268
|
+
"version": VERSION,
|
|
2269
|
+
}
|
|
2270
|
+
|
|
2271
|
+
|
|
2171
2272
|
def _display_dependency_export_options(
|
|
2172
|
-
analysis_result,
|
|
2173
|
-
cookbook_path,
|
|
2174
|
-
depth,
|
|
2175
|
-
direct_deps,
|
|
2176
|
-
transitive_deps,
|
|
2177
|
-
circular_deps,
|
|
2178
|
-
community_cookbooks,
|
|
2179
|
-
):
|
|
2273
|
+
analysis_result: str,
|
|
2274
|
+
cookbook_path: str,
|
|
2275
|
+
depth: str,
|
|
2276
|
+
direct_deps: int,
|
|
2277
|
+
transitive_deps: int,
|
|
2278
|
+
circular_deps: int,
|
|
2279
|
+
community_cookbooks: int,
|
|
2280
|
+
) -> None:
|
|
2180
2281
|
"""Display export options for dependency analysis."""
|
|
2181
2282
|
st.subheader("Export Analysis")
|
|
2182
2283
|
|
|
@@ -2184,7 +2285,7 @@ def _display_dependency_export_options(
|
|
|
2184
2285
|
|
|
2185
2286
|
with col1:
|
|
2186
2287
|
st.download_button(
|
|
2187
|
-
label="
|
|
2288
|
+
label="Download Full Analysis",
|
|
2188
2289
|
data=analysis_result,
|
|
2189
2290
|
file_name="dependency_analysis.md",
|
|
2190
2291
|
mime=MIME_TEXT_MARKDOWN,
|
|
@@ -2208,7 +2309,7 @@ def _display_dependency_export_options(
|
|
|
2208
2309
|
import json
|
|
2209
2310
|
|
|
2210
2311
|
st.download_button(
|
|
2211
|
-
label="
|
|
2312
|
+
label="Download JSON Summary",
|
|
2212
2313
|
data=json.dumps(analysis_json, indent=2),
|
|
2213
2314
|
file_name="dependency_analysis.json",
|
|
2214
2315
|
mime=MIME_APPLICATION_JSON,
|
|
@@ -2216,7 +2317,9 @@ def _display_dependency_export_options(
|
|
|
2216
2317
|
)
|
|
2217
2318
|
|
|
2218
2319
|
|
|
2219
|
-
def _display_dependency_analysis_summary(
|
|
2320
|
+
def _display_dependency_analysis_summary(
|
|
2321
|
+
analysis_result: str, cookbook_path: str, depth: str
|
|
2322
|
+
) -> None:
|
|
2220
2323
|
"""Display dependency analysis summary section."""
|
|
2221
2324
|
# Summary metrics
|
|
2222
2325
|
st.subheader("Dependency Analysis Summary")
|
|
@@ -2236,12 +2339,12 @@ def _display_dependency_analysis_summary(analysis_result, cookbook_path, depth):
|
|
|
2236
2339
|
st.info(analysis_msg)
|
|
2237
2340
|
|
|
2238
2341
|
|
|
2239
|
-
def _display_graph_visualization_section(analysis_result, viz_type):
|
|
2342
|
+
def _display_graph_visualization_section(analysis_result: str, viz_type: str) -> None:
|
|
2240
2343
|
"""Display graph visualization section."""
|
|
2241
2344
|
if viz_type not in ["graph", "interactive"]:
|
|
2242
2345
|
return
|
|
2243
2346
|
|
|
2244
|
-
st.subheader("
|
|
2347
|
+
st.subheader("Dependency Graph Visualization")
|
|
2245
2348
|
|
|
2246
2349
|
# Parse dependencies for filtering and analysis
|
|
2247
2350
|
_ = _parse_dependency_analysis(analysis_result)
|
|
@@ -2278,7 +2381,7 @@ def _display_graph_visualization_section(analysis_result, viz_type):
|
|
|
2278
2381
|
_handle_graph_caching()
|
|
2279
2382
|
|
|
2280
2383
|
# Graph Filtering Options
|
|
2281
|
-
st.subheader("
|
|
2384
|
+
st.subheader("Graph Filtering & Analysis")
|
|
2282
2385
|
|
|
2283
2386
|
col1, col2, col3 = st.columns(3)
|
|
2284
2387
|
|
|
@@ -2315,7 +2418,7 @@ def _display_graph_visualization_section(analysis_result, viz_type):
|
|
|
2315
2418
|
)
|
|
2316
2419
|
|
|
2317
2420
|
|
|
2318
|
-
def _display_impact_analysis_section(analysis_result):
|
|
2421
|
+
def _display_impact_analysis_section(analysis_result: str) -> None:
|
|
2319
2422
|
"""Display migration impact analysis section."""
|
|
2320
2423
|
# Parse dependencies for impact analysis
|
|
2321
2424
|
dependencies, circular_deps, community_cookbooks = _parse_dependency_analysis(
|
|
@@ -2323,7 +2426,7 @@ def _display_impact_analysis_section(analysis_result):
|
|
|
2323
2426
|
)
|
|
2324
2427
|
|
|
2325
2428
|
# Impact Analysis Section
|
|
2326
|
-
st.subheader("
|
|
2429
|
+
st.subheader("Migration Impact Analysis")
|
|
2327
2430
|
|
|
2328
2431
|
if not dependencies:
|
|
2329
2432
|
st.info("No dependencies found for impact analysis.")
|
|
@@ -2336,11 +2439,11 @@ def _display_impact_analysis_section(analysis_result):
|
|
|
2336
2439
|
# Calculate risk score delta
|
|
2337
2440
|
risk_score = impact_analysis["risk_score"]
|
|
2338
2441
|
if risk_score > 7:
|
|
2339
|
-
risk_delta = "
|
|
2442
|
+
risk_delta = "High"
|
|
2340
2443
|
elif risk_score > 4:
|
|
2341
|
-
risk_delta = "
|
|
2444
|
+
risk_delta = "Medium"
|
|
2342
2445
|
else:
|
|
2343
|
-
risk_delta = "
|
|
2446
|
+
risk_delta = "Low"
|
|
2344
2447
|
|
|
2345
2448
|
col1, col2, col3, col4 = st.columns(4)
|
|
2346
2449
|
|
|
@@ -2353,7 +2456,7 @@ def _display_impact_analysis_section(analysis_result):
|
|
|
2353
2456
|
|
|
2354
2457
|
with col2:
|
|
2355
2458
|
timeline_weeks = impact_analysis["timeline_impact_weeks"]
|
|
2356
|
-
timeline_delta = "
|
|
2459
|
+
timeline_delta = "Increase" if timeline_weeks > 0 else "Unchanged"
|
|
2357
2460
|
st.metric(
|
|
2358
2461
|
"Estimated Timeline Impact",
|
|
2359
2462
|
f"{timeline_weeks} weeks",
|
|
@@ -2362,7 +2465,7 @@ def _display_impact_analysis_section(analysis_result):
|
|
|
2362
2465
|
|
|
2363
2466
|
with col3:
|
|
2364
2467
|
complexity_level = impact_analysis["complexity_level"]
|
|
2365
|
-
complexity_delta = "
|
|
2468
|
+
complexity_delta = "High" if complexity_level == "High" else "Low"
|
|
2366
2469
|
st.metric(
|
|
2367
2470
|
"Dependency Complexity",
|
|
2368
2471
|
complexity_level,
|
|
@@ -2371,7 +2474,7 @@ def _display_impact_analysis_section(analysis_result):
|
|
|
2371
2474
|
|
|
2372
2475
|
with col4:
|
|
2373
2476
|
parallel_streams = impact_analysis["parallel_streams"]
|
|
2374
|
-
parallel_delta = "
|
|
2477
|
+
parallel_delta = "Multiple" if parallel_streams > 1 else "Single"
|
|
2375
2478
|
st.metric(
|
|
2376
2479
|
"Parallel Migration Streams",
|
|
2377
2480
|
parallel_streams,
|
|
@@ -2379,15 +2482,18 @@ def _display_impact_analysis_section(analysis_result):
|
|
|
2379
2482
|
)
|
|
2380
2483
|
|
|
2381
2484
|
# Detailed impact breakdown
|
|
2382
|
-
with st.expander("
|
|
2485
|
+
with st.expander("Detailed Impact Analysis"):
|
|
2383
2486
|
_display_detailed_impact_analysis(
|
|
2384
2487
|
impact_analysis, dependencies, circular_deps, community_cookbooks
|
|
2385
2488
|
)
|
|
2386
2489
|
|
|
2387
2490
|
|
|
2388
2491
|
def _display_analysis_details_section(
|
|
2389
|
-
analysis_result
|
|
2390
|
-
|
|
2492
|
+
analysis_result: str,
|
|
2493
|
+
circular_deps: list[tuple[str, str]],
|
|
2494
|
+
community_cookbooks: list[str],
|
|
2495
|
+
direct_deps: int,
|
|
2496
|
+
) -> None:
|
|
2391
2497
|
"""Display analysis details section."""
|
|
2392
2498
|
# Display analysis results
|
|
2393
2499
|
st.subheader("Dependency Analysis Details")
|
|
@@ -2395,10 +2501,12 @@ def _display_analysis_details_section(
|
|
|
2395
2501
|
_display_dependency_analysis_sections(analysis_result)
|
|
2396
2502
|
|
|
2397
2503
|
# Migration recommendations
|
|
2398
|
-
_display_migration_recommendations(
|
|
2504
|
+
_display_migration_recommendations(
|
|
2505
|
+
len(circular_deps), len(community_cookbooks), direct_deps
|
|
2506
|
+
)
|
|
2399
2507
|
|
|
2400
2508
|
|
|
2401
|
-
def display_dependency_analysis_results():
|
|
2509
|
+
def display_dependency_analysis_results() -> None:
|
|
2402
2510
|
"""Display dependency analysis results."""
|
|
2403
2511
|
analysis_result = st.session_state.dep_analysis_result
|
|
2404
2512
|
cookbook_path = st.session_state.dep_cookbook_path
|
|
@@ -2430,8 +2538,8 @@ def display_dependency_analysis_results():
|
|
|
2430
2538
|
depth,
|
|
2431
2539
|
direct_deps,
|
|
2432
2540
|
len(dependencies) if dependencies else 0, # transitive_deps approximation
|
|
2433
|
-
circular_deps,
|
|
2434
|
-
community_cookbooks,
|
|
2541
|
+
len(circular_deps),
|
|
2542
|
+
len(community_cookbooks),
|
|
2435
2543
|
)
|
|
2436
2544
|
|
|
2437
2545
|
|
|
@@ -2469,7 +2577,7 @@ def _collect_files_to_validate(input_path: str) -> list[Path]:
|
|
|
2469
2577
|
return files_to_validate
|
|
2470
2578
|
|
|
2471
2579
|
|
|
2472
|
-
def _run_validation_engine(files_to_validate):
|
|
2580
|
+
def _run_validation_engine(files_to_validate: Sequence[Path]) -> list[Any]:
|
|
2473
2581
|
"""Run validation engine on a list of files."""
|
|
2474
2582
|
from souschef.core.validation import (
|
|
2475
2583
|
ValidationCategory,
|
|
@@ -2510,7 +2618,7 @@ def _run_validation_engine(files_to_validate):
|
|
|
2510
2618
|
return all_results
|
|
2511
2619
|
|
|
2512
2620
|
|
|
2513
|
-
def _get_default_validation_path():
|
|
2621
|
+
def _get_default_validation_path() -> str:
|
|
2514
2622
|
"""Determine the default path for validation from session state."""
|
|
2515
2623
|
default_path = ""
|
|
2516
2624
|
if "converted_playbooks_path" in st.session_state:
|
|
@@ -2529,7 +2637,7 @@ def _get_default_validation_path():
|
|
|
2529
2637
|
return default_path
|
|
2530
2638
|
|
|
2531
2639
|
|
|
2532
|
-
def _render_validation_options_ui():
|
|
2640
|
+
def _render_validation_options_ui() -> tuple[str, str]:
|
|
2533
2641
|
"""Render validation scope and format options."""
|
|
2534
2642
|
col1, col2 = st.columns(2)
|
|
2535
2643
|
|
|
@@ -2560,7 +2668,7 @@ def _render_validation_options_ui():
|
|
|
2560
2668
|
return sub_scope, sub_format
|
|
2561
2669
|
|
|
2562
2670
|
|
|
2563
|
-
def _render_validation_input_ui(default_path):
|
|
2671
|
+
def _render_validation_input_ui(default_path: str) -> str:
|
|
2564
2672
|
"""Render input source selection UI."""
|
|
2565
2673
|
st.subheader("Input Source")
|
|
2566
2674
|
|
|
@@ -2590,7 +2698,7 @@ def _render_validation_input_ui(default_path):
|
|
|
2590
2698
|
return input_path
|
|
2591
2699
|
|
|
2592
2700
|
|
|
2593
|
-
def _render_validation_settings_ui():
|
|
2701
|
+
def _render_validation_settings_ui() -> tuple[bool, bool, bool]:
|
|
2594
2702
|
"""Render strict mode and other validation settings."""
|
|
2595
2703
|
st.subheader("Validation Options")
|
|
2596
2704
|
|
|
@@ -2652,7 +2760,7 @@ def _normalize_and_validate_input_path(input_path: str) -> Path | None:
|
|
|
2652
2760
|
return path_obj
|
|
2653
2761
|
|
|
2654
2762
|
|
|
2655
|
-
def _handle_validation_execution(input_path, options):
|
|
2763
|
+
def _handle_validation_execution(input_path: str, options: Mapping[str, Any]) -> None:
|
|
2656
2764
|
"""Execute the validation process with progress tracking."""
|
|
2657
2765
|
progress_tracker = ProgressTracker(
|
|
2658
2766
|
total_steps=6, description="Running validation..."
|
|
@@ -2708,7 +2816,7 @@ def _handle_validation_execution(input_path, options):
|
|
|
2708
2816
|
st.error(f"Error during validation: {e}")
|
|
2709
2817
|
|
|
2710
2818
|
|
|
2711
|
-
def show_validation_reports():
|
|
2819
|
+
def show_validation_reports() -> None:
|
|
2712
2820
|
"""Show validation reports and conversion validation."""
|
|
2713
2821
|
st.header(NAV_VALIDATION_REPORTS)
|
|
2714
2822
|
|
|
@@ -2728,7 +2836,9 @@ def show_validation_reports():
|
|
|
2728
2836
|
)
|
|
2729
2837
|
|
|
2730
2838
|
# Validation button
|
|
2731
|
-
if st.button(
|
|
2839
|
+
if st.button(
|
|
2840
|
+
"Run Validation", type="primary", width="stretch", key="run_validation"
|
|
2841
|
+
):
|
|
2732
2842
|
if not input_path or not input_path.strip():
|
|
2733
2843
|
st.error("Please enter a path to validate.")
|
|
2734
2844
|
return
|
|
@@ -2748,7 +2858,7 @@ def show_validation_reports():
|
|
|
2748
2858
|
display_validation_results()
|
|
2749
2859
|
|
|
2750
2860
|
|
|
2751
|
-
def _filter_results_by_scope(results, scope):
|
|
2861
|
+
def _filter_results_by_scope(results: list[Any], scope: str) -> list[Any]:
|
|
2752
2862
|
"""Filter validation results based on selected scope."""
|
|
2753
2863
|
from souschef.core.validation import ValidationCategory
|
|
2754
2864
|
|
|
@@ -2769,7 +2879,7 @@ def _filter_results_by_scope(results, scope):
|
|
|
2769
2879
|
return [r for r in results if r.category == target_category]
|
|
2770
2880
|
|
|
2771
2881
|
|
|
2772
|
-
def _parse_validation_metrics(validation_result):
|
|
2882
|
+
def _parse_validation_metrics(validation_result: str) -> tuple[int, int, int, int]:
|
|
2773
2883
|
"""Parse validation result to extract key metrics."""
|
|
2774
2884
|
lines = validation_result.split("\n")
|
|
2775
2885
|
|
|
@@ -2805,7 +2915,9 @@ def _parse_validation_metrics(validation_result):
|
|
|
2805
2915
|
return errors, warnings, passed, total_checks
|
|
2806
2916
|
|
|
2807
2917
|
|
|
2808
|
-
def _display_validation_summary_metrics(
|
|
2918
|
+
def _display_validation_summary_metrics(
|
|
2919
|
+
errors: int, warnings: int, passed: int, total_checks: int
|
|
2920
|
+
) -> None:
|
|
2809
2921
|
"""Display validation summary metrics."""
|
|
2810
2922
|
col1, col2, col3, col4 = st.columns(4)
|
|
2811
2923
|
|
|
@@ -2813,28 +2925,28 @@ def _display_validation_summary_metrics(errors, warnings, passed, total_checks):
|
|
|
2813
2925
|
st.metric("Total Checks", total_checks)
|
|
2814
2926
|
|
|
2815
2927
|
with col2:
|
|
2816
|
-
st.metric("Passed", passed, delta="
|
|
2928
|
+
st.metric("Passed", passed, delta="Pass" if passed > 0 else "")
|
|
2817
2929
|
|
|
2818
2930
|
with col3:
|
|
2819
|
-
st.metric("Warnings", warnings, delta="
|
|
2931
|
+
st.metric("Warnings", warnings, delta="Warning" if warnings > 0 else "")
|
|
2820
2932
|
|
|
2821
2933
|
with col4:
|
|
2822
|
-
st.metric("Errors", errors, delta="
|
|
2934
|
+
st.metric("Errors", errors, delta="Error" if errors > 0 else "")
|
|
2823
2935
|
|
|
2824
2936
|
|
|
2825
|
-
def _display_validation_status(errors, warnings):
|
|
2937
|
+
def _display_validation_status(errors: int, warnings: int) -> None:
|
|
2826
2938
|
"""Display overall validation status."""
|
|
2827
2939
|
if errors > 0:
|
|
2828
|
-
st.error("
|
|
2940
|
+
st.error("**Validation Failed**: Critical issues found that need attention.")
|
|
2829
2941
|
elif warnings > 0:
|
|
2830
2942
|
st.warning(
|
|
2831
|
-
"
|
|
2943
|
+
"**Validation Passed with Warnings**: Review warnings before proceeding."
|
|
2832
2944
|
)
|
|
2833
2945
|
else:
|
|
2834
|
-
st.success("
|
|
2946
|
+
st.success("**Validation Passed**: All checks successful!")
|
|
2835
2947
|
|
|
2836
2948
|
|
|
2837
|
-
def _display_validation_sections(validation_result):
|
|
2949
|
+
def _display_validation_sections(validation_result: str) -> None:
|
|
2838
2950
|
"""Display validation results in expandable sections."""
|
|
2839
2951
|
# Split results into sections
|
|
2840
2952
|
sections = validation_result.split("\n## ")
|
|
@@ -2846,28 +2958,28 @@ def _display_validation_sections(validation_result):
|
|
|
2846
2958
|
|
|
2847
2959
|
# Add expanders for different sections
|
|
2848
2960
|
if "Syntax Validation" in section:
|
|
2849
|
-
with st.expander("
|
|
2961
|
+
with st.expander("Syntax Validation"):
|
|
2850
2962
|
st.markdown(section.replace("## Syntax Validation", ""))
|
|
2851
2963
|
elif "Logic Validation" in section:
|
|
2852
|
-
with st.expander("
|
|
2964
|
+
with st.expander("Logic Validation"):
|
|
2853
2965
|
st.markdown(section.replace("## Logic Validation", ""))
|
|
2854
2966
|
elif "Security Validation" in section:
|
|
2855
|
-
with st.expander("
|
|
2967
|
+
with st.expander("Security Validation"):
|
|
2856
2968
|
st.markdown(section.replace("## Security Validation", ""))
|
|
2857
2969
|
elif "Performance Validation" in section:
|
|
2858
|
-
with st.expander("
|
|
2970
|
+
with st.expander("Performance Validation"):
|
|
2859
2971
|
st.markdown(section.replace("## Performance Validation", ""))
|
|
2860
2972
|
elif SCOPE_BEST_PRACTICES in section:
|
|
2861
|
-
with st.expander(f"
|
|
2973
|
+
with st.expander(f"{SCOPE_BEST_PRACTICES}"):
|
|
2862
2974
|
st.markdown(section.replace(f"## {SCOPE_BEST_PRACTICES}", ""))
|
|
2863
2975
|
elif "Recommendations" in section:
|
|
2864
|
-
with st.expander("
|
|
2976
|
+
with st.expander("Recommendations"):
|
|
2865
2977
|
st.markdown(section.replace("## Recommendations", ""))
|
|
2866
2978
|
else:
|
|
2867
2979
|
st.markdown(section)
|
|
2868
2980
|
|
|
2869
2981
|
|
|
2870
|
-
def _display_validation_action_items(errors, warnings):
|
|
2982
|
+
def _display_validation_action_items(errors: int, warnings: int) -> None:
|
|
2871
2983
|
"""Display action items based on validation results."""
|
|
2872
2984
|
if errors > 0 or warnings > 0:
|
|
2873
2985
|
st.subheader("Action Items")
|
|
@@ -2892,15 +3004,15 @@ def _display_validation_action_items(errors, warnings):
|
|
|
2892
3004
|
|
|
2893
3005
|
|
|
2894
3006
|
def _display_validation_export_options(
|
|
2895
|
-
validation_result,
|
|
2896
|
-
input_path,
|
|
2897
|
-
validation_type,
|
|
2898
|
-
options,
|
|
2899
|
-
errors,
|
|
2900
|
-
warnings,
|
|
2901
|
-
passed,
|
|
2902
|
-
total_checks,
|
|
2903
|
-
):
|
|
3007
|
+
validation_result: str,
|
|
3008
|
+
input_path: str,
|
|
3009
|
+
validation_type: str,
|
|
3010
|
+
options: Mapping[str, Any],
|
|
3011
|
+
errors: int,
|
|
3012
|
+
warnings: int,
|
|
3013
|
+
passed: int,
|
|
3014
|
+
total_checks: int,
|
|
3015
|
+
) -> None:
|
|
2904
3016
|
"""Display export options for validation results."""
|
|
2905
3017
|
st.subheader("Export Report")
|
|
2906
3018
|
|
|
@@ -2908,7 +3020,7 @@ def _display_validation_export_options(
|
|
|
2908
3020
|
|
|
2909
3021
|
with col1:
|
|
2910
3022
|
st.download_button(
|
|
2911
|
-
label="
|
|
3023
|
+
label="Download Full Report",
|
|
2912
3024
|
data=validation_result,
|
|
2913
3025
|
file_name="validation_report.md",
|
|
2914
3026
|
mime=MIME_TEXT_MARKDOWN,
|
|
@@ -2940,7 +3052,7 @@ def _display_validation_export_options(
|
|
|
2940
3052
|
import json
|
|
2941
3053
|
|
|
2942
3054
|
st.download_button(
|
|
2943
|
-
label="
|
|
3055
|
+
label="Download JSON Summary",
|
|
2944
3056
|
data=json.dumps(report_json, indent=2),
|
|
2945
3057
|
file_name="validation_report.json",
|
|
2946
3058
|
mime=MIME_APPLICATION_JSON,
|
|
@@ -2948,7 +3060,7 @@ def _display_validation_export_options(
|
|
|
2948
3060
|
)
|
|
2949
3061
|
|
|
2950
3062
|
|
|
2951
|
-
def display_validation_results():
|
|
3063
|
+
def display_validation_results() -> None:
|
|
2952
3064
|
"""Display validation results."""
|
|
2953
3065
|
validation_result = st.session_state.validation_result
|
|
2954
3066
|
input_path = st.session_state.validation_path
|
|
@@ -2994,5 +3106,11 @@ def display_validation_results():
|
|
|
2994
3106
|
)
|
|
2995
3107
|
|
|
2996
3108
|
|
|
3109
|
+
# UI code only when running under Streamlit
|
|
3110
|
+
if not os.environ.get("STREAMLIT_SERVER_PORT") and not os.environ.get(
|
|
3111
|
+
"STREAMLIT_SERVER_HEADLESS"
|
|
3112
|
+
):
|
|
3113
|
+
main()
|
|
3114
|
+
|
|
2997
3115
|
if __name__ == "__main__":
|
|
2998
3116
|
main()
|