mcp-souschef 2.1.2__py3-none-any.whl → 2.5.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_souschef-2.1.2.dist-info → mcp_souschef-2.5.3.dist-info}/METADATA +200 -19
- mcp_souschef-2.5.3.dist-info/RECORD +38 -0
- mcp_souschef-2.5.3.dist-info/entry_points.txt +4 -0
- souschef/assessment.py +531 -180
- souschef/ci/__init__.py +11 -0
- souschef/ci/github_actions.py +379 -0
- souschef/ci/gitlab_ci.py +299 -0
- souschef/ci/jenkins_pipeline.py +343 -0
- souschef/cli.py +691 -1
- souschef/converters/playbook.py +43 -5
- souschef/converters/resource.py +146 -49
- souschef/core/__init__.py +22 -0
- souschef/core/errors.py +275 -0
- souschef/core/validation.py +35 -2
- souschef/deployment.py +414 -100
- souschef/filesystem/operations.py +0 -7
- souschef/parsers/__init__.py +6 -1
- souschef/parsers/habitat.py +35 -6
- souschef/parsers/inspec.py +415 -52
- souschef/parsers/metadata.py +89 -23
- souschef/profiling.py +568 -0
- souschef/server.py +948 -255
- souschef/ui/__init__.py +8 -0
- souschef/ui/app.py +1837 -0
- souschef/ui/pages/cookbook_analysis.py +425 -0
- mcp_souschef-2.1.2.dist-info/RECORD +0 -29
- mcp_souschef-2.1.2.dist-info/entry_points.txt +0 -4
- {mcp_souschef-2.1.2.dist-info → mcp_souschef-2.5.3.dist-info}/WHEEL +0 -0
- {mcp_souschef-2.1.2.dist-info → mcp_souschef-2.5.3.dist-info}/licenses/LICENSE +0 -0
souschef/ui/app.py
ADDED
|
@@ -0,0 +1,1837 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Visual Migration Planning Interface for SousChef.
|
|
3
|
+
|
|
4
|
+
A Streamlit-based web interface for Chef to Ansible migration planning,
|
|
5
|
+
assessment, and visualization.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import contextlib
|
|
9
|
+
import sys
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
import streamlit as st
|
|
13
|
+
|
|
14
|
+
# Add the parent directory to the path so we can import souschef modules
|
|
15
|
+
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
16
|
+
|
|
17
|
+
# Import page modules
|
|
18
|
+
from souschef.ui.pages.cookbook_analysis import show_cookbook_analysis_page
|
|
19
|
+
|
|
20
|
+
# Constants for repeated strings
|
|
21
|
+
NAV_MIGRATION_PLANNING = "Migration Planning"
|
|
22
|
+
NAV_DEPENDENCY_MAPPING = "Dependency Mapping"
|
|
23
|
+
NAV_VALIDATION_REPORTS = "Validation Reports"
|
|
24
|
+
MIME_TEXT_MARKDOWN = "text/markdown"
|
|
25
|
+
MIME_APPLICATION_JSON = "application/json"
|
|
26
|
+
SECTION_CIRCULAR_DEPENDENCIES = "Circular Dependencies"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ProgressTracker:
|
|
30
|
+
"""Track progress for long-running operations."""
|
|
31
|
+
|
|
32
|
+
def __init__(self, total_steps=100, description="Processing..."):
|
|
33
|
+
self.total_steps = total_steps
|
|
34
|
+
self.current_step = 0
|
|
35
|
+
self.description = description
|
|
36
|
+
self.progress_bar = st.progress(0)
|
|
37
|
+
self.status_text = st.empty()
|
|
38
|
+
|
|
39
|
+
def update(self, step=None, description=None):
|
|
40
|
+
"""Update progress."""
|
|
41
|
+
if step is not None:
|
|
42
|
+
self.current_step = min(step, self.total_steps)
|
|
43
|
+
else:
|
|
44
|
+
self.current_step = min(self.current_step + 1, self.total_steps)
|
|
45
|
+
|
|
46
|
+
if description:
|
|
47
|
+
self.description = description
|
|
48
|
+
|
|
49
|
+
progress = min(self.current_step / self.total_steps, 1.0)
|
|
50
|
+
self.progress_bar.progress(progress)
|
|
51
|
+
self.status_text.text(
|
|
52
|
+
f"{self.description} ({self.current_step}/{self.total_steps})"
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
def complete(self, message="Completed!"):
|
|
56
|
+
"""Mark progress as complete."""
|
|
57
|
+
self.progress_bar.progress(1.0)
|
|
58
|
+
self.status_text.text(message)
|
|
59
|
+
import time
|
|
60
|
+
|
|
61
|
+
time.sleep(0.5) # Brief pause to show completion
|
|
62
|
+
|
|
63
|
+
def close(self):
|
|
64
|
+
"""Clean up progress indicators."""
|
|
65
|
+
self.progress_bar.empty()
|
|
66
|
+
self.status_text.empty()
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def with_progress_tracking(
|
|
70
|
+
operation_func, description="Processing...", total_steps=100
|
|
71
|
+
):
|
|
72
|
+
"""Add progress tracking to operations."""
|
|
73
|
+
|
|
74
|
+
def wrapper(*args, **kwargs):
|
|
75
|
+
tracker = ProgressTracker(total_steps, description)
|
|
76
|
+
try:
|
|
77
|
+
result = operation_func(tracker, *args, **kwargs)
|
|
78
|
+
tracker.complete()
|
|
79
|
+
return result
|
|
80
|
+
except Exception as e:
|
|
81
|
+
tracker.close()
|
|
82
|
+
raise e
|
|
83
|
+
finally:
|
|
84
|
+
tracker.close()
|
|
85
|
+
|
|
86
|
+
return wrapper
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def main():
|
|
90
|
+
"""Run the main Streamlit application."""
|
|
91
|
+
st.set_page_config(
|
|
92
|
+
page_title="SousChef - Chef to Ansible Migration",
|
|
93
|
+
layout="wide",
|
|
94
|
+
initial_sidebar_state="expanded",
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
st.title("SousChef - Visual Migration Planning")
|
|
98
|
+
st.markdown("*AI-powered Chef to Ansible migration planning interface*")
|
|
99
|
+
|
|
100
|
+
# Sidebar navigation
|
|
101
|
+
page = st.sidebar.selectbox(
|
|
102
|
+
"Navigation",
|
|
103
|
+
[
|
|
104
|
+
"Dashboard",
|
|
105
|
+
"Cookbook Analysis",
|
|
106
|
+
NAV_MIGRATION_PLANNING,
|
|
107
|
+
NAV_DEPENDENCY_MAPPING,
|
|
108
|
+
NAV_VALIDATION_REPORTS,
|
|
109
|
+
],
|
|
110
|
+
help="Choose the section you want to work with. "
|
|
111
|
+
"Use arrow keys to navigate options.",
|
|
112
|
+
key="main_navigation",
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
# Main content area
|
|
116
|
+
if page == "Dashboard":
|
|
117
|
+
show_dashboard()
|
|
118
|
+
elif page == "Cookbook Analysis":
|
|
119
|
+
show_cookbook_analysis_page()
|
|
120
|
+
elif page == NAV_MIGRATION_PLANNING:
|
|
121
|
+
show_migration_planning()
|
|
122
|
+
elif page == NAV_DEPENDENCY_MAPPING:
|
|
123
|
+
show_dependency_mapping()
|
|
124
|
+
elif page == NAV_VALIDATION_REPORTS:
|
|
125
|
+
show_validation_reports()
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def show_dashboard():
|
|
129
|
+
"""Show the main dashboard with migration overview."""
|
|
130
|
+
st.header("Migration Dashboard")
|
|
131
|
+
|
|
132
|
+
col1, col2, col3 = st.columns(3)
|
|
133
|
+
|
|
134
|
+
with col1:
|
|
135
|
+
st.metric("Cookbooks Analyzed", "0", "Ready to analyze")
|
|
136
|
+
st.caption("Total cookbooks processed")
|
|
137
|
+
|
|
138
|
+
with col2:
|
|
139
|
+
st.metric("Migration Complexity", "Unknown", "Assessment needed")
|
|
140
|
+
st.caption("Overall migration effort")
|
|
141
|
+
|
|
142
|
+
with col3:
|
|
143
|
+
st.metric("Conversion Rate", "0%", "Start migration")
|
|
144
|
+
st.caption("Successful conversions")
|
|
145
|
+
|
|
146
|
+
st.divider()
|
|
147
|
+
|
|
148
|
+
# Quick actions
|
|
149
|
+
st.subheader("Quick Actions")
|
|
150
|
+
|
|
151
|
+
col1, col2 = st.columns(2)
|
|
152
|
+
|
|
153
|
+
with col1:
|
|
154
|
+
if st.button(
|
|
155
|
+
"Analyze Cookbook Directory", type="primary", use_container_width=True
|
|
156
|
+
):
|
|
157
|
+
st.rerun() # This will trigger navigation to cookbook analysis
|
|
158
|
+
|
|
159
|
+
with col2:
|
|
160
|
+
if st.button(
|
|
161
|
+
"Generate Migration Plan", type="secondary", use_container_width=True
|
|
162
|
+
):
|
|
163
|
+
st.rerun() # This will trigger navigation to migration planning
|
|
164
|
+
|
|
165
|
+
# Recent activity
|
|
166
|
+
st.subheader("Recent Activity")
|
|
167
|
+
st.info("No recent migration activity. Start by analyzing your cookbooks!")
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def show_migration_planning():
|
|
171
|
+
"""Show migration planning interface."""
|
|
172
|
+
st.header("Migration Planning")
|
|
173
|
+
|
|
174
|
+
# Import assessment functions
|
|
175
|
+
from souschef.assessment import generate_migration_plan
|
|
176
|
+
|
|
177
|
+
# Migration planning wizard
|
|
178
|
+
st.markdown("""
|
|
179
|
+
Plan your Chef-to-Ansible migration with this interactive wizard.
|
|
180
|
+
Get detailed timelines, effort estimates, and risk assessments.
|
|
181
|
+
""")
|
|
182
|
+
|
|
183
|
+
# Step 1: Cookbook Selection
|
|
184
|
+
st.subheader("Step 1: Cookbook Selection")
|
|
185
|
+
|
|
186
|
+
col1, col2 = st.columns([3, 1])
|
|
187
|
+
|
|
188
|
+
with col1:
|
|
189
|
+
cookbook_paths = st.text_area(
|
|
190
|
+
"Cookbook Paths",
|
|
191
|
+
placeholder="/path/to/cookbooks/nginx,/path/to/cookbooks/apache2,/path/to/cookbooks/mysql",
|
|
192
|
+
help="Enter comma-separated paths to your Chef cookbooks",
|
|
193
|
+
height=100,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
with col2:
|
|
197
|
+
quick_select = st.selectbox(
|
|
198
|
+
"Quick Examples",
|
|
199
|
+
["", "Single Cookbook", "Multiple Cookbooks", "Full Migration"],
|
|
200
|
+
help="Load example cookbook configurations",
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
# Load example configurations
|
|
204
|
+
if quick_select == "Single Cookbook":
|
|
205
|
+
cookbook_paths = "/path/to/cookbooks/nginx"
|
|
206
|
+
elif quick_select == "Multiple Cookbooks":
|
|
207
|
+
cookbook_paths = (
|
|
208
|
+
"/path/to/cookbooks/nginx,/path/to/cookbooks/apache2,"
|
|
209
|
+
"/path/to/cookbooks/mysql"
|
|
210
|
+
)
|
|
211
|
+
elif quick_select == "Full Migration":
|
|
212
|
+
cookbook_paths = (
|
|
213
|
+
"/path/to/cookbooks/nginx,/path/to/cookbooks/apache2,"
|
|
214
|
+
"/path/to/cookbooks/mysql,/path/to/cookbooks/postgresql,"
|
|
215
|
+
"/path/to/cookbooks/redis"
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# Step 2: Migration Strategy
|
|
219
|
+
st.subheader("Step 2: Migration Strategy")
|
|
220
|
+
|
|
221
|
+
col1, col2 = st.columns(2)
|
|
222
|
+
|
|
223
|
+
with col1:
|
|
224
|
+
migration_strategy = st.selectbox(
|
|
225
|
+
"Migration Approach",
|
|
226
|
+
["phased", "big_bang", "parallel"],
|
|
227
|
+
help="Choose your migration strategy",
|
|
228
|
+
format_func=lambda x: {
|
|
229
|
+
"phased": "Phased Migration (Recommended)",
|
|
230
|
+
"big_bang": "Big Bang Migration",
|
|
231
|
+
"parallel": "Parallel Migration",
|
|
232
|
+
}.get(x, str(x)),
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
with col2:
|
|
236
|
+
timeline_weeks = st.slider(
|
|
237
|
+
"Timeline (Weeks)",
|
|
238
|
+
min_value=4,
|
|
239
|
+
max_value=24,
|
|
240
|
+
value=12,
|
|
241
|
+
help="Target timeline for migration completion",
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
# Strategy descriptions
|
|
245
|
+
strategy_descriptions = {
|
|
246
|
+
"phased": """
|
|
247
|
+
**Phased Migration** - Migrate cookbooks in stages based on complexity
|
|
248
|
+
and dependencies.
|
|
249
|
+
- Lower risk with incremental progress
|
|
250
|
+
- Easier rollback if issues occur
|
|
251
|
+
- Longer timeline but more controlled
|
|
252
|
+
- Recommended for most organizations
|
|
253
|
+
""",
|
|
254
|
+
"big_bang": """
|
|
255
|
+
**Big Bang Migration** - Convert all cookbooks simultaneously and deploy
|
|
256
|
+
at once.
|
|
257
|
+
- Faster overall timeline
|
|
258
|
+
- Higher risk and coordination required
|
|
259
|
+
- Requires comprehensive testing
|
|
260
|
+
- Best for small, well-understood environments
|
|
261
|
+
""",
|
|
262
|
+
"parallel": """
|
|
263
|
+
**Parallel Migration** - Run Chef and Ansible side-by-side during transition.
|
|
264
|
+
- Zero downtime possible
|
|
265
|
+
- Most complex to manage
|
|
266
|
+
- Requires dual maintenance
|
|
267
|
+
- Best for critical production systems
|
|
268
|
+
""",
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
with st.expander("Strategy Details"):
|
|
272
|
+
st.markdown(strategy_descriptions.get(migration_strategy, ""))
|
|
273
|
+
|
|
274
|
+
# Step 3: Generate Plan
|
|
275
|
+
st.subheader("Step 3: Generate Migration Plan")
|
|
276
|
+
|
|
277
|
+
if st.button("Generate Migration Plan", type="primary", use_container_width=True):
|
|
278
|
+
if not cookbook_paths.strip():
|
|
279
|
+
st.error("Please enter cookbook paths to generate a migration plan.")
|
|
280
|
+
return
|
|
281
|
+
|
|
282
|
+
# Create progress tracker
|
|
283
|
+
progress_tracker = ProgressTracker(
|
|
284
|
+
total_steps=7, description="Generating migration plan..."
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
try:
|
|
288
|
+
progress_tracker.update(1, "Scanning cookbook directories...")
|
|
289
|
+
|
|
290
|
+
# Generate migration plan
|
|
291
|
+
plan_result = generate_migration_plan(
|
|
292
|
+
cookbook_paths.strip(), migration_strategy, timeline_weeks
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
progress_tracker.update(2, "Analyzing cookbook complexity...")
|
|
296
|
+
progress_tracker.update(3, "Assessing migration risks...")
|
|
297
|
+
progress_tracker.update(4, "Calculating resource requirements...")
|
|
298
|
+
progress_tracker.update(5, "Generating timeline estimates...")
|
|
299
|
+
progress_tracker.update(6, "Creating migration phases...")
|
|
300
|
+
|
|
301
|
+
# Store results in session state for persistence
|
|
302
|
+
st.session_state.migration_plan = plan_result
|
|
303
|
+
st.session_state.cookbook_paths = cookbook_paths.strip()
|
|
304
|
+
st.session_state.strategy = migration_strategy
|
|
305
|
+
st.session_state.timeline = timeline_weeks
|
|
306
|
+
|
|
307
|
+
progress_tracker.complete("Migration plan generated!")
|
|
308
|
+
st.success("Migration plan generated successfully!")
|
|
309
|
+
st.rerun()
|
|
310
|
+
|
|
311
|
+
except Exception as e:
|
|
312
|
+
progress_tracker.close()
|
|
313
|
+
st.error(f"Error generating migration plan: {e}")
|
|
314
|
+
return
|
|
315
|
+
|
|
316
|
+
# Display results if available
|
|
317
|
+
if "migration_plan" in st.session_state:
|
|
318
|
+
display_migration_plan_results()
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def _display_migration_summary_metrics(cookbook_paths, strategy, timeline):
|
|
322
|
+
"""Display migration overview summary metrics."""
|
|
323
|
+
st.subheader("Migration Overview")
|
|
324
|
+
|
|
325
|
+
col1, col2, col3, col4 = st.columns(4)
|
|
326
|
+
|
|
327
|
+
with col1:
|
|
328
|
+
cookbook_count = len(cookbook_paths.split(","))
|
|
329
|
+
st.metric("Cookbooks", cookbook_count)
|
|
330
|
+
|
|
331
|
+
with col2:
|
|
332
|
+
st.metric("Strategy", strategy.replace("_", " ").title())
|
|
333
|
+
|
|
334
|
+
with col3:
|
|
335
|
+
st.metric("Timeline", f"{timeline} weeks")
|
|
336
|
+
|
|
337
|
+
with col4:
|
|
338
|
+
st.metric("Status", "Plan Generated")
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
def _display_migration_plan_details(plan_result):
|
|
342
|
+
"""Display the detailed migration plan sections."""
|
|
343
|
+
st.subheader("Migration Plan Details")
|
|
344
|
+
|
|
345
|
+
# Split the plan into sections and display
|
|
346
|
+
plan_sections = plan_result.split("\n## ")
|
|
347
|
+
|
|
348
|
+
for section in plan_sections:
|
|
349
|
+
if section.strip():
|
|
350
|
+
if not section.startswith("#"):
|
|
351
|
+
section = "## " + section
|
|
352
|
+
|
|
353
|
+
# Clean up section headers
|
|
354
|
+
section = section.replace("## Executive Summary", "### Executive Summary")
|
|
355
|
+
section = section.replace("## Migration Phases", "### Migration Phases")
|
|
356
|
+
section = section.replace("## Timeline", "### Timeline")
|
|
357
|
+
section = section.replace("## Team Requirements", "### Team Requirements")
|
|
358
|
+
|
|
359
|
+
st.markdown(section)
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
def _display_migration_action_buttons(cookbook_paths):
|
|
363
|
+
"""Display action buttons for next steps."""
|
|
364
|
+
st.subheader("Next Steps")
|
|
365
|
+
|
|
366
|
+
col1, col2, col3 = st.columns(3)
|
|
367
|
+
|
|
368
|
+
with col1:
|
|
369
|
+
if st.button("📊 Generate Detailed Report", use_container_width=True):
|
|
370
|
+
with st.spinner("Generating detailed migration report..."):
|
|
371
|
+
try:
|
|
372
|
+
from souschef.assessment import generate_migration_report
|
|
373
|
+
|
|
374
|
+
report = generate_migration_report(
|
|
375
|
+
"assessment_complete", "executive", "yes"
|
|
376
|
+
)
|
|
377
|
+
st.session_state.detailed_report = report
|
|
378
|
+
st.success("Detailed report generated!")
|
|
379
|
+
except Exception as e:
|
|
380
|
+
st.error(f"Error generating report: {e}")
|
|
381
|
+
|
|
382
|
+
with col2:
|
|
383
|
+
if st.button("🔍 Analyze Dependencies", use_container_width=True):
|
|
384
|
+
if len(cookbook_paths.split(",")) == 1:
|
|
385
|
+
# Single cookbook dependency analysis
|
|
386
|
+
cookbook_path = cookbook_paths.split(",")[0].strip()
|
|
387
|
+
with st.spinner(f"Analyzing dependencies for {cookbook_path}..."):
|
|
388
|
+
try:
|
|
389
|
+
from souschef.assessment import analyze_cookbook_dependencies
|
|
390
|
+
|
|
391
|
+
dep_analysis = analyze_cookbook_dependencies(cookbook_path)
|
|
392
|
+
st.session_state.dep_analysis = dep_analysis
|
|
393
|
+
st.success("Dependency analysis complete!")
|
|
394
|
+
except Exception as e:
|
|
395
|
+
st.error(f"Error analyzing dependencies: {e}")
|
|
396
|
+
else:
|
|
397
|
+
st.info(
|
|
398
|
+
"Dependency analysis is optimized for single cookbooks. "
|
|
399
|
+
"Select one cookbook path for detailed analysis."
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
with col3:
|
|
403
|
+
if st.button("📥 Export Plan", use_container_width=True):
|
|
404
|
+
# Create downloadable plan
|
|
405
|
+
plan_content = f"""# Chef to Ansible Migration Plan
|
|
406
|
+
Generated: {st.session_state.get("timestamp", "Unknown")}
|
|
407
|
+
|
|
408
|
+
## Configuration
|
|
409
|
+
- Cookbook Paths: {cookbook_paths}
|
|
410
|
+
- Strategy: {st.session_state.strategy}
|
|
411
|
+
- Timeline: {st.session_state.timeline} weeks
|
|
412
|
+
|
|
413
|
+
## Migration Plan
|
|
414
|
+
{st.session_state.migration_plan}
|
|
415
|
+
"""
|
|
416
|
+
|
|
417
|
+
st.download_button(
|
|
418
|
+
label="Download Migration Plan",
|
|
419
|
+
data=plan_content,
|
|
420
|
+
file_name="migration_plan.md",
|
|
421
|
+
mime=MIME_TEXT_MARKDOWN,
|
|
422
|
+
help="Download the complete migration plan as Markdown",
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def _display_additional_reports():
|
|
427
|
+
"""Display detailed report and dependency analysis if available."""
|
|
428
|
+
# Display detailed report if generated
|
|
429
|
+
if "detailed_report" in st.session_state:
|
|
430
|
+
with st.expander("📊 Detailed Migration Report"):
|
|
431
|
+
st.markdown(st.session_state.detailed_report)
|
|
432
|
+
|
|
433
|
+
# Display dependency analysis if generated
|
|
434
|
+
if "dep_analysis" in st.session_state:
|
|
435
|
+
with st.expander("🔍 Dependency Analysis"):
|
|
436
|
+
st.markdown(st.session_state.dep_analysis)
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
def display_migration_plan_results():
|
|
440
|
+
"""Display the generated migration plan results."""
|
|
441
|
+
plan_result = st.session_state.migration_plan
|
|
442
|
+
cookbook_paths = st.session_state.cookbook_paths
|
|
443
|
+
strategy = st.session_state.strategy
|
|
444
|
+
timeline = st.session_state.timeline
|
|
445
|
+
|
|
446
|
+
_display_migration_summary_metrics(cookbook_paths, strategy, timeline)
|
|
447
|
+
_display_migration_plan_details(plan_result)
|
|
448
|
+
_display_migration_action_buttons(cookbook_paths)
|
|
449
|
+
_display_additional_reports()
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
def show_dependency_mapping():
|
|
453
|
+
"""Show dependency mapping visualization."""
|
|
454
|
+
st.header(NAV_DEPENDENCY_MAPPING)
|
|
455
|
+
|
|
456
|
+
# Import assessment functions
|
|
457
|
+
from souschef.assessment import analyze_cookbook_dependencies
|
|
458
|
+
|
|
459
|
+
st.markdown("""
|
|
460
|
+
Visualize and analyze cookbook dependencies to understand migration order
|
|
461
|
+
and identify potential circular dependencies.
|
|
462
|
+
""")
|
|
463
|
+
|
|
464
|
+
# Cookbook path input
|
|
465
|
+
cookbook_path = st.text_input(
|
|
466
|
+
"Cookbook Directory Path",
|
|
467
|
+
placeholder="/path/to/your/cookbooks",
|
|
468
|
+
help="Enter the path to your cookbooks directory for dependency analysis",
|
|
469
|
+
)
|
|
470
|
+
|
|
471
|
+
# Analysis options
|
|
472
|
+
col1, col2 = st.columns(2)
|
|
473
|
+
|
|
474
|
+
with col1:
|
|
475
|
+
dependency_depth = st.selectbox(
|
|
476
|
+
"Analysis Depth",
|
|
477
|
+
["direct", "transitive", "full"],
|
|
478
|
+
help="How deep to analyze dependencies",
|
|
479
|
+
format_func=lambda x: {
|
|
480
|
+
"direct": "Direct Dependencies Only",
|
|
481
|
+
"transitive": "Include Transitive Dependencies",
|
|
482
|
+
"full": "Full Dependency Graph",
|
|
483
|
+
}.get(x, str(x)),
|
|
484
|
+
)
|
|
485
|
+
|
|
486
|
+
with col2:
|
|
487
|
+
visualization_type = st.selectbox(
|
|
488
|
+
"Visualization",
|
|
489
|
+
["text", "graph", "interactive"],
|
|
490
|
+
help="How to display dependency information",
|
|
491
|
+
format_func=lambda x: {
|
|
492
|
+
"text": "Text Summary",
|
|
493
|
+
"graph": "Static Graph View",
|
|
494
|
+
"interactive": "Interactive Graph",
|
|
495
|
+
}.get(x, str(x)),
|
|
496
|
+
)
|
|
497
|
+
|
|
498
|
+
# Analysis button
|
|
499
|
+
if st.button("Analyze Dependencies", type="primary", use_container_width=True):
|
|
500
|
+
if not cookbook_path.strip():
|
|
501
|
+
st.error("Please enter a cookbook directory path.")
|
|
502
|
+
return
|
|
503
|
+
|
|
504
|
+
# Create progress tracker
|
|
505
|
+
progress_tracker = ProgressTracker(
|
|
506
|
+
total_steps=5, description="Analyzing cookbook dependencies..."
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
try:
|
|
510
|
+
progress_tracker.update(1, "Scanning cookbook directory...")
|
|
511
|
+
|
|
512
|
+
# Analyze dependencies
|
|
513
|
+
analysis_result = analyze_cookbook_dependencies(
|
|
514
|
+
cookbook_path.strip(), dependency_depth
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
progress_tracker.update(2, "Parsing dependency relationships...")
|
|
518
|
+
progress_tracker.update(3, "Detecting circular dependencies...")
|
|
519
|
+
progress_tracker.update(4, "Generating migration recommendations...")
|
|
520
|
+
|
|
521
|
+
# Store results
|
|
522
|
+
st.session_state.dep_analysis_result = analysis_result
|
|
523
|
+
st.session_state.dep_cookbook_path = cookbook_path.strip()
|
|
524
|
+
st.session_state.dep_depth = dependency_depth
|
|
525
|
+
st.session_state.dep_viz_type = visualization_type
|
|
526
|
+
|
|
527
|
+
progress_tracker.complete("Dependency analysis completed!")
|
|
528
|
+
st.success("Analysis completed successfully!")
|
|
529
|
+
st.rerun()
|
|
530
|
+
|
|
531
|
+
except Exception as e:
|
|
532
|
+
progress_tracker.close()
|
|
533
|
+
st.error(f"Error analyzing dependencies: {e}")
|
|
534
|
+
return
|
|
535
|
+
|
|
536
|
+
# Display results if available
|
|
537
|
+
if "dep_analysis_result" in st.session_state:
|
|
538
|
+
display_dependency_analysis_results()
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
def _setup_dependency_mapping_ui():
|
|
542
|
+
"""Set up the dependency mapping UI header and description."""
|
|
543
|
+
st.header(NAV_DEPENDENCY_MAPPING)
|
|
544
|
+
|
|
545
|
+
st.markdown("""
|
|
546
|
+
Visualize and analyze cookbook dependencies to understand migration order
|
|
547
|
+
and identify potential circular dependencies.
|
|
548
|
+
""")
|
|
549
|
+
|
|
550
|
+
|
|
551
|
+
def _get_dependency_mapping_inputs():
|
|
552
|
+
"""Collect user inputs for dependency analysis."""
|
|
553
|
+
# Cookbook path input
|
|
554
|
+
cookbook_path = st.text_input(
|
|
555
|
+
"Cookbook Directory Path",
|
|
556
|
+
placeholder="/path/to/your/cookbooks",
|
|
557
|
+
help="Enter the path to your cookbooks directory for dependency analysis",
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
# Analysis options
|
|
561
|
+
col1, col2 = st.columns(2)
|
|
562
|
+
|
|
563
|
+
with col1:
|
|
564
|
+
dependency_depth = st.selectbox(
|
|
565
|
+
"Analysis Depth",
|
|
566
|
+
["direct", "transitive", "full"],
|
|
567
|
+
help="How deep to analyze dependencies",
|
|
568
|
+
format_func=lambda x: {
|
|
569
|
+
"direct": "Direct Dependencies Only",
|
|
570
|
+
"transitive": "Include Transitive Dependencies",
|
|
571
|
+
"full": "Full Dependency Graph",
|
|
572
|
+
}.get(x, str(x)),
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
with col2:
|
|
576
|
+
visualization_type = st.selectbox(
|
|
577
|
+
"Visualization",
|
|
578
|
+
["text", "graph", "interactive"],
|
|
579
|
+
help="How to display dependency information",
|
|
580
|
+
format_func=lambda x: {
|
|
581
|
+
"text": "Text Summary",
|
|
582
|
+
"graph": "Static Graph View",
|
|
583
|
+
"interactive": "Interactive Graph",
|
|
584
|
+
}.get(x, str(x)),
|
|
585
|
+
)
|
|
586
|
+
|
|
587
|
+
return cookbook_path, dependency_depth, visualization_type
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
def _handle_dependency_analysis_execution(
|
|
591
|
+
cookbook_path, dependency_depth, visualization_type
|
|
592
|
+
):
|
|
593
|
+
"""Handle the dependency analysis execution when button is clicked."""
|
|
594
|
+
# Analysis button
|
|
595
|
+
if st.button("Analyze Dependencies", type="primary", use_container_width=True):
|
|
596
|
+
if not cookbook_path.strip():
|
|
597
|
+
st.error("Please enter a cookbook directory path.")
|
|
598
|
+
return
|
|
599
|
+
|
|
600
|
+
_perform_dependency_analysis(
|
|
601
|
+
cookbook_path.strip(), dependency_depth, visualization_type
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
|
|
605
|
+
def _perform_dependency_analysis(cookbook_path, dependency_depth, visualization_type):
|
|
606
|
+
"""Perform the actual dependency analysis."""
|
|
607
|
+
# Import assessment functions
|
|
608
|
+
from souschef.assessment import analyze_cookbook_dependencies
|
|
609
|
+
|
|
610
|
+
# Create progress tracker
|
|
611
|
+
progress_tracker = ProgressTracker(
|
|
612
|
+
total_steps=5, description="Analyzing cookbook dependencies..."
|
|
613
|
+
)
|
|
614
|
+
|
|
615
|
+
try:
|
|
616
|
+
progress_tracker.update(1, "Scanning cookbook directory...")
|
|
617
|
+
|
|
618
|
+
# Analyze dependencies
|
|
619
|
+
analysis_result = analyze_cookbook_dependencies(cookbook_path, dependency_depth)
|
|
620
|
+
|
|
621
|
+
progress_tracker.update(2, "Parsing dependency relationships...")
|
|
622
|
+
progress_tracker.update(3, "Detecting circular dependencies...")
|
|
623
|
+
progress_tracker.update(4, "Generating migration recommendations...")
|
|
624
|
+
|
|
625
|
+
# Store results
|
|
626
|
+
st.session_state.dep_analysis_result = analysis_result
|
|
627
|
+
st.session_state.dep_cookbook_path = cookbook_path
|
|
628
|
+
st.session_state.dep_depth = dependency_depth
|
|
629
|
+
st.session_state.dep_viz_type = visualization_type
|
|
630
|
+
|
|
631
|
+
progress_tracker.complete("Dependency analysis completed!")
|
|
632
|
+
st.success("Analysis completed successfully!")
|
|
633
|
+
st.rerun()
|
|
634
|
+
|
|
635
|
+
except Exception as e:
|
|
636
|
+
progress_tracker.close()
|
|
637
|
+
st.error(f"Error analyzing dependencies: {e}")
|
|
638
|
+
|
|
639
|
+
|
|
640
|
+
def _display_dependency_analysis_results_if_available():
|
|
641
|
+
"""Display dependency analysis results if they exist in session state."""
|
|
642
|
+
# Display results if available
|
|
643
|
+
if "dep_analysis_result" in st.session_state:
|
|
644
|
+
display_dependency_analysis_results()
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
def _extract_dependency_relationships(lines):
|
|
648
|
+
"""Extract dependency relationships from analysis lines."""
|
|
649
|
+
dependencies = {}
|
|
650
|
+
current_section = None
|
|
651
|
+
|
|
652
|
+
for line in lines:
|
|
653
|
+
line = line.strip()
|
|
654
|
+
if "Direct Dependencies:" in line:
|
|
655
|
+
current_section = "direct"
|
|
656
|
+
elif "Transitive Dependencies:" in line:
|
|
657
|
+
current_section = "transitive"
|
|
658
|
+
elif line.startswith("- ") and current_section in ["direct", "transitive"]:
|
|
659
|
+
# Regular dependencies
|
|
660
|
+
dep_text = line[2:].strip()
|
|
661
|
+
if ":" in dep_text:
|
|
662
|
+
parts = dep_text.split(":", 1)
|
|
663
|
+
cookbook = parts[0].strip()
|
|
664
|
+
deps = parts[1].strip()
|
|
665
|
+
if deps and deps != "None":
|
|
666
|
+
dep_list = [d.strip() for d in deps.split(",")]
|
|
667
|
+
dependencies[cookbook] = dep_list
|
|
668
|
+
|
|
669
|
+
return dependencies
|
|
670
|
+
|
|
671
|
+
|
|
672
|
+
def _extract_circular_and_community_deps(lines):
|
|
673
|
+
"""Extract circular dependencies and community cookbooks."""
|
|
674
|
+
circular_deps: list[tuple[str, str]] = []
|
|
675
|
+
community_cookbooks: list[str] = []
|
|
676
|
+
current_section = None
|
|
677
|
+
|
|
678
|
+
for line in lines:
|
|
679
|
+
current_section = _update_current_section(line, current_section)
|
|
680
|
+
if _is_list_item(line) and current_section:
|
|
681
|
+
_process_list_item(
|
|
682
|
+
line, current_section, circular_deps, community_cookbooks
|
|
683
|
+
)
|
|
684
|
+
|
|
685
|
+
return circular_deps, community_cookbooks
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
def _update_current_section(line, current_section):
|
|
689
|
+
"""Update the current section based on the line content."""
|
|
690
|
+
line = line.strip()
|
|
691
|
+
if "Circular Dependencies:" in line:
|
|
692
|
+
return "circular"
|
|
693
|
+
elif "Community Cookbooks:" in line:
|
|
694
|
+
return "community"
|
|
695
|
+
return current_section
|
|
696
|
+
|
|
697
|
+
|
|
698
|
+
def _is_list_item(line):
|
|
699
|
+
"""Check if the line is a list item."""
|
|
700
|
+
return line.strip().startswith("- ")
|
|
701
|
+
|
|
702
|
+
|
|
703
|
+
def _process_list_item(line, current_section, circular_deps, community_cookbooks):
|
|
704
|
+
"""Process a list item based on the current section."""
|
|
705
|
+
if current_section == "circular":
|
|
706
|
+
_process_circular_dependency_item(line, circular_deps)
|
|
707
|
+
elif current_section == "community":
|
|
708
|
+
_process_community_cookbook_item(line, community_cookbooks)
|
|
709
|
+
|
|
710
|
+
|
|
711
|
+
def _process_circular_dependency_item(line, circular_deps):
|
|
712
|
+
"""Process a circular dependency list item."""
|
|
713
|
+
dep_text = line[2:].strip()
|
|
714
|
+
if "->" in dep_text:
|
|
715
|
+
parts = dep_text.split("->")
|
|
716
|
+
if len(parts) >= 2:
|
|
717
|
+
circular_deps.append((parts[0].strip(), parts[1].strip()))
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
def _process_community_cookbook_item(line, community_cookbooks):
|
|
721
|
+
"""Process a community cookbook list item."""
|
|
722
|
+
cookbook = line[2:].strip()
|
|
723
|
+
if cookbook:
|
|
724
|
+
community_cookbooks.append(cookbook)
|
|
725
|
+
|
|
726
|
+
|
|
727
|
+
def _parse_dependency_analysis(analysis_result):
|
|
728
|
+
"""Parse dependency analysis result into structured data."""
|
|
729
|
+
lines = analysis_result.split("\n")
|
|
730
|
+
|
|
731
|
+
dependencies = _extract_dependency_relationships(lines)
|
|
732
|
+
circular_deps, community_cookbooks = _extract_circular_and_community_deps(lines)
|
|
733
|
+
|
|
734
|
+
return dependencies, circular_deps, community_cookbooks
|
|
735
|
+
|
|
736
|
+
|
|
737
|
+
def _create_networkx_graph(dependencies, circular_deps, community_cookbooks):
|
|
738
|
+
"""Create NetworkX graph from dependency data."""
|
|
739
|
+
import networkx as nx
|
|
740
|
+
|
|
741
|
+
graph: nx.DiGraph = nx.DiGraph()
|
|
742
|
+
|
|
743
|
+
# Add nodes and edges
|
|
744
|
+
for cookbook, deps in dependencies.items():
|
|
745
|
+
graph.add_node(cookbook, node_type="cookbook")
|
|
746
|
+
for dep in deps:
|
|
747
|
+
graph.add_node(dep, node_type="dependency")
|
|
748
|
+
graph.add_edge(cookbook, dep)
|
|
749
|
+
|
|
750
|
+
# Add circular dependency edges with different styling
|
|
751
|
+
for source, target in circular_deps:
|
|
752
|
+
graph.add_edge(source, target, circular=True)
|
|
753
|
+
|
|
754
|
+
# Mark community cookbooks
|
|
755
|
+
for cookbook in community_cookbooks:
|
|
756
|
+
if cookbook in graph.nodes:
|
|
757
|
+
graph.nodes[cookbook]["community"] = True
|
|
758
|
+
|
|
759
|
+
return graph
|
|
760
|
+
|
|
761
|
+
|
|
762
|
+
def _calculate_graph_positions(graph, layout_algorithm):
|
|
763
|
+
"""Calculate node positions using specified layout algorithm."""
|
|
764
|
+
import networkx as nx
|
|
765
|
+
|
|
766
|
+
# Choose layout algorithm based on graph size and user preference
|
|
767
|
+
num_nodes = len(graph.nodes)
|
|
768
|
+
if layout_algorithm == "auto":
|
|
769
|
+
if num_nodes < 10:
|
|
770
|
+
layout_algorithm = "spring"
|
|
771
|
+
elif num_nodes < 50:
|
|
772
|
+
layout_algorithm = "kamada_kawai"
|
|
773
|
+
else:
|
|
774
|
+
layout_algorithm = "circular"
|
|
775
|
+
|
|
776
|
+
# Calculate positions using selected layout algorithm
|
|
777
|
+
if layout_algorithm == "spring":
|
|
778
|
+
pos = nx.spring_layout(graph, k=2, iterations=50)
|
|
779
|
+
elif layout_algorithm == "circular":
|
|
780
|
+
pos = nx.circular_layout(graph)
|
|
781
|
+
elif layout_algorithm == "kamada_kawai":
|
|
782
|
+
try:
|
|
783
|
+
pos = nx.kamada_kawai_layout(graph)
|
|
784
|
+
except Exception:
|
|
785
|
+
# Fallback to spring layout if kamada_kawai fails
|
|
786
|
+
pos = nx.spring_layout(graph, k=2, iterations=50)
|
|
787
|
+
else:
|
|
788
|
+
pos = nx.spring_layout(graph, k=2, iterations=50)
|
|
789
|
+
|
|
790
|
+
return pos, layout_algorithm
|
|
791
|
+
|
|
792
|
+
|
|
793
|
+
def _create_plotly_edge_traces(graph, pos):
|
|
794
|
+
"""Create edge traces for Plotly graph."""
|
|
795
|
+
import plotly.graph_objects as go # type: ignore[import-untyped]
|
|
796
|
+
|
|
797
|
+
edge_traces = []
|
|
798
|
+
|
|
799
|
+
# Regular edges
|
|
800
|
+
edge_x = []
|
|
801
|
+
edge_y = []
|
|
802
|
+
for edge in graph.edges():
|
|
803
|
+
if not graph.edges[edge].get("circular", False):
|
|
804
|
+
x0, y0 = pos[edge[0]]
|
|
805
|
+
x1, y1 = pos[edge[1]]
|
|
806
|
+
edge_x.extend([x0, x1, None])
|
|
807
|
+
edge_y.extend([y0, y1, None])
|
|
808
|
+
|
|
809
|
+
if edge_x:
|
|
810
|
+
edge_traces.append(
|
|
811
|
+
go.Scatter(
|
|
812
|
+
x=edge_x,
|
|
813
|
+
y=edge_y,
|
|
814
|
+
line={"width": 2, "color": "#888"},
|
|
815
|
+
hoverinfo="none",
|
|
816
|
+
mode="lines",
|
|
817
|
+
name="Dependencies",
|
|
818
|
+
)
|
|
819
|
+
)
|
|
820
|
+
|
|
821
|
+
# Circular dependency edges (red)
|
|
822
|
+
circ_edge_x = []
|
|
823
|
+
circ_edge_y = []
|
|
824
|
+
for edge in graph.edges():
|
|
825
|
+
if graph.edges[edge].get("circular", False):
|
|
826
|
+
x0, y0 = pos[edge[0]]
|
|
827
|
+
x1, y1 = pos[edge[1]]
|
|
828
|
+
circ_edge_x.extend([x0, x1, None])
|
|
829
|
+
circ_edge_y.extend([y0, y1, None])
|
|
830
|
+
|
|
831
|
+
if circ_edge_x:
|
|
832
|
+
edge_traces.append(
|
|
833
|
+
go.Scatter(
|
|
834
|
+
x=circ_edge_x,
|
|
835
|
+
y=circ_edge_y,
|
|
836
|
+
line={"width": 3, "color": "red"},
|
|
837
|
+
hoverinfo="none",
|
|
838
|
+
mode="lines",
|
|
839
|
+
name=SECTION_CIRCULAR_DEPENDENCIES,
|
|
840
|
+
)
|
|
841
|
+
)
|
|
842
|
+
|
|
843
|
+
return edge_traces
|
|
844
|
+
|
|
845
|
+
|
|
846
|
+
def _create_plotly_node_trace(graph, pos):
|
|
847
|
+
"""Create node trace for Plotly graph."""
|
|
848
|
+
import plotly.graph_objects as go
|
|
849
|
+
|
|
850
|
+
node_x = []
|
|
851
|
+
node_y = []
|
|
852
|
+
node_text = []
|
|
853
|
+
node_colors = []
|
|
854
|
+
node_sizes = []
|
|
855
|
+
|
|
856
|
+
for node in graph.nodes():
|
|
857
|
+
x, y = pos[node]
|
|
858
|
+
node_x.append(x)
|
|
859
|
+
node_y.append(y)
|
|
860
|
+
node_text.append(node)
|
|
861
|
+
|
|
862
|
+
# Dynamic node sizing based on connectivity
|
|
863
|
+
degree = graph.degree(node)
|
|
864
|
+
node_sizes.append(max(15, min(30, 15 + degree * 2)))
|
|
865
|
+
|
|
866
|
+
# Color coding
|
|
867
|
+
if graph.nodes[node].get("community", False):
|
|
868
|
+
node_colors.append("lightgreen") # Community cookbooks
|
|
869
|
+
elif any(
|
|
870
|
+
graph.edges[edge].get("circular", False)
|
|
871
|
+
for edge in graph.in_edges(node)
|
|
872
|
+
if edge[1] == node
|
|
873
|
+
):
|
|
874
|
+
node_colors.append("red") # Involved in circular deps
|
|
875
|
+
elif graph.in_degree(node) > 0:
|
|
876
|
+
node_colors.append("lightblue") # Has dependencies
|
|
877
|
+
else:
|
|
878
|
+
node_colors.append("lightgray") # Leaf dependencies
|
|
879
|
+
|
|
880
|
+
node_trace = go.Scatter(
|
|
881
|
+
x=node_x,
|
|
882
|
+
y=node_y,
|
|
883
|
+
mode="markers+text",
|
|
884
|
+
hoverinfo="text",
|
|
885
|
+
text=node_text,
|
|
886
|
+
textposition="top center",
|
|
887
|
+
marker={
|
|
888
|
+
"size": node_sizes,
|
|
889
|
+
"color": node_colors,
|
|
890
|
+
"line_width": 2,
|
|
891
|
+
"line_color": "darkgray",
|
|
892
|
+
},
|
|
893
|
+
name="Cookbooks",
|
|
894
|
+
)
|
|
895
|
+
|
|
896
|
+
return node_trace
|
|
897
|
+
|
|
898
|
+
|
|
899
|
+
def _create_plotly_figure_layout(num_nodes, layout_algorithm):
|
|
900
|
+
"""Create Plotly figure layout."""
|
|
901
|
+
import plotly.graph_objects as go
|
|
902
|
+
|
|
903
|
+
return go.Layout(
|
|
904
|
+
title=f"Cookbook Dependency Graph ({num_nodes} nodes, "
|
|
905
|
+
f"{layout_algorithm} layout)",
|
|
906
|
+
titlefont_size=16,
|
|
907
|
+
showlegend=True,
|
|
908
|
+
hovermode="closest",
|
|
909
|
+
margin={"b": 20, "l": 5, "r": 5, "t": 40},
|
|
910
|
+
xaxis={
|
|
911
|
+
"showgrid": False,
|
|
912
|
+
"zeroline": False,
|
|
913
|
+
"showticklabels": False,
|
|
914
|
+
},
|
|
915
|
+
yaxis={
|
|
916
|
+
"showgrid": False,
|
|
917
|
+
"zeroline": False,
|
|
918
|
+
"showticklabels": False,
|
|
919
|
+
},
|
|
920
|
+
plot_bgcolor="white",
|
|
921
|
+
)
|
|
922
|
+
|
|
923
|
+
|
|
924
|
+
def _create_interactive_plotly_graph(graph, pos, num_nodes, layout_algorithm):
|
|
925
|
+
"""Create interactive Plotly graph visualization."""
|
|
926
|
+
import plotly.graph_objects as go
|
|
927
|
+
|
|
928
|
+
edge_traces = _create_plotly_edge_traces(graph, pos)
|
|
929
|
+
node_trace = _create_plotly_node_trace(graph, pos)
|
|
930
|
+
layout = _create_plotly_figure_layout(num_nodes, layout_algorithm)
|
|
931
|
+
|
|
932
|
+
# Create the figure
|
|
933
|
+
fig = go.Figure(data=edge_traces + [node_trace], layout=layout)
|
|
934
|
+
|
|
935
|
+
return fig
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
def _create_static_matplotlib_graph(graph, pos, num_nodes, layout_algorithm):
|
|
939
|
+
"""Create static matplotlib graph visualization."""
|
|
940
|
+
import matplotlib.pyplot as plt
|
|
941
|
+
|
|
942
|
+
plt.figure(figsize=(12, 8))
|
|
943
|
+
|
|
944
|
+
# Draw regular edges
|
|
945
|
+
regular_edges = [
|
|
946
|
+
(u, v) for u, v, d in graph.edges(data=True) if not d.get("circular", False)
|
|
947
|
+
]
|
|
948
|
+
if regular_edges:
|
|
949
|
+
import networkx as nx
|
|
950
|
+
|
|
951
|
+
nx.draw_networkx_edges(
|
|
952
|
+
graph,
|
|
953
|
+
pos,
|
|
954
|
+
edgelist=regular_edges,
|
|
955
|
+
edge_color="gray",
|
|
956
|
+
arrows=True,
|
|
957
|
+
arrowsize=20,
|
|
958
|
+
width=2,
|
|
959
|
+
alpha=0.7,
|
|
960
|
+
)
|
|
961
|
+
|
|
962
|
+
# Draw circular dependency edges
|
|
963
|
+
circular_edges = [
|
|
964
|
+
(u, v) for u, v, d in graph.edges(data=True) if d.get("circular", False)
|
|
965
|
+
]
|
|
966
|
+
if circular_edges:
|
|
967
|
+
import networkx as nx
|
|
968
|
+
|
|
969
|
+
nx.draw_networkx_edges(
|
|
970
|
+
graph,
|
|
971
|
+
pos,
|
|
972
|
+
edgelist=circular_edges,
|
|
973
|
+
edge_color="red",
|
|
974
|
+
arrows=True,
|
|
975
|
+
arrowsize=25,
|
|
976
|
+
width=3,
|
|
977
|
+
alpha=0.9,
|
|
978
|
+
style="dashed",
|
|
979
|
+
)
|
|
980
|
+
|
|
981
|
+
# Color nodes
|
|
982
|
+
node_colors = []
|
|
983
|
+
for node in graph.nodes():
|
|
984
|
+
if graph.nodes[node].get("community", False):
|
|
985
|
+
node_colors.append("lightgreen") # Community cookbooks
|
|
986
|
+
elif any(
|
|
987
|
+
graph.edges[edge].get("circular", False)
|
|
988
|
+
for edge in graph.in_edges(node)
|
|
989
|
+
if edge[1] == node
|
|
990
|
+
):
|
|
991
|
+
node_colors.append("red") # Involved in circular deps
|
|
992
|
+
elif graph.in_degree(node) > 0:
|
|
993
|
+
node_colors.append("lightblue") # Has dependencies
|
|
994
|
+
else:
|
|
995
|
+
node_colors.append("lightgray") # Leaf dependencies
|
|
996
|
+
|
|
997
|
+
# Draw nodes with size based on connectivity
|
|
998
|
+
node_sizes = [
|
|
999
|
+
max(300, min(1200, 300 + graph.degree(node) * 100)) for node in graph.nodes()
|
|
1000
|
+
]
|
|
1001
|
+
|
|
1002
|
+
# Draw nodes
|
|
1003
|
+
import networkx as nx
|
|
1004
|
+
|
|
1005
|
+
nx.draw_networkx_nodes(
|
|
1006
|
+
graph,
|
|
1007
|
+
pos,
|
|
1008
|
+
node_color=node_colors,
|
|
1009
|
+
node_size=node_sizes,
|
|
1010
|
+
alpha=0.8,
|
|
1011
|
+
linewidths=2,
|
|
1012
|
+
edgecolors="darkgray",
|
|
1013
|
+
)
|
|
1014
|
+
|
|
1015
|
+
# Draw labels
|
|
1016
|
+
nx.draw_networkx_labels(graph, pos, font_size=8, font_weight="bold")
|
|
1017
|
+
|
|
1018
|
+
plt.title(
|
|
1019
|
+
f"Cookbook Dependency Graph ({num_nodes} nodes, {layout_algorithm} layout)",
|
|
1020
|
+
fontsize=16,
|
|
1021
|
+
pad=20,
|
|
1022
|
+
)
|
|
1023
|
+
plt.axis("off")
|
|
1024
|
+
plt.tight_layout()
|
|
1025
|
+
|
|
1026
|
+
return plt.gcf()
|
|
1027
|
+
|
|
1028
|
+
|
|
1029
|
+
def create_dependency_graph(analysis_result, viz_type, layout_algorithm="auto"):
|
|
1030
|
+
"""
|
|
1031
|
+
Create a dependency graph visualization.
|
|
1032
|
+
|
|
1033
|
+
Args:
|
|
1034
|
+
analysis_result: Text analysis result from dependency analysis
|
|
1035
|
+
viz_type: Visualization type ("interactive" or "static")
|
|
1036
|
+
layout_algorithm: Layout algorithm to use ("auto", "spring",
|
|
1037
|
+
"circular", "kamada_kawai")
|
|
1038
|
+
|
|
1039
|
+
Returns:
|
|
1040
|
+
Plotly figure for interactive graphs, matplotlib figure for static graphs
|
|
1041
|
+
|
|
1042
|
+
"""
|
|
1043
|
+
try:
|
|
1044
|
+
# Parse the analysis result to extract dependencies
|
|
1045
|
+
dependencies, circular_deps, community_cookbooks = _parse_dependency_analysis(
|
|
1046
|
+
analysis_result
|
|
1047
|
+
)
|
|
1048
|
+
|
|
1049
|
+
# Create NetworkX graph
|
|
1050
|
+
graph = _create_networkx_graph(dependencies, circular_deps, community_cookbooks)
|
|
1051
|
+
|
|
1052
|
+
if len(graph.nodes) == 0:
|
|
1053
|
+
return None
|
|
1054
|
+
|
|
1055
|
+
# Calculate positions
|
|
1056
|
+
pos, final_layout = _calculate_graph_positions(graph, layout_algorithm)
|
|
1057
|
+
|
|
1058
|
+
if viz_type == "interactive":
|
|
1059
|
+
return _create_interactive_plotly_graph(
|
|
1060
|
+
graph, pos, len(graph.nodes), final_layout
|
|
1061
|
+
)
|
|
1062
|
+
else:
|
|
1063
|
+
return _create_static_matplotlib_graph(
|
|
1064
|
+
graph, pos, len(graph.nodes), final_layout
|
|
1065
|
+
)
|
|
1066
|
+
|
|
1067
|
+
except Exception as e:
|
|
1068
|
+
st.error(f"Error creating dependency graph: {e}")
|
|
1069
|
+
return None
|
|
1070
|
+
|
|
1071
|
+
|
|
1072
|
+
def _parse_dependency_metrics_from_result(analysis_result):
|
|
1073
|
+
"""Parse dependency analysis result to extract key metrics."""
|
|
1074
|
+
lines = analysis_result.split("\n")
|
|
1075
|
+
|
|
1076
|
+
# Extract key metrics from the analysis
|
|
1077
|
+
direct_deps = 0
|
|
1078
|
+
transitive_deps = 0
|
|
1079
|
+
circular_deps = 0
|
|
1080
|
+
community_cookbooks = 0
|
|
1081
|
+
|
|
1082
|
+
for line in lines:
|
|
1083
|
+
if "Direct Dependencies:" in line:
|
|
1084
|
+
with contextlib.suppress(ValueError):
|
|
1085
|
+
direct_deps = int(line.split(":")[1].strip())
|
|
1086
|
+
elif "Transitive Dependencies:" in line:
|
|
1087
|
+
with contextlib.suppress(ValueError):
|
|
1088
|
+
transitive_deps = int(line.split(":")[1].strip())
|
|
1089
|
+
elif "Circular Dependencies:" in line:
|
|
1090
|
+
with contextlib.suppress(ValueError):
|
|
1091
|
+
circular_deps = int(line.split(":")[1].strip())
|
|
1092
|
+
elif "Community Cookbooks:" in line:
|
|
1093
|
+
with contextlib.suppress(ValueError):
|
|
1094
|
+
community_cookbooks = int(line.split(":")[1].strip())
|
|
1095
|
+
|
|
1096
|
+
return direct_deps, transitive_deps, circular_deps, community_cookbooks
|
|
1097
|
+
|
|
1098
|
+
|
|
1099
|
+
def _display_dependency_summary_metrics(
|
|
1100
|
+
direct_deps, transitive_deps, circular_deps, community_cookbooks
|
|
1101
|
+
):
|
|
1102
|
+
"""Display dependency analysis summary metrics."""
|
|
1103
|
+
col1, col2, col3, col4 = st.columns(4)
|
|
1104
|
+
|
|
1105
|
+
with col1:
|
|
1106
|
+
st.metric("Direct Dependencies", direct_deps)
|
|
1107
|
+
|
|
1108
|
+
with col2:
|
|
1109
|
+
st.metric("Transitive Dependencies", transitive_deps)
|
|
1110
|
+
|
|
1111
|
+
with col3:
|
|
1112
|
+
st.metric(
|
|
1113
|
+
SECTION_CIRCULAR_DEPENDENCIES,
|
|
1114
|
+
circular_deps,
|
|
1115
|
+
delta="⚠️ Check" if circular_deps > 0 else "✅ OK",
|
|
1116
|
+
)
|
|
1117
|
+
|
|
1118
|
+
with col4:
|
|
1119
|
+
st.metric("Community Cookbooks", community_cookbooks)
|
|
1120
|
+
|
|
1121
|
+
|
|
1122
|
+
def _handle_graph_caching():
|
|
1123
|
+
"""Handle graph data caching logic."""
|
|
1124
|
+
# Cache control
|
|
1125
|
+
with st.expander("⚙️ Graph Settings"):
|
|
1126
|
+
cache_enabled = st.checkbox(
|
|
1127
|
+
"Enable Graph Caching",
|
|
1128
|
+
value=st.session_state.get("graph_cache_enabled", True),
|
|
1129
|
+
help="Cache graph data to improve performance for repeated views",
|
|
1130
|
+
)
|
|
1131
|
+
st.session_state["graph_cache_enabled"] = cache_enabled
|
|
1132
|
+
|
|
1133
|
+
if st.button(
|
|
1134
|
+
"🗑️ Clear Graph Cache", help="Clear cached graph data to free memory"
|
|
1135
|
+
):
|
|
1136
|
+
# Clear all cached graphs
|
|
1137
|
+
keys_to_remove = [k for k in st.session_state if k.startswith("graph_")]
|
|
1138
|
+
for key in keys_to_remove:
|
|
1139
|
+
del st.session_state[key]
|
|
1140
|
+
st.success("Graph cache cleared!")
|
|
1141
|
+
st.rerun()
|
|
1142
|
+
|
|
1143
|
+
|
|
1144
|
+
def _display_dependency_graph_visualization(analysis_result, viz_type, selected_layout):
|
|
1145
|
+
"""Display the dependency graph visualization section."""
|
|
1146
|
+
try:
|
|
1147
|
+
# Create cache key for graph data
|
|
1148
|
+
cache_key = f"graph_{hash(analysis_result)}_{viz_type}_{selected_layout}"
|
|
1149
|
+
|
|
1150
|
+
# Check if we have cached graph data
|
|
1151
|
+
if cache_key in st.session_state and st.session_state.get(
|
|
1152
|
+
"graph_cache_enabled", True
|
|
1153
|
+
):
|
|
1154
|
+
graph_data = st.session_state[cache_key]
|
|
1155
|
+
st.info("📋 Using cached graph data")
|
|
1156
|
+
else:
|
|
1157
|
+
# Create dependency graph
|
|
1158
|
+
graph_data = create_dependency_graph(
|
|
1159
|
+
analysis_result, viz_type, selected_layout
|
|
1160
|
+
)
|
|
1161
|
+
|
|
1162
|
+
# Cache the result
|
|
1163
|
+
if graph_data is not None and st.session_state.get(
|
|
1164
|
+
"graph_cache_enabled", True
|
|
1165
|
+
):
|
|
1166
|
+
st.session_state[cache_key] = graph_data
|
|
1167
|
+
|
|
1168
|
+
_handle_graph_caching()
|
|
1169
|
+
|
|
1170
|
+
if graph_data:
|
|
1171
|
+
_display_graph_with_export_options(graph_data, viz_type)
|
|
1172
|
+
else:
|
|
1173
|
+
st.info("No dependency relationships found to visualize.")
|
|
1174
|
+
|
|
1175
|
+
except Exception as e:
|
|
1176
|
+
_handle_graph_visualization_error(e, analysis_result)
|
|
1177
|
+
|
|
1178
|
+
|
|
1179
|
+
def _display_graph_with_export_options(graph_data, viz_type):
|
|
1180
|
+
"""Display graph and provide export options."""
|
|
1181
|
+
if viz_type == "interactive":
|
|
1182
|
+
# Interactive Plotly graph
|
|
1183
|
+
st.plotly_chart(graph_data, use_container_width=True)
|
|
1184
|
+
|
|
1185
|
+
# Export options for interactive graph
|
|
1186
|
+
st.subheader("Export Graph")
|
|
1187
|
+
col1, col2, col3 = st.columns(3)
|
|
1188
|
+
|
|
1189
|
+
with col1:
|
|
1190
|
+
# Export as HTML
|
|
1191
|
+
html_content = graph_data.to_html(full_html=False, include_plotlyjs="cdn")
|
|
1192
|
+
st.download_button(
|
|
1193
|
+
label="📄 Export as HTML",
|
|
1194
|
+
data=html_content,
|
|
1195
|
+
file_name="dependency_graph.html",
|
|
1196
|
+
mime="text/html",
|
|
1197
|
+
help="Download interactive graph as HTML file",
|
|
1198
|
+
)
|
|
1199
|
+
|
|
1200
|
+
with col2:
|
|
1201
|
+
# Export as JSON
|
|
1202
|
+
json_data = graph_data.to_json()
|
|
1203
|
+
st.download_button(
|
|
1204
|
+
label="📊 Export as JSON",
|
|
1205
|
+
data=json_data,
|
|
1206
|
+
file_name="dependency_graph.json",
|
|
1207
|
+
mime=MIME_APPLICATION_JSON,
|
|
1208
|
+
help="Download graph data as JSON",
|
|
1209
|
+
)
|
|
1210
|
+
|
|
1211
|
+
with col3:
|
|
1212
|
+
# Export as PNG (requires kaleido)
|
|
1213
|
+
try:
|
|
1214
|
+
import plotly.io as pio # type: ignore[import-untyped]
|
|
1215
|
+
|
|
1216
|
+
png_data = pio.to_image(graph_data, format="png")
|
|
1217
|
+
st.download_button(
|
|
1218
|
+
label="🖼️ Export as PNG",
|
|
1219
|
+
data=png_data,
|
|
1220
|
+
file_name="dependency_graph.png",
|
|
1221
|
+
mime="image/png",
|
|
1222
|
+
help="Download graph as PNG image",
|
|
1223
|
+
)
|
|
1224
|
+
except ImportError:
|
|
1225
|
+
st.info("PNG export requires additional dependencies")
|
|
1226
|
+
|
|
1227
|
+
else:
|
|
1228
|
+
# Static matplotlib graph
|
|
1229
|
+
st.pyplot(graph_data)
|
|
1230
|
+
|
|
1231
|
+
# Export options for static graph
|
|
1232
|
+
st.subheader("Export Graph")
|
|
1233
|
+
col1, col2 = st.columns(2)
|
|
1234
|
+
|
|
1235
|
+
with col1:
|
|
1236
|
+
# Export as PNG
|
|
1237
|
+
import io
|
|
1238
|
+
|
|
1239
|
+
buf = io.BytesIO()
|
|
1240
|
+
graph_data.savefig(buf, format="png", dpi=300, bbox_inches="tight")
|
|
1241
|
+
buf.seek(0)
|
|
1242
|
+
st.download_button(
|
|
1243
|
+
label="🖼️ Export as PNG",
|
|
1244
|
+
data=buf.getvalue(),
|
|
1245
|
+
file_name="dependency_graph.png",
|
|
1246
|
+
mime="image/png",
|
|
1247
|
+
help="Download graph as high-resolution PNG",
|
|
1248
|
+
)
|
|
1249
|
+
|
|
1250
|
+
with col2:
|
|
1251
|
+
# Export as SVG
|
|
1252
|
+
buf_svg = io.BytesIO()
|
|
1253
|
+
graph_data.savefig(buf_svg, format="svg", bbox_inches="tight")
|
|
1254
|
+
buf_svg.seek(0)
|
|
1255
|
+
st.download_button(
|
|
1256
|
+
label="📈 Export as SVG",
|
|
1257
|
+
data=buf_svg.getvalue(),
|
|
1258
|
+
file_name="dependency_graph.svg",
|
|
1259
|
+
mime="image/svg+xml",
|
|
1260
|
+
help="Download graph as scalable SVG",
|
|
1261
|
+
)
|
|
1262
|
+
|
|
1263
|
+
|
|
1264
|
+
def _handle_graph_visualization_error(error, analysis_result):
|
|
1265
|
+
"""Handle graph visualization errors with fallback display."""
|
|
1266
|
+
st.error("❌ **Graph Visualization Error**")
|
|
1267
|
+
with st.expander("Error Details"):
|
|
1268
|
+
st.code(str(error), language="text")
|
|
1269
|
+
st.markdown("""
|
|
1270
|
+
**Possible causes:**
|
|
1271
|
+
- Invalid dependency analysis data
|
|
1272
|
+
- Graph layout algorithm failed for this data
|
|
1273
|
+
- Memory constraints for large graphs
|
|
1274
|
+
|
|
1275
|
+
**Suggestions:**
|
|
1276
|
+
- Try a different layout algorithm
|
|
1277
|
+
- Reduce the scope of your dependency analysis
|
|
1278
|
+
- Check the dependency analysis output for issues
|
|
1279
|
+
""")
|
|
1280
|
+
|
|
1281
|
+
# Fallback: show text summary
|
|
1282
|
+
st.info("📄 Showing text-based dependency summary instead:")
|
|
1283
|
+
st.text_area(
|
|
1284
|
+
"Dependency Analysis Text",
|
|
1285
|
+
analysis_result,
|
|
1286
|
+
height=300,
|
|
1287
|
+
help="Raw dependency analysis output",
|
|
1288
|
+
)
|
|
1289
|
+
|
|
1290
|
+
|
|
1291
|
+
def _display_dependency_analysis_sections(analysis_result):
|
|
1292
|
+
"""Display dependency analysis results in expandable sections."""
|
|
1293
|
+
# Split analysis into sections
|
|
1294
|
+
sections = analysis_result.split("\n## ")
|
|
1295
|
+
|
|
1296
|
+
for section in sections:
|
|
1297
|
+
if section.strip():
|
|
1298
|
+
if not section.startswith("#"):
|
|
1299
|
+
section = "## " + section
|
|
1300
|
+
|
|
1301
|
+
# Add expanders for different sections
|
|
1302
|
+
if "Migration Order Recommendations" in section:
|
|
1303
|
+
with st.expander("📋 Migration Order Recommendations"):
|
|
1304
|
+
st.markdown(
|
|
1305
|
+
section.replace("## Migration Order Recommendations", "")
|
|
1306
|
+
)
|
|
1307
|
+
elif "Dependency Graph" in section:
|
|
1308
|
+
with st.expander("🔗 Dependency Graph"):
|
|
1309
|
+
st.markdown(section.replace("## Dependency Graph", ""))
|
|
1310
|
+
elif "Circular Dependencies" in section:
|
|
1311
|
+
with st.expander(f"⚠️ {SECTION_CIRCULAR_DEPENDENCIES}"):
|
|
1312
|
+
st.markdown(section.replace("## Circular Dependencies", ""))
|
|
1313
|
+
elif "Community Cookbooks" in section:
|
|
1314
|
+
with st.expander("🌐 Community Cookbooks"):
|
|
1315
|
+
st.markdown(section.replace("## Community Cookbooks", ""))
|
|
1316
|
+
elif "Migration Impact Analysis" in section:
|
|
1317
|
+
with st.expander("📊 Migration Impact Analysis"):
|
|
1318
|
+
st.markdown(section.replace("## Migration Impact Analysis", ""))
|
|
1319
|
+
else:
|
|
1320
|
+
st.markdown(section)
|
|
1321
|
+
|
|
1322
|
+
|
|
1323
|
+
def _display_migration_recommendations(circular_deps, community_cookbooks, direct_deps):
|
|
1324
|
+
"""Display migration recommendations based on analysis results."""
|
|
1325
|
+
st.subheader("Migration Recommendations")
|
|
1326
|
+
|
|
1327
|
+
if circular_deps > 0:
|
|
1328
|
+
st.error(
|
|
1329
|
+
"⚠️ **Critical Issue**: Circular dependencies detected. "
|
|
1330
|
+
"Resolve before migration."
|
|
1331
|
+
)
|
|
1332
|
+
st.markdown("""
|
|
1333
|
+
**Resolution Steps:**
|
|
1334
|
+
1. Review the circular dependency pairs
|
|
1335
|
+
2. Refactor cookbooks to break circular references
|
|
1336
|
+
3. Consider combining tightly coupled cookbooks
|
|
1337
|
+
4. Update dependency declarations
|
|
1338
|
+
""")
|
|
1339
|
+
|
|
1340
|
+
if community_cookbooks > 0:
|
|
1341
|
+
st.success(
|
|
1342
|
+
f"✅ **Good News**: {community_cookbooks} community cookbooks identified."
|
|
1343
|
+
)
|
|
1344
|
+
st.markdown("""
|
|
1345
|
+
**Recommendations:**
|
|
1346
|
+
- Replace with Ansible Galaxy roles where possible
|
|
1347
|
+
- Review community cookbook versions and security
|
|
1348
|
+
- Consider forking and maintaining custom versions if needed
|
|
1349
|
+
""")
|
|
1350
|
+
|
|
1351
|
+
if direct_deps > 10:
|
|
1352
|
+
st.warning("⚠️ **Complex Dependencies**: High dependency count detected.")
|
|
1353
|
+
st.markdown("""
|
|
1354
|
+
**Consider:**
|
|
1355
|
+
- Breaking down monolithic cookbooks
|
|
1356
|
+
- Implementing proper dependency injection
|
|
1357
|
+
- Planning migration in smaller phases
|
|
1358
|
+
""")
|
|
1359
|
+
|
|
1360
|
+
|
|
1361
|
+
def _display_dependency_export_options(
|
|
1362
|
+
analysis_result,
|
|
1363
|
+
cookbook_path,
|
|
1364
|
+
depth,
|
|
1365
|
+
direct_deps,
|
|
1366
|
+
transitive_deps,
|
|
1367
|
+
circular_deps,
|
|
1368
|
+
community_cookbooks,
|
|
1369
|
+
):
|
|
1370
|
+
"""Display export options for dependency analysis."""
|
|
1371
|
+
st.subheader("Export Analysis")
|
|
1372
|
+
|
|
1373
|
+
col1, col2 = st.columns(2)
|
|
1374
|
+
|
|
1375
|
+
with col1:
|
|
1376
|
+
st.download_button(
|
|
1377
|
+
label="📥 Download Full Analysis",
|
|
1378
|
+
data=analysis_result,
|
|
1379
|
+
file_name="dependency_analysis.md",
|
|
1380
|
+
mime=MIME_TEXT_MARKDOWN,
|
|
1381
|
+
help="Download complete dependency analysis",
|
|
1382
|
+
)
|
|
1383
|
+
|
|
1384
|
+
with col2:
|
|
1385
|
+
# Create a simplified JSON export
|
|
1386
|
+
analysis_json = {
|
|
1387
|
+
"cookbook_path": cookbook_path,
|
|
1388
|
+
"analysis_depth": depth,
|
|
1389
|
+
"metrics": {
|
|
1390
|
+
"direct_dependencies": direct_deps,
|
|
1391
|
+
"transitive_dependencies": transitive_deps,
|
|
1392
|
+
"circular_dependencies": circular_deps,
|
|
1393
|
+
"community_cookbooks": community_cookbooks,
|
|
1394
|
+
},
|
|
1395
|
+
"full_analysis": analysis_result,
|
|
1396
|
+
}
|
|
1397
|
+
|
|
1398
|
+
import json
|
|
1399
|
+
|
|
1400
|
+
st.download_button(
|
|
1401
|
+
label="📊 Download JSON Summary",
|
|
1402
|
+
data=json.dumps(analysis_json, indent=2),
|
|
1403
|
+
file_name="dependency_analysis.json",
|
|
1404
|
+
mime=MIME_APPLICATION_JSON,
|
|
1405
|
+
help="Download analysis summary as JSON",
|
|
1406
|
+
)
|
|
1407
|
+
|
|
1408
|
+
|
|
1409
|
+
def display_dependency_analysis_results():
|
|
1410
|
+
"""Display dependency analysis results."""
|
|
1411
|
+
analysis_result = st.session_state.dep_analysis_result
|
|
1412
|
+
cookbook_path = st.session_state.dep_cookbook_path
|
|
1413
|
+
depth = st.session_state.dep_depth
|
|
1414
|
+
viz_type = st.session_state.get("dep_viz_type", "text")
|
|
1415
|
+
|
|
1416
|
+
# Summary metrics
|
|
1417
|
+
st.subheader("Dependency Analysis Summary")
|
|
1418
|
+
|
|
1419
|
+
# Parse metrics from analysis result
|
|
1420
|
+
direct_deps, transitive_deps, circular_deps, community_cookbooks = (
|
|
1421
|
+
_parse_dependency_metrics_from_result(analysis_result)
|
|
1422
|
+
)
|
|
1423
|
+
|
|
1424
|
+
# Display summary metrics
|
|
1425
|
+
_display_dependency_summary_metrics(
|
|
1426
|
+
direct_deps, transitive_deps, circular_deps, community_cookbooks
|
|
1427
|
+
)
|
|
1428
|
+
|
|
1429
|
+
# Analysis depth indicator
|
|
1430
|
+
st.info(f"Analysis performed with **{depth}** depth on: `{cookbook_path}`")
|
|
1431
|
+
|
|
1432
|
+
# Graph Visualization Section
|
|
1433
|
+
if viz_type in ["graph", "interactive"]:
|
|
1434
|
+
st.subheader("📊 Dependency Graph Visualization")
|
|
1435
|
+
|
|
1436
|
+
# Layout algorithm selector
|
|
1437
|
+
layout_options = ["auto", "spring", "circular", "kamada_kawai"]
|
|
1438
|
+
selected_layout = st.selectbox(
|
|
1439
|
+
"Layout Algorithm",
|
|
1440
|
+
layout_options,
|
|
1441
|
+
help="Choose graph layout algorithm. 'auto' selects best "
|
|
1442
|
+
"algorithm based on graph size.",
|
|
1443
|
+
format_func=lambda x: {
|
|
1444
|
+
"auto": "Auto (recommended)",
|
|
1445
|
+
"spring": "Spring Layout",
|
|
1446
|
+
"circular": "Circular Layout",
|
|
1447
|
+
"kamada_kawai": "Kamada-Kawai Layout",
|
|
1448
|
+
}.get(x, str(x)),
|
|
1449
|
+
)
|
|
1450
|
+
|
|
1451
|
+
_display_dependency_graph_visualization(
|
|
1452
|
+
analysis_result, viz_type, selected_layout
|
|
1453
|
+
)
|
|
1454
|
+
|
|
1455
|
+
# Display analysis results
|
|
1456
|
+
st.subheader("Dependency Analysis Details")
|
|
1457
|
+
|
|
1458
|
+
_display_dependency_analysis_sections(analysis_result)
|
|
1459
|
+
|
|
1460
|
+
# Migration recommendations
|
|
1461
|
+
_display_migration_recommendations(circular_deps, community_cookbooks, direct_deps)
|
|
1462
|
+
|
|
1463
|
+
# Export options
|
|
1464
|
+
_display_dependency_export_options(
|
|
1465
|
+
analysis_result,
|
|
1466
|
+
cookbook_path,
|
|
1467
|
+
depth,
|
|
1468
|
+
direct_deps,
|
|
1469
|
+
transitive_deps,
|
|
1470
|
+
circular_deps,
|
|
1471
|
+
community_cookbooks,
|
|
1472
|
+
)
|
|
1473
|
+
|
|
1474
|
+
|
|
1475
|
+
def show_validation_reports():
|
|
1476
|
+
"""Show validation reports and conversion validation."""
|
|
1477
|
+
st.header("Validation Reports")
|
|
1478
|
+
|
|
1479
|
+
# Import validation functions
|
|
1480
|
+
from souschef.core.validation import ValidationEngine
|
|
1481
|
+
|
|
1482
|
+
st.markdown("""
|
|
1483
|
+
Validate Chef to Ansible conversions and generate comprehensive
|
|
1484
|
+
validation reports for migration quality assurance.
|
|
1485
|
+
""")
|
|
1486
|
+
|
|
1487
|
+
# Validation options
|
|
1488
|
+
col1, col2 = st.columns(2)
|
|
1489
|
+
|
|
1490
|
+
with col1:
|
|
1491
|
+
validation_type = st.selectbox(
|
|
1492
|
+
"Validation Type",
|
|
1493
|
+
["syntax", "logic", "security", "performance", "full"],
|
|
1494
|
+
help="Type of validation to perform",
|
|
1495
|
+
format_func=lambda x: {
|
|
1496
|
+
"syntax": "Syntax Validation",
|
|
1497
|
+
"logic": "Logic & Structure Validation",
|
|
1498
|
+
"security": "Security Best Practices",
|
|
1499
|
+
"performance": "Performance Analysis",
|
|
1500
|
+
"full": "Complete Validation Suite",
|
|
1501
|
+
}.get(x, str(x)),
|
|
1502
|
+
)
|
|
1503
|
+
|
|
1504
|
+
with col2:
|
|
1505
|
+
output_format = st.selectbox(
|
|
1506
|
+
"Output Format",
|
|
1507
|
+
["text", "json", "html"],
|
|
1508
|
+
help="Format for validation reports",
|
|
1509
|
+
format_func=lambda x: {
|
|
1510
|
+
"text": "Text Report",
|
|
1511
|
+
"json": "JSON Data",
|
|
1512
|
+
"html": "HTML Report",
|
|
1513
|
+
}.get(x, str(x)),
|
|
1514
|
+
)
|
|
1515
|
+
|
|
1516
|
+
# File/Directory input
|
|
1517
|
+
st.subheader("Input Source")
|
|
1518
|
+
|
|
1519
|
+
input_type = st.radio(
|
|
1520
|
+
"Input Type",
|
|
1521
|
+
["Directory", "Single File"],
|
|
1522
|
+
horizontal=True,
|
|
1523
|
+
help="Validate a directory of files or a single file",
|
|
1524
|
+
)
|
|
1525
|
+
|
|
1526
|
+
if input_type == "Directory":
|
|
1527
|
+
input_path = st.text_input(
|
|
1528
|
+
"Directory Path",
|
|
1529
|
+
placeholder="/path/to/ansible/playbooks",
|
|
1530
|
+
help="Path to directory containing Ansible playbooks to validate",
|
|
1531
|
+
)
|
|
1532
|
+
else:
|
|
1533
|
+
input_path = st.text_input(
|
|
1534
|
+
"File Path",
|
|
1535
|
+
placeholder="/path/to/playbook.yml",
|
|
1536
|
+
help="Path to single Ansible playbook file to validate",
|
|
1537
|
+
)
|
|
1538
|
+
|
|
1539
|
+
# Validation options
|
|
1540
|
+
st.subheader("Validation Options")
|
|
1541
|
+
|
|
1542
|
+
col1, col2, col3 = st.columns(3)
|
|
1543
|
+
|
|
1544
|
+
with col1:
|
|
1545
|
+
strict_mode = st.checkbox(
|
|
1546
|
+
"Strict Mode", help="Fail on warnings, not just errors"
|
|
1547
|
+
)
|
|
1548
|
+
|
|
1549
|
+
with col2:
|
|
1550
|
+
include_best_practices = st.checkbox(
|
|
1551
|
+
"Include Best Practices",
|
|
1552
|
+
value=True,
|
|
1553
|
+
help="Check for Ansible best practices",
|
|
1554
|
+
)
|
|
1555
|
+
|
|
1556
|
+
with col3:
|
|
1557
|
+
generate_recommendations = st.checkbox(
|
|
1558
|
+
"Generate Recommendations",
|
|
1559
|
+
value=True,
|
|
1560
|
+
help="Provide improvement suggestions",
|
|
1561
|
+
)
|
|
1562
|
+
|
|
1563
|
+
# Validation button
|
|
1564
|
+
if st.button("Run Validation", type="primary", use_container_width=True):
|
|
1565
|
+
if not input_path.strip():
|
|
1566
|
+
st.error("Please enter a path to validate.")
|
|
1567
|
+
return
|
|
1568
|
+
|
|
1569
|
+
# Create progress tracker
|
|
1570
|
+
progress_tracker = ProgressTracker(
|
|
1571
|
+
total_steps=6, description="Running validation..."
|
|
1572
|
+
)
|
|
1573
|
+
|
|
1574
|
+
try:
|
|
1575
|
+
progress_tracker.update(1, "Preparing validation environment...")
|
|
1576
|
+
|
|
1577
|
+
# Prepare validation options
|
|
1578
|
+
options = {
|
|
1579
|
+
"strict": strict_mode,
|
|
1580
|
+
"best_practices": include_best_practices,
|
|
1581
|
+
"recommendations": generate_recommendations,
|
|
1582
|
+
"format": output_format,
|
|
1583
|
+
}
|
|
1584
|
+
|
|
1585
|
+
progress_tracker.update(2, "Scanning input files...")
|
|
1586
|
+
progress_tracker.update(3, "Running syntax validation...")
|
|
1587
|
+
progress_tracker.update(4, "Performing logic checks...")
|
|
1588
|
+
|
|
1589
|
+
# Run validation
|
|
1590
|
+
engine = ValidationEngine()
|
|
1591
|
+
validation_results = engine.validate_conversion(
|
|
1592
|
+
validation_type, input_path.strip()
|
|
1593
|
+
)
|
|
1594
|
+
|
|
1595
|
+
# Format the results as text
|
|
1596
|
+
validation_result = "\n".join(
|
|
1597
|
+
[
|
|
1598
|
+
f"{result.level.value.upper()}: {result.message}"
|
|
1599
|
+
for result in validation_results
|
|
1600
|
+
]
|
|
1601
|
+
)
|
|
1602
|
+
|
|
1603
|
+
progress_tracker.update(5, "Generating validation report...")
|
|
1604
|
+
|
|
1605
|
+
# Store results
|
|
1606
|
+
st.session_state.validation_result = validation_result
|
|
1607
|
+
st.session_state.validation_path = input_path.strip()
|
|
1608
|
+
st.session_state.validation_type = validation_type
|
|
1609
|
+
st.session_state.validation_options = options
|
|
1610
|
+
|
|
1611
|
+
progress_tracker.complete("Validation completed!")
|
|
1612
|
+
st.success("Validation completed successfully!")
|
|
1613
|
+
st.rerun()
|
|
1614
|
+
|
|
1615
|
+
except Exception as e:
|
|
1616
|
+
progress_tracker.close()
|
|
1617
|
+
st.error(f"Error during validation: {e}")
|
|
1618
|
+
return
|
|
1619
|
+
|
|
1620
|
+
# Display results if available
|
|
1621
|
+
if "validation_result" in st.session_state:
|
|
1622
|
+
display_validation_results()
|
|
1623
|
+
|
|
1624
|
+
|
|
1625
|
+
def _parse_validation_metrics(validation_result):
|
|
1626
|
+
"""Parse validation result to extract key metrics."""
|
|
1627
|
+
lines = validation_result.split("\n")
|
|
1628
|
+
|
|
1629
|
+
errors = 0
|
|
1630
|
+
warnings = 0
|
|
1631
|
+
passed = 0
|
|
1632
|
+
total_checks = 0
|
|
1633
|
+
|
|
1634
|
+
for line in lines:
|
|
1635
|
+
if "ERROR:" in line.upper():
|
|
1636
|
+
errors += 1
|
|
1637
|
+
elif "WARNING:" in line.upper():
|
|
1638
|
+
warnings += 1
|
|
1639
|
+
elif "PASSED:" in line.upper() or "✓" in line:
|
|
1640
|
+
passed += 1
|
|
1641
|
+
if "Total checks:" in line.lower():
|
|
1642
|
+
with contextlib.suppress(ValueError):
|
|
1643
|
+
total_checks = int(line.split(":")[1].strip())
|
|
1644
|
+
|
|
1645
|
+
return errors, warnings, passed, total_checks
|
|
1646
|
+
|
|
1647
|
+
|
|
1648
|
+
def _display_validation_summary_metrics(errors, warnings, passed, total_checks):
|
|
1649
|
+
"""Display validation summary metrics."""
|
|
1650
|
+
col1, col2, col3, col4 = st.columns(4)
|
|
1651
|
+
|
|
1652
|
+
with col1:
|
|
1653
|
+
st.metric("Total Checks", total_checks)
|
|
1654
|
+
|
|
1655
|
+
with col2:
|
|
1656
|
+
st.metric("Passed", passed, delta="✅" if passed > 0 else "")
|
|
1657
|
+
|
|
1658
|
+
with col3:
|
|
1659
|
+
st.metric("Warnings", warnings, delta="⚠️" if warnings > 0 else "")
|
|
1660
|
+
|
|
1661
|
+
with col4:
|
|
1662
|
+
st.metric("Errors", errors, delta="❌" if errors > 0 else "")
|
|
1663
|
+
|
|
1664
|
+
|
|
1665
|
+
def _display_validation_status(errors, warnings):
|
|
1666
|
+
"""Display overall validation status."""
|
|
1667
|
+
if errors > 0:
|
|
1668
|
+
st.error("❌ **Validation Failed**: Critical issues found that need attention.")
|
|
1669
|
+
elif warnings > 0:
|
|
1670
|
+
st.warning(
|
|
1671
|
+
"⚠️ **Validation Passed with Warnings**: Review warnings before proceeding."
|
|
1672
|
+
)
|
|
1673
|
+
else:
|
|
1674
|
+
st.success("✅ **Validation Passed**: All checks successful!")
|
|
1675
|
+
|
|
1676
|
+
|
|
1677
|
+
def _display_validation_sections(validation_result):
|
|
1678
|
+
"""Display validation results in expandable sections."""
|
|
1679
|
+
# Split results into sections
|
|
1680
|
+
sections = validation_result.split("\n## ")
|
|
1681
|
+
|
|
1682
|
+
for section in sections:
|
|
1683
|
+
if section.strip():
|
|
1684
|
+
if not section.startswith("#"):
|
|
1685
|
+
section = "## " + section
|
|
1686
|
+
|
|
1687
|
+
# Add expanders for different sections
|
|
1688
|
+
if "Syntax Validation" in section:
|
|
1689
|
+
with st.expander("🔍 Syntax Validation"):
|
|
1690
|
+
st.markdown(section.replace("## Syntax Validation", ""))
|
|
1691
|
+
elif "Logic Validation" in section:
|
|
1692
|
+
with st.expander("🧠 Logic Validation"):
|
|
1693
|
+
st.markdown(section.replace("## Logic Validation", ""))
|
|
1694
|
+
elif "Security Validation" in section:
|
|
1695
|
+
with st.expander("🔒 Security Validation"):
|
|
1696
|
+
st.markdown(section.replace("## Security Validation", ""))
|
|
1697
|
+
elif "Performance Validation" in section:
|
|
1698
|
+
with st.expander("⚡ Performance Validation"):
|
|
1699
|
+
st.markdown(section.replace("## Performance Validation", ""))
|
|
1700
|
+
elif "Best Practices" in section:
|
|
1701
|
+
with st.expander("📋 Best Practices"):
|
|
1702
|
+
st.markdown(section.replace("## Best Practices", ""))
|
|
1703
|
+
elif "Recommendations" in section:
|
|
1704
|
+
with st.expander("💡 Recommendations"):
|
|
1705
|
+
st.markdown(section.replace("## Recommendations", ""))
|
|
1706
|
+
else:
|
|
1707
|
+
st.markdown(section)
|
|
1708
|
+
|
|
1709
|
+
|
|
1710
|
+
def _display_validation_action_items(errors, warnings):
|
|
1711
|
+
"""Display action items based on validation results."""
|
|
1712
|
+
if errors > 0 or warnings > 0:
|
|
1713
|
+
st.subheader("Action Items")
|
|
1714
|
+
|
|
1715
|
+
if errors > 0:
|
|
1716
|
+
st.error("**Critical Issues to Fix:**")
|
|
1717
|
+
st.markdown("""
|
|
1718
|
+
- Review error messages above
|
|
1719
|
+
- Fix syntax and logic errors
|
|
1720
|
+
- Re-run validation after fixes
|
|
1721
|
+
- Consider impact on migration timeline
|
|
1722
|
+
""")
|
|
1723
|
+
|
|
1724
|
+
if warnings > 0:
|
|
1725
|
+
st.warning("**Warnings to Review:**")
|
|
1726
|
+
st.markdown("""
|
|
1727
|
+
- Address security warnings
|
|
1728
|
+
- Review performance suggestions
|
|
1729
|
+
- Consider best practice recommendations
|
|
1730
|
+
- Document any intentional deviations
|
|
1731
|
+
""")
|
|
1732
|
+
|
|
1733
|
+
|
|
1734
|
+
def _display_validation_export_options(
|
|
1735
|
+
validation_result,
|
|
1736
|
+
input_path,
|
|
1737
|
+
validation_type,
|
|
1738
|
+
options,
|
|
1739
|
+
errors,
|
|
1740
|
+
warnings,
|
|
1741
|
+
passed,
|
|
1742
|
+
total_checks,
|
|
1743
|
+
):
|
|
1744
|
+
"""Display export options for validation results."""
|
|
1745
|
+
st.subheader("Export Report")
|
|
1746
|
+
|
|
1747
|
+
col1, col2 = st.columns(2)
|
|
1748
|
+
|
|
1749
|
+
with col1:
|
|
1750
|
+
st.download_button(
|
|
1751
|
+
label="📥 Download Full Report",
|
|
1752
|
+
data=validation_result,
|
|
1753
|
+
file_name="validation_report.md",
|
|
1754
|
+
mime=MIME_TEXT_MARKDOWN,
|
|
1755
|
+
help="Download complete validation report",
|
|
1756
|
+
)
|
|
1757
|
+
|
|
1758
|
+
with col2:
|
|
1759
|
+
# Create JSON summary
|
|
1760
|
+
if errors > 0:
|
|
1761
|
+
status = "failed"
|
|
1762
|
+
elif warnings > 0:
|
|
1763
|
+
status = "warning"
|
|
1764
|
+
else:
|
|
1765
|
+
status = "passed"
|
|
1766
|
+
report_json = {
|
|
1767
|
+
"input_path": input_path,
|
|
1768
|
+
"validation_type": validation_type,
|
|
1769
|
+
"options": options,
|
|
1770
|
+
"metrics": {
|
|
1771
|
+
"total_checks": total_checks,
|
|
1772
|
+
"passed": passed,
|
|
1773
|
+
"warnings": warnings,
|
|
1774
|
+
"errors": errors,
|
|
1775
|
+
},
|
|
1776
|
+
"status": status,
|
|
1777
|
+
"full_report": validation_result,
|
|
1778
|
+
}
|
|
1779
|
+
|
|
1780
|
+
import json
|
|
1781
|
+
|
|
1782
|
+
st.download_button(
|
|
1783
|
+
label="📊 Download JSON Summary",
|
|
1784
|
+
data=json.dumps(report_json, indent=2),
|
|
1785
|
+
file_name="validation_report.json",
|
|
1786
|
+
mime=MIME_APPLICATION_JSON,
|
|
1787
|
+
help="Download validation summary as JSON",
|
|
1788
|
+
)
|
|
1789
|
+
|
|
1790
|
+
|
|
1791
|
+
def display_validation_results():
|
|
1792
|
+
"""Display validation results."""
|
|
1793
|
+
validation_result = st.session_state.validation_result
|
|
1794
|
+
input_path = st.session_state.validation_path
|
|
1795
|
+
validation_type = st.session_state.validation_type
|
|
1796
|
+
options = st.session_state.validation_options
|
|
1797
|
+
|
|
1798
|
+
# Summary metrics
|
|
1799
|
+
st.subheader("Validation Summary")
|
|
1800
|
+
|
|
1801
|
+
# Parse validation result for metrics
|
|
1802
|
+
errors, warnings, passed, total_checks = _parse_validation_metrics(
|
|
1803
|
+
validation_result
|
|
1804
|
+
)
|
|
1805
|
+
|
|
1806
|
+
# Display summary metrics
|
|
1807
|
+
_display_validation_summary_metrics(errors, warnings, passed, total_checks)
|
|
1808
|
+
|
|
1809
|
+
# Overall status
|
|
1810
|
+
_display_validation_status(errors, warnings)
|
|
1811
|
+
|
|
1812
|
+
# Validation details
|
|
1813
|
+
st.info(f"Validation type: **{validation_type}** | Path: `{input_path}`")
|
|
1814
|
+
|
|
1815
|
+
# Display validation results
|
|
1816
|
+
st.subheader("Validation Details")
|
|
1817
|
+
|
|
1818
|
+
_display_validation_sections(validation_result)
|
|
1819
|
+
|
|
1820
|
+
# Action items
|
|
1821
|
+
_display_validation_action_items(errors, warnings)
|
|
1822
|
+
|
|
1823
|
+
# Export options
|
|
1824
|
+
_display_validation_export_options(
|
|
1825
|
+
validation_result,
|
|
1826
|
+
input_path,
|
|
1827
|
+
validation_type,
|
|
1828
|
+
options,
|
|
1829
|
+
errors,
|
|
1830
|
+
warnings,
|
|
1831
|
+
passed,
|
|
1832
|
+
total_checks,
|
|
1833
|
+
)
|
|
1834
|
+
|
|
1835
|
+
|
|
1836
|
+
if __name__ == "__main__":
|
|
1837
|
+
main()
|