mcp-souschef 3.0.0__py3-none-any.whl → 3.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.5.1.dist-info}/METADATA +241 -409
- mcp_souschef-3.5.1.dist-info/RECORD +52 -0
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.5.1.dist-info}/WHEEL +1 -1
- souschef/__init__.py +2 -10
- souschef/assessment.py +417 -206
- souschef/ci/common.py +1 -1
- souschef/cli.py +302 -19
- souschef/converters/playbook.py +530 -202
- souschef/converters/template.py +122 -5
- souschef/core/__init__.py +6 -1
- souschef/core/ai_schemas.py +81 -0
- souschef/core/http_client.py +394 -0
- souschef/core/logging.py +344 -0
- souschef/core/metrics.py +73 -6
- souschef/core/path_utils.py +233 -19
- souschef/core/url_validation.py +230 -0
- souschef/deployment.py +10 -3
- souschef/generators/__init__.py +13 -0
- souschef/generators/repo.py +695 -0
- souschef/parsers/attributes.py +1 -1
- souschef/parsers/habitat.py +1 -1
- souschef/parsers/inspec.py +25 -2
- souschef/parsers/metadata.py +5 -3
- souschef/parsers/recipe.py +1 -1
- souschef/parsers/resource.py +1 -1
- souschef/parsers/template.py +1 -1
- souschef/server.py +556 -188
- souschef/ui/app.py +44 -36
- souschef/ui/pages/ai_settings.py +151 -30
- souschef/ui/pages/chef_server_settings.py +300 -0
- souschef/ui/pages/cookbook_analysis.py +903 -173
- mcp_souschef-3.0.0.dist-info/RECORD +0 -46
- souschef/converters/cookbook_specific.py.backup +0 -109
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.5.1.dist-info}/entry_points.txt +0 -0
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.5.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
"""Cookbook Analysis Page for SousChef UI."""
|
|
2
2
|
|
|
3
3
|
import contextlib
|
|
4
|
+
import inspect
|
|
4
5
|
import io
|
|
5
6
|
import json
|
|
6
7
|
import os
|
|
7
8
|
import shutil
|
|
9
|
+
import subprocess
|
|
8
10
|
import sys
|
|
9
11
|
import tarfile
|
|
10
12
|
import tempfile
|
|
@@ -33,6 +35,15 @@ from souschef.core.metrics import (
|
|
|
33
35
|
get_timeline_weeks,
|
|
34
36
|
validate_metrics_consistency,
|
|
35
37
|
)
|
|
38
|
+
from souschef.core.path_utils import (
|
|
39
|
+
_ensure_within_base_path,
|
|
40
|
+
_normalize_path,
|
|
41
|
+
_safe_join,
|
|
42
|
+
)
|
|
43
|
+
from souschef.generators.repo import (
|
|
44
|
+
analyse_conversion_output,
|
|
45
|
+
generate_ansible_repository,
|
|
46
|
+
)
|
|
36
47
|
from souschef.parsers.metadata import parse_cookbook_metadata
|
|
37
48
|
|
|
38
49
|
# AI Settings
|
|
@@ -78,7 +89,12 @@ def _get_secure_ai_config_path() -> Path:
|
|
|
78
89
|
if config_dir.is_symlink():
|
|
79
90
|
raise ValueError("AI config directory cannot be a symlink")
|
|
80
91
|
|
|
81
|
-
|
|
92
|
+
config_file = config_dir / "ai_config.json"
|
|
93
|
+
# Ensure config file has secure permissions if it exists
|
|
94
|
+
if config_file.exists():
|
|
95
|
+
with contextlib.suppress(OSError):
|
|
96
|
+
config_file.chmod(0o600)
|
|
97
|
+
return config_file
|
|
82
98
|
|
|
83
99
|
|
|
84
100
|
def load_ai_settings() -> dict[str, str | float | int]:
|
|
@@ -241,6 +257,7 @@ METADATA_STATUS_NO = "No"
|
|
|
241
257
|
ANALYSIS_STATUS_ANALYSED = "Analysed"
|
|
242
258
|
ANALYSIS_STATUS_FAILED = "Failed"
|
|
243
259
|
METADATA_COLUMN_NAME = "Has Metadata"
|
|
260
|
+
MIME_TYPE_ZIP = "application/zip"
|
|
244
261
|
|
|
245
262
|
# Security limits for archive extraction
|
|
246
263
|
MAX_ARCHIVE_SIZE = 100 * 1024 * 1024 # 100MB total
|
|
@@ -375,7 +392,12 @@ license 'All rights reserved'
|
|
|
375
392
|
description 'Automatically extracted cookbook from archive'
|
|
376
393
|
version '1.0.0'
|
|
377
394
|
"""
|
|
378
|
-
|
|
395
|
+
try:
|
|
396
|
+
metadata_file = synthetic_cookbook_dir / METADATA_FILENAME
|
|
397
|
+
metadata_file.parent.mkdir(parents=True, exist_ok=True)
|
|
398
|
+
metadata_file.write_text(metadata_content)
|
|
399
|
+
except OSError as e:
|
|
400
|
+
raise OSError(f"Failed to write metadata file: {e}") from e
|
|
379
401
|
|
|
380
402
|
return extraction_dir
|
|
381
403
|
|
|
@@ -421,15 +443,14 @@ def _extract_zip_securely(archive_path: Path, extraction_dir: Path) -> None:
|
|
|
421
443
|
# Safe extraction with manual path handling
|
|
422
444
|
for info in zip_ref.filelist:
|
|
423
445
|
# Construct safe relative path
|
|
446
|
+
|
|
424
447
|
safe_path = _get_safe_extraction_path(info.filename, extraction_dir)
|
|
425
448
|
|
|
426
449
|
if info.is_dir():
|
|
427
|
-
# Create directory
|
|
428
450
|
safe_path.mkdir(parents=True, exist_ok=True)
|
|
429
451
|
else:
|
|
430
|
-
# Create parent directories if needed
|
|
431
452
|
safe_path.parent.mkdir(parents=True, exist_ok=True)
|
|
432
|
-
|
|
453
|
+
|
|
433
454
|
with zip_ref.open(info) as source, safe_path.open("wb") as target:
|
|
434
455
|
# Read in chunks to control memory usage
|
|
435
456
|
while True:
|
|
@@ -473,7 +494,16 @@ def _validate_zip_file_security(info, file_count: int, total_size: int) -> None:
|
|
|
473
494
|
def _extract_tar_securely(
|
|
474
495
|
archive_path: Path, extraction_dir: Path, gzipped: bool
|
|
475
496
|
) -> None:
|
|
476
|
-
"""
|
|
497
|
+
"""
|
|
498
|
+
Extract TAR archive with resource consumption controls (S5042).
|
|
499
|
+
|
|
500
|
+
Resource consumption is controlled via:
|
|
501
|
+
- Pre-scanning all members before extraction
|
|
502
|
+
- Validating file sizes, counts, and directory depth
|
|
503
|
+
- Using tarfile.filter='data' (Python 3.12+) to prevent symlink traversal
|
|
504
|
+
- Limiting extraction to validated safe paths
|
|
505
|
+
|
|
506
|
+
"""
|
|
477
507
|
mode = "r:gz" if gzipped else "r"
|
|
478
508
|
|
|
479
509
|
if not archive_path.is_file():
|
|
@@ -483,11 +513,25 @@ def _extract_tar_securely(
|
|
|
483
513
|
raise ValueError(f"Invalid or corrupted TAR archive: {archive_path.name}")
|
|
484
514
|
|
|
485
515
|
try:
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
)
|
|
516
|
+
open_kwargs: dict[str, Any] = {"name": str(archive_path), "mode": mode}
|
|
517
|
+
|
|
518
|
+
# Apply safe filter if available (Python 3.12+) to prevent traversal attacks.
|
|
519
|
+
# For older Python versions, resource consumption is controlled via pre-scanning
|
|
520
|
+
# and member validation before extraction.
|
|
521
|
+
if "filter" in inspect.signature(tarfile.open).parameters:
|
|
522
|
+
# Use 'data' filter to prevent extraction of special files and symlinks
|
|
523
|
+
open_kwargs["filter"] = "data"
|
|
524
|
+
|
|
525
|
+
# Resource consumption controls (S5042): Pre-scan validates all members for
|
|
526
|
+
# size limits (MAX_ARCHIVE_SIZE, MAX_FILE_SIZE), file count (MAX_FILES),
|
|
527
|
+
# depth (MAX_DEPTH), and blocks malicious files before extraction.
|
|
528
|
+
with tarfile.open(**open_kwargs) as tar_ref:
|
|
489
529
|
members = tar_ref.getmembers()
|
|
530
|
+
# Pre-validate all members before allowing extraction
|
|
531
|
+
# This controls resource consumption and prevents
|
|
532
|
+
# zip bombs/decompression bombs
|
|
490
533
|
_pre_scan_tar_members(members)
|
|
534
|
+
# Extract only validated members to pre-validated safe paths
|
|
491
535
|
_extract_tar_members(tar_ref, members, extraction_dir)
|
|
492
536
|
except tarfile.TarError as e:
|
|
493
537
|
raise ValueError(f"Invalid or corrupted TAR archive: {e}") from e
|
|
@@ -496,10 +540,20 @@ def _extract_tar_securely(
|
|
|
496
540
|
|
|
497
541
|
|
|
498
542
|
def _pre_scan_tar_members(members):
|
|
499
|
-
"""
|
|
543
|
+
"""
|
|
544
|
+
Pre-scan TAR members to control resource consumption (S5042).
|
|
545
|
+
|
|
546
|
+
Validates all members before extraction to prevent:
|
|
547
|
+
- Compression/decompression bombs (via size limits)
|
|
548
|
+
- Excessive memory consumption (via file count limits)
|
|
549
|
+
- Directory traversal attacks (via depth limits)
|
|
550
|
+
- Malicious file inclusion (via extension and type checks)
|
|
551
|
+
|
|
552
|
+
"""
|
|
500
553
|
total_size = 0
|
|
501
554
|
for file_count, member in enumerate(members, start=1):
|
|
502
555
|
total_size += member.size
|
|
556
|
+
# Validate member and accumulate size for bounds checking
|
|
503
557
|
_validate_tar_file_security(member, file_count, total_size)
|
|
504
558
|
|
|
505
559
|
|
|
@@ -593,29 +647,11 @@ def _get_safe_extraction_path(filename: str, extraction_dir: Path) -> Path:
|
|
|
593
647
|
):
|
|
594
648
|
raise ValueError(f"Path traversal or absolute path detected: {filename}")
|
|
595
649
|
|
|
596
|
-
#
|
|
650
|
+
# Normalise separators and join using a containment-checked join
|
|
597
651
|
normalized = filename.replace("\\", "/").strip("/")
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
for part in normalized.split("/"):
|
|
602
|
-
if part == "" or part == ".":
|
|
603
|
-
continue
|
|
604
|
-
elif part == "..":
|
|
605
|
-
# Remove parent directory if we have one
|
|
606
|
-
if parts:
|
|
607
|
-
parts.pop()
|
|
608
|
-
else:
|
|
609
|
-
parts.append(part)
|
|
610
|
-
|
|
611
|
-
# Join parts back and resolve against extraction_dir
|
|
612
|
-
safe_path = extraction_dir / "/".join(parts)
|
|
613
|
-
|
|
614
|
-
# Ensure the final path is still within extraction_dir
|
|
615
|
-
try:
|
|
616
|
-
safe_path.resolve().relative_to(extraction_dir.resolve())
|
|
617
|
-
except ValueError:
|
|
618
|
-
raise ValueError(f"Path traversal detected: {filename}") from None
|
|
652
|
+
safe_path = _ensure_within_base_path(
|
|
653
|
+
_safe_join(extraction_dir.resolve(), normalized), extraction_dir.resolve()
|
|
654
|
+
)
|
|
619
655
|
|
|
620
656
|
return safe_path
|
|
621
657
|
|
|
@@ -632,6 +668,11 @@ def create_results_archive(results: list, cookbook_path: str) -> bytes:
|
|
|
632
668
|
# Add individual cookbook reports
|
|
633
669
|
for result in results:
|
|
634
670
|
if result["status"] == ANALYSIS_STATUS_ANALYSED:
|
|
671
|
+
manual_hours = result["estimated_hours"]
|
|
672
|
+
souschef_hours = result.get(
|
|
673
|
+
"estimated_hours_with_souschef", manual_hours * 0.5
|
|
674
|
+
)
|
|
675
|
+
time_saved = manual_hours - souschef_hours
|
|
635
676
|
report_content = f"""# Cookbook Analysis Report: {result["name"]}
|
|
636
677
|
|
|
637
678
|
## Metadata
|
|
@@ -639,7 +680,14 @@ def create_results_archive(results: list, cookbook_path: str) -> bytes:
|
|
|
639
680
|
- **Maintainer**: {result["maintainer"]}
|
|
640
681
|
- **Dependencies**: {result["dependencies"]}
|
|
641
682
|
- **Complexity**: {result["complexity"]}
|
|
642
|
-
|
|
683
|
+
|
|
684
|
+
## Effort Estimates
|
|
685
|
+
### Manual Migration (Without SousChef):
|
|
686
|
+
- **Estimated Hours**: {manual_hours:.1f}
|
|
687
|
+
|
|
688
|
+
### AI-Assisted (With SousChef):
|
|
689
|
+
- **Estimated Hours**: {souschef_hours:.1f}
|
|
690
|
+
- **Time Saved**: {time_saved:.1f} hours (50% faster)
|
|
643
691
|
|
|
644
692
|
## Recommendations
|
|
645
693
|
{result["recommendations"]}
|
|
@@ -653,16 +701,27 @@ def create_results_archive(results: list, cookbook_path: str) -> bytes:
|
|
|
653
701
|
successful = len(
|
|
654
702
|
[r for r in results if r["status"] == ANALYSIS_STATUS_ANALYSED]
|
|
655
703
|
)
|
|
656
|
-
|
|
704
|
+
total_hours_manual = sum(r.get("estimated_hours", 0) for r in results)
|
|
705
|
+
total_hours_souschef = sum(
|
|
706
|
+
r.get("estimated_hours_with_souschef", r.get("estimated_hours", 0) * 0.5)
|
|
707
|
+
for r in results
|
|
708
|
+
)
|
|
709
|
+
time_saved_total = total_hours_manual - total_hours_souschef
|
|
657
710
|
|
|
658
711
|
summary_content = f"""# SousChef Cookbook Analysis Summary
|
|
659
712
|
|
|
660
713
|
## Overview
|
|
661
714
|
- **Cookbooks Analysed**: {len(results)}
|
|
662
|
-
|
|
663
715
|
- **Successfully Analysed**: {successful}
|
|
664
716
|
|
|
665
|
-
|
|
717
|
+
## Effort Estimates
|
|
718
|
+
### Manual Migration (Without AI):
|
|
719
|
+
- **Total Estimated Hours**: {total_hours_manual:.1f}
|
|
720
|
+
|
|
721
|
+
### AI-Assisted (With SousChef):
|
|
722
|
+
- **Total Estimated Hours**: {total_hours_souschef:.1f}
|
|
723
|
+
- **Time Saved**: {time_saved_total:.1f} hours (50% faster)
|
|
724
|
+
|
|
666
725
|
- **Source**: {cookbook_path} # deepcode ignore PT: used for display only
|
|
667
726
|
|
|
668
727
|
## Results Summary
|
|
@@ -671,10 +730,15 @@ def create_results_archive(results: list, cookbook_path: str) -> bytes:
|
|
|
671
730
|
status_icon = (
|
|
672
731
|
"PASS" if result["status"] == ANALYSIS_STATUS_ANALYSED else "FAIL"
|
|
673
732
|
)
|
|
733
|
+
manual_hours = result.get("estimated_hours", 0)
|
|
734
|
+
souschef_hours = result.get(
|
|
735
|
+
"estimated_hours_with_souschef", manual_hours * 0.5
|
|
736
|
+
)
|
|
674
737
|
summary_content += f"- {status_icon} {result['name']}: {result['status']}"
|
|
675
738
|
if result["status"] == ANALYSIS_STATUS_ANALYSED:
|
|
676
739
|
summary_content += (
|
|
677
|
-
f" ({
|
|
740
|
+
f" (Manual: {manual_hours:.1f}h, "
|
|
741
|
+
f"With SousChef: {souschef_hours:.1f}h, "
|
|
678
742
|
f"{result['complexity']} complexity)"
|
|
679
743
|
)
|
|
680
744
|
summary_content += "\n"
|
|
@@ -742,7 +806,7 @@ def _show_analysis_input() -> None:
|
|
|
742
806
|
# Store temp_dir in session state to prevent premature cleanup
|
|
743
807
|
st.session_state.temp_dir = temp_dir
|
|
744
808
|
st.success("Archive extracted successfully to temporary location")
|
|
745
|
-
except
|
|
809
|
+
except (OSError, zipfile.BadZipFile, tarfile.TarError) as e:
|
|
746
810
|
st.error(f"Failed to extract archive: {e}")
|
|
747
811
|
return
|
|
748
812
|
|
|
@@ -775,9 +839,19 @@ def _display_results_view() -> None:
|
|
|
775
839
|
st.session_state.analysis_cookbook_path = None
|
|
776
840
|
st.session_state.total_cookbooks = None
|
|
777
841
|
st.session_state.analysis_info_messages = None
|
|
842
|
+
st.session_state.conversion_results = None
|
|
843
|
+
st.session_state.generated_playbook_repo = None
|
|
778
844
|
st.session_state.analysis_page_key += 1
|
|
779
845
|
st.rerun()
|
|
780
846
|
|
|
847
|
+
# Check if we have conversion results to display
|
|
848
|
+
if "conversion_results" in st.session_state and st.session_state.conversion_results:
|
|
849
|
+
# Display conversion results instead of analysis results
|
|
850
|
+
playbooks = st.session_state.conversion_results["playbooks"]
|
|
851
|
+
templates = st.session_state.conversion_results["templates"]
|
|
852
|
+
_handle_playbook_download(playbooks, templates)
|
|
853
|
+
return
|
|
854
|
+
|
|
781
855
|
_display_analysis_results(
|
|
782
856
|
st.session_state.analysis_results,
|
|
783
857
|
st.session_state.total_cookbooks,
|
|
@@ -832,14 +906,31 @@ def _get_archive_upload_input() -> Any:
|
|
|
832
906
|
return uploaded_file
|
|
833
907
|
|
|
834
908
|
|
|
909
|
+
def _is_within_base(base: Path, candidate: Path) -> bool:
|
|
910
|
+
"""Check whether candidate is contained within base after resolution."""
|
|
911
|
+
base_real = Path(os.path.realpath(str(base)))
|
|
912
|
+
candidate_real = Path(os.path.realpath(str(candidate)))
|
|
913
|
+
try:
|
|
914
|
+
candidate_real.relative_to(base_real)
|
|
915
|
+
return True
|
|
916
|
+
except ValueError:
|
|
917
|
+
return False
|
|
918
|
+
|
|
919
|
+
|
|
835
920
|
def _validate_and_list_cookbooks(cookbook_path: str) -> None:
|
|
836
921
|
"""Validate the cookbook path and list available cookbooks."""
|
|
837
922
|
safe_dir = _get_safe_cookbook_directory(cookbook_path)
|
|
838
923
|
if safe_dir is None:
|
|
839
924
|
return
|
|
840
925
|
|
|
841
|
-
|
|
842
|
-
|
|
926
|
+
# Validate the safe directory before use
|
|
927
|
+
dir_exists: bool = safe_dir.exists()
|
|
928
|
+
if dir_exists:
|
|
929
|
+
dir_is_dir: bool = safe_dir.is_dir()
|
|
930
|
+
if dir_is_dir:
|
|
931
|
+
_list_and_display_cookbooks(safe_dir)
|
|
932
|
+
else:
|
|
933
|
+
st.error(f"Directory not found: {safe_dir}")
|
|
843
934
|
else:
|
|
844
935
|
st.error(f"Directory not found: {safe_dir}")
|
|
845
936
|
|
|
@@ -853,54 +944,26 @@ def _get_safe_cookbook_directory(cookbook_path):
|
|
|
853
944
|
"""
|
|
854
945
|
try:
|
|
855
946
|
base_dir = Path.cwd().resolve()
|
|
856
|
-
temp_dir = Path(tempfile.gettempdir()).resolve()
|
|
857
947
|
|
|
858
948
|
path_str = str(cookbook_path).strip()
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
if "\x00" in path_str or ":\\" in path_str or "\\" in path_str:
|
|
862
|
-
st.error(
|
|
863
|
-
"Invalid path: Path contains null bytes or backslashes, "
|
|
864
|
-
"which are not allowed."
|
|
865
|
-
)
|
|
949
|
+
if not path_str:
|
|
950
|
+
st.error("Invalid path: Path cannot be empty.")
|
|
866
951
|
return None
|
|
867
952
|
|
|
868
|
-
#
|
|
869
|
-
|
|
870
|
-
st.error(
|
|
871
|
-
"Invalid path: Path contains '..' which is not allowed "
|
|
872
|
-
"for security reasons."
|
|
873
|
-
)
|
|
874
|
-
return None
|
|
875
|
-
|
|
876
|
-
user_path = Path(path_str)
|
|
877
|
-
|
|
878
|
-
# Resolve the path safely
|
|
879
|
-
if user_path.is_absolute():
|
|
880
|
-
resolved_path = user_path.resolve()
|
|
881
|
-
else:
|
|
882
|
-
resolved_path = (base_dir / user_path).resolve()
|
|
953
|
+
# Sanitise the candidate path using shared helper
|
|
954
|
+
candidate = _normalize_path(path_str)
|
|
883
955
|
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
956
|
+
trusted_bases = [base_dir, Path(tempfile.gettempdir()).resolve()]
|
|
957
|
+
for base in trusted_bases:
|
|
958
|
+
try:
|
|
959
|
+
return _ensure_within_base_path(candidate, base)
|
|
960
|
+
except ValueError:
|
|
961
|
+
continue
|
|
890
962
|
|
|
891
|
-
|
|
892
|
-
resolved_path.relative_to(temp_dir)
|
|
893
|
-
return resolved_path
|
|
894
|
-
except ValueError:
|
|
895
|
-
st.error(
|
|
896
|
-
"Invalid path: The resolved path is outside the allowed "
|
|
897
|
-
"directories (workspace or temporary directory). Paths cannot go above "
|
|
898
|
-
"the workspace root for security reasons."
|
|
899
|
-
)
|
|
900
|
-
return None
|
|
963
|
+
raise ValueError(f"Path traversal attempt: escapes {base_dir}")
|
|
901
964
|
|
|
902
|
-
except
|
|
903
|
-
st.error(f"Invalid path: {exc}
|
|
965
|
+
except ValueError as exc:
|
|
966
|
+
st.error(f"Invalid path: {exc}")
|
|
904
967
|
return None
|
|
905
968
|
|
|
906
969
|
|
|
@@ -1093,7 +1156,7 @@ def _handle_cookbook_selection(cookbook_path: str, cookbook_data: list):
|
|
|
1093
1156
|
|
|
1094
1157
|
with col3:
|
|
1095
1158
|
if st.button(
|
|
1096
|
-
f"
|
|
1159
|
+
f"Select All ({len(cookbook_names)})",
|
|
1097
1160
|
help=f"Select all {len(cookbook_names)} cookbooks",
|
|
1098
1161
|
key="select_all",
|
|
1099
1162
|
):
|
|
@@ -1130,7 +1193,7 @@ def _show_cookbook_validation_warnings(cookbook_data: list):
|
|
|
1130
1193
|
# Check for cookbooks without recipes
|
|
1131
1194
|
cookbooks_without_recipes = []
|
|
1132
1195
|
for cookbook in cookbook_data:
|
|
1133
|
-
cookbook_dir =
|
|
1196
|
+
cookbook_dir = _normalize_path(cookbook["Path"])
|
|
1134
1197
|
recipes_dir = cookbook_dir / "recipes"
|
|
1135
1198
|
if not recipes_dir.exists() or not list(recipes_dir.glob("*.rb")):
|
|
1136
1199
|
cookbooks_without_recipes.append(cookbook["Name"])
|
|
@@ -1246,22 +1309,26 @@ def _analyze_with_ai(
|
|
|
1246
1309
|
st.info(f"Using AI-enhanced analysis with {provider_name} ({model})")
|
|
1247
1310
|
|
|
1248
1311
|
# Count total recipes across all cookbooks
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1312
|
+
def _safe_count_recipes(path_str: str) -> int:
|
|
1313
|
+
"""Count recipes safely with CodeQL-recognized containment checks."""
|
|
1314
|
+
try:
|
|
1315
|
+
normalized = _normalize_path(path_str)
|
|
1316
|
+
recipes_dir = normalized / "recipes"
|
|
1317
|
+
|
|
1318
|
+
if recipes_dir.exists():
|
|
1319
|
+
return len(list(recipes_dir.glob("*.rb")))
|
|
1320
|
+
return 0
|
|
1321
|
+
except (ValueError, OSError):
|
|
1322
|
+
return 0
|
|
1323
|
+
|
|
1324
|
+
total_recipes = sum(_safe_count_recipes(cb["Path"]) for cb in cookbook_data)
|
|
1255
1325
|
|
|
1256
1326
|
st.info(f"Detected {len(cookbook_data)} cookbook(s) with {total_recipes} recipe(s)")
|
|
1257
1327
|
|
|
1258
1328
|
results = []
|
|
1259
1329
|
for i, cb_data in enumerate(cookbook_data):
|
|
1260
1330
|
# Count recipes in this cookbook
|
|
1261
|
-
|
|
1262
|
-
recipe_count = (
|
|
1263
|
-
len(list(recipes_dir.glob("*.rb"))) if recipes_dir.exists() else 0
|
|
1264
|
-
)
|
|
1331
|
+
recipe_count = _safe_count_recipes(cb_data["Path"])
|
|
1265
1332
|
|
|
1266
1333
|
st.info(
|
|
1267
1334
|
f"Analyzing {cb_data['Name']} ({recipe_count} recipes)... "
|
|
@@ -1379,6 +1446,9 @@ def _build_assessment_result(
|
|
|
1379
1446
|
"dependencies": int(cookbook_assessment.get("dependencies", 0) or 0),
|
|
1380
1447
|
"complexity": cookbook_assessment.get("migration_priority", "Unknown").title(),
|
|
1381
1448
|
"estimated_hours": effort_metrics.estimated_hours,
|
|
1449
|
+
"estimated_hours_with_souschef": effort_metrics.estimated_hours_with_souschef,
|
|
1450
|
+
"time_saved_hours": effort_metrics.time_saved * 8,
|
|
1451
|
+
"efficiency_gain_percent": effort_metrics.efficiency_gain_percent,
|
|
1382
1452
|
"recommendations": recommendations,
|
|
1383
1453
|
"status": ANALYSIS_STATUS_ANALYSED,
|
|
1384
1454
|
}
|
|
@@ -1582,20 +1652,26 @@ def _parse_summary_line(line: str, structured: dict):
|
|
|
1582
1652
|
try:
|
|
1583
1653
|
count = int(line.split(":")[-1].strip())
|
|
1584
1654
|
structured["summary"]["total_cookbooks"] = count
|
|
1585
|
-
except ValueError:
|
|
1586
|
-
|
|
1655
|
+
except ValueError as err:
|
|
1656
|
+
structured.setdefault("parse_errors", []).append(
|
|
1657
|
+
f"total_cookbooks_parse_failed: {err}"
|
|
1658
|
+
)
|
|
1587
1659
|
elif "Successfully converted:" in line:
|
|
1588
1660
|
try:
|
|
1589
1661
|
count = int(line.split(":")[-1].strip())
|
|
1590
1662
|
structured["summary"]["cookbooks_converted"] = count
|
|
1591
|
-
except ValueError:
|
|
1592
|
-
|
|
1663
|
+
except ValueError as err:
|
|
1664
|
+
structured.setdefault("parse_errors", []).append(
|
|
1665
|
+
f"cookbooks_converted_parse_failed: {err}"
|
|
1666
|
+
)
|
|
1593
1667
|
elif "Total files converted:" in line:
|
|
1594
1668
|
try:
|
|
1595
1669
|
count = int(line.split(":")[-1].strip())
|
|
1596
1670
|
structured["summary"]["total_converted_files"] = count
|
|
1597
|
-
except ValueError:
|
|
1598
|
-
|
|
1671
|
+
except ValueError as err:
|
|
1672
|
+
structured.setdefault("parse_errors", []).append(
|
|
1673
|
+
f"total_converted_files_parse_failed: {err}"
|
|
1674
|
+
)
|
|
1599
1675
|
|
|
1600
1676
|
|
|
1601
1677
|
def _parse_converted_cookbook(line: str, structured: dict):
|
|
@@ -1616,8 +1692,10 @@ def _parse_converted_cookbook(line: str, structured: dict):
|
|
|
1616
1692
|
"files_count": 0,
|
|
1617
1693
|
}
|
|
1618
1694
|
)
|
|
1619
|
-
except (IndexError, ValueError):
|
|
1620
|
-
|
|
1695
|
+
except (IndexError, ValueError) as err:
|
|
1696
|
+
structured.setdefault("parse_errors", []).append(
|
|
1697
|
+
f"converted_cookbook_parse_failed: {err}"
|
|
1698
|
+
)
|
|
1621
1699
|
|
|
1622
1700
|
|
|
1623
1701
|
def _parse_failed_cookbook(line: str, structured: dict):
|
|
@@ -1634,8 +1712,10 @@ def _parse_failed_cookbook(line: str, structured: dict):
|
|
|
1634
1712
|
"error": error,
|
|
1635
1713
|
}
|
|
1636
1714
|
)
|
|
1637
|
-
except (IndexError, ValueError):
|
|
1638
|
-
|
|
1715
|
+
except (IndexError, ValueError) as err:
|
|
1716
|
+
structured.setdefault("parse_errors", []).append(
|
|
1717
|
+
f"failed_cookbook_parse_failed: {err}"
|
|
1718
|
+
)
|
|
1639
1719
|
|
|
1640
1720
|
|
|
1641
1721
|
def _extract_warnings_from_text(result_text: str, structured: dict):
|
|
@@ -1697,7 +1777,7 @@ def _display_conversion_summary(structured_result: dict):
|
|
|
1697
1777
|
def _display_conversion_warnings_errors(structured_result: dict):
|
|
1698
1778
|
"""Display conversion warnings and errors."""
|
|
1699
1779
|
if "warnings" in structured_result and structured_result["warnings"]:
|
|
1700
|
-
st.warning("
|
|
1780
|
+
st.warning("Conversion Warnings")
|
|
1701
1781
|
for warning in structured_result["warnings"]:
|
|
1702
1782
|
st.write(f"• {warning}")
|
|
1703
1783
|
|
|
@@ -1714,7 +1794,8 @@ def _display_conversion_details(structured_result: dict):
|
|
|
1714
1794
|
|
|
1715
1795
|
for cookbook_result in structured_result["cookbook_results"]:
|
|
1716
1796
|
with st.expander(
|
|
1717
|
-
f"
|
|
1797
|
+
f"Cookbook {cookbook_result.get('cookbook_name', 'Unknown')}",
|
|
1798
|
+
expanded=False,
|
|
1718
1799
|
):
|
|
1719
1800
|
col1, col2 = st.columns(2)
|
|
1720
1801
|
|
|
@@ -1727,7 +1808,7 @@ def _display_conversion_details(structured_result: dict):
|
|
|
1727
1808
|
st.metric("Files", cookbook_result.get("files_count", 0))
|
|
1728
1809
|
|
|
1729
1810
|
if cookbook_result.get("status") == "success":
|
|
1730
|
-
st.success("
|
|
1811
|
+
st.success("Conversion successful")
|
|
1731
1812
|
else:
|
|
1732
1813
|
error_msg = cookbook_result.get("error", "Unknown error")
|
|
1733
1814
|
st.error(f"❌ Conversion failed: {error_msg}")
|
|
@@ -1739,47 +1820,468 @@ def _display_conversion_report(result_text: str):
|
|
|
1739
1820
|
st.code(result_text, language="markdown")
|
|
1740
1821
|
|
|
1741
1822
|
|
|
1823
|
+
def _validate_output_path(output_path: str) -> Path | None:
|
|
1824
|
+
"""
|
|
1825
|
+
Validate and normalize output path.
|
|
1826
|
+
|
|
1827
|
+
Args:
|
|
1828
|
+
output_path: Path string to validate.
|
|
1829
|
+
|
|
1830
|
+
Returns:
|
|
1831
|
+
Normalized Path object or None if invalid.
|
|
1832
|
+
|
|
1833
|
+
"""
|
|
1834
|
+
try:
|
|
1835
|
+
safe_output_path = _normalize_path(str(output_path))
|
|
1836
|
+
base_dir = Path.cwd().resolve()
|
|
1837
|
+
# Use centralised containment validation
|
|
1838
|
+
validated = _ensure_within_base_path(safe_output_path, base_dir)
|
|
1839
|
+
return validated if validated.exists() else None
|
|
1840
|
+
except ValueError:
|
|
1841
|
+
return None
|
|
1842
|
+
|
|
1843
|
+
|
|
1844
|
+
def _collect_role_files(safe_output_path: Path) -> list[tuple[Path, Path]]:
|
|
1845
|
+
"""
|
|
1846
|
+
Collect all files from converted roles directory.
|
|
1847
|
+
|
|
1848
|
+
Args:
|
|
1849
|
+
safe_output_path: Validated base path.
|
|
1850
|
+
|
|
1851
|
+
Returns:
|
|
1852
|
+
List of (file_path, archive_name) tuples.
|
|
1853
|
+
|
|
1854
|
+
"""
|
|
1855
|
+
files_to_archive = []
|
|
1856
|
+
# Path is already normalized; validate files within the output path are contained
|
|
1857
|
+
base_path = safe_output_path
|
|
1858
|
+
|
|
1859
|
+
for root, _dirs, files in os.walk(base_path):
|
|
1860
|
+
root_path = _ensure_within_base_path(Path(root), base_path)
|
|
1861
|
+
|
|
1862
|
+
for file in files:
|
|
1863
|
+
safe_name = _sanitize_filename(file)
|
|
1864
|
+
candidate_path = _ensure_within_base_path(root_path / safe_name, base_path)
|
|
1865
|
+
try:
|
|
1866
|
+
# Ensure each file is contained within base
|
|
1867
|
+
arcname = candidate_path.relative_to(base_path)
|
|
1868
|
+
files_to_archive.append((candidate_path, arcname))
|
|
1869
|
+
except ValueError:
|
|
1870
|
+
continue
|
|
1871
|
+
|
|
1872
|
+
return files_to_archive
|
|
1873
|
+
|
|
1874
|
+
|
|
1875
|
+
def _create_roles_zip_archive(safe_output_path: Path) -> bytes:
|
|
1876
|
+
"""
|
|
1877
|
+
Create ZIP archive of converted roles.
|
|
1878
|
+
|
|
1879
|
+
Args:
|
|
1880
|
+
safe_output_path: Validated path containing roles.
|
|
1881
|
+
|
|
1882
|
+
Returns:
|
|
1883
|
+
ZIP archive as bytes.
|
|
1884
|
+
|
|
1885
|
+
"""
|
|
1886
|
+
zip_buffer = io.BytesIO()
|
|
1887
|
+
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
|
|
1888
|
+
files_to_archive = _collect_role_files(safe_output_path)
|
|
1889
|
+
for file_path, arcname in files_to_archive:
|
|
1890
|
+
zip_file.write(str(file_path), str(arcname))
|
|
1891
|
+
|
|
1892
|
+
zip_buffer.seek(0)
|
|
1893
|
+
return zip_buffer.getvalue()
|
|
1894
|
+
|
|
1895
|
+
|
|
1896
|
+
def _get_git_path() -> str:
|
|
1897
|
+
"""
|
|
1898
|
+
Find git executable in system PATH.
|
|
1899
|
+
|
|
1900
|
+
Returns:
|
|
1901
|
+
The path to git executable.
|
|
1902
|
+
|
|
1903
|
+
Raises:
|
|
1904
|
+
FileNotFoundError: If git is not found in PATH.
|
|
1905
|
+
|
|
1906
|
+
"""
|
|
1907
|
+
# Try common locations first
|
|
1908
|
+
common_paths = [
|
|
1909
|
+
"/usr/bin/git",
|
|
1910
|
+
"/usr/local/bin/git",
|
|
1911
|
+
"/opt/homebrew/bin/git",
|
|
1912
|
+
]
|
|
1913
|
+
|
|
1914
|
+
for path in common_paths:
|
|
1915
|
+
if Path(path).exists():
|
|
1916
|
+
return path
|
|
1917
|
+
|
|
1918
|
+
# Try to find git using 'which' command
|
|
1919
|
+
try:
|
|
1920
|
+
result = subprocess.run(
|
|
1921
|
+
["which", "git"],
|
|
1922
|
+
capture_output=True,
|
|
1923
|
+
text=True,
|
|
1924
|
+
check=True,
|
|
1925
|
+
timeout=5,
|
|
1926
|
+
)
|
|
1927
|
+
git_path = result.stdout.strip()
|
|
1928
|
+
if git_path and Path(git_path).exists():
|
|
1929
|
+
return git_path
|
|
1930
|
+
except (
|
|
1931
|
+
subprocess.CalledProcessError,
|
|
1932
|
+
FileNotFoundError,
|
|
1933
|
+
subprocess.TimeoutExpired,
|
|
1934
|
+
) as exc:
|
|
1935
|
+
# Non-fatal: failure to use 'which' just means we fall back to other checks.
|
|
1936
|
+
st.write(f"Debug: 'which git' probe failed: {exc}")
|
|
1937
|
+
|
|
1938
|
+
# Last resort: try the basic 'git' command
|
|
1939
|
+
try:
|
|
1940
|
+
result = subprocess.run(
|
|
1941
|
+
["git", "--version"],
|
|
1942
|
+
capture_output=True,
|
|
1943
|
+
text=True,
|
|
1944
|
+
check=True,
|
|
1945
|
+
timeout=5,
|
|
1946
|
+
)
|
|
1947
|
+
if result.returncode == 0:
|
|
1948
|
+
return "git"
|
|
1949
|
+
except (
|
|
1950
|
+
subprocess.CalledProcessError,
|
|
1951
|
+
FileNotFoundError,
|
|
1952
|
+
subprocess.TimeoutExpired,
|
|
1953
|
+
) as exc:
|
|
1954
|
+
# Non-fatal: failure to run 'git --version' just means git is not available.
|
|
1955
|
+
st.write(f"Debug: 'git --version' probe failed: {exc}")
|
|
1956
|
+
|
|
1957
|
+
raise FileNotFoundError(
|
|
1958
|
+
"git executable not found. Please ensure Git is installed and in your "
|
|
1959
|
+
"PATH. Visit https://git-scm.com/downloads for installation instructions."
|
|
1960
|
+
)
|
|
1961
|
+
|
|
1962
|
+
|
|
1963
|
+
def _determine_num_recipes(cookbook_path: str, num_roles: int) -> int:
|
|
1964
|
+
"""Determine the number of recipes from the cookbook path."""
|
|
1965
|
+
if not cookbook_path:
|
|
1966
|
+
return num_roles
|
|
1967
|
+
|
|
1968
|
+
recipes_dir = Path(cookbook_path) / "recipes"
|
|
1969
|
+
return len(list(recipes_dir.glob("*.rb"))) if recipes_dir.exists() else 1
|
|
1970
|
+
|
|
1971
|
+
|
|
1972
|
+
def _get_roles_directory(temp_repo: Path) -> Path:
|
|
1973
|
+
"""Get or create the roles directory in the repository."""
|
|
1974
|
+
roles_dir = temp_repo / "roles"
|
|
1975
|
+
if not roles_dir.exists():
|
|
1976
|
+
roles_dir = (
|
|
1977
|
+
temp_repo / "ansible_collections" / "souschef" / "platform" / "roles"
|
|
1978
|
+
)
|
|
1979
|
+
|
|
1980
|
+
roles_dir.mkdir(parents=True, exist_ok=True)
|
|
1981
|
+
return roles_dir
|
|
1982
|
+
|
|
1983
|
+
|
|
1984
|
+
def _copy_roles_to_repository(output_path: str, roles_dir: Path) -> None:
|
|
1985
|
+
"""Copy roles from output_path to the repository roles directory."""
|
|
1986
|
+
output_path_obj = Path(output_path)
|
|
1987
|
+
if not output_path_obj.exists():
|
|
1988
|
+
return
|
|
1989
|
+
|
|
1990
|
+
for role_dir in output_path_obj.iterdir():
|
|
1991
|
+
if not role_dir.is_dir():
|
|
1992
|
+
continue
|
|
1993
|
+
|
|
1994
|
+
dest_dir = roles_dir / role_dir.name
|
|
1995
|
+
if dest_dir.exists():
|
|
1996
|
+
shutil.rmtree(dest_dir)
|
|
1997
|
+
shutil.copytree(role_dir, dest_dir)
|
|
1998
|
+
|
|
1999
|
+
|
|
2000
|
+
def _commit_repository_changes(temp_repo: Path, num_roles: int) -> None:
|
|
2001
|
+
"""Commit repository changes to git."""
|
|
2002
|
+
try:
|
|
2003
|
+
subprocess.run(
|
|
2004
|
+
["git", "add", "."],
|
|
2005
|
+
cwd=temp_repo,
|
|
2006
|
+
check=True,
|
|
2007
|
+
capture_output=True,
|
|
2008
|
+
text=True,
|
|
2009
|
+
)
|
|
2010
|
+
subprocess.run(
|
|
2011
|
+
[
|
|
2012
|
+
"git",
|
|
2013
|
+
"commit",
|
|
2014
|
+
"-m",
|
|
2015
|
+
f"Add converted Ansible roles ({num_roles} role(s))",
|
|
2016
|
+
],
|
|
2017
|
+
cwd=temp_repo,
|
|
2018
|
+
check=True,
|
|
2019
|
+
capture_output=True,
|
|
2020
|
+
text=True,
|
|
2021
|
+
)
|
|
2022
|
+
except subprocess.CalledProcessError:
|
|
2023
|
+
# Ignore if there's nothing to commit
|
|
2024
|
+
pass
|
|
2025
|
+
|
|
2026
|
+
|
|
2027
|
+
def _create_ansible_repository(
|
|
2028
|
+
output_path: str, cookbook_path: str = "", num_roles: int = 1
|
|
2029
|
+
) -> dict:
|
|
2030
|
+
"""Create a complete Ansible repository structure."""
|
|
2031
|
+
try:
|
|
2032
|
+
# Check that git is available early
|
|
2033
|
+
_get_git_path()
|
|
2034
|
+
|
|
2035
|
+
# Create temp directory for the repo (parent directory)
|
|
2036
|
+
temp_parent = tempfile.mkdtemp(prefix="ansible_repo_parent_")
|
|
2037
|
+
temp_repo = Path(temp_parent) / "ansible_repository"
|
|
2038
|
+
|
|
2039
|
+
# Analyse and determine repo type
|
|
2040
|
+
num_recipes = _determine_num_recipes(cookbook_path, num_roles)
|
|
2041
|
+
|
|
2042
|
+
repo_type = analyse_conversion_output(
|
|
2043
|
+
cookbook_path=cookbook_path or output_path,
|
|
2044
|
+
num_recipes=num_recipes,
|
|
2045
|
+
num_roles=num_roles,
|
|
2046
|
+
has_multiple_apps=num_roles > 3,
|
|
2047
|
+
needs_multi_env=True,
|
|
2048
|
+
)
|
|
2049
|
+
|
|
2050
|
+
# Generate the repository
|
|
2051
|
+
result = generate_ansible_repository(
|
|
2052
|
+
output_path=str(temp_repo),
|
|
2053
|
+
repo_type=repo_type,
|
|
2054
|
+
org_name="souschef",
|
|
2055
|
+
init_git=True,
|
|
2056
|
+
)
|
|
2057
|
+
|
|
2058
|
+
if result["success"]:
|
|
2059
|
+
# Copy converted roles into the repository
|
|
2060
|
+
roles_dir = _get_roles_directory(temp_repo)
|
|
2061
|
+
_copy_roles_to_repository(output_path, roles_dir)
|
|
2062
|
+
_commit_repository_changes(temp_repo, num_roles)
|
|
2063
|
+
result["temp_path"] = str(temp_repo)
|
|
2064
|
+
|
|
2065
|
+
return result
|
|
2066
|
+
except Exception as e:
|
|
2067
|
+
return {"success": False, "error": str(e)}
|
|
2068
|
+
|
|
2069
|
+
|
|
2070
|
+
def _create_repository_zip(repo_path: str) -> bytes:
|
|
2071
|
+
"""Create a ZIP archive of the Ansible repository including git history."""
|
|
2072
|
+
zip_buffer = io.BytesIO()
|
|
2073
|
+
repo_path_obj = Path(repo_path)
|
|
2074
|
+
|
|
2075
|
+
# Files/directories to exclude from the archive
|
|
2076
|
+
exclude_names = {".DS_Store", "Thumbs.db", "*.pyc", "__pycache__"}
|
|
2077
|
+
|
|
2078
|
+
# Important dotfiles to always include
|
|
2079
|
+
include_dotfiles = {".gitignore", ".gitattributes", ".editorconfig"}
|
|
2080
|
+
|
|
2081
|
+
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
|
|
2082
|
+
for file_path in repo_path_obj.rglob("*"):
|
|
2083
|
+
if file_path.is_file():
|
|
2084
|
+
# Skip excluded files
|
|
2085
|
+
if file_path.name in exclude_names:
|
|
2086
|
+
continue
|
|
2087
|
+
# Include .git directory, .gitignore, and other important dotfiles
|
|
2088
|
+
# Skip hidden dotfiles unless they're in our include list or in .git
|
|
2089
|
+
if (
|
|
2090
|
+
file_path.name.startswith(".")
|
|
2091
|
+
and ".git" not in str(file_path)
|
|
2092
|
+
and file_path.name not in include_dotfiles
|
|
2093
|
+
):
|
|
2094
|
+
continue
|
|
2095
|
+
|
|
2096
|
+
arcname = file_path.relative_to(repo_path_obj.parent)
|
|
2097
|
+
zip_file.write(str(file_path), str(arcname))
|
|
2098
|
+
|
|
2099
|
+
zip_buffer.seek(0)
|
|
2100
|
+
return zip_buffer.getvalue()
|
|
2101
|
+
|
|
2102
|
+
|
|
1742
2103
|
def _display_conversion_download_options(conversion_result: dict):
|
|
1743
2104
|
"""Display download options for converted roles."""
|
|
1744
|
-
if "output_path" in conversion_result:
|
|
1745
|
-
|
|
2105
|
+
if "output_path" not in conversion_result:
|
|
2106
|
+
return
|
|
1746
2107
|
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
try:
|
|
1750
|
-
from souschef.core.path_utils import _normalize_path
|
|
2108
|
+
st.subheader("Download Converted Roles")
|
|
2109
|
+
output_path = conversion_result["output_path"]
|
|
1751
2110
|
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
return
|
|
2111
|
+
safe_output_path = _validate_output_path(output_path)
|
|
2112
|
+
if safe_output_path is None:
|
|
2113
|
+
st.error("Invalid output path")
|
|
2114
|
+
return
|
|
1757
2115
|
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
|
|
1763
|
-
|
|
1764
|
-
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
2116
|
+
if safe_output_path.exists():
|
|
2117
|
+
_display_role_download_buttons(safe_output_path)
|
|
2118
|
+
repo_placeholder = st.container()
|
|
2119
|
+
_display_generated_repo_section(repo_placeholder)
|
|
2120
|
+
st.info(f"Roles saved to: {output_path}")
|
|
2121
|
+
else:
|
|
2122
|
+
st.warning("Output directory not found for download")
|
|
2123
|
+
|
|
2124
|
+
|
|
2125
|
+
def _create_repo_callback(safe_output_path: Path) -> None:
|
|
2126
|
+
"""Handle repository creation callback."""
|
|
2127
|
+
try:
|
|
2128
|
+
num_roles = len(
|
|
2129
|
+
[
|
|
2130
|
+
d
|
|
2131
|
+
for d in safe_output_path.iterdir()
|
|
2132
|
+
if d.is_dir() and not d.name.startswith(".")
|
|
2133
|
+
]
|
|
2134
|
+
)
|
|
2135
|
+
|
|
2136
|
+
repo_result = _create_ansible_repository(
|
|
2137
|
+
output_path=str(safe_output_path),
|
|
2138
|
+
cookbook_path="",
|
|
2139
|
+
num_roles=num_roles,
|
|
2140
|
+
)
|
|
1779
2141
|
|
|
1780
|
-
|
|
2142
|
+
if repo_result["success"]:
|
|
2143
|
+
st.session_state.generated_repo = repo_result
|
|
2144
|
+
st.session_state.repo_created_successfully = True
|
|
2145
|
+
st.session_state.repo_creation_error = None
|
|
1781
2146
|
else:
|
|
1782
|
-
|
|
2147
|
+
_handle_repo_creation_failure(repo_result.get("error", "Unknown error"))
|
|
2148
|
+
except Exception as e:
|
|
2149
|
+
_handle_repo_creation_failure(f"Exception: {str(e)}")
|
|
2150
|
+
|
|
2151
|
+
|
|
2152
|
+
def _handle_repo_creation_failure(error_msg: str) -> None:
|
|
2153
|
+
"""Handle repository creation failure."""
|
|
2154
|
+
st.session_state.repo_creation_error = error_msg
|
|
2155
|
+
st.session_state.generated_repo = None
|
|
2156
|
+
st.session_state.repo_created_successfully = False
|
|
2157
|
+
|
|
2158
|
+
|
|
2159
|
+
def _display_role_download_buttons(safe_output_path: Path) -> None:
|
|
2160
|
+
"""Display download buttons for roles and repository creation."""
|
|
2161
|
+
col1, col2 = st.columns([1, 1])
|
|
2162
|
+
|
|
2163
|
+
with col1:
|
|
2164
|
+
archive_data = _create_roles_zip_archive(safe_output_path)
|
|
2165
|
+
st.download_button(
|
|
2166
|
+
label="Download All Ansible Roles",
|
|
2167
|
+
data=archive_data,
|
|
2168
|
+
file_name="ansible_roles_holistic.zip",
|
|
2169
|
+
mime=MIME_TYPE_ZIP,
|
|
2170
|
+
help="Download ZIP archive containing all converted Ansible roles",
|
|
2171
|
+
key="download_holistic_roles",
|
|
2172
|
+
)
|
|
2173
|
+
|
|
2174
|
+
with col2:
|
|
2175
|
+
st.button(
|
|
2176
|
+
"Create Ansible Repository",
|
|
2177
|
+
help="Generate a complete Ansible repository structure with these roles",
|
|
2178
|
+
key="create_repo_from_roles",
|
|
2179
|
+
on_click=lambda: _create_repo_callback(safe_output_path),
|
|
2180
|
+
)
|
|
2181
|
+
|
|
2182
|
+
if st.session_state.get("repo_creation_error"):
|
|
2183
|
+
st.error(
|
|
2184
|
+
f"Failed to create repository: {st.session_state.repo_creation_error}"
|
|
2185
|
+
)
|
|
2186
|
+
|
|
2187
|
+
|
|
2188
|
+
def _display_generated_repo_section(placeholder) -> None:
|
|
2189
|
+
"""Display the generated repository section if it exists."""
|
|
2190
|
+
if not _should_display_generated_repo():
|
|
2191
|
+
return
|
|
2192
|
+
|
|
2193
|
+
repo_result = st.session_state.generated_repo
|
|
2194
|
+
|
|
2195
|
+
with placeholder:
|
|
2196
|
+
st.markdown("---")
|
|
2197
|
+
st.success("Ansible Repository Generated!")
|
|
2198
|
+
_display_repo_info(repo_result)
|
|
2199
|
+
_display_repo_structure(repo_result)
|
|
2200
|
+
_display_repo_download(repo_result)
|
|
2201
|
+
_display_repo_git_instructions()
|
|
2202
|
+
_display_repo_clear_button(repo_result)
|
|
2203
|
+
|
|
2204
|
+
|
|
2205
|
+
def _should_display_generated_repo() -> bool:
|
|
2206
|
+
"""Check if generated repo should be displayed."""
|
|
2207
|
+
return "generated_repo" in st.session_state and st.session_state.get(
|
|
2208
|
+
"repo_created_successfully", False
|
|
2209
|
+
)
|
|
2210
|
+
|
|
2211
|
+
|
|
2212
|
+
def _display_repo_info(repo_result: dict) -> None:
|
|
2213
|
+
"""Display repository information."""
|
|
2214
|
+
repo_type = repo_result["repo_type"].replace("_", " ").title()
|
|
2215
|
+
files_count = len(repo_result["files_created"])
|
|
2216
|
+
|
|
2217
|
+
st.info(
|
|
2218
|
+
f"**Repository Type:** {repo_type}\n\n"
|
|
2219
|
+
f"**Files Created:** {files_count}\n\n"
|
|
2220
|
+
"Includes: ansible.cfg, requirements.yml, inventory, playbooks, roles"
|
|
2221
|
+
)
|
|
2222
|
+
|
|
2223
|
+
|
|
2224
|
+
def _display_repo_structure(repo_result: dict) -> None:
|
|
2225
|
+
"""Display repository structure."""
|
|
2226
|
+
with st.expander("Repository Structure", expanded=True):
|
|
2227
|
+
files_sorted = sorted(repo_result["files_created"])
|
|
2228
|
+
st.code("\n".join(files_sorted[:40]), language="text")
|
|
2229
|
+
if len(files_sorted) > 40:
|
|
2230
|
+
remaining = len(files_sorted) - 40
|
|
2231
|
+
st.caption(f"... and {remaining} more files")
|
|
2232
|
+
|
|
2233
|
+
|
|
2234
|
+
def _display_repo_download(repo_result: dict) -> None:
|
|
2235
|
+
"""Display repository download button."""
|
|
2236
|
+
repo_zip = _create_repository_zip(repo_result["temp_path"])
|
|
2237
|
+
st.download_button(
|
|
2238
|
+
label="Download Ansible Repository",
|
|
2239
|
+
data=repo_zip,
|
|
2240
|
+
file_name="ansible_repository.zip",
|
|
2241
|
+
mime=MIME_TYPE_ZIP,
|
|
2242
|
+
help="Download complete Ansible repository as ZIP archive",
|
|
2243
|
+
key="download_generated_repo",
|
|
2244
|
+
)
|
|
2245
|
+
|
|
2246
|
+
|
|
2247
|
+
def _display_repo_git_instructions() -> None:
|
|
2248
|
+
"""Display git clone instructions."""
|
|
2249
|
+
with st.expander("Git Clone Instructions", expanded=True):
|
|
2250
|
+
st.markdown("""
|
|
2251
|
+
After downloading and extracting the repository:
|
|
2252
|
+
|
|
2253
|
+
```bash
|
|
2254
|
+
cd ansible_repository
|
|
2255
|
+
|
|
2256
|
+
# Repository is already initialized with git!
|
|
2257
|
+
# Check commits:
|
|
2258
|
+
git log --oneline
|
|
2259
|
+
|
|
2260
|
+
# Push to remote repository:
|
|
2261
|
+
git remote add origin <your-git-url>
|
|
2262
|
+
git push -u origin master
|
|
2263
|
+
```
|
|
2264
|
+
|
|
2265
|
+
**Repository includes:**
|
|
2266
|
+
- ✅ All converted roles with tasks
|
|
2267
|
+
- ✅ Ansible configuration (`ansible.cfg`)
|
|
2268
|
+
- ✅ `.gitignore` for Ansible projects
|
|
2269
|
+
- ✅ `.gitattributes` for consistent line endings
|
|
2270
|
+
- ✅ `.editorconfig` for consistent coding styles
|
|
2271
|
+
- ✅ README with usage instructions
|
|
2272
|
+
- ✅ **Git repository initialized with all files committed**
|
|
2273
|
+
""")
|
|
2274
|
+
|
|
2275
|
+
|
|
2276
|
+
def _display_repo_clear_button(repo_result: dict) -> None:
|
|
2277
|
+
"""Display repository clear button."""
|
|
2278
|
+
if st.button("Clear Repository", key="clear_generated_repo"):
|
|
2279
|
+
with contextlib.suppress(Exception):
|
|
2280
|
+
shutil.rmtree(repo_result["temp_path"])
|
|
2281
|
+
del st.session_state.generated_repo
|
|
2282
|
+
if "repo_created_successfully" in st.session_state:
|
|
2283
|
+
del st.session_state.repo_created_successfully
|
|
2284
|
+
st.rerun()
|
|
1783
2285
|
|
|
1784
2286
|
|
|
1785
2287
|
def _handle_dashboard_upload():
|
|
@@ -1959,18 +2461,23 @@ def _update_progress(status_text, cookbook_name, current, total):
|
|
|
1959
2461
|
|
|
1960
2462
|
def _find_cookbook_directory(cookbook_path, cookbook_name):
|
|
1961
2463
|
"""Find the directory for a specific cookbook by checking metadata."""
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
2464
|
+
try:
|
|
2465
|
+
normalized_path = _normalize_path(cookbook_path)
|
|
2466
|
+
for d in normalized_path.iterdir():
|
|
2467
|
+
if d.is_dir():
|
|
2468
|
+
# Check if this directory contains a cookbook with the matching name
|
|
2469
|
+
metadata_file = d / METADATA_FILENAME
|
|
2470
|
+
if metadata_file.exists():
|
|
2471
|
+
try:
|
|
2472
|
+
metadata = parse_cookbook_metadata(str(metadata_file))
|
|
2473
|
+
if metadata.get("name") == cookbook_name:
|
|
2474
|
+
return d
|
|
2475
|
+
except (ValueError, OSError, KeyError):
|
|
2476
|
+
# If metadata parsing fails, skip this directory
|
|
2477
|
+
continue
|
|
2478
|
+
except ValueError:
|
|
2479
|
+
# Invalid path, return None
|
|
2480
|
+
return None
|
|
1974
2481
|
return None
|
|
1975
2482
|
|
|
1976
2483
|
|
|
@@ -2393,7 +2900,7 @@ def _build_dependency_graph(cookbook_path: str, selected_cookbooks: list[str]) -
|
|
|
2393
2900
|
# Parse the markdown response to extract dependencies
|
|
2394
2901
|
dependencies = _extract_dependencies_from_markdown(dep_analysis)
|
|
2395
2902
|
dependency_graph[cookbook_name] = dependencies
|
|
2396
|
-
except
|
|
2903
|
+
except (ValueError, OSError, RuntimeError):
|
|
2397
2904
|
# If dependency analysis fails, assume no dependencies
|
|
2398
2905
|
dependency_graph[cookbook_name] = []
|
|
2399
2906
|
|
|
@@ -2931,8 +3438,18 @@ def _display_analysis_summary(results, total_cookbooks):
|
|
|
2931
3438
|
st.metric("Successfully Analysed", f"{successful}/{total_cookbooks}")
|
|
2932
3439
|
|
|
2933
3440
|
with col2:
|
|
2934
|
-
|
|
2935
|
-
|
|
3441
|
+
total_hours_manual = sum(r.get("estimated_hours", 0) for r in results)
|
|
3442
|
+
total_hours_souschef = sum(
|
|
3443
|
+
r.get("estimated_hours_with_souschef", r.get("estimated_hours", 0) * 0.5)
|
|
3444
|
+
for r in results
|
|
3445
|
+
)
|
|
3446
|
+
time_saved = total_hours_manual - total_hours_souschef
|
|
3447
|
+
st.metric(
|
|
3448
|
+
"Manual Effort (hrs)",
|
|
3449
|
+
f"{total_hours_manual:.1f}",
|
|
3450
|
+
delta=f"With AI: {total_hours_souschef:.1f}h (save {time_saved:.1f}h)",
|
|
3451
|
+
delta_color="inverse",
|
|
3452
|
+
)
|
|
2936
3453
|
|
|
2937
3454
|
with col3:
|
|
2938
3455
|
complexities = [r.get("complexity", "Unknown") for r in results]
|
|
@@ -3018,7 +3535,7 @@ def _display_single_cookbook_details(result):
|
|
|
3018
3535
|
st.metric("Dependencies", result.get("dependencies", 0))
|
|
3019
3536
|
|
|
3020
3537
|
# Complexity and effort
|
|
3021
|
-
col1, col2 = st.columns(
|
|
3538
|
+
col1, col2, col3 = st.columns(3)
|
|
3022
3539
|
with col1:
|
|
3023
3540
|
complexity = result.get("complexity", "Unknown")
|
|
3024
3541
|
if complexity == "High":
|
|
@@ -3028,8 +3545,20 @@ def _display_single_cookbook_details(result):
|
|
|
3028
3545
|
else:
|
|
3029
3546
|
st.metric("Complexity", complexity, delta="Low")
|
|
3030
3547
|
with col2:
|
|
3031
|
-
|
|
3032
|
-
st.metric("
|
|
3548
|
+
hours_manual = result.get("estimated_hours", 0)
|
|
3549
|
+
st.metric("Manual Effort (hrs)", f"{hours_manual:.1f}")
|
|
3550
|
+
with col3:
|
|
3551
|
+
hours_souschef = result.get(
|
|
3552
|
+
"estimated_hours_with_souschef", hours_manual * 0.5
|
|
3553
|
+
)
|
|
3554
|
+
time_saved = hours_manual - hours_souschef
|
|
3555
|
+
savings_pct = int((time_saved / hours_manual) * 100)
|
|
3556
|
+
st.metric(
|
|
3557
|
+
"With SousChef (hrs)",
|
|
3558
|
+
f"{hours_souschef:.1f}",
|
|
3559
|
+
delta=f"Save {time_saved:.1f}h ({savings_pct}%)",
|
|
3560
|
+
delta_color="inverse",
|
|
3561
|
+
)
|
|
3033
3562
|
|
|
3034
3563
|
# Path
|
|
3035
3564
|
st.write(f"**Cookbook Path:** {result['path']}")
|
|
@@ -3125,6 +3654,12 @@ def _convert_and_download_playbooks(results):
|
|
|
3125
3654
|
except Exception as e:
|
|
3126
3655
|
st.warning(f"Could not stage playbooks for validation: {e}")
|
|
3127
3656
|
|
|
3657
|
+
# Store conversion results in session state to persist across reruns
|
|
3658
|
+
st.session_state.conversion_results = {
|
|
3659
|
+
"playbooks": playbooks,
|
|
3660
|
+
"templates": templates,
|
|
3661
|
+
}
|
|
3662
|
+
|
|
3128
3663
|
_handle_playbook_download(playbooks, templates)
|
|
3129
3664
|
|
|
3130
3665
|
|
|
@@ -3306,6 +3841,19 @@ def _handle_playbook_download(playbooks: list, templates: list | None = None) ->
|
|
|
3306
3841
|
st.error("No playbooks were successfully generated.")
|
|
3307
3842
|
return
|
|
3308
3843
|
|
|
3844
|
+
# Add back to analysis button
|
|
3845
|
+
col1, _ = st.columns([1, 4])
|
|
3846
|
+
with col1:
|
|
3847
|
+
if st.button(
|
|
3848
|
+
"← Back to Analysis",
|
|
3849
|
+
help="Return to analysis results",
|
|
3850
|
+
key="back_to_analysis_from_conversion",
|
|
3851
|
+
):
|
|
3852
|
+
# Clear conversion results to go back to analysis view
|
|
3853
|
+
st.session_state.conversion_results = None
|
|
3854
|
+
st.session_state.generated_playbook_repo = None
|
|
3855
|
+
st.rerun()
|
|
3856
|
+
|
|
3309
3857
|
templates = templates or []
|
|
3310
3858
|
playbook_archive = _create_playbook_archive(playbooks, templates)
|
|
3311
3859
|
|
|
@@ -3320,8 +3868,10 @@ def _handle_playbook_download(playbooks: list, templates: list | None = None) ->
|
|
|
3320
3868
|
# Show summary
|
|
3321
3869
|
_display_playbook_summary(len(playbooks), template_count)
|
|
3322
3870
|
|
|
3323
|
-
# Provide download button
|
|
3324
|
-
_display_download_button(
|
|
3871
|
+
# Provide download button and repository creation
|
|
3872
|
+
_display_download_button(
|
|
3873
|
+
len(playbooks), template_count, playbook_archive, playbooks
|
|
3874
|
+
)
|
|
3325
3875
|
|
|
3326
3876
|
# Show previews
|
|
3327
3877
|
_display_playbook_previews(playbooks)
|
|
@@ -3346,24 +3896,204 @@ def _display_playbook_summary(playbook_count: int, template_count: int) -> None:
|
|
|
3346
3896
|
)
|
|
3347
3897
|
|
|
3348
3898
|
|
|
3349
|
-
def
|
|
3350
|
-
|
|
3351
|
-
|
|
3352
|
-
"""Display the download button for the archive."""
|
|
3353
|
-
download_label = f"Download Ansible Playbooks ({playbook_count} playbooks"
|
|
3899
|
+
def _build_download_label(playbook_count: int, template_count: int) -> str:
|
|
3900
|
+
"""Build the download button label."""
|
|
3901
|
+
label = f"Download Ansible Playbooks ({playbook_count} playbooks"
|
|
3354
3902
|
if template_count > 0:
|
|
3355
|
-
|
|
3356
|
-
|
|
3903
|
+
label += f", {template_count} templates"
|
|
3904
|
+
label += ")"
|
|
3905
|
+
return label
|
|
3906
|
+
|
|
3907
|
+
|
|
3908
|
+
def _write_playbooks_to_temp_dir(playbooks: list, temp_dir: str) -> None:
|
|
3909
|
+
"""Write playbooks to temporary directory."""
|
|
3910
|
+
for playbook in playbooks:
|
|
3911
|
+
cookbook_name = _sanitize_filename(playbook["cookbook_name"])
|
|
3912
|
+
recipe_name = _sanitize_filename(playbook["recipe_file"].replace(".rb", ""))
|
|
3913
|
+
playbook_file = Path(temp_dir) / f"{cookbook_name}_{recipe_name}.yml"
|
|
3914
|
+
playbook_file.write_text(playbook["playbook_content"])
|
|
3915
|
+
|
|
3916
|
+
|
|
3917
|
+
def _get_playbooks_dir(repo_result: dict) -> Path:
|
|
3918
|
+
"""Get or create the playbooks directory in the repository."""
|
|
3919
|
+
playbooks_dir = Path(repo_result["temp_path"]) / "playbooks"
|
|
3920
|
+
if not playbooks_dir.exists():
|
|
3921
|
+
playbooks_dir = (
|
|
3922
|
+
Path(repo_result["temp_path"])
|
|
3923
|
+
/ "ansible_collections"
|
|
3924
|
+
/ "souschef"
|
|
3925
|
+
/ "platform"
|
|
3926
|
+
/ "playbooks"
|
|
3927
|
+
)
|
|
3928
|
+
playbooks_dir.mkdir(parents=True, exist_ok=True)
|
|
3929
|
+
return playbooks_dir
|
|
3930
|
+
|
|
3931
|
+
|
|
3932
|
+
def _copy_playbooks_to_repo(temp_dir: str, playbooks_dir: Path) -> None:
|
|
3933
|
+
"""Copy playbooks from temp directory to repository."""
|
|
3934
|
+
for playbook_file in Path(temp_dir).glob("*.yml"):
|
|
3935
|
+
shutil.copy(playbook_file, playbooks_dir / playbook_file.name)
|
|
3936
|
+
|
|
3937
|
+
|
|
3938
|
+
def _commit_playbooks_to_git(temp_dir: str, repo_path: str) -> None:
|
|
3939
|
+
"""Commit playbooks to git repository."""
|
|
3940
|
+
try:
|
|
3941
|
+
subprocess.run(
|
|
3942
|
+
["git", "add", "."],
|
|
3943
|
+
cwd=repo_path,
|
|
3944
|
+
check=True,
|
|
3945
|
+
capture_output=True,
|
|
3946
|
+
text=True,
|
|
3947
|
+
)
|
|
3948
|
+
num_playbooks = len(list(Path(temp_dir).glob("*.yml")))
|
|
3949
|
+
commit_msg = f"Add converted Ansible playbooks ({num_playbooks} playbook(s))"
|
|
3950
|
+
subprocess.run(
|
|
3951
|
+
["git", "commit", "-m", commit_msg],
|
|
3952
|
+
cwd=repo_path,
|
|
3953
|
+
check=True,
|
|
3954
|
+
capture_output=True,
|
|
3955
|
+
text=True,
|
|
3956
|
+
)
|
|
3957
|
+
except subprocess.CalledProcessError:
|
|
3958
|
+
# If there's nothing to commit, that's okay
|
|
3959
|
+
pass
|
|
3960
|
+
|
|
3961
|
+
|
|
3962
|
+
def _handle_repo_creation(temp_dir: str, playbooks: list) -> None:
|
|
3963
|
+
"""Handle repository creation and setup."""
|
|
3964
|
+
repo_result = _create_ansible_repository(
|
|
3965
|
+
output_path=temp_dir,
|
|
3966
|
+
cookbook_path="",
|
|
3967
|
+
num_roles=len({p["cookbook_name"] for p in playbooks}),
|
|
3968
|
+
)
|
|
3969
|
+
|
|
3970
|
+
if not repo_result["success"]:
|
|
3971
|
+
st.error(
|
|
3972
|
+
f"Failed to create repository: {repo_result.get('error', 'Unknown error')}"
|
|
3973
|
+
)
|
|
3974
|
+
return
|
|
3357
3975
|
|
|
3976
|
+
playbooks_dir = _get_playbooks_dir(repo_result)
|
|
3977
|
+
_copy_playbooks_to_repo(temp_dir, playbooks_dir)
|
|
3978
|
+
_commit_playbooks_to_git(temp_dir, repo_result["temp_path"])
|
|
3979
|
+
st.session_state.generated_playbook_repo = repo_result
|
|
3980
|
+
|
|
3981
|
+
|
|
3982
|
+
def _display_repo_structure_section(repo_result: dict) -> None:
|
|
3983
|
+
"""Display repository structure in an expander."""
|
|
3984
|
+
with st.expander("Repository Structure", expanded=True):
|
|
3985
|
+
files_sorted = sorted(repo_result["files_created"])
|
|
3986
|
+
st.code("\n".join(files_sorted[:40]), language="text")
|
|
3987
|
+
if len(files_sorted) > 40:
|
|
3988
|
+
remaining = len(files_sorted) - 40
|
|
3989
|
+
st.caption(f"... and {remaining} more files")
|
|
3990
|
+
|
|
3991
|
+
|
|
3992
|
+
def _display_repo_info_section(repo_result: dict) -> None:
|
|
3993
|
+
"""Display repository information."""
|
|
3994
|
+
repo_type = repo_result["repo_type"].replace("_", " ").title()
|
|
3995
|
+
st.info(
|
|
3996
|
+
f"**Repository Type:** {repo_type}\n\n"
|
|
3997
|
+
f"**Files Created:** {len(repo_result['files_created'])}\n\n"
|
|
3998
|
+
"Includes: ansible.cfg, requirements.yml, inventory, playbooks"
|
|
3999
|
+
)
|
|
4000
|
+
|
|
4001
|
+
|
|
4002
|
+
def _display_generated_repo_section_internal(repo_result: dict) -> None:
|
|
4003
|
+
"""Display the complete generated repository section."""
|
|
4004
|
+
st.markdown("---")
|
|
4005
|
+
st.success("Ansible Playbook Repository Generated!")
|
|
4006
|
+
_display_repo_info_section(repo_result)
|
|
4007
|
+
_display_repo_structure_section(repo_result)
|
|
4008
|
+
|
|
4009
|
+
repo_zip = _create_repository_zip(repo_result["temp_path"])
|
|
3358
4010
|
st.download_button(
|
|
3359
|
-
label=
|
|
3360
|
-
data=
|
|
3361
|
-
file_name="
|
|
3362
|
-
mime=
|
|
3363
|
-
help=
|
|
3364
|
-
|
|
4011
|
+
label="Download Ansible Repository",
|
|
4012
|
+
data=repo_zip,
|
|
4013
|
+
file_name="ansible_playbook_repository.zip",
|
|
4014
|
+
mime=MIME_TYPE_ZIP,
|
|
4015
|
+
help="Download complete Ansible repository as ZIP archive",
|
|
4016
|
+
key="download_playbook_repo",
|
|
3365
4017
|
)
|
|
3366
4018
|
|
|
4019
|
+
with st.expander("Git Clone Instructions", expanded=True):
|
|
4020
|
+
st.markdown("""
|
|
4021
|
+
After downloading and extracting the repository:
|
|
4022
|
+
|
|
4023
|
+
```bash
|
|
4024
|
+
cd ansible_playbook_repository
|
|
4025
|
+
|
|
4026
|
+
# Repository is already initialized with git!
|
|
4027
|
+
# Check commits:
|
|
4028
|
+
git log --oneline
|
|
4029
|
+
|
|
4030
|
+
# Push to remote repository:
|
|
4031
|
+
git remote add origin <your-git-url>
|
|
4032
|
+
git push -u origin master
|
|
4033
|
+
```
|
|
4034
|
+
|
|
4035
|
+
**What's included:**
|
|
4036
|
+
- ✅ Ansible configuration (`ansible.cfg`)
|
|
4037
|
+
- ✅ Dependency management (`requirements.yml`)
|
|
4038
|
+
- ✅ Inventory structure
|
|
4039
|
+
- ✅ All converted playbooks
|
|
4040
|
+
- ✅ `.gitignore` for Ansible projects
|
|
4041
|
+
- ✅ `.gitattributes` for consistent line endings
|
|
4042
|
+
- ✅ `.editorconfig` for consistent coding styles
|
|
4043
|
+
- ✅ README with usage instructions
|
|
4044
|
+
- ✅ **Git repository initialized with all files committed**
|
|
4045
|
+
""")
|
|
4046
|
+
|
|
4047
|
+
if st.button("Clear Repository", key="clear_playbook_repo"):
|
|
4048
|
+
if "generated_playbook_repo" in st.session_state:
|
|
4049
|
+
with contextlib.suppress(Exception):
|
|
4050
|
+
shutil.rmtree(repo_result["temp_path"])
|
|
4051
|
+
del st.session_state.generated_playbook_repo
|
|
4052
|
+
st.rerun()
|
|
4053
|
+
|
|
4054
|
+
|
|
4055
|
+
def _display_download_button(
|
|
4056
|
+
playbook_count: int,
|
|
4057
|
+
template_count: int,
|
|
4058
|
+
archive_data: bytes,
|
|
4059
|
+
playbooks: list | None = None,
|
|
4060
|
+
) -> None:
|
|
4061
|
+
"""Display the download button for the archive."""
|
|
4062
|
+
download_label = _build_download_label(playbook_count, template_count)
|
|
4063
|
+
|
|
4064
|
+
col1, col2 = st.columns([1, 1])
|
|
4065
|
+
|
|
4066
|
+
with col1:
|
|
4067
|
+
st.download_button(
|
|
4068
|
+
label=download_label,
|
|
4069
|
+
data=archive_data,
|
|
4070
|
+
file_name="ansible_playbooks.zip",
|
|
4071
|
+
mime=MIME_TYPE_ZIP,
|
|
4072
|
+
help=f"Download ZIP archive containing {playbook_count} playbooks "
|
|
4073
|
+
f"and {template_count} templates",
|
|
4074
|
+
key="download_playbooks_archive",
|
|
4075
|
+
)
|
|
4076
|
+
|
|
4077
|
+
with col2:
|
|
4078
|
+
if st.button(
|
|
4079
|
+
"Create Ansible Repository",
|
|
4080
|
+
help=(
|
|
4081
|
+
"Generate a complete Ansible repository structure with these playbooks"
|
|
4082
|
+
),
|
|
4083
|
+
key="create_repo_from_playbooks",
|
|
4084
|
+
):
|
|
4085
|
+
with st.spinner("Creating Ansible repository with playbooks..."):
|
|
4086
|
+
temp_playbook_dir = tempfile.mkdtemp(prefix="playbooks_")
|
|
4087
|
+
if playbooks:
|
|
4088
|
+
_write_playbooks_to_temp_dir(playbooks, temp_playbook_dir)
|
|
4089
|
+
_handle_repo_creation(temp_playbook_dir, playbooks)
|
|
4090
|
+
|
|
4091
|
+
# Display generated repository options for playbooks
|
|
4092
|
+
if "generated_playbook_repo" in st.session_state:
|
|
4093
|
+
_display_generated_repo_section_internal(
|
|
4094
|
+
st.session_state.generated_playbook_repo
|
|
4095
|
+
)
|
|
4096
|
+
|
|
3367
4097
|
|
|
3368
4098
|
def _display_playbook_previews(playbooks: list) -> None:
|
|
3369
4099
|
"""Display preview of generated playbooks."""
|