mcp-souschef 3.0.0__py3-none-any.whl → 3.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.2.0.dist-info}/METADATA +83 -380
- mcp_souschef-3.2.0.dist-info/RECORD +47 -0
- souschef/__init__.py +2 -10
- souschef/assessment.py +336 -181
- souschef/ci/common.py +1 -1
- souschef/cli.py +37 -13
- souschef/converters/playbook.py +119 -48
- souschef/core/__init__.py +6 -1
- souschef/core/path_utils.py +233 -19
- souschef/deployment.py +10 -3
- souschef/generators/__init__.py +13 -0
- souschef/generators/repo.py +695 -0
- souschef/parsers/attributes.py +1 -1
- souschef/parsers/habitat.py +1 -1
- souschef/parsers/inspec.py +25 -2
- souschef/parsers/metadata.py +5 -3
- souschef/parsers/recipe.py +1 -1
- souschef/parsers/resource.py +1 -1
- souschef/parsers/template.py +1 -1
- souschef/server.py +426 -188
- souschef/ui/app.py +24 -30
- souschef/ui/pages/cookbook_analysis.py +837 -163
- mcp_souschef-3.0.0.dist-info/RECORD +0 -46
- souschef/converters/cookbook_specific.py.backup +0 -109
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.2.0.dist-info}/WHEEL +0 -0
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.2.0.dist-info}/entry_points.txt +0 -0
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.2.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
"""Cookbook Analysis Page for SousChef UI."""
|
|
2
2
|
|
|
3
3
|
import contextlib
|
|
4
|
+
import inspect
|
|
4
5
|
import io
|
|
5
6
|
import json
|
|
6
7
|
import os
|
|
7
8
|
import shutil
|
|
9
|
+
import subprocess
|
|
8
10
|
import sys
|
|
9
11
|
import tarfile
|
|
10
12
|
import tempfile
|
|
@@ -33,6 +35,15 @@ from souschef.core.metrics import (
|
|
|
33
35
|
get_timeline_weeks,
|
|
34
36
|
validate_metrics_consistency,
|
|
35
37
|
)
|
|
38
|
+
from souschef.core.path_utils import (
|
|
39
|
+
_ensure_within_base_path,
|
|
40
|
+
_normalize_path,
|
|
41
|
+
_safe_join,
|
|
42
|
+
)
|
|
43
|
+
from souschef.generators.repo import (
|
|
44
|
+
analyse_conversion_output,
|
|
45
|
+
generate_ansible_repository,
|
|
46
|
+
)
|
|
36
47
|
from souschef.parsers.metadata import parse_cookbook_metadata
|
|
37
48
|
|
|
38
49
|
# AI Settings
|
|
@@ -78,7 +89,12 @@ def _get_secure_ai_config_path() -> Path:
|
|
|
78
89
|
if config_dir.is_symlink():
|
|
79
90
|
raise ValueError("AI config directory cannot be a symlink")
|
|
80
91
|
|
|
81
|
-
|
|
92
|
+
config_file = config_dir / "ai_config.json"
|
|
93
|
+
# Ensure config file has secure permissions if it exists
|
|
94
|
+
if config_file.exists():
|
|
95
|
+
with contextlib.suppress(OSError):
|
|
96
|
+
config_file.chmod(0o600)
|
|
97
|
+
return config_file
|
|
82
98
|
|
|
83
99
|
|
|
84
100
|
def load_ai_settings() -> dict[str, str | float | int]:
|
|
@@ -241,6 +257,7 @@ METADATA_STATUS_NO = "No"
|
|
|
241
257
|
ANALYSIS_STATUS_ANALYSED = "Analysed"
|
|
242
258
|
ANALYSIS_STATUS_FAILED = "Failed"
|
|
243
259
|
METADATA_COLUMN_NAME = "Has Metadata"
|
|
260
|
+
MIME_TYPE_ZIP = "application/zip"
|
|
244
261
|
|
|
245
262
|
# Security limits for archive extraction
|
|
246
263
|
MAX_ARCHIVE_SIZE = 100 * 1024 * 1024 # 100MB total
|
|
@@ -375,7 +392,12 @@ license 'All rights reserved'
|
|
|
375
392
|
description 'Automatically extracted cookbook from archive'
|
|
376
393
|
version '1.0.0'
|
|
377
394
|
"""
|
|
378
|
-
|
|
395
|
+
try:
|
|
396
|
+
metadata_file = synthetic_cookbook_dir / METADATA_FILENAME
|
|
397
|
+
metadata_file.parent.mkdir(parents=True, exist_ok=True)
|
|
398
|
+
metadata_file.write_text(metadata_content)
|
|
399
|
+
except OSError as e:
|
|
400
|
+
raise OSError(f"Failed to write metadata file: {e}") from e
|
|
379
401
|
|
|
380
402
|
return extraction_dir
|
|
381
403
|
|
|
@@ -421,15 +443,14 @@ def _extract_zip_securely(archive_path: Path, extraction_dir: Path) -> None:
|
|
|
421
443
|
# Safe extraction with manual path handling
|
|
422
444
|
for info in zip_ref.filelist:
|
|
423
445
|
# Construct safe relative path
|
|
446
|
+
|
|
424
447
|
safe_path = _get_safe_extraction_path(info.filename, extraction_dir)
|
|
425
448
|
|
|
426
449
|
if info.is_dir():
|
|
427
|
-
# Create directory
|
|
428
450
|
safe_path.mkdir(parents=True, exist_ok=True)
|
|
429
451
|
else:
|
|
430
|
-
# Create parent directories if needed
|
|
431
452
|
safe_path.parent.mkdir(parents=True, exist_ok=True)
|
|
432
|
-
|
|
453
|
+
|
|
433
454
|
with zip_ref.open(info) as source, safe_path.open("wb") as target:
|
|
434
455
|
# Read in chunks to control memory usage
|
|
435
456
|
while True:
|
|
@@ -473,7 +494,16 @@ def _validate_zip_file_security(info, file_count: int, total_size: int) -> None:
|
|
|
473
494
|
def _extract_tar_securely(
|
|
474
495
|
archive_path: Path, extraction_dir: Path, gzipped: bool
|
|
475
496
|
) -> None:
|
|
476
|
-
"""
|
|
497
|
+
"""
|
|
498
|
+
Extract TAR archive with resource consumption controls (S5042).
|
|
499
|
+
|
|
500
|
+
Resource consumption is controlled via:
|
|
501
|
+
- Pre-scanning all members before extraction
|
|
502
|
+
- Validating file sizes, counts, and directory depth
|
|
503
|
+
- Using tarfile.filter='data' (Python 3.12+) to prevent symlink traversal
|
|
504
|
+
- Limiting extraction to validated safe paths
|
|
505
|
+
|
|
506
|
+
"""
|
|
477
507
|
mode = "r:gz" if gzipped else "r"
|
|
478
508
|
|
|
479
509
|
if not archive_path.is_file():
|
|
@@ -483,11 +513,22 @@ def _extract_tar_securely(
|
|
|
483
513
|
raise ValueError(f"Invalid or corrupted TAR archive: {archive_path.name}")
|
|
484
514
|
|
|
485
515
|
try:
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
)
|
|
516
|
+
open_kwargs: dict[str, Any] = {"name": str(archive_path), "mode": mode}
|
|
517
|
+
|
|
518
|
+
# Apply safe filter if available (Python 3.12+) to prevent traversal attacks.
|
|
519
|
+
# For older Python versions, resource consumption is controlled via pre-scanning
|
|
520
|
+
# and member validation before extraction.
|
|
521
|
+
if "filter" in inspect.signature(tarfile.open).parameters:
|
|
522
|
+
# Use 'data' filter to prevent extraction of special files and symlinks
|
|
523
|
+
open_kwargs["filter"] = "data"
|
|
524
|
+
|
|
525
|
+
with tarfile.open(**open_kwargs) as tar_ref:
|
|
489
526
|
members = tar_ref.getmembers()
|
|
527
|
+
# Pre-validate all members before allowing extraction
|
|
528
|
+
# This controls resource consumption and prevents
|
|
529
|
+
# zip bombs/decompression bombs
|
|
490
530
|
_pre_scan_tar_members(members)
|
|
531
|
+
# Extract only validated members to pre-validated safe paths
|
|
491
532
|
_extract_tar_members(tar_ref, members, extraction_dir)
|
|
492
533
|
except tarfile.TarError as e:
|
|
493
534
|
raise ValueError(f"Invalid or corrupted TAR archive: {e}") from e
|
|
@@ -496,10 +537,20 @@ def _extract_tar_securely(
|
|
|
496
537
|
|
|
497
538
|
|
|
498
539
|
def _pre_scan_tar_members(members):
|
|
499
|
-
"""
|
|
540
|
+
"""
|
|
541
|
+
Pre-scan TAR members to control resource consumption (S5042).
|
|
542
|
+
|
|
543
|
+
Validates all members before extraction to prevent:
|
|
544
|
+
- Compression/decompression bombs (via size limits)
|
|
545
|
+
- Excessive memory consumption (via file count limits)
|
|
546
|
+
- Directory traversal attacks (via depth limits)
|
|
547
|
+
- Malicious file inclusion (via extension and type checks)
|
|
548
|
+
|
|
549
|
+
"""
|
|
500
550
|
total_size = 0
|
|
501
551
|
for file_count, member in enumerate(members, start=1):
|
|
502
552
|
total_size += member.size
|
|
553
|
+
# Validate member and accumulate size for bounds checking
|
|
503
554
|
_validate_tar_file_security(member, file_count, total_size)
|
|
504
555
|
|
|
505
556
|
|
|
@@ -593,29 +644,11 @@ def _get_safe_extraction_path(filename: str, extraction_dir: Path) -> Path:
|
|
|
593
644
|
):
|
|
594
645
|
raise ValueError(f"Path traversal or absolute path detected: {filename}")
|
|
595
646
|
|
|
596
|
-
#
|
|
647
|
+
# Normalise separators and join using a containment-checked join
|
|
597
648
|
normalized = filename.replace("\\", "/").strip("/")
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
for part in normalized.split("/"):
|
|
602
|
-
if part == "" or part == ".":
|
|
603
|
-
continue
|
|
604
|
-
elif part == "..":
|
|
605
|
-
# Remove parent directory if we have one
|
|
606
|
-
if parts:
|
|
607
|
-
parts.pop()
|
|
608
|
-
else:
|
|
609
|
-
parts.append(part)
|
|
610
|
-
|
|
611
|
-
# Join parts back and resolve against extraction_dir
|
|
612
|
-
safe_path = extraction_dir / "/".join(parts)
|
|
613
|
-
|
|
614
|
-
# Ensure the final path is still within extraction_dir
|
|
615
|
-
try:
|
|
616
|
-
safe_path.resolve().relative_to(extraction_dir.resolve())
|
|
617
|
-
except ValueError:
|
|
618
|
-
raise ValueError(f"Path traversal detected: {filename}") from None
|
|
649
|
+
safe_path = _ensure_within_base_path(
|
|
650
|
+
_safe_join(extraction_dir.resolve(), normalized), extraction_dir.resolve()
|
|
651
|
+
)
|
|
619
652
|
|
|
620
653
|
return safe_path
|
|
621
654
|
|
|
@@ -742,7 +775,7 @@ def _show_analysis_input() -> None:
|
|
|
742
775
|
# Store temp_dir in session state to prevent premature cleanup
|
|
743
776
|
st.session_state.temp_dir = temp_dir
|
|
744
777
|
st.success("Archive extracted successfully to temporary location")
|
|
745
|
-
except
|
|
778
|
+
except (OSError, zipfile.BadZipFile, tarfile.TarError) as e:
|
|
746
779
|
st.error(f"Failed to extract archive: {e}")
|
|
747
780
|
return
|
|
748
781
|
|
|
@@ -775,9 +808,19 @@ def _display_results_view() -> None:
|
|
|
775
808
|
st.session_state.analysis_cookbook_path = None
|
|
776
809
|
st.session_state.total_cookbooks = None
|
|
777
810
|
st.session_state.analysis_info_messages = None
|
|
811
|
+
st.session_state.conversion_results = None
|
|
812
|
+
st.session_state.generated_playbook_repo = None
|
|
778
813
|
st.session_state.analysis_page_key += 1
|
|
779
814
|
st.rerun()
|
|
780
815
|
|
|
816
|
+
# Check if we have conversion results to display
|
|
817
|
+
if "conversion_results" in st.session_state and st.session_state.conversion_results:
|
|
818
|
+
# Display conversion results instead of analysis results
|
|
819
|
+
playbooks = st.session_state.conversion_results["playbooks"]
|
|
820
|
+
templates = st.session_state.conversion_results["templates"]
|
|
821
|
+
_handle_playbook_download(playbooks, templates)
|
|
822
|
+
return
|
|
823
|
+
|
|
781
824
|
_display_analysis_results(
|
|
782
825
|
st.session_state.analysis_results,
|
|
783
826
|
st.session_state.total_cookbooks,
|
|
@@ -832,14 +875,31 @@ def _get_archive_upload_input() -> Any:
|
|
|
832
875
|
return uploaded_file
|
|
833
876
|
|
|
834
877
|
|
|
878
|
+
def _is_within_base(base: Path, candidate: Path) -> bool:
|
|
879
|
+
"""Check whether candidate is contained within base after resolution."""
|
|
880
|
+
base_real = Path(os.path.realpath(str(base)))
|
|
881
|
+
candidate_real = Path(os.path.realpath(str(candidate)))
|
|
882
|
+
try:
|
|
883
|
+
candidate_real.relative_to(base_real)
|
|
884
|
+
return True
|
|
885
|
+
except ValueError:
|
|
886
|
+
return False
|
|
887
|
+
|
|
888
|
+
|
|
835
889
|
def _validate_and_list_cookbooks(cookbook_path: str) -> None:
|
|
836
890
|
"""Validate the cookbook path and list available cookbooks."""
|
|
837
891
|
safe_dir = _get_safe_cookbook_directory(cookbook_path)
|
|
838
892
|
if safe_dir is None:
|
|
839
893
|
return
|
|
840
894
|
|
|
841
|
-
|
|
842
|
-
|
|
895
|
+
# Validate the safe directory before use
|
|
896
|
+
dir_exists: bool = safe_dir.exists()
|
|
897
|
+
if dir_exists:
|
|
898
|
+
dir_is_dir: bool = safe_dir.is_dir()
|
|
899
|
+
if dir_is_dir:
|
|
900
|
+
_list_and_display_cookbooks(safe_dir)
|
|
901
|
+
else:
|
|
902
|
+
st.error(f"Directory not found: {safe_dir}")
|
|
843
903
|
else:
|
|
844
904
|
st.error(f"Directory not found: {safe_dir}")
|
|
845
905
|
|
|
@@ -853,54 +913,26 @@ def _get_safe_cookbook_directory(cookbook_path):
|
|
|
853
913
|
"""
|
|
854
914
|
try:
|
|
855
915
|
base_dir = Path.cwd().resolve()
|
|
856
|
-
temp_dir = Path(tempfile.gettempdir()).resolve()
|
|
857
916
|
|
|
858
917
|
path_str = str(cookbook_path).strip()
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
if "\x00" in path_str or ":\\" in path_str or "\\" in path_str:
|
|
862
|
-
st.error(
|
|
863
|
-
"Invalid path: Path contains null bytes or backslashes, "
|
|
864
|
-
"which are not allowed."
|
|
865
|
-
)
|
|
918
|
+
if not path_str:
|
|
919
|
+
st.error("Invalid path: Path cannot be empty.")
|
|
866
920
|
return None
|
|
867
921
|
|
|
868
|
-
#
|
|
869
|
-
|
|
870
|
-
st.error(
|
|
871
|
-
"Invalid path: Path contains '..' which is not allowed "
|
|
872
|
-
"for security reasons."
|
|
873
|
-
)
|
|
874
|
-
return None
|
|
922
|
+
# Sanitise the candidate path using shared helper
|
|
923
|
+
candidate = _normalize_path(path_str)
|
|
875
924
|
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
resolved_path = (base_dir / user_path).resolve()
|
|
925
|
+
trusted_bases = [base_dir, Path(tempfile.gettempdir()).resolve()]
|
|
926
|
+
for base in trusted_bases:
|
|
927
|
+
try:
|
|
928
|
+
return _ensure_within_base_path(candidate, base)
|
|
929
|
+
except ValueError:
|
|
930
|
+
continue
|
|
883
931
|
|
|
884
|
-
|
|
885
|
-
try:
|
|
886
|
-
resolved_path.relative_to(base_dir)
|
|
887
|
-
return resolved_path
|
|
888
|
-
except ValueError:
|
|
889
|
-
pass
|
|
932
|
+
raise ValueError(f"Path traversal attempt: escapes {base_dir}")
|
|
890
933
|
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
return resolved_path
|
|
894
|
-
except ValueError:
|
|
895
|
-
st.error(
|
|
896
|
-
"Invalid path: The resolved path is outside the allowed "
|
|
897
|
-
"directories (workspace or temporary directory). Paths cannot go above "
|
|
898
|
-
"the workspace root for security reasons."
|
|
899
|
-
)
|
|
900
|
-
return None
|
|
901
|
-
|
|
902
|
-
except Exception as exc:
|
|
903
|
-
st.error(f"Invalid path: {exc}. Please enter a valid relative path.")
|
|
934
|
+
except ValueError as exc:
|
|
935
|
+
st.error(f"Invalid path: {exc}")
|
|
904
936
|
return None
|
|
905
937
|
|
|
906
938
|
|
|
@@ -1093,7 +1125,7 @@ def _handle_cookbook_selection(cookbook_path: str, cookbook_data: list):
|
|
|
1093
1125
|
|
|
1094
1126
|
with col3:
|
|
1095
1127
|
if st.button(
|
|
1096
|
-
f"
|
|
1128
|
+
f"Select All ({len(cookbook_names)})",
|
|
1097
1129
|
help=f"Select all {len(cookbook_names)} cookbooks",
|
|
1098
1130
|
key="select_all",
|
|
1099
1131
|
):
|
|
@@ -1130,7 +1162,7 @@ def _show_cookbook_validation_warnings(cookbook_data: list):
|
|
|
1130
1162
|
# Check for cookbooks without recipes
|
|
1131
1163
|
cookbooks_without_recipes = []
|
|
1132
1164
|
for cookbook in cookbook_data:
|
|
1133
|
-
cookbook_dir =
|
|
1165
|
+
cookbook_dir = _normalize_path(cookbook["Path"])
|
|
1134
1166
|
recipes_dir = cookbook_dir / "recipes"
|
|
1135
1167
|
if not recipes_dir.exists() or not list(recipes_dir.glob("*.rb")):
|
|
1136
1168
|
cookbooks_without_recipes.append(cookbook["Name"])
|
|
@@ -1246,22 +1278,26 @@ def _analyze_with_ai(
|
|
|
1246
1278
|
st.info(f"Using AI-enhanced analysis with {provider_name} ({model})")
|
|
1247
1279
|
|
|
1248
1280
|
# Count total recipes across all cookbooks
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1281
|
+
def _safe_count_recipes(path_str: str) -> int:
|
|
1282
|
+
"""Count recipes safely with CodeQL-recognized containment checks."""
|
|
1283
|
+
try:
|
|
1284
|
+
normalized = _normalize_path(path_str)
|
|
1285
|
+
recipes_dir = normalized / "recipes"
|
|
1286
|
+
|
|
1287
|
+
if recipes_dir.exists():
|
|
1288
|
+
return len(list(recipes_dir.glob("*.rb")))
|
|
1289
|
+
return 0
|
|
1290
|
+
except (ValueError, OSError):
|
|
1291
|
+
return 0
|
|
1292
|
+
|
|
1293
|
+
total_recipes = sum(_safe_count_recipes(cb["Path"]) for cb in cookbook_data)
|
|
1255
1294
|
|
|
1256
1295
|
st.info(f"Detected {len(cookbook_data)} cookbook(s) with {total_recipes} recipe(s)")
|
|
1257
1296
|
|
|
1258
1297
|
results = []
|
|
1259
1298
|
for i, cb_data in enumerate(cookbook_data):
|
|
1260
1299
|
# Count recipes in this cookbook
|
|
1261
|
-
|
|
1262
|
-
recipe_count = (
|
|
1263
|
-
len(list(recipes_dir.glob("*.rb"))) if recipes_dir.exists() else 0
|
|
1264
|
-
)
|
|
1300
|
+
recipe_count = _safe_count_recipes(cb_data["Path"])
|
|
1265
1301
|
|
|
1266
1302
|
st.info(
|
|
1267
1303
|
f"Analyzing {cb_data['Name']} ({recipe_count} recipes)... "
|
|
@@ -1582,20 +1618,26 @@ def _parse_summary_line(line: str, structured: dict):
|
|
|
1582
1618
|
try:
|
|
1583
1619
|
count = int(line.split(":")[-1].strip())
|
|
1584
1620
|
structured["summary"]["total_cookbooks"] = count
|
|
1585
|
-
except ValueError:
|
|
1586
|
-
|
|
1621
|
+
except ValueError as err:
|
|
1622
|
+
structured.setdefault("parse_errors", []).append(
|
|
1623
|
+
f"total_cookbooks_parse_failed: {err}"
|
|
1624
|
+
)
|
|
1587
1625
|
elif "Successfully converted:" in line:
|
|
1588
1626
|
try:
|
|
1589
1627
|
count = int(line.split(":")[-1].strip())
|
|
1590
1628
|
structured["summary"]["cookbooks_converted"] = count
|
|
1591
|
-
except ValueError:
|
|
1592
|
-
|
|
1629
|
+
except ValueError as err:
|
|
1630
|
+
structured.setdefault("parse_errors", []).append(
|
|
1631
|
+
f"cookbooks_converted_parse_failed: {err}"
|
|
1632
|
+
)
|
|
1593
1633
|
elif "Total files converted:" in line:
|
|
1594
1634
|
try:
|
|
1595
1635
|
count = int(line.split(":")[-1].strip())
|
|
1596
1636
|
structured["summary"]["total_converted_files"] = count
|
|
1597
|
-
except ValueError:
|
|
1598
|
-
|
|
1637
|
+
except ValueError as err:
|
|
1638
|
+
structured.setdefault("parse_errors", []).append(
|
|
1639
|
+
f"total_converted_files_parse_failed: {err}"
|
|
1640
|
+
)
|
|
1599
1641
|
|
|
1600
1642
|
|
|
1601
1643
|
def _parse_converted_cookbook(line: str, structured: dict):
|
|
@@ -1616,8 +1658,10 @@ def _parse_converted_cookbook(line: str, structured: dict):
|
|
|
1616
1658
|
"files_count": 0,
|
|
1617
1659
|
}
|
|
1618
1660
|
)
|
|
1619
|
-
except (IndexError, ValueError):
|
|
1620
|
-
|
|
1661
|
+
except (IndexError, ValueError) as err:
|
|
1662
|
+
structured.setdefault("parse_errors", []).append(
|
|
1663
|
+
f"converted_cookbook_parse_failed: {err}"
|
|
1664
|
+
)
|
|
1621
1665
|
|
|
1622
1666
|
|
|
1623
1667
|
def _parse_failed_cookbook(line: str, structured: dict):
|
|
@@ -1634,8 +1678,10 @@ def _parse_failed_cookbook(line: str, structured: dict):
|
|
|
1634
1678
|
"error": error,
|
|
1635
1679
|
}
|
|
1636
1680
|
)
|
|
1637
|
-
except (IndexError, ValueError):
|
|
1638
|
-
|
|
1681
|
+
except (IndexError, ValueError) as err:
|
|
1682
|
+
structured.setdefault("parse_errors", []).append(
|
|
1683
|
+
f"failed_cookbook_parse_failed: {err}"
|
|
1684
|
+
)
|
|
1639
1685
|
|
|
1640
1686
|
|
|
1641
1687
|
def _extract_warnings_from_text(result_text: str, structured: dict):
|
|
@@ -1697,7 +1743,7 @@ def _display_conversion_summary(structured_result: dict):
|
|
|
1697
1743
|
def _display_conversion_warnings_errors(structured_result: dict):
|
|
1698
1744
|
"""Display conversion warnings and errors."""
|
|
1699
1745
|
if "warnings" in structured_result and structured_result["warnings"]:
|
|
1700
|
-
st.warning("
|
|
1746
|
+
st.warning("Conversion Warnings")
|
|
1701
1747
|
for warning in structured_result["warnings"]:
|
|
1702
1748
|
st.write(f"• {warning}")
|
|
1703
1749
|
|
|
@@ -1714,7 +1760,8 @@ def _display_conversion_details(structured_result: dict):
|
|
|
1714
1760
|
|
|
1715
1761
|
for cookbook_result in structured_result["cookbook_results"]:
|
|
1716
1762
|
with st.expander(
|
|
1717
|
-
f"
|
|
1763
|
+
f"Cookbook {cookbook_result.get('cookbook_name', 'Unknown')}",
|
|
1764
|
+
expanded=False,
|
|
1718
1765
|
):
|
|
1719
1766
|
col1, col2 = st.columns(2)
|
|
1720
1767
|
|
|
@@ -1727,7 +1774,7 @@ def _display_conversion_details(structured_result: dict):
|
|
|
1727
1774
|
st.metric("Files", cookbook_result.get("files_count", 0))
|
|
1728
1775
|
|
|
1729
1776
|
if cookbook_result.get("status") == "success":
|
|
1730
|
-
st.success("
|
|
1777
|
+
st.success("Conversion successful")
|
|
1731
1778
|
else:
|
|
1732
1779
|
error_msg = cookbook_result.get("error", "Unknown error")
|
|
1733
1780
|
st.error(f"❌ Conversion failed: {error_msg}")
|
|
@@ -1739,47 +1786,468 @@ def _display_conversion_report(result_text: str):
|
|
|
1739
1786
|
st.code(result_text, language="markdown")
|
|
1740
1787
|
|
|
1741
1788
|
|
|
1789
|
+
def _validate_output_path(output_path: str) -> Path | None:
|
|
1790
|
+
"""
|
|
1791
|
+
Validate and normalize output path.
|
|
1792
|
+
|
|
1793
|
+
Args:
|
|
1794
|
+
output_path: Path string to validate.
|
|
1795
|
+
|
|
1796
|
+
Returns:
|
|
1797
|
+
Normalized Path object or None if invalid.
|
|
1798
|
+
|
|
1799
|
+
"""
|
|
1800
|
+
try:
|
|
1801
|
+
safe_output_path = _normalize_path(str(output_path))
|
|
1802
|
+
base_dir = Path.cwd().resolve()
|
|
1803
|
+
# Use centralised containment validation
|
|
1804
|
+
validated = _ensure_within_base_path(safe_output_path, base_dir)
|
|
1805
|
+
return validated if validated.exists() else None
|
|
1806
|
+
except ValueError:
|
|
1807
|
+
return None
|
|
1808
|
+
|
|
1809
|
+
|
|
1810
|
+
def _collect_role_files(safe_output_path: Path) -> list[tuple[Path, Path]]:
|
|
1811
|
+
"""
|
|
1812
|
+
Collect all files from converted roles directory.
|
|
1813
|
+
|
|
1814
|
+
Args:
|
|
1815
|
+
safe_output_path: Validated base path.
|
|
1816
|
+
|
|
1817
|
+
Returns:
|
|
1818
|
+
List of (file_path, archive_name) tuples.
|
|
1819
|
+
|
|
1820
|
+
"""
|
|
1821
|
+
files_to_archive = []
|
|
1822
|
+
# Path is already normalized; validate files within the output path are contained
|
|
1823
|
+
base_path = safe_output_path
|
|
1824
|
+
|
|
1825
|
+
for root, _dirs, files in os.walk(base_path):
|
|
1826
|
+
root_path = _ensure_within_base_path(Path(root), base_path)
|
|
1827
|
+
|
|
1828
|
+
for file in files:
|
|
1829
|
+
safe_name = _sanitize_filename(file)
|
|
1830
|
+
candidate_path = _ensure_within_base_path(root_path / safe_name, base_path)
|
|
1831
|
+
try:
|
|
1832
|
+
# Ensure each file is contained within base
|
|
1833
|
+
arcname = candidate_path.relative_to(base_path)
|
|
1834
|
+
files_to_archive.append((candidate_path, arcname))
|
|
1835
|
+
except ValueError:
|
|
1836
|
+
continue
|
|
1837
|
+
|
|
1838
|
+
return files_to_archive
|
|
1839
|
+
|
|
1840
|
+
|
|
1841
|
+
def _create_roles_zip_archive(safe_output_path: Path) -> bytes:
|
|
1842
|
+
"""
|
|
1843
|
+
Create ZIP archive of converted roles.
|
|
1844
|
+
|
|
1845
|
+
Args:
|
|
1846
|
+
safe_output_path: Validated path containing roles.
|
|
1847
|
+
|
|
1848
|
+
Returns:
|
|
1849
|
+
ZIP archive as bytes.
|
|
1850
|
+
|
|
1851
|
+
"""
|
|
1852
|
+
zip_buffer = io.BytesIO()
|
|
1853
|
+
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
|
|
1854
|
+
files_to_archive = _collect_role_files(safe_output_path)
|
|
1855
|
+
for file_path, arcname in files_to_archive:
|
|
1856
|
+
zip_file.write(str(file_path), str(arcname))
|
|
1857
|
+
|
|
1858
|
+
zip_buffer.seek(0)
|
|
1859
|
+
return zip_buffer.getvalue()
|
|
1860
|
+
|
|
1861
|
+
|
|
1862
|
+
def _get_git_path() -> str:
|
|
1863
|
+
"""
|
|
1864
|
+
Find git executable in system PATH.
|
|
1865
|
+
|
|
1866
|
+
Returns:
|
|
1867
|
+
The path to git executable.
|
|
1868
|
+
|
|
1869
|
+
Raises:
|
|
1870
|
+
FileNotFoundError: If git is not found in PATH.
|
|
1871
|
+
|
|
1872
|
+
"""
|
|
1873
|
+
# Try common locations first
|
|
1874
|
+
common_paths = [
|
|
1875
|
+
"/usr/bin/git",
|
|
1876
|
+
"/usr/local/bin/git",
|
|
1877
|
+
"/opt/homebrew/bin/git",
|
|
1878
|
+
]
|
|
1879
|
+
|
|
1880
|
+
for path in common_paths:
|
|
1881
|
+
if Path(path).exists():
|
|
1882
|
+
return path
|
|
1883
|
+
|
|
1884
|
+
# Try to find git using 'which' command
|
|
1885
|
+
try:
|
|
1886
|
+
result = subprocess.run(
|
|
1887
|
+
["which", "git"],
|
|
1888
|
+
capture_output=True,
|
|
1889
|
+
text=True,
|
|
1890
|
+
check=True,
|
|
1891
|
+
timeout=5,
|
|
1892
|
+
)
|
|
1893
|
+
git_path = result.stdout.strip()
|
|
1894
|
+
if git_path and Path(git_path).exists():
|
|
1895
|
+
return git_path
|
|
1896
|
+
except (
|
|
1897
|
+
subprocess.CalledProcessError,
|
|
1898
|
+
FileNotFoundError,
|
|
1899
|
+
subprocess.TimeoutExpired,
|
|
1900
|
+
) as exc:
|
|
1901
|
+
# Non-fatal: failure to use 'which' just means we fall back to other checks.
|
|
1902
|
+
st.write(f"Debug: 'which git' probe failed: {exc}")
|
|
1903
|
+
|
|
1904
|
+
# Last resort: try the basic 'git' command
|
|
1905
|
+
try:
|
|
1906
|
+
result = subprocess.run(
|
|
1907
|
+
["git", "--version"],
|
|
1908
|
+
capture_output=True,
|
|
1909
|
+
text=True,
|
|
1910
|
+
check=True,
|
|
1911
|
+
timeout=5,
|
|
1912
|
+
)
|
|
1913
|
+
if result.returncode == 0:
|
|
1914
|
+
return "git"
|
|
1915
|
+
except (
|
|
1916
|
+
subprocess.CalledProcessError,
|
|
1917
|
+
FileNotFoundError,
|
|
1918
|
+
subprocess.TimeoutExpired,
|
|
1919
|
+
) as exc:
|
|
1920
|
+
# Non-fatal: failure to run 'git --version' just means git is not available.
|
|
1921
|
+
st.write(f"Debug: 'git --version' probe failed: {exc}")
|
|
1922
|
+
|
|
1923
|
+
raise FileNotFoundError(
|
|
1924
|
+
"git executable not found. Please ensure Git is installed and in your "
|
|
1925
|
+
"PATH. Visit https://git-scm.com/downloads for installation instructions."
|
|
1926
|
+
)
|
|
1927
|
+
|
|
1928
|
+
|
|
1929
|
+
def _determine_num_recipes(cookbook_path: str, num_roles: int) -> int:
|
|
1930
|
+
"""Determine the number of recipes from the cookbook path."""
|
|
1931
|
+
if not cookbook_path:
|
|
1932
|
+
return num_roles
|
|
1933
|
+
|
|
1934
|
+
recipes_dir = Path(cookbook_path) / "recipes"
|
|
1935
|
+
return len(list(recipes_dir.glob("*.rb"))) if recipes_dir.exists() else 1
|
|
1936
|
+
|
|
1937
|
+
|
|
1938
|
+
def _get_roles_directory(temp_repo: Path) -> Path:
|
|
1939
|
+
"""Get or create the roles directory in the repository."""
|
|
1940
|
+
roles_dir = temp_repo / "roles"
|
|
1941
|
+
if not roles_dir.exists():
|
|
1942
|
+
roles_dir = (
|
|
1943
|
+
temp_repo / "ansible_collections" / "souschef" / "platform" / "roles"
|
|
1944
|
+
)
|
|
1945
|
+
|
|
1946
|
+
roles_dir.mkdir(parents=True, exist_ok=True)
|
|
1947
|
+
return roles_dir
|
|
1948
|
+
|
|
1949
|
+
|
|
1950
|
+
def _copy_roles_to_repository(output_path: str, roles_dir: Path) -> None:
|
|
1951
|
+
"""Copy roles from output_path to the repository roles directory."""
|
|
1952
|
+
output_path_obj = Path(output_path)
|
|
1953
|
+
if not output_path_obj.exists():
|
|
1954
|
+
return
|
|
1955
|
+
|
|
1956
|
+
for role_dir in output_path_obj.iterdir():
|
|
1957
|
+
if not role_dir.is_dir():
|
|
1958
|
+
continue
|
|
1959
|
+
|
|
1960
|
+
dest_dir = roles_dir / role_dir.name
|
|
1961
|
+
if dest_dir.exists():
|
|
1962
|
+
shutil.rmtree(dest_dir)
|
|
1963
|
+
shutil.copytree(role_dir, dest_dir)
|
|
1964
|
+
|
|
1965
|
+
|
|
1966
|
+
def _commit_repository_changes(temp_repo: Path, num_roles: int) -> None:
|
|
1967
|
+
"""Commit repository changes to git."""
|
|
1968
|
+
try:
|
|
1969
|
+
subprocess.run(
|
|
1970
|
+
["git", "add", "."],
|
|
1971
|
+
cwd=temp_repo,
|
|
1972
|
+
check=True,
|
|
1973
|
+
capture_output=True,
|
|
1974
|
+
text=True,
|
|
1975
|
+
)
|
|
1976
|
+
subprocess.run(
|
|
1977
|
+
[
|
|
1978
|
+
"git",
|
|
1979
|
+
"commit",
|
|
1980
|
+
"-m",
|
|
1981
|
+
f"Add converted Ansible roles ({num_roles} role(s))",
|
|
1982
|
+
],
|
|
1983
|
+
cwd=temp_repo,
|
|
1984
|
+
check=True,
|
|
1985
|
+
capture_output=True,
|
|
1986
|
+
text=True,
|
|
1987
|
+
)
|
|
1988
|
+
except subprocess.CalledProcessError:
|
|
1989
|
+
# Ignore if there's nothing to commit
|
|
1990
|
+
pass
|
|
1991
|
+
|
|
1992
|
+
|
|
1993
|
+
def _create_ansible_repository(
|
|
1994
|
+
output_path: str, cookbook_path: str = "", num_roles: int = 1
|
|
1995
|
+
) -> dict:
|
|
1996
|
+
"""Create a complete Ansible repository structure."""
|
|
1997
|
+
try:
|
|
1998
|
+
# Check that git is available early
|
|
1999
|
+
_get_git_path()
|
|
2000
|
+
|
|
2001
|
+
# Create temp directory for the repo (parent directory)
|
|
2002
|
+
temp_parent = tempfile.mkdtemp(prefix="ansible_repo_parent_")
|
|
2003
|
+
temp_repo = Path(temp_parent) / "ansible_repository"
|
|
2004
|
+
|
|
2005
|
+
# Analyse and determine repo type
|
|
2006
|
+
num_recipes = _determine_num_recipes(cookbook_path, num_roles)
|
|
2007
|
+
|
|
2008
|
+
repo_type = analyse_conversion_output(
|
|
2009
|
+
cookbook_path=cookbook_path or output_path,
|
|
2010
|
+
num_recipes=num_recipes,
|
|
2011
|
+
num_roles=num_roles,
|
|
2012
|
+
has_multiple_apps=num_roles > 3,
|
|
2013
|
+
needs_multi_env=True,
|
|
2014
|
+
)
|
|
2015
|
+
|
|
2016
|
+
# Generate the repository
|
|
2017
|
+
result = generate_ansible_repository(
|
|
2018
|
+
output_path=str(temp_repo),
|
|
2019
|
+
repo_type=repo_type,
|
|
2020
|
+
org_name="souschef",
|
|
2021
|
+
init_git=True,
|
|
2022
|
+
)
|
|
2023
|
+
|
|
2024
|
+
if result["success"]:
|
|
2025
|
+
# Copy converted roles into the repository
|
|
2026
|
+
roles_dir = _get_roles_directory(temp_repo)
|
|
2027
|
+
_copy_roles_to_repository(output_path, roles_dir)
|
|
2028
|
+
_commit_repository_changes(temp_repo, num_roles)
|
|
2029
|
+
result["temp_path"] = str(temp_repo)
|
|
2030
|
+
|
|
2031
|
+
return result
|
|
2032
|
+
except Exception as e:
|
|
2033
|
+
return {"success": False, "error": str(e)}
|
|
2034
|
+
|
|
2035
|
+
|
|
2036
|
+
def _create_repository_zip(repo_path: str) -> bytes:
|
|
2037
|
+
"""Create a ZIP archive of the Ansible repository including git history."""
|
|
2038
|
+
zip_buffer = io.BytesIO()
|
|
2039
|
+
repo_path_obj = Path(repo_path)
|
|
2040
|
+
|
|
2041
|
+
# Files/directories to exclude from the archive
|
|
2042
|
+
exclude_names = {".DS_Store", "Thumbs.db", "*.pyc", "__pycache__"}
|
|
2043
|
+
|
|
2044
|
+
# Important dotfiles to always include
|
|
2045
|
+
include_dotfiles = {".gitignore", ".gitattributes", ".editorconfig"}
|
|
2046
|
+
|
|
2047
|
+
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
|
|
2048
|
+
for file_path in repo_path_obj.rglob("*"):
|
|
2049
|
+
if file_path.is_file():
|
|
2050
|
+
# Skip excluded files
|
|
2051
|
+
if file_path.name in exclude_names:
|
|
2052
|
+
continue
|
|
2053
|
+
# Include .git directory, .gitignore, and other important dotfiles
|
|
2054
|
+
# Skip hidden dotfiles unless they're in our include list or in .git
|
|
2055
|
+
if (
|
|
2056
|
+
file_path.name.startswith(".")
|
|
2057
|
+
and ".git" not in str(file_path)
|
|
2058
|
+
and file_path.name not in include_dotfiles
|
|
2059
|
+
):
|
|
2060
|
+
continue
|
|
2061
|
+
|
|
2062
|
+
arcname = file_path.relative_to(repo_path_obj.parent)
|
|
2063
|
+
zip_file.write(str(file_path), str(arcname))
|
|
2064
|
+
|
|
2065
|
+
zip_buffer.seek(0)
|
|
2066
|
+
return zip_buffer.getvalue()
|
|
2067
|
+
|
|
2068
|
+
|
|
1742
2069
|
def _display_conversion_download_options(conversion_result: dict):
|
|
1743
2070
|
"""Display download options for converted roles."""
|
|
1744
|
-
if "output_path" in conversion_result:
|
|
1745
|
-
|
|
2071
|
+
if "output_path" not in conversion_result:
|
|
2072
|
+
return
|
|
1746
2073
|
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
try:
|
|
1750
|
-
from souschef.core.path_utils import _normalize_path
|
|
2074
|
+
st.subheader("Download Converted Roles")
|
|
2075
|
+
output_path = conversion_result["output_path"]
|
|
1751
2076
|
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
return
|
|
2077
|
+
safe_output_path = _validate_output_path(output_path)
|
|
2078
|
+
if safe_output_path is None:
|
|
2079
|
+
st.error("Invalid output path")
|
|
2080
|
+
return
|
|
1757
2081
|
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
|
|
1763
|
-
|
|
1764
|
-
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
2082
|
+
if safe_output_path.exists():
|
|
2083
|
+
_display_role_download_buttons(safe_output_path)
|
|
2084
|
+
repo_placeholder = st.container()
|
|
2085
|
+
_display_generated_repo_section(repo_placeholder)
|
|
2086
|
+
st.info(f"Roles saved to: {output_path}")
|
|
2087
|
+
else:
|
|
2088
|
+
st.warning("Output directory not found for download")
|
|
2089
|
+
|
|
2090
|
+
|
|
2091
|
+
def _create_repo_callback(safe_output_path: Path) -> None:
|
|
2092
|
+
"""Handle repository creation callback."""
|
|
2093
|
+
try:
|
|
2094
|
+
num_roles = len(
|
|
2095
|
+
[
|
|
2096
|
+
d
|
|
2097
|
+
for d in safe_output_path.iterdir()
|
|
2098
|
+
if d.is_dir() and not d.name.startswith(".")
|
|
2099
|
+
]
|
|
2100
|
+
)
|
|
2101
|
+
|
|
2102
|
+
repo_result = _create_ansible_repository(
|
|
2103
|
+
output_path=str(safe_output_path),
|
|
2104
|
+
cookbook_path="",
|
|
2105
|
+
num_roles=num_roles,
|
|
2106
|
+
)
|
|
1779
2107
|
|
|
1780
|
-
|
|
2108
|
+
if repo_result["success"]:
|
|
2109
|
+
st.session_state.generated_repo = repo_result
|
|
2110
|
+
st.session_state.repo_created_successfully = True
|
|
2111
|
+
st.session_state.repo_creation_error = None
|
|
1781
2112
|
else:
|
|
1782
|
-
|
|
2113
|
+
_handle_repo_creation_failure(repo_result.get("error", "Unknown error"))
|
|
2114
|
+
except Exception as e:
|
|
2115
|
+
_handle_repo_creation_failure(f"Exception: {str(e)}")
|
|
2116
|
+
|
|
2117
|
+
|
|
2118
|
+
def _handle_repo_creation_failure(error_msg: str) -> None:
|
|
2119
|
+
"""Handle repository creation failure."""
|
|
2120
|
+
st.session_state.repo_creation_error = error_msg
|
|
2121
|
+
st.session_state.generated_repo = None
|
|
2122
|
+
st.session_state.repo_created_successfully = False
|
|
2123
|
+
|
|
2124
|
+
|
|
2125
|
+
def _display_role_download_buttons(safe_output_path: Path) -> None:
|
|
2126
|
+
"""Display download buttons for roles and repository creation."""
|
|
2127
|
+
col1, col2 = st.columns([1, 1])
|
|
2128
|
+
|
|
2129
|
+
with col1:
|
|
2130
|
+
archive_data = _create_roles_zip_archive(safe_output_path)
|
|
2131
|
+
st.download_button(
|
|
2132
|
+
label="Download All Ansible Roles",
|
|
2133
|
+
data=archive_data,
|
|
2134
|
+
file_name="ansible_roles_holistic.zip",
|
|
2135
|
+
mime=MIME_TYPE_ZIP,
|
|
2136
|
+
help="Download ZIP archive containing all converted Ansible roles",
|
|
2137
|
+
key="download_holistic_roles",
|
|
2138
|
+
)
|
|
2139
|
+
|
|
2140
|
+
with col2:
|
|
2141
|
+
st.button(
|
|
2142
|
+
"Create Ansible Repository",
|
|
2143
|
+
help="Generate a complete Ansible repository structure with these roles",
|
|
2144
|
+
key="create_repo_from_roles",
|
|
2145
|
+
on_click=lambda: _create_repo_callback(safe_output_path),
|
|
2146
|
+
)
|
|
2147
|
+
|
|
2148
|
+
if st.session_state.get("repo_creation_error"):
|
|
2149
|
+
st.error(
|
|
2150
|
+
f"Failed to create repository: {st.session_state.repo_creation_error}"
|
|
2151
|
+
)
|
|
2152
|
+
|
|
2153
|
+
|
|
2154
|
+
def _display_generated_repo_section(placeholder) -> None:
|
|
2155
|
+
"""Display the generated repository section if it exists."""
|
|
2156
|
+
if not _should_display_generated_repo():
|
|
2157
|
+
return
|
|
2158
|
+
|
|
2159
|
+
repo_result = st.session_state.generated_repo
|
|
2160
|
+
|
|
2161
|
+
with placeholder:
|
|
2162
|
+
st.markdown("---")
|
|
2163
|
+
st.success("Ansible Repository Generated!")
|
|
2164
|
+
_display_repo_info(repo_result)
|
|
2165
|
+
_display_repo_structure(repo_result)
|
|
2166
|
+
_display_repo_download(repo_result)
|
|
2167
|
+
_display_repo_git_instructions()
|
|
2168
|
+
_display_repo_clear_button(repo_result)
|
|
2169
|
+
|
|
2170
|
+
|
|
2171
|
+
def _should_display_generated_repo() -> bool:
|
|
2172
|
+
"""Check if generated repo should be displayed."""
|
|
2173
|
+
return "generated_repo" in st.session_state and st.session_state.get(
|
|
2174
|
+
"repo_created_successfully", False
|
|
2175
|
+
)
|
|
2176
|
+
|
|
2177
|
+
|
|
2178
|
+
def _display_repo_info(repo_result: dict) -> None:
|
|
2179
|
+
"""Display repository information."""
|
|
2180
|
+
repo_type = repo_result["repo_type"].replace("_", " ").title()
|
|
2181
|
+
files_count = len(repo_result["files_created"])
|
|
2182
|
+
|
|
2183
|
+
st.info(
|
|
2184
|
+
f"**Repository Type:** {repo_type}\n\n"
|
|
2185
|
+
f"**Files Created:** {files_count}\n\n"
|
|
2186
|
+
"Includes: ansible.cfg, requirements.yml, inventory, playbooks, roles"
|
|
2187
|
+
)
|
|
2188
|
+
|
|
2189
|
+
|
|
2190
|
+
def _display_repo_structure(repo_result: dict) -> None:
|
|
2191
|
+
"""Display repository structure."""
|
|
2192
|
+
with st.expander("Repository Structure", expanded=True):
|
|
2193
|
+
files_sorted = sorted(repo_result["files_created"])
|
|
2194
|
+
st.code("\n".join(files_sorted[:40]), language="text")
|
|
2195
|
+
if len(files_sorted) > 40:
|
|
2196
|
+
remaining = len(files_sorted) - 40
|
|
2197
|
+
st.caption(f"... and {remaining} more files")
|
|
2198
|
+
|
|
2199
|
+
|
|
2200
|
+
def _display_repo_download(repo_result: dict) -> None:
|
|
2201
|
+
"""Display repository download button."""
|
|
2202
|
+
repo_zip = _create_repository_zip(repo_result["temp_path"])
|
|
2203
|
+
st.download_button(
|
|
2204
|
+
label="Download Ansible Repository",
|
|
2205
|
+
data=repo_zip,
|
|
2206
|
+
file_name="ansible_repository.zip",
|
|
2207
|
+
mime=MIME_TYPE_ZIP,
|
|
2208
|
+
help="Download complete Ansible repository as ZIP archive",
|
|
2209
|
+
key="download_generated_repo",
|
|
2210
|
+
)
|
|
2211
|
+
|
|
2212
|
+
|
|
2213
|
+
def _display_repo_git_instructions() -> None:
|
|
2214
|
+
"""Display git clone instructions."""
|
|
2215
|
+
with st.expander("Git Clone Instructions", expanded=True):
|
|
2216
|
+
st.markdown("""
|
|
2217
|
+
After downloading and extracting the repository:
|
|
2218
|
+
|
|
2219
|
+
```bash
|
|
2220
|
+
cd ansible_repository
|
|
2221
|
+
|
|
2222
|
+
# Repository is already initialized with git!
|
|
2223
|
+
# Check commits:
|
|
2224
|
+
git log --oneline
|
|
2225
|
+
|
|
2226
|
+
# Push to remote repository:
|
|
2227
|
+
git remote add origin <your-git-url>
|
|
2228
|
+
git push -u origin master
|
|
2229
|
+
```
|
|
2230
|
+
|
|
2231
|
+
**Repository includes:**
|
|
2232
|
+
- ✅ All converted roles with tasks
|
|
2233
|
+
- ✅ Ansible configuration (`ansible.cfg`)
|
|
2234
|
+
- ✅ `.gitignore` for Ansible projects
|
|
2235
|
+
- ✅ `.gitattributes` for consistent line endings
|
|
2236
|
+
- ✅ `.editorconfig` for consistent coding styles
|
|
2237
|
+
- ✅ README with usage instructions
|
|
2238
|
+
- ✅ **Git repository initialized with all files committed**
|
|
2239
|
+
""")
|
|
2240
|
+
|
|
2241
|
+
|
|
2242
|
+
def _display_repo_clear_button(repo_result: dict) -> None:
|
|
2243
|
+
"""Display repository clear button."""
|
|
2244
|
+
if st.button("Clear Repository", key="clear_generated_repo"):
|
|
2245
|
+
with contextlib.suppress(Exception):
|
|
2246
|
+
shutil.rmtree(repo_result["temp_path"])
|
|
2247
|
+
del st.session_state.generated_repo
|
|
2248
|
+
if "repo_created_successfully" in st.session_state:
|
|
2249
|
+
del st.session_state.repo_created_successfully
|
|
2250
|
+
st.rerun()
|
|
1783
2251
|
|
|
1784
2252
|
|
|
1785
2253
|
def _handle_dashboard_upload():
|
|
@@ -1959,18 +2427,23 @@ def _update_progress(status_text, cookbook_name, current, total):
|
|
|
1959
2427
|
|
|
1960
2428
|
def _find_cookbook_directory(cookbook_path, cookbook_name):
|
|
1961
2429
|
"""Find the directory for a specific cookbook by checking metadata."""
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
2430
|
+
try:
|
|
2431
|
+
normalized_path = _normalize_path(cookbook_path)
|
|
2432
|
+
for d in normalized_path.iterdir():
|
|
2433
|
+
if d.is_dir():
|
|
2434
|
+
# Check if this directory contains a cookbook with the matching name
|
|
2435
|
+
metadata_file = d / METADATA_FILENAME
|
|
2436
|
+
if metadata_file.exists():
|
|
2437
|
+
try:
|
|
2438
|
+
metadata = parse_cookbook_metadata(str(metadata_file))
|
|
2439
|
+
if metadata.get("name") == cookbook_name:
|
|
2440
|
+
return d
|
|
2441
|
+
except (ValueError, OSError, KeyError):
|
|
2442
|
+
# If metadata parsing fails, skip this directory
|
|
2443
|
+
continue
|
|
2444
|
+
except ValueError:
|
|
2445
|
+
# Invalid path, return None
|
|
2446
|
+
return None
|
|
1974
2447
|
return None
|
|
1975
2448
|
|
|
1976
2449
|
|
|
@@ -2393,7 +2866,7 @@ def _build_dependency_graph(cookbook_path: str, selected_cookbooks: list[str]) -
|
|
|
2393
2866
|
# Parse the markdown response to extract dependencies
|
|
2394
2867
|
dependencies = _extract_dependencies_from_markdown(dep_analysis)
|
|
2395
2868
|
dependency_graph[cookbook_name] = dependencies
|
|
2396
|
-
except
|
|
2869
|
+
except (ValueError, OSError, RuntimeError):
|
|
2397
2870
|
# If dependency analysis fails, assume no dependencies
|
|
2398
2871
|
dependency_graph[cookbook_name] = []
|
|
2399
2872
|
|
|
@@ -3125,6 +3598,12 @@ def _convert_and_download_playbooks(results):
|
|
|
3125
3598
|
except Exception as e:
|
|
3126
3599
|
st.warning(f"Could not stage playbooks for validation: {e}")
|
|
3127
3600
|
|
|
3601
|
+
# Store conversion results in session state to persist across reruns
|
|
3602
|
+
st.session_state.conversion_results = {
|
|
3603
|
+
"playbooks": playbooks,
|
|
3604
|
+
"templates": templates,
|
|
3605
|
+
}
|
|
3606
|
+
|
|
3128
3607
|
_handle_playbook_download(playbooks, templates)
|
|
3129
3608
|
|
|
3130
3609
|
|
|
@@ -3306,6 +3785,19 @@ def _handle_playbook_download(playbooks: list, templates: list | None = None) ->
|
|
|
3306
3785
|
st.error("No playbooks were successfully generated.")
|
|
3307
3786
|
return
|
|
3308
3787
|
|
|
3788
|
+
# Add back to analysis button
|
|
3789
|
+
col1, _ = st.columns([1, 4])
|
|
3790
|
+
with col1:
|
|
3791
|
+
if st.button(
|
|
3792
|
+
"← Back to Analysis",
|
|
3793
|
+
help="Return to analysis results",
|
|
3794
|
+
key="back_to_analysis_from_conversion",
|
|
3795
|
+
):
|
|
3796
|
+
# Clear conversion results to go back to analysis view
|
|
3797
|
+
st.session_state.conversion_results = None
|
|
3798
|
+
st.session_state.generated_playbook_repo = None
|
|
3799
|
+
st.rerun()
|
|
3800
|
+
|
|
3309
3801
|
templates = templates or []
|
|
3310
3802
|
playbook_archive = _create_playbook_archive(playbooks, templates)
|
|
3311
3803
|
|
|
@@ -3320,8 +3812,10 @@ def _handle_playbook_download(playbooks: list, templates: list | None = None) ->
|
|
|
3320
3812
|
# Show summary
|
|
3321
3813
|
_display_playbook_summary(len(playbooks), template_count)
|
|
3322
3814
|
|
|
3323
|
-
# Provide download button
|
|
3324
|
-
_display_download_button(
|
|
3815
|
+
# Provide download button and repository creation
|
|
3816
|
+
_display_download_button(
|
|
3817
|
+
len(playbooks), template_count, playbook_archive, playbooks
|
|
3818
|
+
)
|
|
3325
3819
|
|
|
3326
3820
|
# Show previews
|
|
3327
3821
|
_display_playbook_previews(playbooks)
|
|
@@ -3346,24 +3840,204 @@ def _display_playbook_summary(playbook_count: int, template_count: int) -> None:
|
|
|
3346
3840
|
)
|
|
3347
3841
|
|
|
3348
3842
|
|
|
3349
|
-
def
|
|
3350
|
-
|
|
3351
|
-
|
|
3352
|
-
"""Display the download button for the archive."""
|
|
3353
|
-
download_label = f"Download Ansible Playbooks ({playbook_count} playbooks"
|
|
3843
|
+
def _build_download_label(playbook_count: int, template_count: int) -> str:
|
|
3844
|
+
"""Build the download button label."""
|
|
3845
|
+
label = f"Download Ansible Playbooks ({playbook_count} playbooks"
|
|
3354
3846
|
if template_count > 0:
|
|
3355
|
-
|
|
3356
|
-
|
|
3847
|
+
label += f", {template_count} templates"
|
|
3848
|
+
label += ")"
|
|
3849
|
+
return label
|
|
3850
|
+
|
|
3851
|
+
|
|
3852
|
+
def _write_playbooks_to_temp_dir(playbooks: list, temp_dir: str) -> None:
|
|
3853
|
+
"""Write playbooks to temporary directory."""
|
|
3854
|
+
for playbook in playbooks:
|
|
3855
|
+
cookbook_name = _sanitize_filename(playbook["cookbook_name"])
|
|
3856
|
+
recipe_name = _sanitize_filename(playbook["recipe_file"].replace(".rb", ""))
|
|
3857
|
+
playbook_file = Path(temp_dir) / f"{cookbook_name}_{recipe_name}.yml"
|
|
3858
|
+
playbook_file.write_text(playbook["playbook_content"])
|
|
3859
|
+
|
|
3860
|
+
|
|
3861
|
+
def _get_playbooks_dir(repo_result: dict) -> Path:
|
|
3862
|
+
"""Get or create the playbooks directory in the repository."""
|
|
3863
|
+
playbooks_dir = Path(repo_result["temp_path"]) / "playbooks"
|
|
3864
|
+
if not playbooks_dir.exists():
|
|
3865
|
+
playbooks_dir = (
|
|
3866
|
+
Path(repo_result["temp_path"])
|
|
3867
|
+
/ "ansible_collections"
|
|
3868
|
+
/ "souschef"
|
|
3869
|
+
/ "platform"
|
|
3870
|
+
/ "playbooks"
|
|
3871
|
+
)
|
|
3872
|
+
playbooks_dir.mkdir(parents=True, exist_ok=True)
|
|
3873
|
+
return playbooks_dir
|
|
3874
|
+
|
|
3875
|
+
|
|
3876
|
+
def _copy_playbooks_to_repo(temp_dir: str, playbooks_dir: Path) -> None:
|
|
3877
|
+
"""Copy playbooks from temp directory to repository."""
|
|
3878
|
+
for playbook_file in Path(temp_dir).glob("*.yml"):
|
|
3879
|
+
shutil.copy(playbook_file, playbooks_dir / playbook_file.name)
|
|
3880
|
+
|
|
3357
3881
|
|
|
3882
|
+
def _commit_playbooks_to_git(temp_dir: str, repo_path: str) -> None:
|
|
3883
|
+
"""Commit playbooks to git repository."""
|
|
3884
|
+
try:
|
|
3885
|
+
subprocess.run(
|
|
3886
|
+
["git", "add", "."],
|
|
3887
|
+
cwd=repo_path,
|
|
3888
|
+
check=True,
|
|
3889
|
+
capture_output=True,
|
|
3890
|
+
text=True,
|
|
3891
|
+
)
|
|
3892
|
+
num_playbooks = len(list(Path(temp_dir).glob("*.yml")))
|
|
3893
|
+
commit_msg = f"Add converted Ansible playbooks ({num_playbooks} playbook(s))"
|
|
3894
|
+
subprocess.run(
|
|
3895
|
+
["git", "commit", "-m", commit_msg],
|
|
3896
|
+
cwd=repo_path,
|
|
3897
|
+
check=True,
|
|
3898
|
+
capture_output=True,
|
|
3899
|
+
text=True,
|
|
3900
|
+
)
|
|
3901
|
+
except subprocess.CalledProcessError:
|
|
3902
|
+
# If there's nothing to commit, that's okay
|
|
3903
|
+
pass
|
|
3904
|
+
|
|
3905
|
+
|
|
3906
|
+
def _handle_repo_creation(temp_dir: str, playbooks: list) -> None:
|
|
3907
|
+
"""Handle repository creation and setup."""
|
|
3908
|
+
repo_result = _create_ansible_repository(
|
|
3909
|
+
output_path=temp_dir,
|
|
3910
|
+
cookbook_path="",
|
|
3911
|
+
num_roles=len({p["cookbook_name"] for p in playbooks}),
|
|
3912
|
+
)
|
|
3913
|
+
|
|
3914
|
+
if not repo_result["success"]:
|
|
3915
|
+
st.error(
|
|
3916
|
+
f"Failed to create repository: {repo_result.get('error', 'Unknown error')}"
|
|
3917
|
+
)
|
|
3918
|
+
return
|
|
3919
|
+
|
|
3920
|
+
playbooks_dir = _get_playbooks_dir(repo_result)
|
|
3921
|
+
_copy_playbooks_to_repo(temp_dir, playbooks_dir)
|
|
3922
|
+
_commit_playbooks_to_git(temp_dir, repo_result["temp_path"])
|
|
3923
|
+
st.session_state.generated_playbook_repo = repo_result
|
|
3924
|
+
|
|
3925
|
+
|
|
3926
|
+
def _display_repo_structure_section(repo_result: dict) -> None:
|
|
3927
|
+
"""Display repository structure in an expander."""
|
|
3928
|
+
with st.expander("Repository Structure", expanded=True):
|
|
3929
|
+
files_sorted = sorted(repo_result["files_created"])
|
|
3930
|
+
st.code("\n".join(files_sorted[:40]), language="text")
|
|
3931
|
+
if len(files_sorted) > 40:
|
|
3932
|
+
remaining = len(files_sorted) - 40
|
|
3933
|
+
st.caption(f"... and {remaining} more files")
|
|
3934
|
+
|
|
3935
|
+
|
|
3936
|
+
def _display_repo_info_section(repo_result: dict) -> None:
|
|
3937
|
+
"""Display repository information."""
|
|
3938
|
+
repo_type = repo_result["repo_type"].replace("_", " ").title()
|
|
3939
|
+
st.info(
|
|
3940
|
+
f"**Repository Type:** {repo_type}\n\n"
|
|
3941
|
+
f"**Files Created:** {len(repo_result['files_created'])}\n\n"
|
|
3942
|
+
"Includes: ansible.cfg, requirements.yml, inventory, playbooks"
|
|
3943
|
+
)
|
|
3944
|
+
|
|
3945
|
+
|
|
3946
|
+
def _display_generated_repo_section_internal(repo_result: dict) -> None:
|
|
3947
|
+
"""Display the complete generated repository section."""
|
|
3948
|
+
st.markdown("---")
|
|
3949
|
+
st.success("Ansible Playbook Repository Generated!")
|
|
3950
|
+
_display_repo_info_section(repo_result)
|
|
3951
|
+
_display_repo_structure_section(repo_result)
|
|
3952
|
+
|
|
3953
|
+
repo_zip = _create_repository_zip(repo_result["temp_path"])
|
|
3358
3954
|
st.download_button(
|
|
3359
|
-
label=
|
|
3360
|
-
data=
|
|
3361
|
-
file_name="
|
|
3362
|
-
mime=
|
|
3363
|
-
help=
|
|
3364
|
-
|
|
3955
|
+
label="Download Ansible Repository",
|
|
3956
|
+
data=repo_zip,
|
|
3957
|
+
file_name="ansible_playbook_repository.zip",
|
|
3958
|
+
mime=MIME_TYPE_ZIP,
|
|
3959
|
+
help="Download complete Ansible repository as ZIP archive",
|
|
3960
|
+
key="download_playbook_repo",
|
|
3365
3961
|
)
|
|
3366
3962
|
|
|
3963
|
+
with st.expander("Git Clone Instructions", expanded=True):
|
|
3964
|
+
st.markdown("""
|
|
3965
|
+
After downloading and extracting the repository:
|
|
3966
|
+
|
|
3967
|
+
```bash
|
|
3968
|
+
cd ansible_playbook_repository
|
|
3969
|
+
|
|
3970
|
+
# Repository is already initialized with git!
|
|
3971
|
+
# Check commits:
|
|
3972
|
+
git log --oneline
|
|
3973
|
+
|
|
3974
|
+
# Push to remote repository:
|
|
3975
|
+
git remote add origin <your-git-url>
|
|
3976
|
+
git push -u origin master
|
|
3977
|
+
```
|
|
3978
|
+
|
|
3979
|
+
**What's included:**
|
|
3980
|
+
- ✅ Ansible configuration (`ansible.cfg`)
|
|
3981
|
+
- ✅ Dependency management (`requirements.yml`)
|
|
3982
|
+
- ✅ Inventory structure
|
|
3983
|
+
- ✅ All converted playbooks
|
|
3984
|
+
- ✅ `.gitignore` for Ansible projects
|
|
3985
|
+
- ✅ `.gitattributes` for consistent line endings
|
|
3986
|
+
- ✅ `.editorconfig` for consistent coding styles
|
|
3987
|
+
- ✅ README with usage instructions
|
|
3988
|
+
- ✅ **Git repository initialized with all files committed**
|
|
3989
|
+
""")
|
|
3990
|
+
|
|
3991
|
+
if st.button("Clear Repository", key="clear_playbook_repo"):
|
|
3992
|
+
if "generated_playbook_repo" in st.session_state:
|
|
3993
|
+
with contextlib.suppress(Exception):
|
|
3994
|
+
shutil.rmtree(repo_result["temp_path"])
|
|
3995
|
+
del st.session_state.generated_playbook_repo
|
|
3996
|
+
st.rerun()
|
|
3997
|
+
|
|
3998
|
+
|
|
3999
|
+
def _display_download_button(
|
|
4000
|
+
playbook_count: int,
|
|
4001
|
+
template_count: int,
|
|
4002
|
+
archive_data: bytes,
|
|
4003
|
+
playbooks: list | None = None,
|
|
4004
|
+
) -> None:
|
|
4005
|
+
"""Display the download button for the archive."""
|
|
4006
|
+
download_label = _build_download_label(playbook_count, template_count)
|
|
4007
|
+
|
|
4008
|
+
col1, col2 = st.columns([1, 1])
|
|
4009
|
+
|
|
4010
|
+
with col1:
|
|
4011
|
+
st.download_button(
|
|
4012
|
+
label=download_label,
|
|
4013
|
+
data=archive_data,
|
|
4014
|
+
file_name="ansible_playbooks.zip",
|
|
4015
|
+
mime=MIME_TYPE_ZIP,
|
|
4016
|
+
help=f"Download ZIP archive containing {playbook_count} playbooks "
|
|
4017
|
+
f"and {template_count} templates",
|
|
4018
|
+
key="download_playbooks_archive",
|
|
4019
|
+
)
|
|
4020
|
+
|
|
4021
|
+
with col2:
|
|
4022
|
+
if st.button(
|
|
4023
|
+
"Create Ansible Repository",
|
|
4024
|
+
help=(
|
|
4025
|
+
"Generate a complete Ansible repository structure with these playbooks"
|
|
4026
|
+
),
|
|
4027
|
+
key="create_repo_from_playbooks",
|
|
4028
|
+
):
|
|
4029
|
+
with st.spinner("Creating Ansible repository with playbooks..."):
|
|
4030
|
+
temp_playbook_dir = tempfile.mkdtemp(prefix="playbooks_")
|
|
4031
|
+
if playbooks:
|
|
4032
|
+
_write_playbooks_to_temp_dir(playbooks, temp_playbook_dir)
|
|
4033
|
+
_handle_repo_creation(temp_playbook_dir, playbooks)
|
|
4034
|
+
|
|
4035
|
+
# Display generated repository options for playbooks
|
|
4036
|
+
if "generated_playbook_repo" in st.session_state:
|
|
4037
|
+
_display_generated_repo_section_internal(
|
|
4038
|
+
st.session_state.generated_playbook_repo
|
|
4039
|
+
)
|
|
4040
|
+
|
|
3367
4041
|
|
|
3368
4042
|
def _display_playbook_previews(playbooks: list) -> None:
|
|
3369
4043
|
"""Display preview of generated playbooks."""
|