mcp-souschef 2.2.0__py3-none-any.whl → 2.5.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_souschef-2.2.0.dist-info → mcp_souschef-2.5.3.dist-info}/METADATA +174 -21
- mcp_souschef-2.5.3.dist-info/RECORD +38 -0
- mcp_souschef-2.5.3.dist-info/entry_points.txt +4 -0
- souschef/assessment.py +100 -17
- souschef/ci/__init__.py +11 -0
- souschef/ci/github_actions.py +379 -0
- souschef/ci/gitlab_ci.py +299 -0
- souschef/ci/jenkins_pipeline.py +343 -0
- souschef/cli.py +601 -1
- souschef/core/validation.py +35 -2
- souschef/deployment.py +5 -3
- souschef/filesystem/operations.py +0 -7
- souschef/parsers/__init__.py +6 -1
- souschef/parsers/inspec.py +343 -18
- souschef/parsers/metadata.py +30 -0
- souschef/server.py +394 -141
- souschef/ui/__init__.py +8 -0
- souschef/ui/app.py +1837 -0
- souschef/ui/pages/cookbook_analysis.py +425 -0
- mcp_souschef-2.2.0.dist-info/RECORD +0 -31
- mcp_souschef-2.2.0.dist-info/entry_points.txt +0 -4
- {mcp_souschef-2.2.0.dist-info → mcp_souschef-2.5.3.dist-info}/WHEEL +0 -0
- {mcp_souschef-2.2.0.dist-info → mcp_souschef-2.5.3.dist-info}/licenses/LICENSE +0 -0
souschef/server.py
CHANGED
|
@@ -21,13 +21,16 @@ from souschef.assessment import (
|
|
|
21
21
|
from souschef.assessment import (
|
|
22
22
|
generate_migration_report as _generate_migration_report,
|
|
23
23
|
)
|
|
24
|
+
from souschef.assessment import (
|
|
25
|
+
parse_chef_migration_assessment as _parse_chef_migration_assessment,
|
|
26
|
+
)
|
|
24
27
|
from souschef.assessment import (
|
|
25
28
|
validate_conversion as _validate_conversion,
|
|
26
29
|
)
|
|
27
30
|
|
|
28
31
|
# Import extracted modules
|
|
29
32
|
# Import private helper functions still used in server.py
|
|
30
|
-
#
|
|
33
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
31
34
|
from souschef.converters.habitat import ( # noqa: F401
|
|
32
35
|
_add_service_build,
|
|
33
36
|
_add_service_dependencies,
|
|
@@ -49,7 +52,7 @@ from souschef.converters.habitat import (
|
|
|
49
52
|
)
|
|
50
53
|
|
|
51
54
|
# Re-exports of playbook internal functions for backward compatibility (tests)
|
|
52
|
-
#
|
|
55
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
53
56
|
from souschef.converters.playbook import ( # noqa: F401
|
|
54
57
|
_add_general_recommendations,
|
|
55
58
|
_convert_chef_block_to_ansible,
|
|
@@ -84,11 +87,8 @@ from souschef.converters.playbook import (
|
|
|
84
87
|
from souschef.converters.playbook import (
|
|
85
88
|
generate_dynamic_inventory_script as _generate_dynamic_inventory_script,
|
|
86
89
|
)
|
|
87
|
-
from souschef.converters.playbook import (
|
|
88
|
-
generate_playbook_from_recipe as _generate_playbook_from_recipe,
|
|
89
|
-
)
|
|
90
90
|
|
|
91
|
-
#
|
|
91
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
92
92
|
from souschef.converters.resource import ( # noqa: F401
|
|
93
93
|
_convert_chef_resource_to_ansible,
|
|
94
94
|
_format_ansible_task,
|
|
@@ -101,7 +101,7 @@ from souschef.converters.resource import (
|
|
|
101
101
|
|
|
102
102
|
# Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
|
|
103
103
|
# These imports are intentionally exposed for external test access
|
|
104
|
-
#
|
|
104
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
105
105
|
from souschef.core.constants import ( # noqa: F401
|
|
106
106
|
ACTION_TO_STATE,
|
|
107
107
|
ANSIBLE_SERVICE_MODULE,
|
|
@@ -113,19 +113,19 @@ from souschef.core.constants import ( # noqa: F401
|
|
|
113
113
|
# Import core utilities
|
|
114
114
|
from souschef.core.errors import format_error_with_context
|
|
115
115
|
|
|
116
|
-
#
|
|
116
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
117
117
|
from souschef.core.path_utils import _normalize_path, _safe_join # noqa: F401
|
|
118
118
|
|
|
119
119
|
# Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
|
|
120
120
|
# These imports are intentionally exposed for external test access
|
|
121
|
-
#
|
|
121
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
122
122
|
from souschef.core.ruby_utils import ( # noqa: F401
|
|
123
123
|
_normalize_ruby_value,
|
|
124
124
|
)
|
|
125
125
|
|
|
126
126
|
# Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
|
|
127
127
|
# These imports are intentionally exposed for external test access
|
|
128
|
-
#
|
|
128
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
129
129
|
from souschef.core.validation import ( # noqa: F401
|
|
130
130
|
ValidationCategory,
|
|
131
131
|
ValidationEngine,
|
|
@@ -137,7 +137,7 @@ from souschef.core.validation import ( # noqa: F401
|
|
|
137
137
|
# Re-exports of deployment internal functions for backward compatibility (tests)
|
|
138
138
|
# Public re-exports of deployment functions for test backward compatibility
|
|
139
139
|
# Note: MCP tool wrappers exist for some of these, but tests import directly
|
|
140
|
-
#
|
|
140
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
141
141
|
from souschef.deployment import ( # noqa: F401
|
|
142
142
|
_analyze_cookbook_for_awx,
|
|
143
143
|
_analyze_cookbooks_directory,
|
|
@@ -192,7 +192,7 @@ from souschef.deployment import (
|
|
|
192
192
|
from souschef.filesystem import list_directory as _list_directory
|
|
193
193
|
from souschef.filesystem import read_file as _read_file
|
|
194
194
|
|
|
195
|
-
#
|
|
195
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
196
196
|
from souschef.parsers.attributes import ( # noqa: F401
|
|
197
197
|
_extract_attributes,
|
|
198
198
|
_format_attributes,
|
|
@@ -204,7 +204,7 @@ from souschef.parsers.attributes import ( # noqa: F401
|
|
|
204
204
|
# Import parser functions
|
|
205
205
|
from souschef.parsers.attributes import parse_attributes as _parse_attributes
|
|
206
206
|
|
|
207
|
-
#
|
|
207
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
208
208
|
from souschef.parsers.habitat import ( # noqa: F401
|
|
209
209
|
_extract_plan_array,
|
|
210
210
|
_extract_plan_exports,
|
|
@@ -217,16 +217,24 @@ from souschef.parsers.habitat import ( # noqa: F401
|
|
|
217
217
|
from souschef.parsers.habitat import parse_habitat_plan as _parse_habitat_plan
|
|
218
218
|
|
|
219
219
|
# Re-export InSpec internal functions for backward compatibility (tests)
|
|
220
|
-
#
|
|
220
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
221
221
|
from souschef.parsers.inspec import ( # noqa: F401
|
|
222
222
|
_convert_inspec_to_ansible_assert,
|
|
223
|
+
_convert_inspec_to_goss,
|
|
224
|
+
_convert_inspec_to_serverspec,
|
|
223
225
|
_convert_inspec_to_testinfra,
|
|
224
226
|
_extract_inspec_describe_blocks,
|
|
225
227
|
_generate_inspec_from_resource,
|
|
226
228
|
_parse_inspec_control,
|
|
227
229
|
)
|
|
230
|
+
from souschef.parsers.inspec import (
|
|
231
|
+
convert_inspec_to_test as _convert_inspec_test,
|
|
232
|
+
)
|
|
233
|
+
from souschef.parsers.inspec import (
|
|
234
|
+
parse_inspec_profile as _parse_inspec,
|
|
235
|
+
)
|
|
228
236
|
|
|
229
|
-
#
|
|
237
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
230
238
|
from souschef.parsers.metadata import ( # noqa: F401
|
|
231
239
|
_extract_metadata,
|
|
232
240
|
_format_cookbook_structure,
|
|
@@ -235,9 +243,12 @@ from souschef.parsers.metadata import ( # noqa: F401
|
|
|
235
243
|
from souschef.parsers.metadata import (
|
|
236
244
|
list_cookbook_structure as _list_cookbook_structure,
|
|
237
245
|
)
|
|
246
|
+
from souschef.parsers.metadata import (
|
|
247
|
+
parse_cookbook_metadata as _parse_cookbook_metadata,
|
|
248
|
+
)
|
|
238
249
|
from souschef.parsers.metadata import read_cookbook_metadata as _read_cookbook_metadata
|
|
239
250
|
|
|
240
|
-
#
|
|
251
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
241
252
|
from souschef.parsers.recipe import ( # noqa: F401
|
|
242
253
|
_extract_conditionals,
|
|
243
254
|
_extract_resources,
|
|
@@ -245,14 +256,14 @@ from souschef.parsers.recipe import ( # noqa: F401
|
|
|
245
256
|
)
|
|
246
257
|
from souschef.parsers.recipe import parse_recipe as _parse_recipe
|
|
247
258
|
|
|
248
|
-
#
|
|
259
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
249
260
|
from souschef.parsers.resource import ( # noqa: F401
|
|
250
261
|
_extract_resource_actions,
|
|
251
262
|
_extract_resource_properties,
|
|
252
263
|
)
|
|
253
264
|
from souschef.parsers.resource import parse_custom_resource as _parse_custom_resource
|
|
254
265
|
|
|
255
|
-
#
|
|
266
|
+
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
256
267
|
from souschef.parsers.template import ( # noqa: F401
|
|
257
268
|
_convert_erb_to_jinja2,
|
|
258
269
|
_extract_code_block_variables,
|
|
@@ -354,6 +365,21 @@ def read_cookbook_metadata(path: str) -> str:
|
|
|
354
365
|
return _read_cookbook_metadata(path)
|
|
355
366
|
|
|
356
367
|
|
|
368
|
+
@mcp.tool()
|
|
369
|
+
def parse_cookbook_metadata(path: str) -> dict[str, str | list[str]]:
|
|
370
|
+
"""
|
|
371
|
+
Parse Chef cookbook metadata.rb file and return as dictionary.
|
|
372
|
+
|
|
373
|
+
Args:
|
|
374
|
+
path: Path to the metadata.rb file.
|
|
375
|
+
|
|
376
|
+
Returns:
|
|
377
|
+
Dictionary containing extracted metadata fields.
|
|
378
|
+
|
|
379
|
+
"""
|
|
380
|
+
return _parse_cookbook_metadata(path)
|
|
381
|
+
|
|
382
|
+
|
|
357
383
|
@mcp.tool()
|
|
358
384
|
def parse_recipe(path: str) -> str:
|
|
359
385
|
"""
|
|
@@ -537,6 +563,7 @@ def _parse_controls_from_file(profile_path: Path) -> list[dict[str, Any]]:
|
|
|
537
563
|
raise RuntimeError(f"Error reading file: {e}") from e
|
|
538
564
|
|
|
539
565
|
|
|
566
|
+
@mcp.tool()
|
|
540
567
|
def parse_inspec_profile(path: str) -> str:
|
|
541
568
|
"""
|
|
542
569
|
Parse an InSpec profile and extract controls.
|
|
@@ -548,108 +575,23 @@ def parse_inspec_profile(path: str) -> str:
|
|
|
548
575
|
JSON string with parsed controls, or error message.
|
|
549
576
|
|
|
550
577
|
"""
|
|
551
|
-
|
|
552
|
-
# Validate input
|
|
553
|
-
if not path or not path.strip():
|
|
554
|
-
return (
|
|
555
|
-
"Error: Path cannot be empty\n\n"
|
|
556
|
-
"Suggestion: Provide a path to an InSpec profile directory or control file"
|
|
557
|
-
)
|
|
558
|
-
|
|
559
|
-
profile_path = _normalize_path(path)
|
|
560
|
-
|
|
561
|
-
if not profile_path.exists():
|
|
562
|
-
return (
|
|
563
|
-
f"Error: Path does not exist: {path}\n\n"
|
|
564
|
-
"Suggestion: Check that the path is correct and the InSpec profile exists"
|
|
565
|
-
)
|
|
566
|
-
|
|
567
|
-
if profile_path.is_dir():
|
|
568
|
-
controls = _parse_controls_from_directory(profile_path)
|
|
569
|
-
elif profile_path.is_file():
|
|
570
|
-
controls = _parse_controls_from_file(profile_path)
|
|
571
|
-
else:
|
|
572
|
-
return (
|
|
573
|
-
f"Error: Invalid path type: {path}\n\n"
|
|
574
|
-
"Suggestion: Provide a directory or file path, not a special file type"
|
|
575
|
-
)
|
|
576
|
-
|
|
577
|
-
return json.dumps(
|
|
578
|
-
{
|
|
579
|
-
"profile_path": str(profile_path),
|
|
580
|
-
"controls_count": len(controls),
|
|
581
|
-
"controls": controls,
|
|
582
|
-
},
|
|
583
|
-
indent=2,
|
|
584
|
-
)
|
|
585
|
-
|
|
586
|
-
except (FileNotFoundError, RuntimeError) as e:
|
|
587
|
-
return format_error_with_context(e, "parsing InSpec profile", path)
|
|
588
|
-
except Exception as e:
|
|
589
|
-
return format_error_with_context(e, "parsing InSpec profile", path)
|
|
578
|
+
return _parse_inspec(path)
|
|
590
579
|
|
|
591
580
|
|
|
592
581
|
@mcp.tool()
|
|
593
582
|
def convert_inspec_to_test(inspec_path: str, output_format: str = "testinfra") -> str:
|
|
594
583
|
"""
|
|
595
|
-
Convert InSpec controls to
|
|
584
|
+
Convert InSpec controls to test framework format.
|
|
596
585
|
|
|
597
586
|
Args:
|
|
598
587
|
inspec_path: Path to InSpec profile or control file.
|
|
599
|
-
output_format: Output format ('testinfra' or '
|
|
588
|
+
output_format: Output format ('testinfra', 'ansible_assert', 'serverspec', or 'goss').
|
|
600
589
|
|
|
601
590
|
Returns:
|
|
602
591
|
Converted test code or error message.
|
|
603
592
|
|
|
604
593
|
"""
|
|
605
|
-
|
|
606
|
-
# First parse the InSpec profile
|
|
607
|
-
parse_result = parse_inspec_profile(inspec_path)
|
|
608
|
-
|
|
609
|
-
# Check if parsing failed
|
|
610
|
-
if parse_result.startswith(ERROR_PREFIX):
|
|
611
|
-
return parse_result
|
|
612
|
-
|
|
613
|
-
# Parse JSON result
|
|
614
|
-
profile_data = json.loads(parse_result)
|
|
615
|
-
controls = profile_data["controls"]
|
|
616
|
-
|
|
617
|
-
if not controls:
|
|
618
|
-
return "Error: No controls found in InSpec profile"
|
|
619
|
-
|
|
620
|
-
# Convert each control
|
|
621
|
-
converted_tests = []
|
|
622
|
-
|
|
623
|
-
if output_format == "testinfra":
|
|
624
|
-
converted_tests.append("import pytest")
|
|
625
|
-
converted_tests.append("")
|
|
626
|
-
converted_tests.append("")
|
|
627
|
-
for control in controls:
|
|
628
|
-
test_code = _convert_inspec_to_testinfra(control)
|
|
629
|
-
converted_tests.append(test_code)
|
|
630
|
-
|
|
631
|
-
elif output_format == "ansible_assert":
|
|
632
|
-
converted_tests.append("---")
|
|
633
|
-
converted_tests.append("# Validation tasks converted from InSpec")
|
|
634
|
-
converted_tests.append("")
|
|
635
|
-
for control in controls:
|
|
636
|
-
assert_code = _convert_inspec_to_ansible_assert(control)
|
|
637
|
-
converted_tests.append(assert_code)
|
|
638
|
-
converted_tests.append("")
|
|
639
|
-
|
|
640
|
-
else:
|
|
641
|
-
error_msg = (
|
|
642
|
-
f"Error: Unsupported format '{output_format}'. "
|
|
643
|
-
"Use 'testinfra' or 'ansible_assert'"
|
|
644
|
-
)
|
|
645
|
-
return error_msg
|
|
646
|
-
|
|
647
|
-
return "\n".join(converted_tests)
|
|
648
|
-
|
|
649
|
-
except Exception as e:
|
|
650
|
-
return format_error_with_context(
|
|
651
|
-
e, f"converting InSpec to {output_format}", inspec_path
|
|
652
|
-
)
|
|
594
|
+
return _convert_inspec_test(inspec_path, output_format)
|
|
653
595
|
|
|
654
596
|
|
|
655
597
|
def _extract_resources_from_parse_result(parse_result: str) -> list[dict[str, Any]]:
|
|
@@ -1220,6 +1162,183 @@ def _parse_chef_environment_content(content: str) -> dict:
|
|
|
1220
1162
|
return env_data
|
|
1221
1163
|
|
|
1222
1164
|
|
|
1165
|
+
def _convert_ruby_literal(value: str) -> Any:
|
|
1166
|
+
"""
|
|
1167
|
+
Convert Ruby literal values to equivalent Python types.
|
|
1168
|
+
|
|
1169
|
+
This function handles the conversion of Ruby's basic literal values
|
|
1170
|
+
to their Python equivalents during Chef environment parsing.
|
|
1171
|
+
|
|
1172
|
+
Args:
|
|
1173
|
+
value: String representation of a Ruby literal value.
|
|
1174
|
+
|
|
1175
|
+
Returns:
|
|
1176
|
+
The converted Python value:
|
|
1177
|
+
- "true" -> True (bool)
|
|
1178
|
+
- "false" -> False (bool)
|
|
1179
|
+
- "nil" -> None
|
|
1180
|
+
- Integer strings -> int (e.g., "42" -> 42)
|
|
1181
|
+
- Float strings -> float (e.g., "3.14" -> 3.14, "1e10" -> 10000000000.0)
|
|
1182
|
+
- Unrecognized values -> original string unchanged
|
|
1183
|
+
|
|
1184
|
+
Examples:
|
|
1185
|
+
>>> _convert_ruby_literal("true")
|
|
1186
|
+
True
|
|
1187
|
+
>>> _convert_ruby_literal("42")
|
|
1188
|
+
42
|
|
1189
|
+
>>> _convert_ruby_literal("3.14")
|
|
1190
|
+
3.14
|
|
1191
|
+
>>> _convert_ruby_literal("nil")
|
|
1192
|
+
None
|
|
1193
|
+
>>> _convert_ruby_literal("some_string")
|
|
1194
|
+
'some_string'
|
|
1195
|
+
|
|
1196
|
+
"""
|
|
1197
|
+
# Handle boolean and nil values
|
|
1198
|
+
literal_map = {
|
|
1199
|
+
"true": True,
|
|
1200
|
+
"false": False,
|
|
1201
|
+
"nil": None,
|
|
1202
|
+
}
|
|
1203
|
+
|
|
1204
|
+
if value in literal_map:
|
|
1205
|
+
return literal_map[value]
|
|
1206
|
+
|
|
1207
|
+
# Handle numeric values
|
|
1208
|
+
try:
|
|
1209
|
+
# Try integer first
|
|
1210
|
+
if "." not in value and "e" not in value.lower():
|
|
1211
|
+
return int(value)
|
|
1212
|
+
else:
|
|
1213
|
+
return float(value)
|
|
1214
|
+
except ValueError:
|
|
1215
|
+
pass
|
|
1216
|
+
|
|
1217
|
+
# Return as string if no conversion applies
|
|
1218
|
+
return value
|
|
1219
|
+
|
|
1220
|
+
|
|
1221
|
+
def _parse_quoted_key(content: str, i: int) -> tuple[str, int]:
|
|
1222
|
+
"""Parse a quoted key and return (key, new_index)."""
|
|
1223
|
+
if content[i] not in "'\"":
|
|
1224
|
+
raise ValueError("Expected quote at start of key")
|
|
1225
|
+
|
|
1226
|
+
quote = content[i]
|
|
1227
|
+
i += 1
|
|
1228
|
+
key_start = i
|
|
1229
|
+
while i < len(content) and content[i] != quote:
|
|
1230
|
+
i += 1
|
|
1231
|
+
key = content[key_start:i]
|
|
1232
|
+
i += 1 # skip closing quote
|
|
1233
|
+
return key, i
|
|
1234
|
+
|
|
1235
|
+
|
|
1236
|
+
def _parse_nested_hash(content: str, i: int) -> tuple[dict, int]:
|
|
1237
|
+
"""Parse a nested hash and return (parsed_dict, new_index)."""
|
|
1238
|
+
if content[i] != "{":
|
|
1239
|
+
raise ValueError("Expected opening brace for nested hash")
|
|
1240
|
+
|
|
1241
|
+
brace_count = 1
|
|
1242
|
+
start = i
|
|
1243
|
+
i += 1
|
|
1244
|
+
while i < len(content) and brace_count > 0:
|
|
1245
|
+
if content[i] == "{":
|
|
1246
|
+
brace_count += 1
|
|
1247
|
+
elif content[i] == "}":
|
|
1248
|
+
brace_count -= 1
|
|
1249
|
+
i += 1
|
|
1250
|
+
|
|
1251
|
+
nested_content = content[start + 1 : i - 1] # exclude braces
|
|
1252
|
+
return parse_ruby_hash(nested_content), i
|
|
1253
|
+
|
|
1254
|
+
|
|
1255
|
+
def _parse_simple_value(content: str, i: int) -> tuple[str, int]:
|
|
1256
|
+
"""Parse a simple value and return (value, new_index)."""
|
|
1257
|
+
value_start = i
|
|
1258
|
+
while i < len(content) and content[i] not in ",}":
|
|
1259
|
+
i += 1
|
|
1260
|
+
value = content[value_start:i].strip()
|
|
1261
|
+
# Remove quotes if present
|
|
1262
|
+
if (value.startswith("'") and value.endswith("'")) or (
|
|
1263
|
+
value.startswith('"') and value.endswith('"')
|
|
1264
|
+
):
|
|
1265
|
+
value = value[1:-1]
|
|
1266
|
+
else:
|
|
1267
|
+
# Convert Ruby literals to Python types
|
|
1268
|
+
value = _convert_ruby_literal(value)
|
|
1269
|
+
return value, i
|
|
1270
|
+
|
|
1271
|
+
|
|
1272
|
+
def _skip_to_next_item(content: str, i: int) -> int:
|
|
1273
|
+
"""Skip to the next item, handling delimiters."""
|
|
1274
|
+
while i < len(content) and content[i] not in ",}":
|
|
1275
|
+
i += 1
|
|
1276
|
+
if i < len(content) and (content[i] == "," or content[i] == "}"):
|
|
1277
|
+
i += 1
|
|
1278
|
+
return i
|
|
1279
|
+
|
|
1280
|
+
|
|
1281
|
+
def parse_ruby_hash(content: str) -> dict:
|
|
1282
|
+
"""Parse Ruby hash syntax recursively."""
|
|
1283
|
+
result = {}
|
|
1284
|
+
|
|
1285
|
+
# Simple recursive parser for Ruby hash syntax
|
|
1286
|
+
# This handles nested braces by counting them
|
|
1287
|
+
i = 0
|
|
1288
|
+
while i < len(content):
|
|
1289
|
+
# Skip whitespace
|
|
1290
|
+
i = _skip_whitespace(content, i)
|
|
1291
|
+
if i >= len(content):
|
|
1292
|
+
break
|
|
1293
|
+
|
|
1294
|
+
# Parse key-value pair
|
|
1295
|
+
key, value, i = _parse_key_value_pair(content, i)
|
|
1296
|
+
if key is not None:
|
|
1297
|
+
result[key] = value
|
|
1298
|
+
|
|
1299
|
+
# Skip to next item
|
|
1300
|
+
i = _skip_to_next_item(content, i)
|
|
1301
|
+
|
|
1302
|
+
return result
|
|
1303
|
+
|
|
1304
|
+
|
|
1305
|
+
def _skip_whitespace(content: str, i: int) -> int:
|
|
1306
|
+
"""Skip whitespace characters and return new index."""
|
|
1307
|
+
while i < len(content) and content[i].isspace():
|
|
1308
|
+
i += 1
|
|
1309
|
+
return i
|
|
1310
|
+
|
|
1311
|
+
|
|
1312
|
+
def _parse_key_value_pair(content: str, i: int) -> tuple[str | None, Any, int]:
|
|
1313
|
+
"""Parse a single key => value pair and return (key, value, new_index)."""
|
|
1314
|
+
# Look for key => value patterns
|
|
1315
|
+
if content[i] in "'\"":
|
|
1316
|
+
# Parse quoted key
|
|
1317
|
+
key, i = _parse_quoted_key(content, i)
|
|
1318
|
+
|
|
1319
|
+
# Skip whitespace and =>
|
|
1320
|
+
i = _skip_whitespace_and_arrows(content, i)
|
|
1321
|
+
|
|
1322
|
+
value: Any
|
|
1323
|
+
if i < len(content) and content[i] == "{":
|
|
1324
|
+
# Nested hash
|
|
1325
|
+
value, i = _parse_nested_hash(content, i)
|
|
1326
|
+
else:
|
|
1327
|
+
# Simple value
|
|
1328
|
+
value, i = _parse_simple_value(content, i)
|
|
1329
|
+
|
|
1330
|
+
return key, value, i
|
|
1331
|
+
|
|
1332
|
+
return None, None, i
|
|
1333
|
+
|
|
1334
|
+
|
|
1335
|
+
def _skip_whitespace_and_arrows(content: str, i: int) -> int:
|
|
1336
|
+
"""Skip whitespace and => symbols."""
|
|
1337
|
+
while i < len(content) and (content[i].isspace() or content[i] in "=>"):
|
|
1338
|
+
i += 1
|
|
1339
|
+
return i
|
|
1340
|
+
|
|
1341
|
+
|
|
1223
1342
|
def _extract_attributes_block(content: str, block_type: str) -> dict:
|
|
1224
1343
|
"""Extract attribute blocks from Chef environment content."""
|
|
1225
1344
|
# Find the block start
|
|
@@ -1231,37 +1350,7 @@ def _extract_attributes_block(content: str, block_type: str) -> dict:
|
|
|
1231
1350
|
|
|
1232
1351
|
block_content = match.group(1).strip()
|
|
1233
1352
|
|
|
1234
|
-
|
|
1235
|
-
# Ruby attribute hashes use => syntax, which we convert to Python dict
|
|
1236
|
-
# This is intentionally simple - complex Chef DSL needs full Ruby parser
|
|
1237
|
-
attributes = {}
|
|
1238
|
-
|
|
1239
|
-
# Parse simple key-value pairs like 'port' => '8080'
|
|
1240
|
-
key_value_pattern = (
|
|
1241
|
-
r"['\"]([^'\"]{0,100})['\"][\s:]*=>[\s:]*['\"]([^'\"]{0,200})['\"]"
|
|
1242
|
-
)
|
|
1243
|
-
for match in re.finditer(key_value_pattern, block_content):
|
|
1244
|
-
attr_key = match.group(1)
|
|
1245
|
-
attr_value = match.group(2)
|
|
1246
|
-
attributes[attr_key] = attr_value
|
|
1247
|
-
|
|
1248
|
-
# Parse nested structures (basic support)
|
|
1249
|
-
nested_pattern = (
|
|
1250
|
-
r"['\"](([^'\"\n]{0,100}))['\"](\\s|:)*=>(\\s|:)*\\{([^}]{0,500})\\}"
|
|
1251
|
-
)
|
|
1252
|
-
for match in re.finditer(nested_pattern, block_content):
|
|
1253
|
-
key = match.group(1)
|
|
1254
|
-
nested_content = match.group(5)
|
|
1255
|
-
nested_attrs = {}
|
|
1256
|
-
|
|
1257
|
-
for nested_match in re.finditer(key_value_pattern, nested_content):
|
|
1258
|
-
nested_key = nested_match.group(1)
|
|
1259
|
-
nested_value = nested_match.group(2)
|
|
1260
|
-
nested_attrs[nested_key] = nested_value
|
|
1261
|
-
|
|
1262
|
-
if nested_attrs:
|
|
1263
|
-
attributes[key] = nested_attrs
|
|
1264
|
-
|
|
1353
|
+
attributes = parse_ruby_hash(block_content)
|
|
1265
1354
|
return attributes
|
|
1266
1355
|
|
|
1267
1356
|
|
|
@@ -1527,7 +1616,7 @@ def _find_environment_patterns_in_content(content: str, file_path: str) -> list:
|
|
|
1527
1616
|
# Common Chef environment patterns
|
|
1528
1617
|
environment_patterns = [
|
|
1529
1618
|
(r"node\.chef_environment", "node.chef_environment"),
|
|
1530
|
-
(r"node\[['\"]
|
|
1619
|
+
(r"node\[['\"]environment['\"]\]", 'node["environment"]'),
|
|
1531
1620
|
(r"environment\s+['\"]([^'\"\n]{0,100})['\"]", "environment declaration"),
|
|
1532
1621
|
(
|
|
1533
1622
|
r"if\s+node\.chef_environment\s*==\s*['\"]([^'\"\n]{0,100})['\"]",
|
|
@@ -1888,7 +1977,7 @@ def _build_conversion_details_section(results: list) -> str:
|
|
|
1888
1977
|
if "error" in result:
|
|
1889
1978
|
section += f"❌ {result['databag']}/{result['item']}: {result['error']}\n"
|
|
1890
1979
|
else:
|
|
1891
|
-
status = "🔒 Encrypted" if result
|
|
1980
|
+
status = "🔒 Encrypted" if result.get("encrypted", False) else "📄 Plain"
|
|
1892
1981
|
databag_item = f"{result['databag']}/{result['item']}"
|
|
1893
1982
|
target = result["target_file"]
|
|
1894
1983
|
section += f"✅ {databag_item} → {target} ({status})\n"
|
|
@@ -2435,7 +2524,11 @@ def generate_playbook_from_recipe(recipe_path: str) -> str:
|
|
|
2435
2524
|
Generated Ansible playbook content.
|
|
2436
2525
|
|
|
2437
2526
|
"""
|
|
2438
|
-
|
|
2527
|
+
from souschef.converters.playbook import (
|
|
2528
|
+
generate_playbook_from_recipe as _generate_playbook,
|
|
2529
|
+
)
|
|
2530
|
+
|
|
2531
|
+
return _generate_playbook(recipe_path)
|
|
2439
2532
|
|
|
2440
2533
|
|
|
2441
2534
|
def convert_chef_search_to_inventory(search_query: str) -> str:
|
|
@@ -2558,6 +2651,166 @@ def profile_parsing_operation(
|
|
|
2558
2651
|
return format_error_with_context(e, f"profiling {operation} parsing", file_path)
|
|
2559
2652
|
|
|
2560
2653
|
|
|
2654
|
+
# CI/CD Pipeline Generation Tools
|
|
2655
|
+
|
|
2656
|
+
|
|
2657
|
+
@mcp.tool()
|
|
2658
|
+
def generate_jenkinsfile_from_chef(
|
|
2659
|
+
cookbook_path: str,
|
|
2660
|
+
pipeline_name: str = "chef-to-ansible-pipeline",
|
|
2661
|
+
pipeline_type: str = "declarative",
|
|
2662
|
+
enable_parallel: str = "yes",
|
|
2663
|
+
) -> str:
|
|
2664
|
+
"""
|
|
2665
|
+
Generate Jenkins pipeline from Chef cookbook CI/CD patterns.
|
|
2666
|
+
|
|
2667
|
+
Analyzes Chef testing tools (Test Kitchen, ChefSpec, InSpec, Foodcritic)
|
|
2668
|
+
and generates equivalent Jenkins pipeline stages (Declarative or Scripted).
|
|
2669
|
+
|
|
2670
|
+
Args:
|
|
2671
|
+
cookbook_path: Path to Chef cookbook directory.
|
|
2672
|
+
pipeline_name: Name for the Jenkins pipeline.
|
|
2673
|
+
pipeline_type: Pipeline type - 'declarative' (recommended) or 'scripted'.
|
|
2674
|
+
enable_parallel: Enable parallel test execution - 'yes' or 'no'.
|
|
2675
|
+
|
|
2676
|
+
Returns:
|
|
2677
|
+
Jenkinsfile content (Groovy DSL) for Jenkins pipeline.
|
|
2678
|
+
|
|
2679
|
+
"""
|
|
2680
|
+
from souschef.ci.jenkins_pipeline import generate_jenkinsfile_from_chef_ci
|
|
2681
|
+
|
|
2682
|
+
try:
|
|
2683
|
+
# Convert string to boolean
|
|
2684
|
+
enable_parallel_bool = enable_parallel.lower() in ("yes", "true", "1")
|
|
2685
|
+
|
|
2686
|
+
result = generate_jenkinsfile_from_chef_ci(
|
|
2687
|
+
cookbook_path=cookbook_path,
|
|
2688
|
+
pipeline_name=pipeline_name,
|
|
2689
|
+
pipeline_type=pipeline_type,
|
|
2690
|
+
enable_parallel=enable_parallel_bool,
|
|
2691
|
+
)
|
|
2692
|
+
return result
|
|
2693
|
+
except FileNotFoundError as e:
|
|
2694
|
+
return format_error_with_context(e, "generating Jenkinsfile", cookbook_path)
|
|
2695
|
+
except Exception as e:
|
|
2696
|
+
return format_error_with_context(e, "generating Jenkinsfile", cookbook_path)
|
|
2697
|
+
|
|
2698
|
+
|
|
2699
|
+
@mcp.tool()
|
|
2700
|
+
def generate_gitlab_ci_from_chef(
|
|
2701
|
+
cookbook_path: str,
|
|
2702
|
+
project_name: str = "chef-to-ansible",
|
|
2703
|
+
enable_cache: str = "yes",
|
|
2704
|
+
enable_artifacts: str = "yes",
|
|
2705
|
+
) -> str:
|
|
2706
|
+
"""
|
|
2707
|
+
Generate GitLab CI configuration from Chef cookbook CI/CD patterns.
|
|
2708
|
+
|
|
2709
|
+
Analyzes Chef testing tools and generates equivalent GitLab CI stages
|
|
2710
|
+
with caching, artifacts, and parallel execution support.
|
|
2711
|
+
|
|
2712
|
+
Args:
|
|
2713
|
+
cookbook_path: Path to Chef cookbook directory.
|
|
2714
|
+
project_name: GitLab project name.
|
|
2715
|
+
enable_cache: Enable caching for dependencies - 'yes' or 'no'.
|
|
2716
|
+
enable_artifacts: Enable artifacts for test results - 'yes' or 'no'.
|
|
2717
|
+
|
|
2718
|
+
Returns:
|
|
2719
|
+
.gitlab-ci.yml content (YAML) for GitLab CI/CD.
|
|
2720
|
+
|
|
2721
|
+
"""
|
|
2722
|
+
from souschef.ci.gitlab_ci import generate_gitlab_ci_from_chef_ci
|
|
2723
|
+
|
|
2724
|
+
try:
|
|
2725
|
+
enable_cache_bool = enable_cache.lower() in ("yes", "true", "1")
|
|
2726
|
+
enable_artifacts_bool = enable_artifacts.lower() in ("yes", "true", "1")
|
|
2727
|
+
result = generate_gitlab_ci_from_chef_ci(
|
|
2728
|
+
cookbook_path=cookbook_path,
|
|
2729
|
+
project_name=project_name,
|
|
2730
|
+
enable_cache=enable_cache_bool,
|
|
2731
|
+
enable_artifacts=enable_artifacts_bool,
|
|
2732
|
+
)
|
|
2733
|
+
return result
|
|
2734
|
+
except FileNotFoundError as e:
|
|
2735
|
+
return format_error_with_context(
|
|
2736
|
+
e,
|
|
2737
|
+
"generating .gitlab-ci.yml",
|
|
2738
|
+
cookbook_path,
|
|
2739
|
+
)
|
|
2740
|
+
except Exception as e:
|
|
2741
|
+
return format_error_with_context(e, "generating .gitlab-ci.yml", cookbook_path)
|
|
2742
|
+
|
|
2743
|
+
|
|
2744
|
+
@mcp.tool()
|
|
2745
|
+
def generate_github_workflow_from_chef(
|
|
2746
|
+
cookbook_path: str,
|
|
2747
|
+
workflow_name: str = "Chef Cookbook CI",
|
|
2748
|
+
enable_cache: str = "yes",
|
|
2749
|
+
enable_artifacts: str = "yes",
|
|
2750
|
+
) -> str:
|
|
2751
|
+
"""
|
|
2752
|
+
Generate GitHub Actions workflow from Chef cookbook CI/CD patterns.
|
|
2753
|
+
|
|
2754
|
+
Analyzes Chef testing tools and generates equivalent GitHub Actions workflow
|
|
2755
|
+
with caching, artifacts, and matrix strategy support.
|
|
2756
|
+
|
|
2757
|
+
Args:
|
|
2758
|
+
cookbook_path: Path to Chef cookbook directory.
|
|
2759
|
+
workflow_name: GitHub Actions workflow name.
|
|
2760
|
+
enable_cache: Enable caching for dependencies - 'yes' or 'no'.
|
|
2761
|
+
enable_artifacts: Enable artifacts for test results - 'yes' or 'no'.
|
|
2762
|
+
|
|
2763
|
+
Returns:
|
|
2764
|
+
GitHub Actions workflow YAML content (.github/workflows/*.yml).
|
|
2765
|
+
|
|
2766
|
+
"""
|
|
2767
|
+
from souschef.ci.github_actions import generate_github_workflow_from_chef_ci
|
|
2768
|
+
|
|
2769
|
+
try:
|
|
2770
|
+
enable_cache_bool = enable_cache.lower() in ("yes", "true", "1")
|
|
2771
|
+
enable_artifacts_bool = enable_artifacts.lower() in ("yes", "true", "1")
|
|
2772
|
+
result = generate_github_workflow_from_chef_ci(
|
|
2773
|
+
cookbook_path=cookbook_path,
|
|
2774
|
+
workflow_name=workflow_name,
|
|
2775
|
+
enable_cache=enable_cache_bool,
|
|
2776
|
+
enable_artifacts=enable_artifacts_bool,
|
|
2777
|
+
)
|
|
2778
|
+
return result
|
|
2779
|
+
except FileNotFoundError as e:
|
|
2780
|
+
return format_error_with_context(
|
|
2781
|
+
e,
|
|
2782
|
+
"generating GitHub Actions workflow",
|
|
2783
|
+
cookbook_path,
|
|
2784
|
+
)
|
|
2785
|
+
except Exception as e:
|
|
2786
|
+
return format_error_with_context(
|
|
2787
|
+
e, "generating GitHub Actions workflow", cookbook_path
|
|
2788
|
+
)
|
|
2789
|
+
|
|
2790
|
+
|
|
2791
|
+
@mcp.tool()
|
|
2792
|
+
def parse_chef_migration_assessment(
|
|
2793
|
+
cookbook_paths: str,
|
|
2794
|
+
migration_scope: str = "full",
|
|
2795
|
+
target_platform: str = "ansible_awx",
|
|
2796
|
+
) -> dict[str, Any]:
|
|
2797
|
+
"""
|
|
2798
|
+
Parse Chef cookbook migration assessment and return as dictionary.
|
|
2799
|
+
|
|
2800
|
+
Args:
|
|
2801
|
+
cookbook_paths: Comma-separated paths to Chef cookbooks or cookbook directory
|
|
2802
|
+
migration_scope: Scope of migration (full, recipes_only, infrastructure_only)
|
|
2803
|
+
target_platform: Target platform (ansible_awx, ansible_core, ansible_tower)
|
|
2804
|
+
|
|
2805
|
+
Returns:
|
|
2806
|
+
Dictionary containing assessment data with complexity, recommendations, etc.
|
|
2807
|
+
|
|
2808
|
+
"""
|
|
2809
|
+
return _parse_chef_migration_assessment(
|
|
2810
|
+
cookbook_paths, migration_scope, target_platform
|
|
2811
|
+
)
|
|
2812
|
+
|
|
2813
|
+
|
|
2561
2814
|
# AWX/AAP deployment wrappers for backward compatibility
|
|
2562
2815
|
def main() -> None:
|
|
2563
2816
|
"""
|