mcp-souschef 2.8.0__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.2.0.dist-info}/METADATA +159 -384
  2. mcp_souschef-3.2.0.dist-info/RECORD +47 -0
  3. {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.2.0.dist-info}/WHEEL +1 -1
  4. souschef/__init__.py +31 -7
  5. souschef/assessment.py +1451 -105
  6. souschef/ci/common.py +126 -0
  7. souschef/ci/github_actions.py +3 -92
  8. souschef/ci/gitlab_ci.py +2 -52
  9. souschef/ci/jenkins_pipeline.py +2 -59
  10. souschef/cli.py +149 -16
  11. souschef/converters/playbook.py +378 -138
  12. souschef/converters/resource.py +12 -11
  13. souschef/converters/template.py +177 -0
  14. souschef/core/__init__.py +6 -1
  15. souschef/core/metrics.py +313 -0
  16. souschef/core/path_utils.py +233 -19
  17. souschef/core/validation.py +53 -0
  18. souschef/deployment.py +71 -12
  19. souschef/generators/__init__.py +13 -0
  20. souschef/generators/repo.py +695 -0
  21. souschef/parsers/attributes.py +1 -1
  22. souschef/parsers/habitat.py +1 -1
  23. souschef/parsers/inspec.py +25 -2
  24. souschef/parsers/metadata.py +5 -3
  25. souschef/parsers/recipe.py +1 -1
  26. souschef/parsers/resource.py +1 -1
  27. souschef/parsers/template.py +1 -1
  28. souschef/server.py +1039 -121
  29. souschef/ui/app.py +486 -374
  30. souschef/ui/pages/ai_settings.py +74 -8
  31. souschef/ui/pages/cookbook_analysis.py +3216 -373
  32. souschef/ui/pages/validation_reports.py +274 -0
  33. mcp_souschef-2.8.0.dist-info/RECORD +0 -42
  34. souschef/converters/cookbook_specific.py.backup +0 -109
  35. {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.2.0.dist-info}/entry_points.txt +0 -0
  36. {mcp_souschef-2.8.0.dist-info → mcp_souschef-3.2.0.dist-info}/licenses/LICENSE +0 -0
souschef/server.py CHANGED
@@ -1,12 +1,16 @@
1
1
  """SousChef MCP Server - Chef to Ansible conversion assistant."""
2
2
 
3
+ # codeql[py/unused-import]: Intentional re-exports for MCP tools and test compatibility
4
+
3
5
  import ast
4
6
  import json
7
+ import os
5
8
  import re
6
9
  from pathlib import Path
7
10
  from typing import Any
8
11
 
9
- from mcp.server.fastmcp import FastMCP
12
+ import yaml
13
+ from mcp.server import FastMCP
10
14
 
11
15
  # Import assessment functions with aliases to avoid name conflicts
12
16
  from souschef.assessment import (
@@ -15,23 +19,16 @@ from souschef.assessment import (
15
19
  from souschef.assessment import (
16
20
  assess_chef_migration_complexity as _assess_chef_migration_complexity,
17
21
  )
18
- from souschef.assessment import (
19
- generate_migration_plan as _generate_migration_plan,
20
- )
21
- from souschef.assessment import (
22
- generate_migration_report as _generate_migration_report,
23
- )
22
+ from souschef.assessment import generate_migration_plan as _generate_migration_plan
23
+ from souschef.assessment import generate_migration_report as _generate_migration_report
24
24
  from souschef.assessment import (
25
25
  parse_chef_migration_assessment as _parse_chef_migration_assessment,
26
26
  )
27
- from souschef.assessment import (
28
- validate_conversion as _validate_conversion,
29
- )
27
+ from souschef.assessment import validate_conversion as _validate_conversion
30
28
 
31
29
  # Import extracted modules
32
30
  # Import private helper functions still used in server.py
33
- # codeql[py/unused-import]: Backward compatibility exports for test suite
34
- from souschef.converters.habitat import ( # noqa: F401
31
+ from souschef.converters.habitat import ( # noqa: F401, codeql[py/unused-import]
35
32
  _add_service_build,
36
33
  _add_service_dependencies,
37
34
  _add_service_environment,
@@ -51,9 +48,9 @@ from souschef.converters.habitat import (
51
48
  generate_compose_from_habitat as _generate_compose_from_habitat,
52
49
  )
53
50
 
51
+ # Import playbook converter functions
54
52
  # Re-exports of playbook internal functions for backward compatibility (tests)
55
- # codeql[py/unused-import]: Backward compatibility exports for test suite
56
- from souschef.converters.playbook import ( # noqa: F401
53
+ from souschef.converters.playbook import ( # noqa: F401, codeql[py/unused-import]
57
54
  _add_general_recommendations,
58
55
  _convert_chef_block_to_ansible,
59
56
  _convert_chef_condition_to_ansible,
@@ -76,8 +73,6 @@ from souschef.converters.playbook import ( # noqa: F401
76
73
  _parse_search_condition,
77
74
  _process_subscribes,
78
75
  )
79
-
80
- # Import playbook converter functions
81
76
  from souschef.converters.playbook import (
82
77
  analyse_chef_search_patterns as _analyse_chef_search_patterns,
83
78
  )
@@ -87,9 +82,7 @@ from souschef.converters.playbook import (
87
82
  from souschef.converters.playbook import (
88
83
  generate_dynamic_inventory_script as _generate_dynamic_inventory_script,
89
84
  )
90
-
91
- # codeql[py/unused-import]: Backward compatibility exports for test suite
92
- from souschef.converters.resource import ( # noqa: F401
85
+ from souschef.converters.resource import ( # noqa: F401, codeql[py/unused-import]
93
86
  _convert_chef_resource_to_ansible,
94
87
  _format_ansible_task,
95
88
  _get_file_params,
@@ -101,8 +94,7 @@ from souschef.converters.resource import (
101
94
 
102
95
  # Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
103
96
  # These imports are intentionally exposed for external test access
104
- # codeql[py/unused-import]: Backward compatibility exports for test suite
105
- from souschef.core.constants import ( # noqa: F401
97
+ from souschef.core.constants import ( # noqa: F401, codeql[py/unused-import]
106
98
  ACTION_TO_STATE,
107
99
  ANSIBLE_SERVICE_MODULE,
108
100
  ERROR_PREFIX,
@@ -112,33 +104,47 @@ from souschef.core.constants import ( # noqa: F401
112
104
 
113
105
  # Import core utilities
114
106
  from souschef.core.errors import format_error_with_context
115
-
116
- # codeql[py/unused-import]: Backward compatibility exports for test suite
117
- from souschef.core.path_utils import _normalize_path, _safe_join # noqa: F401
107
+ from souschef.core.path_utils import ( # noqa: F401, codeql[py/unused-import]
108
+ _ensure_within_base_path,
109
+ _normalize_path,
110
+ _safe_join,
111
+ _validated_candidate,
112
+ safe_glob,
113
+ safe_read_text,
114
+ safe_write_text,
115
+ )
118
116
 
119
117
  # Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
120
118
  # These imports are intentionally exposed for external test access
121
- # codeql[py/unused-import]: Backward compatibility exports for test suite
122
- from souschef.core.ruby_utils import ( # noqa: F401
123
- _normalize_ruby_value,
119
+ from souschef.core.ruby_utils import (
120
+ _normalize_ruby_value, # noqa: F401, codeql[py/unused-import]
124
121
  )
125
122
 
126
123
  # Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
127
124
  # These imports are intentionally exposed for external test access
128
- # codeql[py/unused-import]: Backward compatibility exports for test suite
129
- from souschef.core.validation import ( # noqa: F401
125
+ from souschef.core.validation import ( # noqa: F401, codeql[py/unused-import]
130
126
  ValidationCategory,
131
127
  ValidationEngine,
132
128
  ValidationLevel,
133
129
  ValidationResult,
134
130
  )
135
131
 
132
+ # Explicit re-exports for language servers and type checkers
133
+ # These names are intentionally available from souschef.server
134
+ __all__ = [
135
+ "ValidationCategory",
136
+ "ValidationEngine",
137
+ "ValidationLevel",
138
+ "ValidationResult",
139
+ ]
140
+
141
+ # Re-exports for backward compatibility (used by tests)
142
+ # These are imported and re-exported intentionally
136
143
  # Import validation framework
137
144
  # Re-exports of deployment internal functions for backward compatibility (tests)
138
145
  # Public re-exports of deployment functions for test backward compatibility
139
146
  # Note: MCP tool wrappers exist for some of these, but tests import directly
140
- # codeql[py/unused-import]: Backward compatibility exports for test suite
141
- from souschef.deployment import ( # noqa: F401
147
+ from souschef.deployment import ( # noqa: F401, codeql[py/unused-import]
142
148
  _analyse_cookbook_for_awx,
143
149
  _analyse_cookbooks_directory,
144
150
  _detect_deployment_patterns_in_recipe,
@@ -152,17 +158,7 @@ from souschef.deployment import ( # noqa: F401
152
158
  _parse_chef_runlist,
153
159
  _recommend_ansible_strategies,
154
160
  analyse_chef_application_patterns,
155
- convert_chef_deployment_to_ansible_strategy,
156
- generate_awx_inventory_source_from_chef,
157
- generate_awx_job_template_from_cookbook,
158
- generate_awx_project_from_cookbooks,
159
- generate_awx_workflow_from_chef_runlist,
160
- generate_blue_green_deployment_playbook,
161
- generate_canary_deployment_strategy,
162
161
  )
163
-
164
- # Re-exports for backward compatibility (used by tests)
165
- # These are imported and re-exported intentionally
166
162
  from souschef.deployment import (
167
163
  convert_chef_deployment_to_ansible_strategy as _convert_chef_deployment_to_ansible_strategy,
168
164
  )
@@ -189,33 +185,28 @@ from souschef.deployment import (
189
185
  from souschef.filesystem import list_directory as _list_directory
190
186
  from souschef.filesystem import read_file as _read_file
191
187
 
192
- # codeql[py/unused-import]: Backward compatibility exports for test suite
193
- from souschef.parsers.attributes import ( # noqa: F401
188
+ # Import parser functions
189
+ from souschef.parsers.attributes import ( # noqa: F401, codeql[py/unused-import]
194
190
  _extract_attributes,
195
191
  _format_attributes,
196
192
  _format_resolved_attributes,
197
193
  _get_precedence_level,
198
194
  _resolve_attribute_precedence,
199
195
  )
200
-
201
- # Import parser functions
202
196
  from souschef.parsers.attributes import parse_attributes as _parse_attributes
203
197
 
204
- # codeql[py/unused-import]: Backward compatibility exports for test suite
205
- from souschef.parsers.habitat import ( # noqa: F401
198
+ # Import Habitat parser internal functions for backward compatibility
199
+ from souschef.parsers.habitat import ( # noqa: F401, codeql[py/unused-import]
206
200
  _extract_plan_array,
207
201
  _extract_plan_exports,
208
202
  _extract_plan_function,
209
203
  _extract_plan_var,
210
204
  _update_quote_state,
211
205
  )
212
-
213
- # Import Habitat parser internal functions for backward compatibility
214
206
  from souschef.parsers.habitat import parse_habitat_plan as _parse_habitat_plan
215
207
 
216
208
  # Re-export InSpec internal functions for backward compatibility (tests)
217
- # codeql[py/unused-import]: Backward compatibility exports for test suite
218
- from souschef.parsers.inspec import ( # noqa: F401
209
+ from souschef.parsers.inspec import ( # noqa: F401, codeql[py/unused-import]
219
210
  _convert_inspec_to_ansible_assert,
220
211
  _convert_inspec_to_goss,
221
212
  _convert_inspec_to_serverspec,
@@ -224,18 +215,12 @@ from souschef.parsers.inspec import ( # noqa: F401
224
215
  _generate_inspec_from_resource,
225
216
  _parse_inspec_control,
226
217
  )
227
- from souschef.parsers.inspec import (
228
- convert_inspec_to_test as _convert_inspec_test,
229
- )
230
- from souschef.parsers.inspec import (
231
- parse_inspec_profile as _parse_inspec,
232
- )
233
-
234
- # codeql[py/unused-import]: Backward compatibility exports for test suite
235
- from souschef.parsers.metadata import ( # noqa: F401
236
- _extract_metadata,
237
- _format_cookbook_structure,
238
- _format_metadata,
218
+ from souschef.parsers.inspec import convert_inspec_to_test as _convert_inspec_test
219
+ from souschef.parsers.inspec import parse_inspec_profile as _parse_inspec
220
+ from souschef.parsers.metadata import (
221
+ _extract_metadata, # noqa: F401, codeql[py/unused-import]
222
+ _format_cookbook_structure, # noqa: F401, codeql[py/unused-import]
223
+ _format_metadata, # noqa: F401, codeql[py/unused-import]
239
224
  )
240
225
  from souschef.parsers.metadata import (
241
226
  list_cookbook_structure as _list_cookbook_structure,
@@ -244,24 +229,23 @@ from souschef.parsers.metadata import (
244
229
  parse_cookbook_metadata as _parse_cookbook_metadata,
245
230
  )
246
231
  from souschef.parsers.metadata import read_cookbook_metadata as _read_cookbook_metadata
247
-
248
- # codeql[py/unused-import]: Backward compatibility exports for test suite
249
- from souschef.parsers.recipe import ( # noqa: F401
250
- _extract_conditionals,
251
- _extract_resources,
252
- _format_resources,
232
+ from souschef.parsers.recipe import (
233
+ _extract_conditionals, # noqa: F401, codeql[py/unused-import]
234
+ _extract_resources, # noqa: F401, codeql[py/unused-import]
235
+ _format_resources, # noqa: F401, codeql[py/unused-import]
253
236
  )
254
237
  from souschef.parsers.recipe import parse_recipe as _parse_recipe
255
238
 
256
- # codeql[py/unused-import]: Backward compatibility exports for test suite
257
- from souschef.parsers.resource import ( # noqa: F401
258
- _extract_resource_actions,
259
- _extract_resource_properties,
239
+ # Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
240
+ # These imports are intentionally exposed for external test access
241
+ from souschef.parsers.resource import (
242
+ _extract_resource_actions, # noqa: F401, codeql[py/unused-import]
243
+ _extract_resource_properties, # noqa: F401, codeql[py/unused-import]
260
244
  )
261
245
  from souschef.parsers.resource import parse_custom_resource as _parse_custom_resource
262
246
 
263
- # codeql[py/unused-import]: Backward compatibility exports for test suite
264
- from souschef.parsers.template import ( # noqa: F401
247
+ # Import internal functions for backward compatibility (used by tests)
248
+ from souschef.parsers.template import ( # noqa: F401, codeql[py/unused-import]
265
249
  _convert_erb_to_jinja2,
266
250
  _extract_code_block_variables,
267
251
  _extract_heredoc_strings,
@@ -270,17 +254,38 @@ from souschef.parsers.template import ( # noqa: F401
270
254
  _extract_template_variables,
271
255
  _strip_ruby_comments,
272
256
  )
273
-
274
- # Import internal functions for backward compatibility (used by tests)
275
257
  from souschef.parsers.template import parse_template as _parse_template
276
258
 
259
+ # Backward compatibility re-exports without underscore prefix (for tests)
260
+ # noinspection PyUnusedLocal
261
+ convert_chef_deployment_to_ansible_strategy = ( # noqa: F401
262
+ _convert_chef_deployment_to_ansible_strategy
263
+ )
264
+ generate_awx_inventory_source_from_chef = ( # noqa: F401
265
+ _generate_awx_inventory_source_from_chef
266
+ )
267
+ generate_awx_job_template_from_cookbook = ( # noqa: F401
268
+ _generate_awx_job_template_from_cookbook
269
+ )
270
+ generate_awx_project_from_cookbooks = _generate_awx_project_from_cookbooks # noqa: F401
271
+ generate_awx_workflow_from_chef_runlist = ( # noqa: F401
272
+ _generate_awx_workflow_from_chef_runlist
273
+ )
274
+ generate_blue_green_deployment_playbook = ( # noqa: F401
275
+ _generate_blue_green_deployment_playbook
276
+ )
277
+ generate_canary_deployment_strategy = ( # noqa: F401
278
+ _generate_canary_deployment_strategy
279
+ )
280
+
277
281
  # Create a new FastMCP server
278
282
  mcp = FastMCP("souschef")
279
283
 
280
- # Error message templates
281
- ERROR_FILE_NOT_FOUND = "Error: File not found at {path}"
282
- ERROR_IS_DIRECTORY = "Error: {path} is a directory, not a file"
283
- ERROR_PERMISSION_DENIED = "Error: Permission denied for {path}"
284
+ # File constants
285
+ METADATA_RB = "metadata.rb"
286
+
287
+ # File constants
288
+ METADATA_RB = "metadata.rb"
284
289
 
285
290
  # Validation Framework Classes
286
291
 
@@ -297,6 +302,10 @@ def parse_template(path: str) -> str:
297
302
  JSON string with extracted variables and Jinja2-converted template.
298
303
 
299
304
  """
305
+ try:
306
+ path = str(_normalize_path(path))
307
+ except ValueError as e:
308
+ return format_error_with_context(e, "validating template path", path)
300
309
  return _parse_template(path)
301
310
 
302
311
 
@@ -312,6 +321,10 @@ def parse_custom_resource(path: str) -> str:
312
321
  JSON string with extracted properties, actions, and metadata.
313
322
 
314
323
  """
324
+ try:
325
+ path = str(_normalize_path(path))
326
+ except ValueError as e:
327
+ return format_error_with_context(e, "validating resource path", path)
315
328
  return _parse_custom_resource(path)
316
329
 
317
330
 
@@ -327,6 +340,10 @@ def list_directory(path: str) -> list[str] | str:
327
340
  A list of filenames in the directory, or an error message.
328
341
 
329
342
  """
343
+ try:
344
+ path = str(_normalize_path(path))
345
+ except ValueError as e:
346
+ return format_error_with_context(e, "validating directory path", path)
330
347
  result: list[str] | str = _list_directory(path)
331
348
  return result
332
349
 
@@ -343,6 +360,10 @@ def read_file(path: str) -> str:
343
360
  The contents of the file, or an error message.
344
361
 
345
362
  """
363
+ try:
364
+ path = str(_normalize_path(path))
365
+ except ValueError as e:
366
+ return format_error_with_context(e, "validating file path", path)
346
367
  result: str = _read_file(path)
347
368
  return result
348
369
 
@@ -359,6 +380,10 @@ def read_cookbook_metadata(path: str) -> str:
359
380
  Formatted string with extracted metadata.
360
381
 
361
382
  """
383
+ try:
384
+ path = str(_normalize_path(path))
385
+ except ValueError as e:
386
+ return format_error_with_context(e, "validating metadata path", path)
362
387
  return _read_cookbook_metadata(path)
363
388
 
364
389
 
@@ -374,6 +399,10 @@ def parse_cookbook_metadata(path: str) -> dict[str, str | list[str]]:
374
399
  Dictionary containing extracted metadata fields.
375
400
 
376
401
  """
402
+ try:
403
+ path = str(_normalize_path(path))
404
+ except ValueError as e:
405
+ return {"error": str(e)}
377
406
  return _parse_cookbook_metadata(path)
378
407
 
379
408
 
@@ -389,6 +418,10 @@ def parse_recipe(path: str) -> str:
389
418
  Formatted string with extracted Chef resources and their properties.
390
419
 
391
420
  """
421
+ try:
422
+ path = str(_normalize_path(path))
423
+ except ValueError as e:
424
+ return format_error_with_context(e, "validating recipe path", path)
392
425
  return _parse_recipe(path)
393
426
 
394
427
 
@@ -418,6 +451,10 @@ def parse_attributes(path: str, resolve_precedence: bool = True) -> str:
418
451
  Formatted string with extracted attributes.
419
452
 
420
453
  """
454
+ try:
455
+ path = str(_normalize_path(path))
456
+ except ValueError as e:
457
+ return format_error_with_context(e, "validating attributes path", path)
421
458
  return _parse_attributes(path, resolve_precedence)
422
459
 
423
460
 
@@ -433,6 +470,10 @@ def list_cookbook_structure(path: str) -> str:
433
470
  Formatted string showing the cookbook structure.
434
471
 
435
472
  """
473
+ try:
474
+ path = str(_normalize_path(path))
475
+ except ValueError as e:
476
+ return format_error_with_context(e, "validating cookbook path", path)
436
477
  return _list_cookbook_structure(path)
437
478
 
438
479
 
@@ -502,7 +543,6 @@ def _extract_resource_subscriptions(
502
543
  return subscriptions
503
544
 
504
545
 
505
- @mcp.tool()
506
546
  def _parse_controls_from_directory(profile_path: Path) -> list[dict[str, Any]]:
507
547
  """
508
548
  Parse all control files from an InSpec profile directory.
@@ -523,9 +563,9 @@ def _parse_controls_from_directory(profile_path: Path) -> list[dict[str, Any]]:
523
563
  raise FileNotFoundError(f"No controls directory found in {profile_path}")
524
564
 
525
565
  controls = []
526
- for control_file in controls_dir.glob("*.rb"):
566
+ for control_file in safe_glob(controls_dir, "*.rb", profile_path):
527
567
  try:
528
- content = control_file.read_text()
568
+ content = safe_read_text(control_file, profile_path)
529
569
  file_controls = _parse_inspec_control(content)
530
570
  for ctrl in file_controls:
531
571
  ctrl["file"] = str(control_file.relative_to(profile_path))
@@ -551,7 +591,7 @@ def _parse_controls_from_file(profile_path: Path) -> list[dict[str, Any]]:
551
591
 
552
592
  """
553
593
  try:
554
- content = profile_path.read_text()
594
+ content = safe_read_text(profile_path, profile_path.parent)
555
595
  controls = _parse_inspec_control(content)
556
596
  for ctrl in controls:
557
597
  ctrl["file"] = profile_path.name
@@ -572,6 +612,10 @@ def parse_inspec_profile(path: str) -> str:
572
612
  JSON string with parsed controls, or error message.
573
613
 
574
614
  """
615
+ try:
616
+ path = str(_normalize_path(path))
617
+ except ValueError as e:
618
+ return format_error_with_context(e, "validating InSpec path", path)
575
619
  return _parse_inspec(path)
576
620
 
577
621
 
@@ -588,6 +632,10 @@ def convert_inspec_to_test(inspec_path: str, output_format: str = "testinfra") -
588
632
  Converted test code or error message.
589
633
 
590
634
  """
635
+ try:
636
+ inspec_path = str(_normalize_path(inspec_path))
637
+ except ValueError as e:
638
+ return format_error_with_context(e, "validating InSpec path", inspec_path)
591
639
  return _convert_inspec_test(inspec_path, output_format)
592
640
 
593
641
 
@@ -643,6 +691,9 @@ def generate_inspec_from_recipe(recipe_path: str) -> str:
643
691
 
644
692
  """
645
693
  try:
694
+ # Validate and normalize path
695
+ recipe_path = str(_normalize_path(recipe_path))
696
+
646
697
  # First parse the recipe
647
698
  recipe_result: str = parse_recipe(recipe_path)
648
699
 
@@ -702,8 +753,6 @@ def convert_chef_databag_to_vars(
702
753
 
703
754
  """
704
755
  try:
705
- import yaml
706
-
707
756
  # Validate inputs
708
757
  if not databag_content or not databag_content.strip():
709
758
  return (
@@ -809,13 +858,14 @@ def _validate_databags_directory(
809
858
  return databags_path, None
810
859
 
811
860
 
812
- def _convert_databag_item(item_file, databag_name: str, output_directory: str) -> dict:
861
+ def _convert_databag_item(
862
+ item_file, databag_name: str, output_directory: str, base_path: Path
863
+ ) -> dict:
813
864
  """Convert a single databag item file to Ansible format."""
814
865
  item_name = item_file.stem
815
866
 
816
867
  try:
817
- with item_file.open() as f:
818
- content = f.read()
868
+ content = safe_read_text(item_file, base_path)
819
869
 
820
870
  # Detect if encrypted
821
871
  is_encrypted = _detect_encrypted_databag(content)
@@ -840,13 +890,17 @@ def _convert_databag_item(item_file, databag_name: str, output_directory: str) -
840
890
  return {"databag": databag_name, "item": item_name, "error": str(e)}
841
891
 
842
892
 
843
- def _process_databag_directory(databag_dir, output_directory: str) -> list[dict]:
893
+ def _process_databag_directory(
894
+ databag_dir, output_directory: str, base_path: Path
895
+ ) -> list[dict]:
844
896
  """Process all items in a single databag directory."""
845
897
  results = []
846
898
  databag_name = databag_dir.name
847
899
 
848
- for item_file in databag_dir.glob("*.json"):
849
- result = _convert_databag_item(item_file, databag_name, output_directory)
900
+ for item_file in safe_glob(databag_dir, "*.json", base_path):
901
+ result = _convert_databag_item(
902
+ item_file, databag_name, output_directory, base_path
903
+ )
850
904
  results.append(result)
851
905
 
852
906
  return results
@@ -881,11 +935,13 @@ def generate_ansible_vault_from_databags(
881
935
  conversion_results = []
882
936
 
883
937
  # Process each data bag directory
884
- for databag_dir in databags_path.iterdir():
938
+ for databag_dir in databags_path.iterdir(): # nosonar
885
939
  if not databag_dir.is_dir():
886
940
  continue
887
941
 
888
- results = _process_databag_directory(databag_dir, output_directory)
942
+ results = _process_databag_directory(
943
+ databag_dir, output_directory, databags_path
944
+ )
889
945
  conversion_results.extend(results)
890
946
 
891
947
  # Generate summary and file structure
@@ -912,6 +968,9 @@ def analyse_chef_databag_usage(cookbook_path: str, databags_path: str = "") -> s
912
968
  Analysis of data bag usage and migration recommendations
913
969
 
914
970
  """
971
+ cookbook_path = str(_normalize_path(cookbook_path))
972
+ if databags_path:
973
+ databags_path = str(_normalize_path(databags_path))
915
974
  try:
916
975
  cookbook = _normalize_path(cookbook_path)
917
976
  if not cookbook.exists():
@@ -1024,12 +1083,11 @@ def generate_inventory_from_chef_environments(
1024
1083
  environments = {}
1025
1084
  processing_results = []
1026
1085
 
1027
- for env_file in env_path.glob("*.rb"):
1086
+ for env_file in safe_glob(env_path, "*.rb", env_path):
1028
1087
  env_name = env_file.stem
1029
1088
 
1030
1089
  try:
1031
- with env_file.open("r") as f:
1032
- content = f.read()
1090
+ content = safe_read_text(env_file, env_path)
1033
1091
 
1034
1092
  env_data = _parse_chef_environment_content(content)
1035
1093
  environments[env_name] = env_data
@@ -1371,8 +1429,6 @@ def _generate_inventory_group_from_environment(
1371
1429
  env_data: dict, env_name: str, include_constraints: bool
1372
1430
  ) -> str:
1373
1431
  """Generate Ansible inventory group configuration from environment data."""
1374
- import yaml
1375
-
1376
1432
  group_vars: dict[str, Any] = {}
1377
1433
 
1378
1434
  # Add environment metadata
@@ -1405,7 +1461,7 @@ def _generate_inventory_group_from_environment(
1405
1461
  ),
1406
1462
  }
1407
1463
 
1408
- return yaml.dump(group_vars, default_flow_style=False, indent=2)
1464
+ return str(yaml.dump(group_vars, default_flow_style=False, indent=2))
1409
1465
 
1410
1466
 
1411
1467
  def _build_conversion_summary(results: list) -> str:
@@ -1458,8 +1514,6 @@ def _generate_yaml_inventory(environments: dict) -> str:
1458
1514
  YAML inventory string
1459
1515
 
1460
1516
  """
1461
- import yaml
1462
-
1463
1517
  inventory: dict[str, Any] = {"all": {"children": {}}}
1464
1518
 
1465
1519
  for env_name, env_data in environments.items():
@@ -1589,8 +1643,7 @@ def _extract_environment_usage_from_cookbook(cookbook_path) -> list:
1589
1643
  # Search for environment usage in Ruby files
1590
1644
  for ruby_file in cookbook_path.rglob("*.rb"):
1591
1645
  try:
1592
- with ruby_file.open("r") as f:
1593
- content = f.read()
1646
+ content = safe_read_text(ruby_file, cookbook_path)
1594
1647
 
1595
1648
  # Find environment usage patterns
1596
1649
  found_patterns = _find_environment_patterns_in_content(
@@ -1647,13 +1700,12 @@ def _analyse_environments_structure(environments_path) -> dict:
1647
1700
  """Analyse the structure of Chef environments directory."""
1648
1701
  structure: dict[str, Any] = {"total_environments": 0, "environments": {}}
1649
1702
 
1650
- for env_file in environments_path.glob("*.rb"):
1703
+ for env_file in safe_glob(environments_path, "*.rb", environments_path):
1651
1704
  structure["total_environments"] += 1
1652
1705
  env_name = env_file.stem
1653
1706
 
1654
1707
  try:
1655
- with env_file.open("r") as f:
1656
- content = f.read()
1708
+ content = safe_read_text(env_file, environments_path)
1657
1709
 
1658
1710
  env_data = _parse_chef_environment_content(content)
1659
1711
 
@@ -1849,12 +1901,10 @@ def _convert_databag_to_ansible_vars(
1849
1901
 
1850
1902
  def _generate_vault_content(vars_dict: dict, databag_name: str) -> str:
1851
1903
  """Generate Ansible Vault YAML content from variables dictionary."""
1852
- import yaml
1853
-
1854
1904
  # Structure for vault file
1855
1905
  vault_vars = {f"{databag_name}_vault": vars_dict}
1856
1906
 
1857
- return yaml.dump(vault_vars, default_flow_style=False, indent=2)
1907
+ return str(yaml.dump(vault_vars, default_flow_style=False, indent=2))
1858
1908
 
1859
1909
 
1860
1910
  def _detect_encrypted_databag(content: str) -> bool:
@@ -2033,8 +2083,7 @@ def _extract_databag_usage_from_cookbook(cookbook_path) -> list:
2033
2083
  # Search for data bag usage in Ruby files
2034
2084
  for ruby_file in cookbook_path.rglob("*.rb"):
2035
2085
  try:
2036
- with ruby_file.open() as f:
2037
- content = f.read()
2086
+ content = safe_read_text(ruby_file, cookbook_path)
2038
2087
 
2039
2088
  # Find data bag usage patterns
2040
2089
  found_patterns = _find_databag_patterns_in_content(content, str(ruby_file))
@@ -2096,7 +2145,7 @@ def _analyse_databag_structure(databags_path) -> dict:
2096
2145
  "databags": {},
2097
2146
  }
2098
2147
 
2099
- for databag_dir in databags_path.iterdir():
2148
+ for databag_dir in databags_path.iterdir(): # nosonar
2100
2149
  if not databag_dir.is_dir():
2101
2150
  continue
2102
2151
 
@@ -2104,13 +2153,12 @@ def _analyse_databag_structure(databags_path) -> dict:
2104
2153
  structure["total_databags"] += 1
2105
2154
 
2106
2155
  items = []
2107
- for item_file in databag_dir.glob("*.json"):
2156
+ for item_file in safe_glob(databag_dir, "*.json", databags_path):
2108
2157
  structure["total_items"] += 1
2109
2158
  item_name = item_file.stem
2110
2159
 
2111
2160
  try:
2112
- with item_file.open() as f:
2113
- content = f.read()
2161
+ content = safe_read_text(item_file, databags_path)
2114
2162
 
2115
2163
  is_encrypted = _detect_encrypted_databag(content)
2116
2164
  if is_encrypted:
@@ -2356,8 +2404,10 @@ def assess_chef_migration_complexity(
2356
2404
  Detailed assessment report in markdown format.
2357
2405
 
2358
2406
  """
2407
+ # Sanitise and contain user-provided cookbook paths before processing
2408
+ sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
2359
2409
  return _assess_chef_migration_complexity(
2360
- cookbook_paths, migration_scope, target_platform
2410
+ sanitized, migration_scope, target_platform
2361
2411
  )
2362
2412
 
2363
2413
 
@@ -2382,7 +2432,9 @@ def generate_migration_plan(
2382
2432
  Detailed migration plan in markdown format.
2383
2433
 
2384
2434
  """
2385
- return _generate_migration_plan(cookbook_paths, migration_strategy, timeline_weeks)
2435
+ # Sanitise and contain user-provided cookbook paths before processing
2436
+ sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
2437
+ return _generate_migration_plan(sanitized, migration_strategy, timeline_weeks)
2386
2438
 
2387
2439
 
2388
2440
  @mcp.tool()
@@ -2400,7 +2452,9 @@ def analyse_cookbook_dependencies(cookbook_paths: str) -> str:
2400
2452
  Dependency analysis report in markdown format.
2401
2453
 
2402
2454
  """
2403
- return _analyse_cookbook_dependencies(cookbook_paths)
2455
+ # Sanitise and contain user-provided cookbook paths before processing
2456
+ sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
2457
+ return _analyse_cookbook_dependencies(sanitized)
2404
2458
 
2405
2459
 
2406
2460
  @mcp.tool()
@@ -2424,11 +2478,47 @@ def generate_migration_report(
2424
2478
  Comprehensive migration report in markdown format.
2425
2479
 
2426
2480
  """
2481
+ # Sanitise and contain user-provided cookbook paths before processing
2482
+ sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
2427
2483
  return _generate_migration_report(
2428
- cookbook_paths, report_format, include_technical_details
2484
+ sanitized, report_format, include_technical_details
2429
2485
  )
2430
2486
 
2431
2487
 
2488
+ def _sanitize_cookbook_paths_input(cookbook_paths: str) -> str:
2489
+ """
2490
+ Sanitise a comma-separated list of cookbook paths.
2491
+
2492
+ Args:
2493
+ cookbook_paths: Comma-separated paths provided by the user.
2494
+
2495
+ Returns:
2496
+ A comma-separated string of normalised paths.
2497
+
2498
+ Raises:
2499
+ ValueError: If any path is invalid.
2500
+
2501
+ """
2502
+ sanitized_paths: list[str] = []
2503
+ for raw in cookbook_paths.split(","):
2504
+ candidate = raw.strip()
2505
+ if not candidate:
2506
+ continue
2507
+ # Normalize the path (resolves ., .., symlinks)
2508
+
2509
+ # prevents traversal attacks; file access is further contained by per-operation checks
2510
+ normalised = _normalize_path(candidate)
2511
+
2512
+ # Validate it's an absolute path after normalization
2513
+ if not normalised.is_absolute():
2514
+ msg = f"Path must be absolute after normalization: {candidate}"
2515
+ raise ValueError(msg)
2516
+
2517
+ # Use the normalized absolute path (temp dirs, workspace dirs all allowed)
2518
+ sanitized_paths.append(str(normalised))
2519
+ return ",".join(sanitized_paths)
2520
+
2521
+
2432
2522
  @mcp.tool()
2433
2523
  def validate_conversion(
2434
2524
  conversion_type: str,
@@ -2471,6 +2561,7 @@ def parse_habitat_plan(plan_path: str) -> str:
2471
2561
  JSON string with parsed plan metadata
2472
2562
 
2473
2563
  """
2564
+ plan_path = str(_normalize_path(plan_path))
2474
2565
  return _parse_habitat_plan(plan_path)
2475
2566
 
2476
2567
 
@@ -2568,6 +2659,7 @@ def analyse_chef_search_patterns(recipe_or_cookbook_path: str) -> str:
2568
2659
  Analysis of search patterns found.
2569
2660
 
2570
2661
  """
2662
+ recipe_or_cookbook_path = str(_normalize_path(recipe_or_cookbook_path))
2571
2663
  return _analyse_chef_search_patterns(recipe_or_cookbook_path)
2572
2664
 
2573
2665
 
@@ -2590,6 +2682,7 @@ def profile_cookbook_performance(cookbook_path: str) -> str:
2590
2682
  from souschef.profiling import generate_cookbook_performance_report
2591
2683
 
2592
2684
  try:
2685
+ cookbook_path = str(_normalize_path(cookbook_path))
2593
2686
  report = generate_cookbook_performance_report(cookbook_path)
2594
2687
  return str(report)
2595
2688
  except Exception as e:
@@ -2635,6 +2728,7 @@ def profile_parsing_operation(
2635
2728
  func = operation_map[operation]
2636
2729
 
2637
2730
  try:
2731
+ file_path = str(_normalize_path(file_path))
2638
2732
  if detailed:
2639
2733
  _, profile_result = detailed_profile_function(func, file_path)
2640
2734
  result = str(profile_result)
@@ -2678,6 +2772,7 @@ def generate_jenkinsfile_from_chef(
2678
2772
  from souschef.ci.jenkins_pipeline import generate_jenkinsfile_from_chef_ci
2679
2773
 
2680
2774
  try:
2775
+ cookbook_path = str(_normalize_path(cookbook_path))
2681
2776
  # Convert string to boolean
2682
2777
  enable_parallel_bool = enable_parallel.lower() in ("yes", "true", "1")
2683
2778
 
@@ -2720,6 +2815,7 @@ def generate_gitlab_ci_from_chef(
2720
2815
  from souschef.ci.gitlab_ci import generate_gitlab_ci_from_chef_ci
2721
2816
 
2722
2817
  try:
2818
+ cookbook_path = str(_normalize_path(cookbook_path))
2723
2819
  enable_cache_bool = enable_cache.lower() in ("yes", "true", "1")
2724
2820
  enable_artifacts_bool = enable_artifacts.lower() in ("yes", "true", "1")
2725
2821
  result = generate_gitlab_ci_from_chef_ci(
@@ -2765,6 +2861,7 @@ def generate_github_workflow_from_chef(
2765
2861
  from souschef.ci.github_actions import generate_github_workflow_from_chef_ci
2766
2862
 
2767
2863
  try:
2864
+ cookbook_path = str(_normalize_path(cookbook_path))
2768
2865
  enable_cache_bool = enable_cache.lower() in ("yes", "true", "1")
2769
2866
  enable_artifacts_bool = enable_artifacts.lower() in ("yes", "true", "1")
2770
2867
  result = generate_github_workflow_from_chef_ci(
@@ -2786,6 +2883,105 @@ def generate_github_workflow_from_chef(
2786
2883
  )
2787
2884
 
2788
2885
 
2886
+ @mcp.tool()
2887
+ def generate_ansible_repository(
2888
+ output_path: str,
2889
+ repo_type: str = "auto",
2890
+ cookbook_path: str = "",
2891
+ org_name: str = "myorg",
2892
+ init_git: str = "yes",
2893
+ ) -> str:
2894
+ """
2895
+ Generate a complete Ansible repository structure.
2896
+
2897
+ Analyses converted Chef cookbooks and creates an appropriate Ansible
2898
+ repository structure with proper organisation, configuration files,
2899
+ and git initialisation.
2900
+
2901
+ Repo Types:
2902
+ - auto: Auto-detect based on conversion analysis (recommended)
2903
+ - inventory_first: Classic inventory-first (best for infra management)
2904
+ - playbooks_roles: Simple playbooks + roles (best for small projects)
2905
+ - collection: Ansible Collection layout (best for reusable automation)
2906
+ - mono_repo: Multi-project mono-repo (best for platform teams)
2907
+
2908
+ Args:
2909
+ output_path: Path where the repository should be created
2910
+ repo_type: Type of repository structure (auto/inventory_first/playbooks_roles/collection/mono_repo)
2911
+ cookbook_path: Optional path to Chef cookbook for analysis (used with repo_type='auto')
2912
+ org_name: Organisation name for the repository
2913
+ init_git: Whether to initialise a git repository ('yes' or 'no')
2914
+
2915
+ Returns:
2916
+ JSON string with generation results including success status, files created, and git status
2917
+
2918
+ """
2919
+ from souschef.generators.repo import (
2920
+ analyse_conversion_output,
2921
+ )
2922
+ from souschef.generators.repo import (
2923
+ generate_ansible_repository as gen_repo,
2924
+ )
2925
+
2926
+ try:
2927
+ output_path = str(_normalize_path(output_path))
2928
+ init_git_bool = init_git.lower() in ("yes", "true", "1")
2929
+
2930
+ # Determine repo type
2931
+ if repo_type == "auto":
2932
+ if not cookbook_path:
2933
+ return json.dumps(
2934
+ {
2935
+ "success": False,
2936
+ "error": "cookbook_path required when repo_type='auto'",
2937
+ }
2938
+ )
2939
+
2940
+ cookbook_path = str(_normalize_path(cookbook_path))
2941
+
2942
+ # Validate cookbook path exists
2943
+ if not Path(cookbook_path).exists():
2944
+ return json.dumps(
2945
+ {
2946
+ "success": False,
2947
+ "error": f"Cookbook path does not exist: {cookbook_path}",
2948
+ }
2949
+ )
2950
+
2951
+ # Analyse the cookbook to determine best repo type
2952
+ # Count recipes
2953
+ recipes_dir = Path(cookbook_path) / "recipes"
2954
+ num_recipes = (
2955
+ len(list(recipes_dir.glob("*.rb"))) if recipes_dir.exists() else 0
2956
+ )
2957
+
2958
+ # Basic heuristics for repo type selection
2959
+ has_multiple_apps = num_recipes > 5
2960
+ num_roles = max(1, num_recipes // 2) # Estimate roles from recipes
2961
+
2962
+ determined_type = analyse_conversion_output(
2963
+ cookbook_path=cookbook_path,
2964
+ num_recipes=num_recipes,
2965
+ num_roles=num_roles,
2966
+ has_multiple_apps=has_multiple_apps,
2967
+ needs_multi_env=True,
2968
+ )
2969
+ result = gen_repo(output_path, determined_type, org_name, init_git_bool)
2970
+ else:
2971
+ # Use specified repo type
2972
+ result = gen_repo(output_path, repo_type, org_name, init_git_bool)
2973
+
2974
+ return json.dumps(result, indent=2)
2975
+
2976
+ except Exception as e:
2977
+ return json.dumps(
2978
+ {
2979
+ "success": False,
2980
+ "error": f"Failed to generate repository: {e}",
2981
+ }
2982
+ )
2983
+
2984
+
2789
2985
  @mcp.tool()
2790
2986
  def parse_chef_migration_assessment(
2791
2987
  cookbook_paths: str,
@@ -2809,6 +3005,728 @@ def parse_chef_migration_assessment(
2809
3005
  )
2810
3006
 
2811
3007
 
3008
+ @mcp.tool()
3009
+ def convert_cookbook_comprehensive(
3010
+ cookbook_path: str,
3011
+ output_path: str,
3012
+ assessment_data: str = "",
3013
+ include_templates: bool = True,
3014
+ include_attributes: bool = True,
3015
+ include_recipes: bool = True,
3016
+ role_name: str = "",
3017
+ ) -> str:
3018
+ """
3019
+ Convert an entire Chef cookbook to a complete Ansible role.
3020
+
3021
+ This function performs comprehensive conversion of a Chef cookbook to an Ansible role,
3022
+ including recipes, templates, attributes, and proper role structure. It can use
3023
+ assessment data to optimize the conversion process.
3024
+
3025
+ Args:
3026
+ cookbook_path: Path to the Chef cookbook directory
3027
+ output_path: Directory where the Ansible role will be created
3028
+ assessment_data: Optional JSON string with assessment results for optimization
3029
+ include_templates: Whether to convert ERB templates to Jinja2 (default: True)
3030
+ include_attributes: Whether to convert attributes to Ansible variables (default: True)
3031
+ include_recipes: Whether to convert recipes to Ansible tasks (default: True)
3032
+ role_name: Name for the Ansible role (defaults to cookbook name)
3033
+
3034
+ Returns:
3035
+ Summary of the conversion process and created files
3036
+
3037
+ """
3038
+ try:
3039
+ from souschef.core.path_utils import _normalize_path
3040
+
3041
+ cookbook_dir = _normalize_path(cookbook_path)
3042
+ output_dir = _normalize_path(output_path)
3043
+
3044
+ if not cookbook_dir.exists():
3045
+ return f"Error: Cookbook path does not exist: {cookbook_path}"
3046
+
3047
+ # Parse assessment data if provided
3048
+ assessment = {}
3049
+ if assessment_data:
3050
+ try:
3051
+ assessment = json.loads(assessment_data)
3052
+ except json.JSONDecodeError:
3053
+ return f"Error: Invalid assessment data JSON: {assessment_data}"
3054
+
3055
+ # Get cookbook metadata and setup
3056
+ cookbook_name, role_name = _setup_conversion_metadata(cookbook_dir, role_name)
3057
+
3058
+ # Create role directory structure
3059
+ role_dir = _create_role_structure(output_dir, role_name)
3060
+
3061
+ # Initialize conversion summary
3062
+ conversion_summary = {
3063
+ "cookbook_name": cookbook_name,
3064
+ "role_name": role_name,
3065
+ "converted_files": [],
3066
+ "errors": [],
3067
+ "warnings": [],
3068
+ }
3069
+
3070
+ # Convert components
3071
+ if include_recipes:
3072
+ _convert_recipes(cookbook_dir, role_dir, conversion_summary)
3073
+
3074
+ if include_templates:
3075
+ _convert_templates(cookbook_dir, role_dir, conversion_summary)
3076
+
3077
+ if include_attributes:
3078
+ _convert_attributes(cookbook_dir, role_dir, conversion_summary)
3079
+
3080
+ # Create main task file and metadata
3081
+ _create_main_task_file(
3082
+ cookbook_dir, role_dir, conversion_summary, include_recipes
3083
+ )
3084
+ _create_role_metadata(
3085
+ role_dir, role_name, cookbook_name, assessment, conversion_summary
3086
+ )
3087
+
3088
+ # Generate and return summary report
3089
+ return _generate_conversion_report(conversion_summary, role_dir)
3090
+
3091
+ except Exception as e:
3092
+ return format_error_with_context(
3093
+ e, "converting cookbook comprehensively", cookbook_path
3094
+ )
3095
+
3096
+
3097
+ def _setup_conversion_metadata(cookbook_dir: Path, role_name: str) -> tuple[str, str]:
3098
+ """Get cookbook metadata and determine role name."""
3099
+ metadata_file = cookbook_dir / METADATA_RB
3100
+ cookbook_name = cookbook_dir.name
3101
+ if metadata_file.exists():
3102
+ metadata = _parse_cookbook_metadata(str(metadata_file))
3103
+ name_from_metadata = metadata.get("name")
3104
+ if name_from_metadata is not None:
3105
+ cookbook_name = str(name_from_metadata)
3106
+
3107
+ if not role_name:
3108
+ role_name = cookbook_name
3109
+
3110
+ return cookbook_name, role_name
3111
+
3112
+
3113
+ def _validate_role_name(role_name: str) -> None:
3114
+ """
3115
+ Validate that role_name is safe for filesystem operations.
3116
+
3117
+ Args:
3118
+ role_name: The role name to validate.
3119
+
3120
+ Raises:
3121
+ ValueError: If the role name contains unsafe characters.
3122
+
3123
+ """
3124
+ if not role_name:
3125
+ raise ValueError("Role name cannot be empty")
3126
+ if ".." in role_name or "/" in role_name or "\\" in role_name:
3127
+ raise ValueError(f"Role name contains unsafe characters: {role_name}")
3128
+
3129
+
3130
+ def _create_role_structure(output_dir: Path, role_name: str) -> Path:
3131
+ """Create the standard Ansible role directory structure."""
3132
+ # Validate role_name to ensure it's safe for filesystem operations
3133
+ _validate_role_name(role_name)
3134
+
3135
+ base = os.path.realpath(str(output_dir))
3136
+ role_dir_str = os.path.realpath(os.path.join(base, role_name)) # noqa: PTH111, PTH118
3137
+ if os.path.commonpath([base, role_dir_str]) != base:
3138
+ raise RuntimeError("Unsafe role path outside output directory")
3139
+ role_dir = Path(role_dir_str)
3140
+ role_tasks_dir = role_dir / "tasks"
3141
+ role_templates_dir = role_dir / "templates"
3142
+ role_vars_dir = role_dir / "vars"
3143
+ role_defaults_dir = role_dir / "defaults"
3144
+
3145
+ # All paths are validated via os.path.commonpath containment check above
3146
+ for directory in [
3147
+ role_tasks_dir,
3148
+ role_templates_dir,
3149
+ role_vars_dir,
3150
+ role_defaults_dir,
3151
+ ]:
3152
+ directory.mkdir(parents=True, exist_ok=True) # nosonar: S2083
3153
+
3154
+ return role_dir
3155
+
3156
+
3157
+ def _convert_recipes(
3158
+ cookbook_dir: Path, role_dir: Path, conversion_summary: dict
3159
+ ) -> None:
3160
+ """Convert Chef recipes to Ansible tasks."""
3161
+ cookbook_base = os.path.realpath(str(cookbook_dir))
3162
+ recipes_dir_str = os.path.realpath(os.path.join(cookbook_base, "recipes")) # noqa: PTH111, PTH118
3163
+ if os.path.commonpath([cookbook_base, recipes_dir_str]) != cookbook_base:
3164
+ raise RuntimeError("Unsafe recipes path outside cookbook directory")
3165
+ recipes_dir = Path(recipes_dir_str)
3166
+ if not recipes_dir.exists():
3167
+ conversion_summary["warnings"].append(
3168
+ f"No recipes directory found in {cookbook_dir.name}. "
3169
+ "Cookbook cannot be converted to Ansible tasks."
3170
+ )
3171
+ return
3172
+
3173
+ from souschef.converters.playbook import generate_playbook_from_recipe
3174
+
3175
+ recipe_files = safe_glob(recipes_dir, "*.rb", cookbook_dir)
3176
+ if not recipe_files:
3177
+ conversion_summary["warnings"].append(
3178
+ f"No recipe files (*.rb) found in {cookbook_dir.name}/recipes/. "
3179
+ "Cookbook has no recipes to convert."
3180
+ )
3181
+ return
3182
+
3183
+ for recipe_file in recipe_files:
3184
+ try:
3185
+ validated_recipe = _validated_candidate(recipe_file, recipes_dir)
3186
+ recipe_name = validated_recipe.stem
3187
+
3188
+ # Parse recipe to validate it can be processed
3189
+ parse_result = _parse_recipe(str(validated_recipe))
3190
+ if parse_result.startswith("Error:"):
3191
+ conversion_summary["errors"].append(
3192
+ f"Failed to parse recipe {recipe_name}: {parse_result}"
3193
+ )
3194
+ continue
3195
+
3196
+ # Convert to Ansible tasks
3197
+ playbook_yaml = generate_playbook_from_recipe(str(validated_recipe))
3198
+
3199
+ # Write as task file; _safe_join already enforces containment within role_dir
3200
+ task_file = _safe_join(role_dir, "tasks", f"{recipe_name}.yml")
3201
+ try:
3202
+ task_file.parent.mkdir(parents=True, exist_ok=True) # nosonar
3203
+ safe_write_text(task_file, role_dir, playbook_yaml)
3204
+ except OSError as write_err:
3205
+ conversion_summary["errors"].append(
3206
+ f"Failed to write task file {task_file.name}: {write_err}"
3207
+ )
3208
+ continue
3209
+
3210
+ conversion_summary["converted_files"].append(
3211
+ {
3212
+ "type": "task",
3213
+ "source": f"recipes/{recipe_name}.rb",
3214
+ "target": f"{role_dir.name}/tasks/{recipe_name}.yml",
3215
+ }
3216
+ )
3217
+
3218
+ except Exception as e:
3219
+ conversion_summary["errors"].append(
3220
+ f"Error converting recipe {recipe_file.name}: {str(e)}"
3221
+ )
3222
+
3223
+
3224
+ def _convert_templates(
3225
+ cookbook_dir: Path, role_dir: Path, conversion_summary: dict
3226
+ ) -> None:
3227
+ """Convert ERB templates to Jinja2 templates."""
3228
+ templates_dir = _safe_join(cookbook_dir, "templates")
3229
+
3230
+ if not templates_dir.exists():
3231
+ return
3232
+
3233
+ for template_file in safe_glob(templates_dir, "**/*.erb", cookbook_dir):
3234
+ validated_template = template_file
3235
+ try:
3236
+ # Convert ERB to Jinja2
3237
+ validated_template = _validated_candidate(template_file, templates_dir)
3238
+
3239
+ conversion_result = _parse_template(str(validated_template))
3240
+ if conversion_result.startswith("Error:"):
3241
+ conversion_summary["errors"].append(
3242
+ f"Failed to convert template {validated_template.name}: {conversion_result}"
3243
+ )
3244
+ continue
3245
+
3246
+ # Parse the JSON result
3247
+ try:
3248
+ template_data = json.loads(conversion_result)
3249
+ jinja2_content = template_data.get("jinja2_template", "")
3250
+
3251
+ # Determine relative path for role templates using _safe_join
3252
+
3253
+ rel_path = validated_template.relative_to(templates_dir)
3254
+ # Build target file path with inline containment guard
3255
+ target_file = _safe_join(
3256
+ role_dir, "templates", str(rel_path.with_suffix(""))
3257
+ )
3258
+ target_file.parent.mkdir(parents=True, exist_ok=True)
3259
+ safe_write_text(target_file, role_dir, jinja2_content)
3260
+
3261
+ conversion_summary["converted_files"].append(
3262
+ {
3263
+ "type": "template",
3264
+ "source": f"templates/{rel_path}",
3265
+ "target": f"{role_dir.name}/templates/{rel_path.with_suffix('')}",
3266
+ }
3267
+ )
3268
+
3269
+ except json.JSONDecodeError:
3270
+ conversion_summary["errors"].append(
3271
+ f"Invalid JSON result for template {validated_template.name}"
3272
+ )
3273
+
3274
+ except Exception as e:
3275
+ conversion_summary["errors"].append(
3276
+ f"Error converting template {validated_template.name}: {str(e)}"
3277
+ )
3278
+
3279
+
3280
+ def _convert_attributes(
3281
+ cookbook_dir: Path, role_dir: Path, conversion_summary: dict
3282
+ ) -> None:
3283
+ """Convert Chef attributes to Ansible variables."""
3284
+ attributes_dir = _safe_join(cookbook_dir, "attributes")
3285
+ role_defaults_dir = _safe_join(role_dir, "defaults")
3286
+
3287
+ if not attributes_dir.exists():
3288
+ return
3289
+
3290
+ for attr_file in safe_glob(attributes_dir, "*.rb", cookbook_dir):
3291
+ validated_attr = attr_file
3292
+ try:
3293
+ validated_attr = _validated_candidate(attr_file, attributes_dir)
3294
+ # Read the file content
3295
+ content = safe_read_text(validated_attr, cookbook_dir)
3296
+
3297
+ # Extract attributes (already imported at top of file)
3298
+ raw_attributes = _extract_attributes(content)
3299
+
3300
+ if not raw_attributes:
3301
+ conversion_summary["warnings"].append(
3302
+ f"No attributes found in {validated_attr.name}"
3303
+ )
3304
+ continue
3305
+
3306
+ # Resolve precedence to get final values
3307
+ resolved_attributes = _resolve_attribute_precedence(raw_attributes)
3308
+
3309
+ # Convert to Ansible variable format (flatten nested keys)
3310
+ ansible_vars = {}
3311
+ for attr_path, attr_info in resolved_attributes.items():
3312
+ # Convert Chef attribute paths to Ansible variable names
3313
+ # e.g., "nginx.port" becomes "nginx_port"
3314
+ ansible_key = attr_path.replace(".", "_")
3315
+ ansible_vars[ansible_key] = attr_info["value"]
3316
+
3317
+ # Write as defaults using _safe_join to prevent path injection
3318
+ # All paths are validated via _safe_join to ensure containment within role_defaults_dir
3319
+ defaults_filename: str = f"{validated_attr.stem}.yml"
3320
+ defaults_file: Path = _safe_join(role_defaults_dir, defaults_filename)
3321
+ defaults_yaml = yaml.dump(ansible_vars, default_flow_style=False, indent=2)
3322
+ defaults_file.parent.mkdir(parents=True, exist_ok=True)
3323
+ safe_write_text(defaults_file, role_dir, defaults_yaml)
3324
+
3325
+ conversion_summary["converted_files"].append(
3326
+ {
3327
+ "type": "defaults",
3328
+ "source": f"attributes/{validated_attr.name}",
3329
+ "target": f"{role_dir.name}/defaults/{validated_attr.stem}.yml",
3330
+ }
3331
+ )
3332
+
3333
+ except Exception as e:
3334
+ conversion_summary["errors"].append(
3335
+ f"Error converting attributes {validated_attr.name}: {str(e)}"
3336
+ )
3337
+
3338
+
3339
+ def _create_main_task_file(
3340
+ cookbook_dir: Path, role_dir: Path, conversion_summary: dict, include_recipes: bool
3341
+ ) -> None:
3342
+ """Create main.yml task file from default recipe if it exists."""
3343
+ if not include_recipes:
3344
+ return
3345
+
3346
+ # Build path to tasks directory safely
3347
+ tasks_dir: Path = _safe_join(role_dir, "tasks")
3348
+ # Build path to main.yml within tasks directory
3349
+ default_task_file: Path = _safe_join(tasks_dir, "main.yml")
3350
+ if default_task_file.exists():
3351
+ return # Already exists
3352
+
3353
+ # Build path to default recipe safely
3354
+ recipes_dir: Path = _safe_join(cookbook_dir, "recipes")
3355
+ default_recipe: Path = _safe_join(recipes_dir, "default.rb")
3356
+ if not default_recipe.exists():
3357
+ return
3358
+
3359
+ try:
3360
+ from souschef.converters.playbook import generate_playbook_from_recipe
3361
+
3362
+ playbook_yaml = generate_playbook_from_recipe(str(default_recipe))
3363
+ default_task_file.parent.mkdir(parents=True, exist_ok=True)
3364
+ safe_write_text(default_task_file, role_dir, playbook_yaml)
3365
+ conversion_summary["converted_files"].append(
3366
+ {
3367
+ "type": "task",
3368
+ "source": "recipes/default.rb",
3369
+ "target": f"{role_dir.name}/tasks/main.yml",
3370
+ }
3371
+ )
3372
+ except Exception as e:
3373
+ conversion_summary["warnings"].append(
3374
+ f"Could not create main.yml from default recipe: {str(e)}"
3375
+ )
3376
+
3377
+
3378
+ def _create_role_metadata(
3379
+ role_dir: Path,
3380
+ role_name: str,
3381
+ cookbook_name: str,
3382
+ assessment: dict,
3383
+ conversion_summary: dict,
3384
+ ) -> None:
3385
+ """Create Ansible role metadata file."""
3386
+ # Use _safe_join to construct metadata file path
3387
+
3388
+ meta_dir = _safe_join(role_dir, "meta")
3389
+ meta_dir.mkdir(parents=True, exist_ok=True)
3390
+ meta_file = _safe_join(meta_dir, "main.yml")
3391
+
3392
+ meta_content: dict[str, Any] = {
3393
+ "galaxy_info": {
3394
+ "role_name": role_name,
3395
+ "author": "SousChef Migration",
3396
+ "description": f"Converted from Chef cookbook {cookbook_name}",
3397
+ "license": "MIT",
3398
+ "min_ansible_version": "2.9",
3399
+ "platforms": ["ubuntu", "centos", "redhat"],
3400
+ "galaxy_tags": ["chef", "migration", "converted"],
3401
+ },
3402
+ "dependencies": [],
3403
+ }
3404
+
3405
+ # Add dependencies from assessment data if available
3406
+ if assessment and "dependencies" in assessment:
3407
+ deps = assessment["dependencies"]
3408
+ if isinstance(deps, list):
3409
+ meta_content["dependencies"] = [{"role": dep} for dep in deps]
3410
+
3411
+ meta_yaml = yaml.dump(meta_content, default_flow_style=False, indent=2)
3412
+ safe_write_text(meta_file, role_dir, meta_yaml)
3413
+
3414
+ conversion_summary["converted_files"].append(
3415
+ {
3416
+ "type": "meta",
3417
+ "source": METADATA_RB,
3418
+ "target": f"{role_name}/meta/main.yml",
3419
+ }
3420
+ )
3421
+
3422
+
3423
+ def _generate_conversion_report(conversion_summary: dict, role_dir: Path) -> str:
3424
+ """Generate a comprehensive conversion report."""
3425
+ summary_lines = [
3426
+ f"# Cookbook Conversion Summary: {conversion_summary['cookbook_name']} → {conversion_summary['role_name']}",
3427
+ "",
3428
+ "## Files Converted:",
3429
+ ]
3430
+
3431
+ for file_info in conversion_summary["converted_files"]:
3432
+ summary_lines.append(
3433
+ f"- {file_info['type'].title()}: {file_info['source']} → {file_info['target']}"
3434
+ )
3435
+
3436
+ if conversion_summary["errors"]:
3437
+ summary_lines.append("")
3438
+ summary_lines.append("## Errors:")
3439
+ for error in conversion_summary["errors"]:
3440
+ summary_lines.append(f"- ERROR: {error}")
3441
+
3442
+ if conversion_summary["warnings"]:
3443
+ summary_lines.append("")
3444
+ summary_lines.append("## Warnings:")
3445
+ for warning in conversion_summary["warnings"]:
3446
+ summary_lines.append(f"- WARNING: {warning}")
3447
+
3448
+ summary_lines.append("")
3449
+ summary_lines.append(f"## Role Location: {role_dir}")
3450
+ summary_lines.append("")
3451
+ summary_lines.append("## Next Steps:")
3452
+ summary_lines.append("1. Review converted files for accuracy")
3453
+ summary_lines.append("2. Test the role with ansible-playbook --check")
3454
+ summary_lines.append("3. Update variable references and dependencies")
3455
+ summary_lines.append("4. Run integration tests")
3456
+
3457
+ return "\n".join(summary_lines)
3458
+
3459
+
3460
+ @mcp.tool()
3461
+ def convert_all_cookbooks_comprehensive(
3462
+ cookbooks_path: str,
3463
+ output_path: str,
3464
+ assessment_data: str = "",
3465
+ include_templates: bool = True,
3466
+ include_attributes: bool = True,
3467
+ include_recipes: bool = True,
3468
+ ) -> str:
3469
+ """
3470
+ Convert all Chef cookbooks in a directory to complete Ansible roles.
3471
+
3472
+ This function performs comprehensive conversion of all Chef cookbooks found in the
3473
+ specified directory to Ansible roles, including recipes, templates, attributes,
3474
+ and proper role structure. It can use assessment data to optimize the conversion process.
3475
+
3476
+ Args:
3477
+ cookbooks_path: Path to the directory containing Chef cookbooks
3478
+ output_path: Directory where the Ansible roles will be created
3479
+ assessment_data: Optional JSON string with assessment results for optimization
3480
+ include_templates: Whether to convert ERB templates to Jinja2 (default: True)
3481
+ include_attributes: Whether to convert attributes to Ansible variables (default: True)
3482
+ include_recipes: Whether to convert recipes to Ansible tasks (default: True)
3483
+
3484
+ Returns:
3485
+ Summary of the conversion process and created files for all cookbooks
3486
+
3487
+ """
3488
+ try:
3489
+ cookbooks_dir, output_dir = _validate_conversion_paths(
3490
+ cookbooks_path, output_path
3491
+ )
3492
+ assessment = _parse_assessment_data(assessment_data)
3493
+ cookbook_dirs = _find_cookbook_directories(cookbooks_dir)
3494
+
3495
+ if not cookbook_dirs:
3496
+ return f"Error: No Chef cookbooks found in {cookbooks_path}. Cookbooks must contain a {METADATA_RB} file."
3497
+
3498
+ overall_summary = _initialize_conversion_summary(len(cookbook_dirs))
3499
+ _convert_all_cookbooks(
3500
+ cookbook_dirs,
3501
+ output_dir,
3502
+ assessment,
3503
+ include_templates,
3504
+ include_attributes,
3505
+ include_recipes,
3506
+ overall_summary,
3507
+ )
3508
+
3509
+ return _generate_batch_conversion_report(overall_summary, output_dir)
3510
+
3511
+ except Exception as e:
3512
+ return format_error_with_context(
3513
+ e, "converting all cookbooks comprehensively", cookbooks_path
3514
+ )
3515
+
3516
+
3517
+ def _validate_conversion_paths(
3518
+ cookbooks_path: str, output_path: str
3519
+ ) -> tuple[Path, Path]:
3520
+ """Validate and return Path objects for conversion paths."""
3521
+ base_dir = Path.cwd().resolve()
3522
+
3523
+ cookbooks_candidate = _normalize_path(cookbooks_path)
3524
+ try:
3525
+ cookbooks_dir = _ensure_within_base_path(cookbooks_candidate, base_dir)
3526
+ except ValueError as e:
3527
+ raise ValueError(f"Cookbooks path is invalid or outside workspace: {e}") from e
3528
+
3529
+ from souschef.core.path_utils import safe_exists
3530
+
3531
+ if not safe_exists(cookbooks_dir, base_dir):
3532
+ raise ValueError(f"Cookbooks path does not exist: {cookbooks_path}")
3533
+
3534
+ output_candidate = _normalize_path(output_path)
3535
+ try:
3536
+ output_dir = _ensure_within_base_path(output_candidate, base_dir)
3537
+ except ValueError as e:
3538
+ raise ValueError(f"Output path is invalid or outside workspace: {e}") from e
3539
+
3540
+ return cookbooks_dir, output_dir
3541
+
3542
+
3543
+ def _parse_assessment_data(assessment_data: str) -> dict[Any, Any]:
3544
+ """Parse assessment data JSON if provided."""
3545
+ if not assessment_data:
3546
+ return {}
3547
+
3548
+ try:
3549
+ parsed = json.loads(assessment_data)
3550
+ return parsed if isinstance(parsed, dict) else {}
3551
+ except json.JSONDecodeError as e:
3552
+ raise ValueError(f"Invalid assessment data JSON: {assessment_data}") from e
3553
+
3554
+
3555
+ def _find_cookbook_directories(cookbooks_dir: Path) -> list[Path]:
3556
+ """Find all cookbook directories containing metadata.rb files."""
3557
+ return [
3558
+ item
3559
+ for item in cookbooks_dir.iterdir()
3560
+ if item.is_dir() and (item / METADATA_RB).exists()
3561
+ ]
3562
+
3563
+
3564
+ def _initialize_conversion_summary(total_cookbooks: int) -> dict:
3565
+ """Initialize the overall conversion summary structure."""
3566
+ return {
3567
+ "total_cookbooks": total_cookbooks,
3568
+ "converted_cookbooks": [],
3569
+ "failed_cookbooks": [],
3570
+ "total_converted_files": 0,
3571
+ "total_errors": 0,
3572
+ "total_warnings": 0,
3573
+ }
3574
+
3575
+
3576
+ def _convert_all_cookbooks(
3577
+ cookbook_dirs: list[Path],
3578
+ output_dir: Path,
3579
+ assessment: dict,
3580
+ include_templates: bool,
3581
+ include_attributes: bool,
3582
+ include_recipes: bool,
3583
+ overall_summary: dict,
3584
+ ) -> None:
3585
+ """Convert all cookbooks and update the overall summary."""
3586
+ for cookbook_dir in cookbook_dirs:
3587
+ try:
3588
+ _convert_single_cookbook_comprehensive(
3589
+ cookbook_dir,
3590
+ output_dir,
3591
+ assessment,
3592
+ include_templates,
3593
+ include_attributes,
3594
+ include_recipes,
3595
+ overall_summary,
3596
+ )
3597
+ except Exception as e:
3598
+ overall_summary["failed_cookbooks"].append(
3599
+ {
3600
+ "cookbook_name": cookbook_dir.name,
3601
+ "error": str(e),
3602
+ }
3603
+ )
3604
+
3605
+
3606
+ def _convert_single_cookbook_comprehensive(
3607
+ cookbook_dir: Path,
3608
+ output_dir: Path,
3609
+ assessment: dict,
3610
+ include_templates: bool,
3611
+ include_attributes: bool,
3612
+ include_recipes: bool,
3613
+ overall_summary: dict,
3614
+ ) -> None:
3615
+ """Convert a single cookbook comprehensively."""
3616
+ cookbook_name = cookbook_dir.name
3617
+
3618
+ # Get role name from metadata or directory name
3619
+ role_name = _get_role_name(cookbook_dir, cookbook_name)
3620
+
3621
+ # Create role directory structure
3622
+ role_dir = _create_role_structure(output_dir, role_name)
3623
+
3624
+ # Initialize conversion summary for this cookbook
3625
+ conversion_summary = {
3626
+ "cookbook_name": cookbook_name,
3627
+ "role_name": role_name,
3628
+ "converted_files": [],
3629
+ "errors": [],
3630
+ "warnings": [],
3631
+ }
3632
+
3633
+ # Convert components
3634
+ if include_recipes:
3635
+ _convert_recipes(cookbook_dir, role_dir, conversion_summary)
3636
+
3637
+ if include_templates:
3638
+ _convert_templates(cookbook_dir, role_dir, conversion_summary)
3639
+
3640
+ if include_attributes:
3641
+ _convert_attributes(cookbook_dir, role_dir, conversion_summary)
3642
+
3643
+ # Create main task file and metadata
3644
+ _create_main_task_file(cookbook_dir, role_dir, conversion_summary, include_recipes)
3645
+ _create_role_metadata(
3646
+ role_dir, role_name, cookbook_name, assessment, conversion_summary
3647
+ )
3648
+
3649
+ # Add to overall summary
3650
+ overall_summary["converted_cookbooks"].append(
3651
+ {
3652
+ "cookbook_name": cookbook_name,
3653
+ "role_name": role_name,
3654
+ "role_path": str(role_dir),
3655
+ "converted_files": len(conversion_summary["converted_files"]),
3656
+ "errors": len(conversion_summary["errors"]),
3657
+ "warnings": len(conversion_summary["warnings"]),
3658
+ }
3659
+ )
3660
+
3661
+ overall_summary["total_converted_files"] += len(
3662
+ conversion_summary["converted_files"]
3663
+ )
3664
+ overall_summary["total_errors"] += len(conversion_summary["errors"])
3665
+ overall_summary["total_warnings"] += len(conversion_summary["warnings"])
3666
+
3667
+
3668
+ def _get_role_name(cookbook_dir: Path, default_name: str) -> str:
3669
+ """Get the role name from metadata or return default."""
3670
+ metadata_file = cookbook_dir / METADATA_RB
3671
+ if metadata_file.exists():
3672
+ metadata = _parse_cookbook_metadata(str(metadata_file))
3673
+ name = metadata.get("name")
3674
+ # Ensure we return a string, handling potential list values
3675
+ if name is None:
3676
+ return default_name
3677
+ if isinstance(name, list):
3678
+ return name[0] if name else default_name
3679
+ return str(name)
3680
+ return default_name
3681
+
3682
+
3683
+ def _generate_batch_conversion_report(overall_summary: dict, output_dir: Path) -> str:
3684
+ """Generate a comprehensive batch conversion report."""
3685
+ summary_lines = [
3686
+ "# Batch Cookbook Conversion Summary",
3687
+ "",
3688
+ "## Overview:",
3689
+ f"- Total cookbooks found: {overall_summary['total_cookbooks']}",
3690
+ f"- Successfully converted: {len(overall_summary['converted_cookbooks'])}",
3691
+ f"- Failed conversions: {len(overall_summary['failed_cookbooks'])}",
3692
+ f"- Total files converted: {overall_summary['total_converted_files']}",
3693
+ f"- Total errors: {overall_summary['total_errors']}",
3694
+ f"- Total warnings: {overall_summary['total_warnings']}",
3695
+ "",
3696
+ ]
3697
+
3698
+ if overall_summary["converted_cookbooks"]:
3699
+ summary_lines.append("## Successfully Converted Cookbooks:")
3700
+ for cookbook in overall_summary["converted_cookbooks"]:
3701
+ summary_lines.append(
3702
+ f"- **{cookbook['cookbook_name']}** → `{cookbook['role_name']}`"
3703
+ )
3704
+ summary_lines.append(f" - Files converted: {cookbook['converted_files']}")
3705
+ summary_lines.append(f" - Errors: {cookbook['errors']}")
3706
+ summary_lines.append(f" - Warnings: {cookbook['warnings']}")
3707
+ summary_lines.append(f" - Location: {cookbook['role_path']}")
3708
+ summary_lines.append("")
3709
+
3710
+ if overall_summary["failed_cookbooks"]:
3711
+ summary_lines.append("## Failed Conversions:")
3712
+ for failed in overall_summary["failed_cookbooks"]:
3713
+ summary_lines.append(
3714
+ f"- ❌ **{failed['cookbook_name']}**: {failed['error']}"
3715
+ )
3716
+ summary_lines.append("")
3717
+
3718
+ summary_lines.append(f"## Output Directory: {output_dir}")
3719
+ summary_lines.append("")
3720
+ summary_lines.append("## Next Steps:")
3721
+ summary_lines.append("1. Review converted roles for accuracy")
3722
+ summary_lines.append("2. Test roles with ansible-playbook --check")
3723
+ summary_lines.append("3. Update variable references and cross-role dependencies")
3724
+ summary_lines.append("4. Run integration tests across all converted roles")
3725
+ summary_lines.append("5. Consider using Ansible Galaxy for role distribution")
3726
+
3727
+ return "\n".join(summary_lines)
3728
+
3729
+
2812
3730
  # AWX/AAP deployment wrappers for backward compatibility
2813
3731
  def main() -> None:
2814
3732
  """