mcp-souschef 3.0.0__py3-none-any.whl → 3.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
souschef/server.py CHANGED
@@ -1,12 +1,16 @@
1
1
  """SousChef MCP Server - Chef to Ansible conversion assistant."""
2
2
 
3
+ # codeql[py/unused-import]: Intentional re-exports for MCP tools and test compatibility
4
+
3
5
  import ast
4
6
  import json
7
+ import os
5
8
  import re
6
9
  from pathlib import Path
7
10
  from typing import Any
8
11
 
9
- from mcp.server.fastmcp import FastMCP
12
+ import yaml
13
+ from mcp.server import FastMCP
10
14
 
11
15
  # Import assessment functions with aliases to avoid name conflicts
12
16
  from souschef.assessment import (
@@ -15,23 +19,16 @@ from souschef.assessment import (
15
19
  from souschef.assessment import (
16
20
  assess_chef_migration_complexity as _assess_chef_migration_complexity,
17
21
  )
18
- from souschef.assessment import (
19
- generate_migration_plan as _generate_migration_plan,
20
- )
21
- from souschef.assessment import (
22
- generate_migration_report as _generate_migration_report,
23
- )
22
+ from souschef.assessment import generate_migration_plan as _generate_migration_plan
23
+ from souschef.assessment import generate_migration_report as _generate_migration_report
24
24
  from souschef.assessment import (
25
25
  parse_chef_migration_assessment as _parse_chef_migration_assessment,
26
26
  )
27
- from souschef.assessment import (
28
- validate_conversion as _validate_conversion,
29
- )
27
+ from souschef.assessment import validate_conversion as _validate_conversion
30
28
 
31
29
  # Import extracted modules
32
30
  # Import private helper functions still used in server.py
33
- # codeql[py/unused-import]: Backward compatibility exports for test suite
34
- from souschef.converters.habitat import ( # noqa: F401
31
+ from souschef.converters.habitat import ( # noqa: F401, codeql[py/unused-import]
35
32
  _add_service_build,
36
33
  _add_service_dependencies,
37
34
  _add_service_environment,
@@ -51,9 +48,9 @@ from souschef.converters.habitat import (
51
48
  generate_compose_from_habitat as _generate_compose_from_habitat,
52
49
  )
53
50
 
51
+ # Import playbook converter functions
54
52
  # Re-exports of playbook internal functions for backward compatibility (tests)
55
- # codeql[py/unused-import]: Backward compatibility exports for test suite
56
- from souschef.converters.playbook import ( # noqa: F401
53
+ from souschef.converters.playbook import ( # noqa: F401, codeql[py/unused-import]
57
54
  _add_general_recommendations,
58
55
  _convert_chef_block_to_ansible,
59
56
  _convert_chef_condition_to_ansible,
@@ -76,8 +73,6 @@ from souschef.converters.playbook import ( # noqa: F401
76
73
  _parse_search_condition,
77
74
  _process_subscribes,
78
75
  )
79
-
80
- # Import playbook converter functions
81
76
  from souschef.converters.playbook import (
82
77
  analyse_chef_search_patterns as _analyse_chef_search_patterns,
83
78
  )
@@ -87,9 +82,8 @@ from souschef.converters.playbook import (
87
82
  from souschef.converters.playbook import (
88
83
  generate_dynamic_inventory_script as _generate_dynamic_inventory_script,
89
84
  )
90
-
91
- # codeql[py/unused-import]: Backward compatibility exports for test suite
92
- from souschef.converters.resource import ( # noqa: F401
85
+ from souschef.converters.playbook import get_chef_nodes as _get_chef_nodes
86
+ from souschef.converters.resource import ( # noqa: F401, codeql[py/unused-import]
93
87
  _convert_chef_resource_to_ansible,
94
88
  _format_ansible_task,
95
89
  _get_file_params,
@@ -101,8 +95,7 @@ from souschef.converters.resource import (
101
95
 
102
96
  # Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
103
97
  # These imports are intentionally exposed for external test access
104
- # codeql[py/unused-import]: Backward compatibility exports for test suite
105
- from souschef.core.constants import ( # noqa: F401
98
+ from souschef.core.constants import ( # noqa: F401, codeql[py/unused-import]
106
99
  ACTION_TO_STATE,
107
100
  ANSIBLE_SERVICE_MODULE,
108
101
  ERROR_PREFIX,
@@ -112,33 +105,52 @@ from souschef.core.constants import ( # noqa: F401
112
105
 
113
106
  # Import core utilities
114
107
  from souschef.core.errors import format_error_with_context
115
-
116
- # codeql[py/unused-import]: Backward compatibility exports for test suite
117
- from souschef.core.path_utils import _normalize_path, _safe_join # noqa: F401
108
+ from souschef.core.logging import configure_logging
109
+ from souschef.core.path_utils import ( # noqa: F401, codeql[py/unused-import]
110
+ _ensure_within_base_path,
111
+ _normalize_path,
112
+ _safe_join,
113
+ _validated_candidate,
114
+ safe_glob,
115
+ safe_read_text,
116
+ safe_write_text,
117
+ )
118
118
 
119
119
  # Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
120
120
  # These imports are intentionally exposed for external test access
121
- # codeql[py/unused-import]: Backward compatibility exports for test suite
122
- from souschef.core.ruby_utils import ( # noqa: F401
123
- _normalize_ruby_value,
121
+ from souschef.core.ruby_utils import (
122
+ _normalize_ruby_value, # noqa: F401, codeql[py/unused-import]
124
123
  )
125
124
 
126
125
  # Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
127
126
  # These imports are intentionally exposed for external test access
128
- # codeql[py/unused-import]: Backward compatibility exports for test suite
129
- from souschef.core.validation import ( # noqa: F401
127
+ from souschef.core.validation import ( # noqa: F401, codeql[py/unused-import]
130
128
  ValidationCategory,
131
129
  ValidationEngine,
132
130
  ValidationLevel,
133
131
  ValidationResult,
134
132
  )
135
133
 
134
+ # Explicit re-exports for language servers and type checkers
135
+ # These names are intentionally available from souschef.server
136
+ __all__ = [
137
+ "ValidationCategory",
138
+ "ValidationEngine",
139
+ "ValidationLevel",
140
+ "ValidationResult",
141
+ ]
142
+
143
+ # Re-exports for backward compatibility (used by tests)
144
+ # These are imported and re-exported intentionally
136
145
  # Import validation framework
137
146
  # Re-exports of deployment internal functions for backward compatibility (tests)
138
147
  # Public re-exports of deployment functions for test backward compatibility
139
148
  # Note: MCP tool wrappers exist for some of these, but tests import directly
140
- # codeql[py/unused-import]: Backward compatibility exports for test suite
141
- from souschef.deployment import ( # noqa: F401
149
+ # Import converters.template functions
150
+ from souschef.converters.template import (
151
+ convert_template_with_ai as _convert_template_with_ai,
152
+ )
153
+ from souschef.deployment import ( # noqa: F401, codeql[py/unused-import]
142
154
  _analyse_cookbook_for_awx,
143
155
  _analyse_cookbooks_directory,
144
156
  _detect_deployment_patterns_in_recipe,
@@ -152,17 +164,7 @@ from souschef.deployment import ( # noqa: F401
152
164
  _parse_chef_runlist,
153
165
  _recommend_ansible_strategies,
154
166
  analyse_chef_application_patterns,
155
- convert_chef_deployment_to_ansible_strategy,
156
- generate_awx_inventory_source_from_chef,
157
- generate_awx_job_template_from_cookbook,
158
- generate_awx_project_from_cookbooks,
159
- generate_awx_workflow_from_chef_runlist,
160
- generate_blue_green_deployment_playbook,
161
- generate_canary_deployment_strategy,
162
167
  )
163
-
164
- # Re-exports for backward compatibility (used by tests)
165
- # These are imported and re-exported intentionally
166
168
  from souschef.deployment import (
167
169
  convert_chef_deployment_to_ansible_strategy as _convert_chef_deployment_to_ansible_strategy,
168
170
  )
@@ -189,33 +191,28 @@ from souschef.deployment import (
189
191
  from souschef.filesystem import list_directory as _list_directory
190
192
  from souschef.filesystem import read_file as _read_file
191
193
 
192
- # codeql[py/unused-import]: Backward compatibility exports for test suite
193
- from souschef.parsers.attributes import ( # noqa: F401
194
+ # Import parser functions
195
+ from souschef.parsers.attributes import ( # noqa: F401, codeql[py/unused-import]
194
196
  _extract_attributes,
195
197
  _format_attributes,
196
198
  _format_resolved_attributes,
197
199
  _get_precedence_level,
198
200
  _resolve_attribute_precedence,
199
201
  )
200
-
201
- # Import parser functions
202
202
  from souschef.parsers.attributes import parse_attributes as _parse_attributes
203
203
 
204
- # codeql[py/unused-import]: Backward compatibility exports for test suite
205
- from souschef.parsers.habitat import ( # noqa: F401
204
+ # Import Habitat parser internal functions for backward compatibility
205
+ from souschef.parsers.habitat import ( # noqa: F401, codeql[py/unused-import]
206
206
  _extract_plan_array,
207
207
  _extract_plan_exports,
208
208
  _extract_plan_function,
209
209
  _extract_plan_var,
210
210
  _update_quote_state,
211
211
  )
212
-
213
- # Import Habitat parser internal functions for backward compatibility
214
212
  from souschef.parsers.habitat import parse_habitat_plan as _parse_habitat_plan
215
213
 
216
214
  # Re-export InSpec internal functions for backward compatibility (tests)
217
- # codeql[py/unused-import]: Backward compatibility exports for test suite
218
- from souschef.parsers.inspec import ( # noqa: F401
215
+ from souschef.parsers.inspec import ( # noqa: F401, codeql[py/unused-import]
219
216
  _convert_inspec_to_ansible_assert,
220
217
  _convert_inspec_to_goss,
221
218
  _convert_inspec_to_serverspec,
@@ -224,18 +221,12 @@ from souschef.parsers.inspec import ( # noqa: F401
224
221
  _generate_inspec_from_resource,
225
222
  _parse_inspec_control,
226
223
  )
227
- from souschef.parsers.inspec import (
228
- convert_inspec_to_test as _convert_inspec_test,
229
- )
230
- from souschef.parsers.inspec import (
231
- parse_inspec_profile as _parse_inspec,
232
- )
233
-
234
- # codeql[py/unused-import]: Backward compatibility exports for test suite
235
- from souschef.parsers.metadata import ( # noqa: F401
236
- _extract_metadata,
237
- _format_cookbook_structure,
238
- _format_metadata,
224
+ from souschef.parsers.inspec import convert_inspec_to_test as _convert_inspec_test
225
+ from souschef.parsers.inspec import parse_inspec_profile as _parse_inspec
226
+ from souschef.parsers.metadata import (
227
+ _extract_metadata, # noqa: F401, codeql[py/unused-import]
228
+ _format_cookbook_structure, # noqa: F401, codeql[py/unused-import]
229
+ _format_metadata, # noqa: F401, codeql[py/unused-import]
239
230
  )
240
231
  from souschef.parsers.metadata import (
241
232
  list_cookbook_structure as _list_cookbook_structure,
@@ -244,24 +235,23 @@ from souschef.parsers.metadata import (
244
235
  parse_cookbook_metadata as _parse_cookbook_metadata,
245
236
  )
246
237
  from souschef.parsers.metadata import read_cookbook_metadata as _read_cookbook_metadata
247
-
248
- # codeql[py/unused-import]: Backward compatibility exports for test suite
249
- from souschef.parsers.recipe import ( # noqa: F401
250
- _extract_conditionals,
251
- _extract_resources,
252
- _format_resources,
238
+ from souschef.parsers.recipe import (
239
+ _extract_conditionals, # noqa: F401, codeql[py/unused-import]
240
+ _extract_resources, # noqa: F401, codeql[py/unused-import]
241
+ _format_resources, # noqa: F401, codeql[py/unused-import]
253
242
  )
254
243
  from souschef.parsers.recipe import parse_recipe as _parse_recipe
255
244
 
256
- # codeql[py/unused-import]: Backward compatibility exports for test suite
257
- from souschef.parsers.resource import ( # noqa: F401
258
- _extract_resource_actions,
259
- _extract_resource_properties,
245
+ # Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
246
+ # These imports are intentionally exposed for external test access
247
+ from souschef.parsers.resource import (
248
+ _extract_resource_actions, # noqa: F401, codeql[py/unused-import]
249
+ _extract_resource_properties, # noqa: F401, codeql[py/unused-import]
260
250
  )
261
251
  from souschef.parsers.resource import parse_custom_resource as _parse_custom_resource
262
252
 
263
- # codeql[py/unused-import]: Backward compatibility exports for test suite
264
- from souschef.parsers.template import ( # noqa: F401
253
+ # Import internal functions for backward compatibility (used by tests)
254
+ from souschef.parsers.template import ( # noqa: F401, codeql[py/unused-import]
265
255
  _convert_erb_to_jinja2,
266
256
  _extract_code_block_variables,
267
257
  _extract_heredoc_strings,
@@ -270,17 +260,40 @@ from souschef.parsers.template import ( # noqa: F401
270
260
  _extract_template_variables,
271
261
  _strip_ruby_comments,
272
262
  )
273
-
274
- # Import internal functions for backward compatibility (used by tests)
275
263
  from souschef.parsers.template import parse_template as _parse_template
276
264
 
265
+ # Import UI helper functions for MCP exposure
266
+ from souschef.ui.pages.chef_server_settings import (
267
+ _validate_chef_server_connection,
268
+ )
269
+
270
+ # Backward compatibility re-exports without underscore prefix (for tests)
271
+ # noinspection PyUnusedLocal
272
+ convert_chef_deployment_to_ansible_strategy = ( # noqa: F401
273
+ _convert_chef_deployment_to_ansible_strategy
274
+ )
275
+ generate_awx_inventory_source_from_chef = ( # noqa: F401
276
+ _generate_awx_inventory_source_from_chef
277
+ )
278
+ generate_awx_job_template_from_cookbook = ( # noqa: F401
279
+ _generate_awx_job_template_from_cookbook
280
+ )
281
+ generate_awx_project_from_cookbooks = _generate_awx_project_from_cookbooks # noqa: F401
282
+ generate_awx_workflow_from_chef_runlist = ( # noqa: F401
283
+ _generate_awx_workflow_from_chef_runlist
284
+ )
285
+ generate_blue_green_deployment_playbook = ( # noqa: F401
286
+ _generate_blue_green_deployment_playbook
287
+ )
288
+ generate_canary_deployment_strategy = ( # noqa: F401
289
+ _generate_canary_deployment_strategy
290
+ )
291
+
277
292
  # Create a new FastMCP server
278
293
  mcp = FastMCP("souschef")
279
294
 
280
- # Error message templates
281
- ERROR_FILE_NOT_FOUND = "Error: File not found at {path}"
282
- ERROR_IS_DIRECTORY = "Error: {path} is a directory, not a file"
283
- ERROR_PERMISSION_DENIED = "Error: Permission denied for {path}"
295
+ # File constants
296
+ METADATA_RB = "metadata.rb"
284
297
 
285
298
  # File constants
286
299
  METADATA_RB = "metadata.rb"
@@ -300,6 +313,10 @@ def parse_template(path: str) -> str:
300
313
  JSON string with extracted variables and Jinja2-converted template.
301
314
 
302
315
  """
316
+ try:
317
+ path = str(_normalize_path(path))
318
+ except ValueError as e:
319
+ return format_error_with_context(e, "validating template path", path)
303
320
  return _parse_template(path)
304
321
 
305
322
 
@@ -315,6 +332,10 @@ def parse_custom_resource(path: str) -> str:
315
332
  JSON string with extracted properties, actions, and metadata.
316
333
 
317
334
  """
335
+ try:
336
+ path = str(_normalize_path(path))
337
+ except ValueError as e:
338
+ return format_error_with_context(e, "validating resource path", path)
318
339
  return _parse_custom_resource(path)
319
340
 
320
341
 
@@ -330,6 +351,10 @@ def list_directory(path: str) -> list[str] | str:
330
351
  A list of filenames in the directory, or an error message.
331
352
 
332
353
  """
354
+ try:
355
+ path = str(_normalize_path(path))
356
+ except ValueError as e:
357
+ return format_error_with_context(e, "validating directory path", path)
333
358
  result: list[str] | str = _list_directory(path)
334
359
  return result
335
360
 
@@ -346,6 +371,10 @@ def read_file(path: str) -> str:
346
371
  The contents of the file, or an error message.
347
372
 
348
373
  """
374
+ try:
375
+ path = str(_normalize_path(path))
376
+ except ValueError as e:
377
+ return format_error_with_context(e, "validating file path", path)
349
378
  result: str = _read_file(path)
350
379
  return result
351
380
 
@@ -362,6 +391,10 @@ def read_cookbook_metadata(path: str) -> str:
362
391
  Formatted string with extracted metadata.
363
392
 
364
393
  """
394
+ try:
395
+ path = str(_normalize_path(path))
396
+ except ValueError as e:
397
+ return format_error_with_context(e, "validating metadata path", path)
365
398
  return _read_cookbook_metadata(path)
366
399
 
367
400
 
@@ -377,6 +410,10 @@ def parse_cookbook_metadata(path: str) -> dict[str, str | list[str]]:
377
410
  Dictionary containing extracted metadata fields.
378
411
 
379
412
  """
413
+ try:
414
+ path = str(_normalize_path(path))
415
+ except ValueError as e:
416
+ return {"error": str(e)}
380
417
  return _parse_cookbook_metadata(path)
381
418
 
382
419
 
@@ -392,6 +429,10 @@ def parse_recipe(path: str) -> str:
392
429
  Formatted string with extracted Chef resources and their properties.
393
430
 
394
431
  """
432
+ try:
433
+ path = str(_normalize_path(path))
434
+ except ValueError as e:
435
+ return format_error_with_context(e, "validating recipe path", path)
395
436
  return _parse_recipe(path)
396
437
 
397
438
 
@@ -421,6 +462,10 @@ def parse_attributes(path: str, resolve_precedence: bool = True) -> str:
421
462
  Formatted string with extracted attributes.
422
463
 
423
464
  """
465
+ try:
466
+ path = str(_normalize_path(path))
467
+ except ValueError as e:
468
+ return format_error_with_context(e, "validating attributes path", path)
424
469
  return _parse_attributes(path, resolve_precedence)
425
470
 
426
471
 
@@ -436,6 +481,10 @@ def list_cookbook_structure(path: str) -> str:
436
481
  Formatted string showing the cookbook structure.
437
482
 
438
483
  """
484
+ try:
485
+ path = str(_normalize_path(path))
486
+ except ValueError as e:
487
+ return format_error_with_context(e, "validating cookbook path", path)
439
488
  return _list_cookbook_structure(path)
440
489
 
441
490
 
@@ -505,7 +554,6 @@ def _extract_resource_subscriptions(
505
554
  return subscriptions
506
555
 
507
556
 
508
- @mcp.tool()
509
557
  def _parse_controls_from_directory(profile_path: Path) -> list[dict[str, Any]]:
510
558
  """
511
559
  Parse all control files from an InSpec profile directory.
@@ -526,9 +574,9 @@ def _parse_controls_from_directory(profile_path: Path) -> list[dict[str, Any]]:
526
574
  raise FileNotFoundError(f"No controls directory found in {profile_path}")
527
575
 
528
576
  controls = []
529
- for control_file in controls_dir.glob("*.rb"):
577
+ for control_file in safe_glob(controls_dir, "*.rb", profile_path):
530
578
  try:
531
- content = control_file.read_text()
579
+ content = safe_read_text(control_file, profile_path)
532
580
  file_controls = _parse_inspec_control(content)
533
581
  for ctrl in file_controls:
534
582
  ctrl["file"] = str(control_file.relative_to(profile_path))
@@ -554,7 +602,7 @@ def _parse_controls_from_file(profile_path: Path) -> list[dict[str, Any]]:
554
602
 
555
603
  """
556
604
  try:
557
- content = profile_path.read_text()
605
+ content = safe_read_text(profile_path, profile_path.parent)
558
606
  controls = _parse_inspec_control(content)
559
607
  for ctrl in controls:
560
608
  ctrl["file"] = profile_path.name
@@ -575,6 +623,10 @@ def parse_inspec_profile(path: str) -> str:
575
623
  JSON string with parsed controls, or error message.
576
624
 
577
625
  """
626
+ try:
627
+ path = str(_normalize_path(path))
628
+ except ValueError as e:
629
+ return format_error_with_context(e, "validating InSpec path", path)
578
630
  return _parse_inspec(path)
579
631
 
580
632
 
@@ -591,6 +643,10 @@ def convert_inspec_to_test(inspec_path: str, output_format: str = "testinfra") -
591
643
  Converted test code or error message.
592
644
 
593
645
  """
646
+ try:
647
+ inspec_path = str(_normalize_path(inspec_path))
648
+ except ValueError as e:
649
+ return format_error_with_context(e, "validating InSpec path", inspec_path)
594
650
  return _convert_inspec_test(inspec_path, output_format)
595
651
 
596
652
 
@@ -646,6 +702,9 @@ def generate_inspec_from_recipe(recipe_path: str) -> str:
646
702
 
647
703
  """
648
704
  try:
705
+ # Validate and normalize path
706
+ recipe_path = str(_normalize_path(recipe_path))
707
+
649
708
  # First parse the recipe
650
709
  recipe_result: str = parse_recipe(recipe_path)
651
710
 
@@ -705,8 +764,6 @@ def convert_chef_databag_to_vars(
705
764
 
706
765
  """
707
766
  try:
708
- import yaml
709
-
710
767
  # Validate inputs
711
768
  if not databag_content or not databag_content.strip():
712
769
  return (
@@ -812,13 +869,14 @@ def _validate_databags_directory(
812
869
  return databags_path, None
813
870
 
814
871
 
815
- def _convert_databag_item(item_file, databag_name: str, output_directory: str) -> dict:
872
+ def _convert_databag_item(
873
+ item_file, databag_name: str, output_directory: str, base_path: Path
874
+ ) -> dict:
816
875
  """Convert a single databag item file to Ansible format."""
817
876
  item_name = item_file.stem
818
877
 
819
878
  try:
820
- with item_file.open() as f:
821
- content = f.read()
879
+ content = safe_read_text(item_file, base_path)
822
880
 
823
881
  # Detect if encrypted
824
882
  is_encrypted = _detect_encrypted_databag(content)
@@ -843,13 +901,17 @@ def _convert_databag_item(item_file, databag_name: str, output_directory: str) -
843
901
  return {"databag": databag_name, "item": item_name, "error": str(e)}
844
902
 
845
903
 
846
- def _process_databag_directory(databag_dir, output_directory: str) -> list[dict]:
904
+ def _process_databag_directory(
905
+ databag_dir, output_directory: str, base_path: Path
906
+ ) -> list[dict]:
847
907
  """Process all items in a single databag directory."""
848
908
  results = []
849
909
  databag_name = databag_dir.name
850
910
 
851
- for item_file in databag_dir.glob("*.json"):
852
- result = _convert_databag_item(item_file, databag_name, output_directory)
911
+ for item_file in safe_glob(databag_dir, "*.json", base_path):
912
+ result = _convert_databag_item(
913
+ item_file, databag_name, output_directory, base_path
914
+ )
853
915
  results.append(result)
854
916
 
855
917
  return results
@@ -884,11 +946,13 @@ def generate_ansible_vault_from_databags(
884
946
  conversion_results = []
885
947
 
886
948
  # Process each data bag directory
887
- for databag_dir in databags_path.iterdir():
949
+ for databag_dir in databags_path.iterdir(): # nosonar
888
950
  if not databag_dir.is_dir():
889
951
  continue
890
952
 
891
- results = _process_databag_directory(databag_dir, output_directory)
953
+ results = _process_databag_directory(
954
+ databag_dir, output_directory, databags_path
955
+ )
892
956
  conversion_results.extend(results)
893
957
 
894
958
  # Generate summary and file structure
@@ -915,6 +979,9 @@ def analyse_chef_databag_usage(cookbook_path: str, databags_path: str = "") -> s
915
979
  Analysis of data bag usage and migration recommendations
916
980
 
917
981
  """
982
+ cookbook_path = str(_normalize_path(cookbook_path))
983
+ if databags_path:
984
+ databags_path = str(_normalize_path(databags_path))
918
985
  try:
919
986
  cookbook = _normalize_path(cookbook_path)
920
987
  if not cookbook.exists():
@@ -1027,12 +1094,11 @@ def generate_inventory_from_chef_environments(
1027
1094
  environments = {}
1028
1095
  processing_results = []
1029
1096
 
1030
- for env_file in env_path.glob("*.rb"):
1097
+ for env_file in safe_glob(env_path, "*.rb", env_path):
1031
1098
  env_name = env_file.stem
1032
1099
 
1033
1100
  try:
1034
- with env_file.open("r") as f:
1035
- content = f.read()
1101
+ content = safe_read_text(env_file, env_path)
1036
1102
 
1037
1103
  env_data = _parse_chef_environment_content(content)
1038
1104
  environments[env_name] = env_data
@@ -1374,8 +1440,6 @@ def _generate_inventory_group_from_environment(
1374
1440
  env_data: dict, env_name: str, include_constraints: bool
1375
1441
  ) -> str:
1376
1442
  """Generate Ansible inventory group configuration from environment data."""
1377
- import yaml
1378
-
1379
1443
  group_vars: dict[str, Any] = {}
1380
1444
 
1381
1445
  # Add environment metadata
@@ -1408,7 +1472,7 @@ def _generate_inventory_group_from_environment(
1408
1472
  ),
1409
1473
  }
1410
1474
 
1411
- return yaml.dump(group_vars, default_flow_style=False, indent=2)
1475
+ return str(yaml.dump(group_vars, default_flow_style=False, indent=2))
1412
1476
 
1413
1477
 
1414
1478
  def _build_conversion_summary(results: list) -> str:
@@ -1461,8 +1525,6 @@ def _generate_yaml_inventory(environments: dict) -> str:
1461
1525
  YAML inventory string
1462
1526
 
1463
1527
  """
1464
- import yaml
1465
-
1466
1528
  inventory: dict[str, Any] = {"all": {"children": {}}}
1467
1529
 
1468
1530
  for env_name, env_data in environments.items():
@@ -1592,8 +1654,7 @@ def _extract_environment_usage_from_cookbook(cookbook_path) -> list:
1592
1654
  # Search for environment usage in Ruby files
1593
1655
  for ruby_file in cookbook_path.rglob("*.rb"):
1594
1656
  try:
1595
- with ruby_file.open("r") as f:
1596
- content = f.read()
1657
+ content = safe_read_text(ruby_file, cookbook_path)
1597
1658
 
1598
1659
  # Find environment usage patterns
1599
1660
  found_patterns = _find_environment_patterns_in_content(
@@ -1650,13 +1711,12 @@ def _analyse_environments_structure(environments_path) -> dict:
1650
1711
  """Analyse the structure of Chef environments directory."""
1651
1712
  structure: dict[str, Any] = {"total_environments": 0, "environments": {}}
1652
1713
 
1653
- for env_file in environments_path.glob("*.rb"):
1714
+ for env_file in safe_glob(environments_path, "*.rb", environments_path):
1654
1715
  structure["total_environments"] += 1
1655
1716
  env_name = env_file.stem
1656
1717
 
1657
1718
  try:
1658
- with env_file.open("r") as f:
1659
- content = f.read()
1719
+ content = safe_read_text(env_file, environments_path)
1660
1720
 
1661
1721
  env_data = _parse_chef_environment_content(content)
1662
1722
 
@@ -1852,12 +1912,10 @@ def _convert_databag_to_ansible_vars(
1852
1912
 
1853
1913
  def _generate_vault_content(vars_dict: dict, databag_name: str) -> str:
1854
1914
  """Generate Ansible Vault YAML content from variables dictionary."""
1855
- import yaml
1856
-
1857
1915
  # Structure for vault file
1858
1916
  vault_vars = {f"{databag_name}_vault": vars_dict}
1859
1917
 
1860
- return yaml.dump(vault_vars, default_flow_style=False, indent=2)
1918
+ return str(yaml.dump(vault_vars, default_flow_style=False, indent=2))
1861
1919
 
1862
1920
 
1863
1921
  def _detect_encrypted_databag(content: str) -> bool:
@@ -2036,8 +2094,7 @@ def _extract_databag_usage_from_cookbook(cookbook_path) -> list:
2036
2094
  # Search for data bag usage in Ruby files
2037
2095
  for ruby_file in cookbook_path.rglob("*.rb"):
2038
2096
  try:
2039
- with ruby_file.open() as f:
2040
- content = f.read()
2097
+ content = safe_read_text(ruby_file, cookbook_path)
2041
2098
 
2042
2099
  # Find data bag usage patterns
2043
2100
  found_patterns = _find_databag_patterns_in_content(content, str(ruby_file))
@@ -2099,7 +2156,7 @@ def _analyse_databag_structure(databags_path) -> dict:
2099
2156
  "databags": {},
2100
2157
  }
2101
2158
 
2102
- for databag_dir in databags_path.iterdir():
2159
+ for databag_dir in databags_path.iterdir(): # nosonar
2103
2160
  if not databag_dir.is_dir():
2104
2161
  continue
2105
2162
 
@@ -2107,13 +2164,12 @@ def _analyse_databag_structure(databags_path) -> dict:
2107
2164
  structure["total_databags"] += 1
2108
2165
 
2109
2166
  items = []
2110
- for item_file in databag_dir.glob("*.json"):
2167
+ for item_file in safe_glob(databag_dir, "*.json", databags_path):
2111
2168
  structure["total_items"] += 1
2112
2169
  item_name = item_file.stem
2113
2170
 
2114
2171
  try:
2115
- with item_file.open() as f:
2116
- content = f.read()
2172
+ content = safe_read_text(item_file, databags_path)
2117
2173
 
2118
2174
  is_encrypted = _detect_encrypted_databag(content)
2119
2175
  if is_encrypted:
@@ -2359,8 +2415,10 @@ def assess_chef_migration_complexity(
2359
2415
  Detailed assessment report in markdown format.
2360
2416
 
2361
2417
  """
2418
+ # Sanitise and contain user-provided cookbook paths before processing
2419
+ sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
2362
2420
  return _assess_chef_migration_complexity(
2363
- cookbook_paths, migration_scope, target_platform
2421
+ sanitized, migration_scope, target_platform
2364
2422
  )
2365
2423
 
2366
2424
 
@@ -2385,7 +2443,9 @@ def generate_migration_plan(
2385
2443
  Detailed migration plan in markdown format.
2386
2444
 
2387
2445
  """
2388
- return _generate_migration_plan(cookbook_paths, migration_strategy, timeline_weeks)
2446
+ # Sanitise and contain user-provided cookbook paths before processing
2447
+ sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
2448
+ return _generate_migration_plan(sanitized, migration_strategy, timeline_weeks)
2389
2449
 
2390
2450
 
2391
2451
  @mcp.tool()
@@ -2403,7 +2463,9 @@ def analyse_cookbook_dependencies(cookbook_paths: str) -> str:
2403
2463
  Dependency analysis report in markdown format.
2404
2464
 
2405
2465
  """
2406
- return _analyse_cookbook_dependencies(cookbook_paths)
2466
+ # Sanitise and contain user-provided cookbook paths before processing
2467
+ sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
2468
+ return _analyse_cookbook_dependencies(sanitized)
2407
2469
 
2408
2470
 
2409
2471
  @mcp.tool()
@@ -2427,11 +2489,47 @@ def generate_migration_report(
2427
2489
  Comprehensive migration report in markdown format.
2428
2490
 
2429
2491
  """
2492
+ # Sanitise and contain user-provided cookbook paths before processing
2493
+ sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
2430
2494
  return _generate_migration_report(
2431
- cookbook_paths, report_format, include_technical_details
2495
+ sanitized, report_format, include_technical_details
2432
2496
  )
2433
2497
 
2434
2498
 
2499
+ def _sanitize_cookbook_paths_input(cookbook_paths: str) -> str:
2500
+ """
2501
+ Sanitise a comma-separated list of cookbook paths.
2502
+
2503
+ Args:
2504
+ cookbook_paths: Comma-separated paths provided by the user.
2505
+
2506
+ Returns:
2507
+ A comma-separated string of normalised paths.
2508
+
2509
+ Raises:
2510
+ ValueError: If any path is invalid.
2511
+
2512
+ """
2513
+ sanitized_paths: list[str] = []
2514
+ for raw in cookbook_paths.split(","):
2515
+ candidate = raw.strip()
2516
+ if not candidate:
2517
+ continue
2518
+ # Normalize the path (resolves ., .., symlinks)
2519
+
2520
+ # prevents traversal attacks; file access is further contained by per-operation checks
2521
+ normalised = _normalize_path(candidate)
2522
+
2523
+ # Validate it's an absolute path after normalization
2524
+ if not normalised.is_absolute():
2525
+ msg = f"Path must be absolute after normalization: {candidate}"
2526
+ raise ValueError(msg)
2527
+
2528
+ # Use the normalized absolute path (temp dirs, workspace dirs all allowed)
2529
+ sanitized_paths.append(str(normalised))
2530
+ return ",".join(sanitized_paths)
2531
+
2532
+
2435
2533
  @mcp.tool()
2436
2534
  def validate_conversion(
2437
2535
  conversion_type: str,
@@ -2456,6 +2554,124 @@ def validate_conversion(
2456
2554
  return _validate_conversion(conversion_type, result_content, output_format)
2457
2555
 
2458
2556
 
2557
+ # Chef Server Integration Tools
2558
+
2559
+
2560
+ @mcp.tool()
2561
+ def validate_chef_server_connection(
2562
+ server_url: str,
2563
+ node_name: str,
2564
+ ) -> str:
2565
+ """
2566
+ Validate Chef Server connectivity and configuration.
2567
+
2568
+ Tests the Chef Server REST API connection to ensure the server is
2569
+ reachable and properly configured.
2570
+
2571
+ Args:
2572
+ server_url: Base URL of the Chef Server (e.g., https://chef.example.com).
2573
+ node_name: Chef node name for authentication.
2574
+
2575
+ Returns:
2576
+ Success/failure message indicating the connection status.
2577
+
2578
+ """
2579
+ try:
2580
+ success, message = _validate_chef_server_connection(server_url, node_name)
2581
+ result = "✅ Success" if success else "❌ Failed"
2582
+ return f"{result}: {message}"
2583
+ except Exception as e:
2584
+ return f"❌ Error validating Chef Server connection: {e}"
2585
+
2586
+
2587
+ @mcp.tool()
2588
+ def get_chef_nodes(search_query: str = "*:*") -> str:
2589
+ """
2590
+ Query Chef Server for nodes matching search criteria.
2591
+
2592
+ Retrieves nodes from Chef Server that match the provided search query,
2593
+ extracting role assignments, environment, platform, and IP address
2594
+ information for dynamic inventory generation.
2595
+
2596
+ Args:
2597
+ search_query: Chef search query (default: '*:*' for all nodes).
2598
+
2599
+ Returns:
2600
+ JSON string containing list of matching nodes with their attributes.
2601
+
2602
+ """
2603
+ try:
2604
+ nodes = _get_chef_nodes(search_query)
2605
+ if not nodes:
2606
+ return json.dumps(
2607
+ {
2608
+ "status": "no_nodes",
2609
+ "message": "No nodes found matching the search query",
2610
+ "nodes": [],
2611
+ }
2612
+ )
2613
+ return json.dumps(
2614
+ {
2615
+ "status": "success",
2616
+ "count": len(nodes),
2617
+ "nodes": nodes,
2618
+ }
2619
+ )
2620
+ except Exception as e:
2621
+ return json.dumps(
2622
+ {
2623
+ "status": "error",
2624
+ "message": f"Error querying Chef Server: {str(e)}",
2625
+ "nodes": [],
2626
+ }
2627
+ )
2628
+
2629
+
2630
+ # Template Conversion Tools
2631
+
2632
+
2633
+ @mcp.tool()
2634
+ def convert_template_with_ai(
2635
+ erb_path: str,
2636
+ use_ai_enhancement: bool = True,
2637
+ ) -> str:
2638
+ """
2639
+ Convert an ERB template to Jinja2 with optional AI assistance.
2640
+
2641
+ Converts Chef ERB templates to Ansible Jinja2 format with optional
2642
+ AI-based validation and improvement for complex Ruby logic that cannot
2643
+ be automatically converted.
2644
+
2645
+ Args:
2646
+ erb_path: Path to the ERB template file.
2647
+ use_ai_enhancement: Whether to use AI for validation (default: True).
2648
+
2649
+ Returns:
2650
+ JSON string with conversion results including success status,
2651
+ Jinja2 output, warnings, and conversion method used.
2652
+
2653
+ """
2654
+ try:
2655
+ if use_ai_enhancement:
2656
+ result = _convert_template_with_ai(erb_path, ai_service=None)
2657
+ else:
2658
+ # Fall back to rule-based conversion
2659
+ from souschef.converters.template import convert_template_file
2660
+
2661
+ result = convert_template_file(erb_path)
2662
+ result["conversion_method"] = "rule-based"
2663
+ return json.dumps(result, indent=2)
2664
+ except Exception as e:
2665
+ return json.dumps(
2666
+ {
2667
+ "success": False,
2668
+ "error": f"Error converting template: {str(e)}",
2669
+ "template": erb_path,
2670
+ "jinja2_output": "",
2671
+ }
2672
+ )
2673
+
2674
+
2459
2675
  # Habitat Parsing Tool
2460
2676
 
2461
2677
 
@@ -2474,6 +2690,7 @@ def parse_habitat_plan(plan_path: str) -> str:
2474
2690
  JSON string with parsed plan metadata
2475
2691
 
2476
2692
  """
2693
+ plan_path = str(_normalize_path(plan_path))
2477
2694
  return _parse_habitat_plan(plan_path)
2478
2695
 
2479
2696
 
@@ -2571,6 +2788,7 @@ def analyse_chef_search_patterns(recipe_or_cookbook_path: str) -> str:
2571
2788
  Analysis of search patterns found.
2572
2789
 
2573
2790
  """
2791
+ recipe_or_cookbook_path = str(_normalize_path(recipe_or_cookbook_path))
2574
2792
  return _analyse_chef_search_patterns(recipe_or_cookbook_path)
2575
2793
 
2576
2794
 
@@ -2593,6 +2811,7 @@ def profile_cookbook_performance(cookbook_path: str) -> str:
2593
2811
  from souschef.profiling import generate_cookbook_performance_report
2594
2812
 
2595
2813
  try:
2814
+ cookbook_path = str(_normalize_path(cookbook_path))
2596
2815
  report = generate_cookbook_performance_report(cookbook_path)
2597
2816
  return str(report)
2598
2817
  except Exception as e:
@@ -2638,6 +2857,7 @@ def profile_parsing_operation(
2638
2857
  func = operation_map[operation]
2639
2858
 
2640
2859
  try:
2860
+ file_path = str(_normalize_path(file_path))
2641
2861
  if detailed:
2642
2862
  _, profile_result = detailed_profile_function(func, file_path)
2643
2863
  result = str(profile_result)
@@ -2681,6 +2901,7 @@ def generate_jenkinsfile_from_chef(
2681
2901
  from souschef.ci.jenkins_pipeline import generate_jenkinsfile_from_chef_ci
2682
2902
 
2683
2903
  try:
2904
+ cookbook_path = str(_normalize_path(cookbook_path))
2684
2905
  # Convert string to boolean
2685
2906
  enable_parallel_bool = enable_parallel.lower() in ("yes", "true", "1")
2686
2907
 
@@ -2723,6 +2944,7 @@ def generate_gitlab_ci_from_chef(
2723
2944
  from souschef.ci.gitlab_ci import generate_gitlab_ci_from_chef_ci
2724
2945
 
2725
2946
  try:
2947
+ cookbook_path = str(_normalize_path(cookbook_path))
2726
2948
  enable_cache_bool = enable_cache.lower() in ("yes", "true", "1")
2727
2949
  enable_artifacts_bool = enable_artifacts.lower() in ("yes", "true", "1")
2728
2950
  result = generate_gitlab_ci_from_chef_ci(
@@ -2768,6 +2990,7 @@ def generate_github_workflow_from_chef(
2768
2990
  from souschef.ci.github_actions import generate_github_workflow_from_chef_ci
2769
2991
 
2770
2992
  try:
2993
+ cookbook_path = str(_normalize_path(cookbook_path))
2771
2994
  enable_cache_bool = enable_cache.lower() in ("yes", "true", "1")
2772
2995
  enable_artifacts_bool = enable_artifacts.lower() in ("yes", "true", "1")
2773
2996
  result = generate_github_workflow_from_chef_ci(
@@ -2789,6 +3012,105 @@ def generate_github_workflow_from_chef(
2789
3012
  )
2790
3013
 
2791
3014
 
3015
+ @mcp.tool()
3016
+ def generate_ansible_repository(
3017
+ output_path: str,
3018
+ repo_type: str = "auto",
3019
+ cookbook_path: str = "",
3020
+ org_name: str = "myorg",
3021
+ init_git: str = "yes",
3022
+ ) -> str:
3023
+ """
3024
+ Generate a complete Ansible repository structure.
3025
+
3026
+ Analyses converted Chef cookbooks and creates an appropriate Ansible
3027
+ repository structure with proper organisation, configuration files,
3028
+ and git initialisation.
3029
+
3030
+ Repo Types:
3031
+ - auto: Auto-detect based on conversion analysis (recommended)
3032
+ - inventory_first: Classic inventory-first (best for infra management)
3033
+ - playbooks_roles: Simple playbooks + roles (best for small projects)
3034
+ - collection: Ansible Collection layout (best for reusable automation)
3035
+ - mono_repo: Multi-project mono-repo (best for platform teams)
3036
+
3037
+ Args:
3038
+ output_path: Path where the repository should be created
3039
+ repo_type: Type of repository structure (auto/inventory_first/playbooks_roles/collection/mono_repo)
3040
+ cookbook_path: Optional path to Chef cookbook for analysis (used with repo_type='auto')
3041
+ org_name: Organisation name for the repository
3042
+ init_git: Whether to initialise a git repository ('yes' or 'no')
3043
+
3044
+ Returns:
3045
+ JSON string with generation results including success status, files created, and git status
3046
+
3047
+ """
3048
+ from souschef.generators.repo import (
3049
+ analyse_conversion_output,
3050
+ )
3051
+ from souschef.generators.repo import (
3052
+ generate_ansible_repository as gen_repo,
3053
+ )
3054
+
3055
+ try:
3056
+ output_path = str(_normalize_path(output_path))
3057
+ init_git_bool = init_git.lower() in ("yes", "true", "1")
3058
+
3059
+ # Determine repo type
3060
+ if repo_type == "auto":
3061
+ if not cookbook_path:
3062
+ return json.dumps(
3063
+ {
3064
+ "success": False,
3065
+ "error": "cookbook_path required when repo_type='auto'",
3066
+ }
3067
+ )
3068
+
3069
+ cookbook_path = str(_normalize_path(cookbook_path))
3070
+
3071
+ # Validate cookbook path exists
3072
+ if not Path(cookbook_path).exists():
3073
+ return json.dumps(
3074
+ {
3075
+ "success": False,
3076
+ "error": f"Cookbook path does not exist: {cookbook_path}",
3077
+ }
3078
+ )
3079
+
3080
+ # Analyse the cookbook to determine best repo type
3081
+ # Count recipes
3082
+ recipes_dir = Path(cookbook_path) / "recipes"
3083
+ num_recipes = (
3084
+ len(list(recipes_dir.glob("*.rb"))) if recipes_dir.exists() else 0
3085
+ )
3086
+
3087
+ # Basic heuristics for repo type selection
3088
+ has_multiple_apps = num_recipes > 5
3089
+ num_roles = max(1, num_recipes // 2) # Estimate roles from recipes
3090
+
3091
+ determined_type = analyse_conversion_output(
3092
+ cookbook_path=cookbook_path,
3093
+ num_recipes=num_recipes,
3094
+ num_roles=num_roles,
3095
+ has_multiple_apps=has_multiple_apps,
3096
+ needs_multi_env=True,
3097
+ )
3098
+ result = gen_repo(output_path, determined_type, org_name, init_git_bool)
3099
+ else:
3100
+ # Use specified repo type
3101
+ result = gen_repo(output_path, repo_type, org_name, init_git_bool)
3102
+
3103
+ return json.dumps(result, indent=2)
3104
+
3105
+ except Exception as e:
3106
+ return json.dumps(
3107
+ {
3108
+ "success": False,
3109
+ "error": f"Failed to generate repository: {e}",
3110
+ }
3111
+ )
3112
+
3113
+
2792
3114
  @mcp.tool()
2793
3115
  def parse_chef_migration_assessment(
2794
3116
  cookbook_paths: str,
@@ -2917,21 +3239,46 @@ def _setup_conversion_metadata(cookbook_dir: Path, role_name: str) -> tuple[str,
2917
3239
  return cookbook_name, role_name
2918
3240
 
2919
3241
 
3242
+ def _validate_role_name(role_name: str) -> None:
3243
+ """
3244
+ Validate that role_name is safe for filesystem operations.
3245
+
3246
+ Args:
3247
+ role_name: The role name to validate.
3248
+
3249
+ Raises:
3250
+ ValueError: If the role name contains unsafe characters.
3251
+
3252
+ """
3253
+ if not role_name:
3254
+ raise ValueError("Role name cannot be empty")
3255
+ if ".." in role_name or "/" in role_name or "\\" in role_name:
3256
+ raise ValueError(f"Role name contains unsafe characters: {role_name}")
3257
+
3258
+
2920
3259
  def _create_role_structure(output_dir: Path, role_name: str) -> Path:
2921
3260
  """Create the standard Ansible role directory structure."""
2922
- role_dir = output_dir / role_name
3261
+ # Validate role_name to ensure it's safe for filesystem operations
3262
+ _validate_role_name(role_name)
3263
+
3264
+ base = os.path.realpath(str(output_dir))
3265
+ role_dir_str = os.path.realpath(os.path.join(base, role_name)) # noqa: PTH111, PTH118
3266
+ if os.path.commonpath([base, role_dir_str]) != base:
3267
+ raise RuntimeError("Unsafe role path outside output directory")
3268
+ role_dir = Path(role_dir_str)
2923
3269
  role_tasks_dir = role_dir / "tasks"
2924
3270
  role_templates_dir = role_dir / "templates"
2925
3271
  role_vars_dir = role_dir / "vars"
2926
3272
  role_defaults_dir = role_dir / "defaults"
2927
3273
 
3274
+ # All paths are validated via os.path.commonpath containment check above
2928
3275
  for directory in [
2929
3276
  role_tasks_dir,
2930
3277
  role_templates_dir,
2931
3278
  role_vars_dir,
2932
3279
  role_defaults_dir,
2933
3280
  ]:
2934
- directory.mkdir(parents=True, exist_ok=True)
3281
+ directory.mkdir(parents=True, exist_ok=True) # nosonar: S2083
2935
3282
 
2936
3283
  return role_dir
2937
3284
 
@@ -2940,9 +3287,11 @@ def _convert_recipes(
2940
3287
  cookbook_dir: Path, role_dir: Path, conversion_summary: dict
2941
3288
  ) -> None:
2942
3289
  """Convert Chef recipes to Ansible tasks."""
2943
- recipes_dir = cookbook_dir / "recipes"
2944
- role_tasks_dir = role_dir / "tasks"
2945
-
3290
+ cookbook_base = os.path.realpath(str(cookbook_dir))
3291
+ recipes_dir_str = os.path.realpath(os.path.join(cookbook_base, "recipes")) # noqa: PTH111, PTH118
3292
+ if os.path.commonpath([cookbook_base, recipes_dir_str]) != cookbook_base:
3293
+ raise RuntimeError("Unsafe recipes path outside cookbook directory")
3294
+ recipes_dir = Path(recipes_dir_str)
2946
3295
  if not recipes_dir.exists():
2947
3296
  conversion_summary["warnings"].append(
2948
3297
  f"No recipes directory found in {cookbook_dir.name}. "
@@ -2952,7 +3301,7 @@ def _convert_recipes(
2952
3301
 
2953
3302
  from souschef.converters.playbook import generate_playbook_from_recipe
2954
3303
 
2955
- recipe_files = list(recipes_dir.glob("*.rb"))
3304
+ recipe_files = safe_glob(recipes_dir, "*.rb", cookbook_dir)
2956
3305
  if not recipe_files:
2957
3306
  conversion_summary["warnings"].append(
2958
3307
  f"No recipe files (*.rb) found in {cookbook_dir.name}/recipes/. "
@@ -2962,10 +3311,11 @@ def _convert_recipes(
2962
3311
 
2963
3312
  for recipe_file in recipe_files:
2964
3313
  try:
2965
- recipe_name = recipe_file.stem
3314
+ validated_recipe = _validated_candidate(recipe_file, recipes_dir)
3315
+ recipe_name = validated_recipe.stem
2966
3316
 
2967
3317
  # Parse recipe to validate it can be processed
2968
- parse_result = _parse_recipe(str(recipe_file))
3318
+ parse_result = _parse_recipe(str(validated_recipe))
2969
3319
  if parse_result.startswith("Error:"):
2970
3320
  conversion_summary["errors"].append(
2971
3321
  f"Failed to parse recipe {recipe_name}: {parse_result}"
@@ -2973,13 +3323,18 @@ def _convert_recipes(
2973
3323
  continue
2974
3324
 
2975
3325
  # Convert to Ansible tasks
2976
- playbook_yaml = generate_playbook_from_recipe(str(recipe_file))
3326
+ playbook_yaml = generate_playbook_from_recipe(str(validated_recipe))
2977
3327
 
2978
- # Write as task file (paths normalized at function entry)
2979
- task_file = (
2980
- role_tasks_dir / f"{recipe_name}.yml"
2981
- ) # nosemgrep: python.lang.security.audit.dynamic-urllib-use-detected
2982
- task_file.write_text(playbook_yaml)
3328
+ # Write as task file; _safe_join already enforces containment within role_dir
3329
+ task_file = _safe_join(role_dir, "tasks", f"{recipe_name}.yml")
3330
+ try:
3331
+ task_file.parent.mkdir(parents=True, exist_ok=True) # nosonar
3332
+ safe_write_text(task_file, role_dir, playbook_yaml)
3333
+ except OSError as write_err:
3334
+ conversion_summary["errors"].append(
3335
+ f"Failed to write task file {task_file.name}: {write_err}"
3336
+ )
3337
+ continue
2983
3338
 
2984
3339
  conversion_summary["converted_files"].append(
2985
3340
  {
@@ -2999,19 +3354,21 @@ def _convert_templates(
2999
3354
  cookbook_dir: Path, role_dir: Path, conversion_summary: dict
3000
3355
  ) -> None:
3001
3356
  """Convert ERB templates to Jinja2 templates."""
3002
- templates_dir = cookbook_dir / "templates"
3003
- role_templates_dir = role_dir / "templates"
3357
+ templates_dir = _safe_join(cookbook_dir, "templates")
3004
3358
 
3005
3359
  if not templates_dir.exists():
3006
3360
  return
3007
3361
 
3008
- for template_file in templates_dir.rglob("*.erb"):
3362
+ for template_file in safe_glob(templates_dir, "**/*.erb", cookbook_dir):
3363
+ validated_template = template_file
3009
3364
  try:
3010
3365
  # Convert ERB to Jinja2
3011
- conversion_result = _parse_template(str(template_file))
3366
+ validated_template = _validated_candidate(template_file, templates_dir)
3367
+
3368
+ conversion_result = _parse_template(str(validated_template))
3012
3369
  if conversion_result.startswith("Error:"):
3013
3370
  conversion_summary["errors"].append(
3014
- f"Failed to convert template {template_file.name}: {conversion_result}"
3371
+ f"Failed to convert template {validated_template.name}: {conversion_result}"
3015
3372
  )
3016
3373
  continue
3017
3374
 
@@ -3020,15 +3377,15 @@ def _convert_templates(
3020
3377
  template_data = json.loads(conversion_result)
3021
3378
  jinja2_content = template_data.get("jinja2_template", "")
3022
3379
 
3023
- # Determine relative path for role templates (paths normalized at function entry)
3024
- rel_path = template_file.relative_to(
3025
- templates_dir
3026
- ) # nosemgrep: python.lang.security.audit.dynamic-urllib-use-detected
3027
- target_file = (
3028
- role_templates_dir / rel_path.with_suffix("")
3029
- ) # Remove .erb extension # nosemgrep: python.lang.security.audit.dynamic-urllib-use-detected
3380
+ # Determine relative path for role templates using _safe_join
3381
+
3382
+ rel_path = validated_template.relative_to(templates_dir)
3383
+ # Build target file path with inline containment guard
3384
+ target_file = _safe_join(
3385
+ role_dir, "templates", str(rel_path.with_suffix(""))
3386
+ )
3030
3387
  target_file.parent.mkdir(parents=True, exist_ok=True)
3031
- target_file.write_text(jinja2_content)
3388
+ safe_write_text(target_file, role_dir, jinja2_content)
3032
3389
 
3033
3390
  conversion_summary["converted_files"].append(
3034
3391
  {
@@ -3040,12 +3397,12 @@ def _convert_templates(
3040
3397
 
3041
3398
  except json.JSONDecodeError:
3042
3399
  conversion_summary["errors"].append(
3043
- f"Invalid JSON result for template {template_file.name}"
3400
+ f"Invalid JSON result for template {validated_template.name}"
3044
3401
  )
3045
3402
 
3046
3403
  except Exception as e:
3047
3404
  conversion_summary["errors"].append(
3048
- f"Error converting template {template_file.name}: {str(e)}"
3405
+ f"Error converting template {validated_template.name}: {str(e)}"
3049
3406
  )
3050
3407
 
3051
3408
 
@@ -3053,30 +3410,25 @@ def _convert_attributes(
3053
3410
  cookbook_dir: Path, role_dir: Path, conversion_summary: dict
3054
3411
  ) -> None:
3055
3412
  """Convert Chef attributes to Ansible variables."""
3056
- import yaml
3057
-
3058
- attributes_dir = cookbook_dir / "attributes"
3059
- role_defaults_dir = role_dir / "defaults"
3413
+ attributes_dir = _safe_join(cookbook_dir, "attributes")
3414
+ role_defaults_dir = _safe_join(role_dir, "defaults")
3060
3415
 
3061
3416
  if not attributes_dir.exists():
3062
3417
  return
3063
3418
 
3064
- for attr_file in attributes_dir.glob("*.rb"):
3419
+ for attr_file in safe_glob(attributes_dir, "*.rb", cookbook_dir):
3420
+ validated_attr = attr_file
3065
3421
  try:
3422
+ validated_attr = _validated_candidate(attr_file, attributes_dir)
3066
3423
  # Read the file content
3067
- content = attr_file.read_text()
3068
-
3069
- # Extract attributes using internal function
3070
- from souschef.parsers.attributes import (
3071
- _extract_attributes,
3072
- _resolve_attribute_precedence,
3073
- )
3424
+ content = safe_read_text(validated_attr, cookbook_dir)
3074
3425
 
3426
+ # Extract attributes (already imported at top of file)
3075
3427
  raw_attributes = _extract_attributes(content)
3076
3428
 
3077
3429
  if not raw_attributes:
3078
3430
  conversion_summary["warnings"].append(
3079
- f"No attributes found in {attr_file.name}"
3431
+ f"No attributes found in {validated_attr.name}"
3080
3432
  )
3081
3433
  continue
3082
3434
 
@@ -3091,24 +3443,25 @@ def _convert_attributes(
3091
3443
  ansible_key = attr_path.replace(".", "_")
3092
3444
  ansible_vars[ansible_key] = attr_info["value"]
3093
3445
 
3094
- # Write as defaults (paths normalized at function entry)
3095
- defaults_file = (
3096
- role_defaults_dir / f"{attr_file.stem}.yml"
3097
- ) # nosemgrep: python.lang.security.audit.dynamic-urllib-use-detected
3446
+ # Write as defaults using _safe_join to prevent path injection
3447
+ # All paths are validated via _safe_join to ensure containment within role_defaults_dir
3448
+ defaults_filename: str = f"{validated_attr.stem}.yml"
3449
+ defaults_file: Path = _safe_join(role_defaults_dir, defaults_filename)
3098
3450
  defaults_yaml = yaml.dump(ansible_vars, default_flow_style=False, indent=2)
3099
- defaults_file.write_text(defaults_yaml)
3451
+ defaults_file.parent.mkdir(parents=True, exist_ok=True)
3452
+ safe_write_text(defaults_file, role_dir, defaults_yaml)
3100
3453
 
3101
3454
  conversion_summary["converted_files"].append(
3102
3455
  {
3103
3456
  "type": "defaults",
3104
- "source": f"attributes/{attr_file.name}",
3105
- "target": f"{role_dir.name}/defaults/{attr_file.stem}.yml",
3457
+ "source": f"attributes/{validated_attr.name}",
3458
+ "target": f"{role_dir.name}/defaults/{validated_attr.stem}.yml",
3106
3459
  }
3107
3460
  )
3108
3461
 
3109
3462
  except Exception as e:
3110
3463
  conversion_summary["errors"].append(
3111
- f"Error converting attributes {attr_file.name}: {str(e)}"
3464
+ f"Error converting attributes {validated_attr.name}: {str(e)}"
3112
3465
  )
3113
3466
 
3114
3467
 
@@ -3119,11 +3472,16 @@ def _create_main_task_file(
3119
3472
  if not include_recipes:
3120
3473
  return
3121
3474
 
3122
- default_task_file = role_dir / "tasks" / "main.yml"
3475
+ # Build path to tasks directory safely
3476
+ tasks_dir: Path = _safe_join(role_dir, "tasks")
3477
+ # Build path to main.yml within tasks directory
3478
+ default_task_file: Path = _safe_join(tasks_dir, "main.yml")
3123
3479
  if default_task_file.exists():
3124
3480
  return # Already exists
3125
3481
 
3126
- default_recipe = cookbook_dir / "recipes" / "default.rb"
3482
+ # Build path to default recipe safely
3483
+ recipes_dir: Path = _safe_join(cookbook_dir, "recipes")
3484
+ default_recipe: Path = _safe_join(recipes_dir, "default.rb")
3127
3485
  if not default_recipe.exists():
3128
3486
  return
3129
3487
 
@@ -3131,7 +3489,8 @@ def _create_main_task_file(
3131
3489
  from souschef.converters.playbook import generate_playbook_from_recipe
3132
3490
 
3133
3491
  playbook_yaml = generate_playbook_from_recipe(str(default_recipe))
3134
- default_task_file.write_text(playbook_yaml)
3492
+ default_task_file.parent.mkdir(parents=True, exist_ok=True)
3493
+ safe_write_text(default_task_file, role_dir, playbook_yaml)
3135
3494
  conversion_summary["converted_files"].append(
3136
3495
  {
3137
3496
  "type": "task",
@@ -3153,14 +3512,11 @@ def _create_role_metadata(
3153
3512
  conversion_summary: dict,
3154
3513
  ) -> None:
3155
3514
  """Create Ansible role metadata file."""
3156
- import yaml
3515
+ # Use _safe_join to construct metadata file path
3157
3516
 
3158
- # role_dir created from normalized paths
3159
- meta_dir = role_dir / "meta"
3160
- meta_dir.mkdir(exist_ok=True)
3161
- meta_file = (
3162
- meta_dir / "main.yml"
3163
- ) # nosemgrep: python.lang.security.audit.dynamic-urllib-use-detected
3517
+ meta_dir = _safe_join(role_dir, "meta")
3518
+ meta_dir.mkdir(parents=True, exist_ok=True)
3519
+ meta_file = _safe_join(meta_dir, "main.yml")
3164
3520
 
3165
3521
  meta_content: dict[str, Any] = {
3166
3522
  "galaxy_info": {
@@ -3182,7 +3538,7 @@ def _create_role_metadata(
3182
3538
  meta_content["dependencies"] = [{"role": dep} for dep in deps]
3183
3539
 
3184
3540
  meta_yaml = yaml.dump(meta_content, default_flow_style=False, indent=2)
3185
- meta_file.write_text(meta_yaml)
3541
+ safe_write_text(meta_file, role_dir, meta_yaml)
3186
3542
 
3187
3543
  conversion_summary["converted_files"].append(
3188
3544
  {
@@ -3210,13 +3566,13 @@ def _generate_conversion_report(conversion_summary: dict, role_dir: Path) -> str
3210
3566
  summary_lines.append("")
3211
3567
  summary_lines.append("## Errors:")
3212
3568
  for error in conversion_summary["errors"]:
3213
- summary_lines.append(f"- {error}")
3569
+ summary_lines.append(f"- ERROR: {error}")
3214
3570
 
3215
3571
  if conversion_summary["warnings"]:
3216
3572
  summary_lines.append("")
3217
3573
  summary_lines.append("## Warnings:")
3218
3574
  for warning in conversion_summary["warnings"]:
3219
- summary_lines.append(f"- ⚠️ {warning}")
3575
+ summary_lines.append(f"- WARNING: {warning}")
3220
3576
 
3221
3577
  summary_lines.append("")
3222
3578
  summary_lines.append(f"## Role Location: {role_dir}")
@@ -3291,14 +3647,25 @@ def _validate_conversion_paths(
3291
3647
  cookbooks_path: str, output_path: str
3292
3648
  ) -> tuple[Path, Path]:
3293
3649
  """Validate and return Path objects for conversion paths."""
3294
- from souschef.core.path_utils import _normalize_path
3650
+ base_dir = Path.cwd().resolve()
3651
+
3652
+ cookbooks_candidate = _normalize_path(cookbooks_path)
3653
+ try:
3654
+ cookbooks_dir = _ensure_within_base_path(cookbooks_candidate, base_dir)
3655
+ except ValueError as e:
3656
+ raise ValueError(f"Cookbooks path is invalid or outside workspace: {e}") from e
3295
3657
 
3296
- cookbooks_dir = _normalize_path(cookbooks_path)
3297
- output_dir = _normalize_path(output_path)
3658
+ from souschef.core.path_utils import safe_exists
3298
3659
 
3299
- if not cookbooks_dir.exists():
3660
+ if not safe_exists(cookbooks_dir, base_dir):
3300
3661
  raise ValueError(f"Cookbooks path does not exist: {cookbooks_path}")
3301
3662
 
3663
+ output_candidate = _normalize_path(output_path)
3664
+ try:
3665
+ output_dir = _ensure_within_base_path(output_candidate, base_dir)
3666
+ except ValueError as e:
3667
+ raise ValueError(f"Output path is invalid or outside workspace: {e}") from e
3668
+
3302
3669
  return cookbooks_dir, output_dir
3303
3670
 
3304
3671
 
@@ -3496,6 +3863,7 @@ def main() -> None:
3496
3863
 
3497
3864
  This is the main entry point for running the server.
3498
3865
  """
3866
+ configure_logging()
3499
3867
  mcp.run()
3500
3868
 
3501
3869