mcp-souschef 3.0.0__py3-none-any.whl → 3.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.2.0.dist-info}/METADATA +83 -380
- mcp_souschef-3.2.0.dist-info/RECORD +47 -0
- souschef/__init__.py +2 -10
- souschef/assessment.py +336 -181
- souschef/ci/common.py +1 -1
- souschef/cli.py +37 -13
- souschef/converters/playbook.py +119 -48
- souschef/core/__init__.py +6 -1
- souschef/core/path_utils.py +233 -19
- souschef/deployment.py +10 -3
- souschef/generators/__init__.py +13 -0
- souschef/generators/repo.py +695 -0
- souschef/parsers/attributes.py +1 -1
- souschef/parsers/habitat.py +1 -1
- souschef/parsers/inspec.py +25 -2
- souschef/parsers/metadata.py +5 -3
- souschef/parsers/recipe.py +1 -1
- souschef/parsers/resource.py +1 -1
- souschef/parsers/template.py +1 -1
- souschef/server.py +426 -188
- souschef/ui/app.py +24 -30
- souschef/ui/pages/cookbook_analysis.py +837 -163
- mcp_souschef-3.0.0.dist-info/RECORD +0 -46
- souschef/converters/cookbook_specific.py.backup +0 -109
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.2.0.dist-info}/WHEEL +0 -0
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.2.0.dist-info}/entry_points.txt +0 -0
- {mcp_souschef-3.0.0.dist-info → mcp_souschef-3.2.0.dist-info}/licenses/LICENSE +0 -0
souschef/server.py
CHANGED
|
@@ -1,12 +1,16 @@
|
|
|
1
1
|
"""SousChef MCP Server - Chef to Ansible conversion assistant."""
|
|
2
2
|
|
|
3
|
+
# codeql[py/unused-import]: Intentional re-exports for MCP tools and test compatibility
|
|
4
|
+
|
|
3
5
|
import ast
|
|
4
6
|
import json
|
|
7
|
+
import os
|
|
5
8
|
import re
|
|
6
9
|
from pathlib import Path
|
|
7
10
|
from typing import Any
|
|
8
11
|
|
|
9
|
-
|
|
12
|
+
import yaml
|
|
13
|
+
from mcp.server import FastMCP
|
|
10
14
|
|
|
11
15
|
# Import assessment functions with aliases to avoid name conflicts
|
|
12
16
|
from souschef.assessment import (
|
|
@@ -15,23 +19,16 @@ from souschef.assessment import (
|
|
|
15
19
|
from souschef.assessment import (
|
|
16
20
|
assess_chef_migration_complexity as _assess_chef_migration_complexity,
|
|
17
21
|
)
|
|
18
|
-
from souschef.assessment import
|
|
19
|
-
|
|
20
|
-
)
|
|
21
|
-
from souschef.assessment import (
|
|
22
|
-
generate_migration_report as _generate_migration_report,
|
|
23
|
-
)
|
|
22
|
+
from souschef.assessment import generate_migration_plan as _generate_migration_plan
|
|
23
|
+
from souschef.assessment import generate_migration_report as _generate_migration_report
|
|
24
24
|
from souschef.assessment import (
|
|
25
25
|
parse_chef_migration_assessment as _parse_chef_migration_assessment,
|
|
26
26
|
)
|
|
27
|
-
from souschef.assessment import
|
|
28
|
-
validate_conversion as _validate_conversion,
|
|
29
|
-
)
|
|
27
|
+
from souschef.assessment import validate_conversion as _validate_conversion
|
|
30
28
|
|
|
31
29
|
# Import extracted modules
|
|
32
30
|
# Import private helper functions still used in server.py
|
|
33
|
-
# codeql[py/unused-import]
|
|
34
|
-
from souschef.converters.habitat import ( # noqa: F401
|
|
31
|
+
from souschef.converters.habitat import ( # noqa: F401, codeql[py/unused-import]
|
|
35
32
|
_add_service_build,
|
|
36
33
|
_add_service_dependencies,
|
|
37
34
|
_add_service_environment,
|
|
@@ -51,9 +48,9 @@ from souschef.converters.habitat import (
|
|
|
51
48
|
generate_compose_from_habitat as _generate_compose_from_habitat,
|
|
52
49
|
)
|
|
53
50
|
|
|
51
|
+
# Import playbook converter functions
|
|
54
52
|
# Re-exports of playbook internal functions for backward compatibility (tests)
|
|
55
|
-
# codeql[py/unused-import]
|
|
56
|
-
from souschef.converters.playbook import ( # noqa: F401
|
|
53
|
+
from souschef.converters.playbook import ( # noqa: F401, codeql[py/unused-import]
|
|
57
54
|
_add_general_recommendations,
|
|
58
55
|
_convert_chef_block_to_ansible,
|
|
59
56
|
_convert_chef_condition_to_ansible,
|
|
@@ -76,8 +73,6 @@ from souschef.converters.playbook import ( # noqa: F401
|
|
|
76
73
|
_parse_search_condition,
|
|
77
74
|
_process_subscribes,
|
|
78
75
|
)
|
|
79
|
-
|
|
80
|
-
# Import playbook converter functions
|
|
81
76
|
from souschef.converters.playbook import (
|
|
82
77
|
analyse_chef_search_patterns as _analyse_chef_search_patterns,
|
|
83
78
|
)
|
|
@@ -87,9 +82,7 @@ from souschef.converters.playbook import (
|
|
|
87
82
|
from souschef.converters.playbook import (
|
|
88
83
|
generate_dynamic_inventory_script as _generate_dynamic_inventory_script,
|
|
89
84
|
)
|
|
90
|
-
|
|
91
|
-
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
92
|
-
from souschef.converters.resource import ( # noqa: F401
|
|
85
|
+
from souschef.converters.resource import ( # noqa: F401, codeql[py/unused-import]
|
|
93
86
|
_convert_chef_resource_to_ansible,
|
|
94
87
|
_format_ansible_task,
|
|
95
88
|
_get_file_params,
|
|
@@ -101,8 +94,7 @@ from souschef.converters.resource import (
|
|
|
101
94
|
|
|
102
95
|
# Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
|
|
103
96
|
# These imports are intentionally exposed for external test access
|
|
104
|
-
# codeql[py/unused-import]
|
|
105
|
-
from souschef.core.constants import ( # noqa: F401
|
|
97
|
+
from souschef.core.constants import ( # noqa: F401, codeql[py/unused-import]
|
|
106
98
|
ACTION_TO_STATE,
|
|
107
99
|
ANSIBLE_SERVICE_MODULE,
|
|
108
100
|
ERROR_PREFIX,
|
|
@@ -112,33 +104,47 @@ from souschef.core.constants import ( # noqa: F401
|
|
|
112
104
|
|
|
113
105
|
# Import core utilities
|
|
114
106
|
from souschef.core.errors import format_error_with_context
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
107
|
+
from souschef.core.path_utils import ( # noqa: F401, codeql[py/unused-import]
|
|
108
|
+
_ensure_within_base_path,
|
|
109
|
+
_normalize_path,
|
|
110
|
+
_safe_join,
|
|
111
|
+
_validated_candidate,
|
|
112
|
+
safe_glob,
|
|
113
|
+
safe_read_text,
|
|
114
|
+
safe_write_text,
|
|
115
|
+
)
|
|
118
116
|
|
|
119
117
|
# Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
|
|
120
118
|
# These imports are intentionally exposed for external test access
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
_normalize_ruby_value,
|
|
119
|
+
from souschef.core.ruby_utils import (
|
|
120
|
+
_normalize_ruby_value, # noqa: F401, codeql[py/unused-import]
|
|
124
121
|
)
|
|
125
122
|
|
|
126
123
|
# Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
|
|
127
124
|
# These imports are intentionally exposed for external test access
|
|
128
|
-
# codeql[py/unused-import]
|
|
129
|
-
from souschef.core.validation import ( # noqa: F401
|
|
125
|
+
from souschef.core.validation import ( # noqa: F401, codeql[py/unused-import]
|
|
130
126
|
ValidationCategory,
|
|
131
127
|
ValidationEngine,
|
|
132
128
|
ValidationLevel,
|
|
133
129
|
ValidationResult,
|
|
134
130
|
)
|
|
135
131
|
|
|
132
|
+
# Explicit re-exports for language servers and type checkers
|
|
133
|
+
# These names are intentionally available from souschef.server
|
|
134
|
+
__all__ = [
|
|
135
|
+
"ValidationCategory",
|
|
136
|
+
"ValidationEngine",
|
|
137
|
+
"ValidationLevel",
|
|
138
|
+
"ValidationResult",
|
|
139
|
+
]
|
|
140
|
+
|
|
141
|
+
# Re-exports for backward compatibility (used by tests)
|
|
142
|
+
# These are imported and re-exported intentionally
|
|
136
143
|
# Import validation framework
|
|
137
144
|
# Re-exports of deployment internal functions for backward compatibility (tests)
|
|
138
145
|
# Public re-exports of deployment functions for test backward compatibility
|
|
139
146
|
# Note: MCP tool wrappers exist for some of these, but tests import directly
|
|
140
|
-
# codeql[py/unused-import]
|
|
141
|
-
from souschef.deployment import ( # noqa: F401
|
|
147
|
+
from souschef.deployment import ( # noqa: F401, codeql[py/unused-import]
|
|
142
148
|
_analyse_cookbook_for_awx,
|
|
143
149
|
_analyse_cookbooks_directory,
|
|
144
150
|
_detect_deployment_patterns_in_recipe,
|
|
@@ -152,17 +158,7 @@ from souschef.deployment import ( # noqa: F401
|
|
|
152
158
|
_parse_chef_runlist,
|
|
153
159
|
_recommend_ansible_strategies,
|
|
154
160
|
analyse_chef_application_patterns,
|
|
155
|
-
convert_chef_deployment_to_ansible_strategy,
|
|
156
|
-
generate_awx_inventory_source_from_chef,
|
|
157
|
-
generate_awx_job_template_from_cookbook,
|
|
158
|
-
generate_awx_project_from_cookbooks,
|
|
159
|
-
generate_awx_workflow_from_chef_runlist,
|
|
160
|
-
generate_blue_green_deployment_playbook,
|
|
161
|
-
generate_canary_deployment_strategy,
|
|
162
161
|
)
|
|
163
|
-
|
|
164
|
-
# Re-exports for backward compatibility (used by tests)
|
|
165
|
-
# These are imported and re-exported intentionally
|
|
166
162
|
from souschef.deployment import (
|
|
167
163
|
convert_chef_deployment_to_ansible_strategy as _convert_chef_deployment_to_ansible_strategy,
|
|
168
164
|
)
|
|
@@ -189,33 +185,28 @@ from souschef.deployment import (
|
|
|
189
185
|
from souschef.filesystem import list_directory as _list_directory
|
|
190
186
|
from souschef.filesystem import read_file as _read_file
|
|
191
187
|
|
|
192
|
-
#
|
|
193
|
-
from souschef.parsers.attributes import ( # noqa: F401
|
|
188
|
+
# Import parser functions
|
|
189
|
+
from souschef.parsers.attributes import ( # noqa: F401, codeql[py/unused-import]
|
|
194
190
|
_extract_attributes,
|
|
195
191
|
_format_attributes,
|
|
196
192
|
_format_resolved_attributes,
|
|
197
193
|
_get_precedence_level,
|
|
198
194
|
_resolve_attribute_precedence,
|
|
199
195
|
)
|
|
200
|
-
|
|
201
|
-
# Import parser functions
|
|
202
196
|
from souschef.parsers.attributes import parse_attributes as _parse_attributes
|
|
203
197
|
|
|
204
|
-
#
|
|
205
|
-
from souschef.parsers.habitat import ( # noqa: F401
|
|
198
|
+
# Import Habitat parser internal functions for backward compatibility
|
|
199
|
+
from souschef.parsers.habitat import ( # noqa: F401, codeql[py/unused-import]
|
|
206
200
|
_extract_plan_array,
|
|
207
201
|
_extract_plan_exports,
|
|
208
202
|
_extract_plan_function,
|
|
209
203
|
_extract_plan_var,
|
|
210
204
|
_update_quote_state,
|
|
211
205
|
)
|
|
212
|
-
|
|
213
|
-
# Import Habitat parser internal functions for backward compatibility
|
|
214
206
|
from souschef.parsers.habitat import parse_habitat_plan as _parse_habitat_plan
|
|
215
207
|
|
|
216
208
|
# Re-export InSpec internal functions for backward compatibility (tests)
|
|
217
|
-
# codeql[py/unused-import]
|
|
218
|
-
from souschef.parsers.inspec import ( # noqa: F401
|
|
209
|
+
from souschef.parsers.inspec import ( # noqa: F401, codeql[py/unused-import]
|
|
219
210
|
_convert_inspec_to_ansible_assert,
|
|
220
211
|
_convert_inspec_to_goss,
|
|
221
212
|
_convert_inspec_to_serverspec,
|
|
@@ -224,18 +215,12 @@ from souschef.parsers.inspec import ( # noqa: F401
|
|
|
224
215
|
_generate_inspec_from_resource,
|
|
225
216
|
_parse_inspec_control,
|
|
226
217
|
)
|
|
227
|
-
from souschef.parsers.inspec import
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
# codeql[py/unused-import]: Backward compatibility exports for test suite
|
|
235
|
-
from souschef.parsers.metadata import ( # noqa: F401
|
|
236
|
-
_extract_metadata,
|
|
237
|
-
_format_cookbook_structure,
|
|
238
|
-
_format_metadata,
|
|
218
|
+
from souschef.parsers.inspec import convert_inspec_to_test as _convert_inspec_test
|
|
219
|
+
from souschef.parsers.inspec import parse_inspec_profile as _parse_inspec
|
|
220
|
+
from souschef.parsers.metadata import (
|
|
221
|
+
_extract_metadata, # noqa: F401, codeql[py/unused-import]
|
|
222
|
+
_format_cookbook_structure, # noqa: F401, codeql[py/unused-import]
|
|
223
|
+
_format_metadata, # noqa: F401, codeql[py/unused-import]
|
|
239
224
|
)
|
|
240
225
|
from souschef.parsers.metadata import (
|
|
241
226
|
list_cookbook_structure as _list_cookbook_structure,
|
|
@@ -244,24 +229,23 @@ from souschef.parsers.metadata import (
|
|
|
244
229
|
parse_cookbook_metadata as _parse_cookbook_metadata,
|
|
245
230
|
)
|
|
246
231
|
from souschef.parsers.metadata import read_cookbook_metadata as _read_cookbook_metadata
|
|
247
|
-
|
|
248
|
-
# codeql[py/unused-import]
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
_extract_resources,
|
|
252
|
-
_format_resources,
|
|
232
|
+
from souschef.parsers.recipe import (
|
|
233
|
+
_extract_conditionals, # noqa: F401, codeql[py/unused-import]
|
|
234
|
+
_extract_resources, # noqa: F401, codeql[py/unused-import]
|
|
235
|
+
_format_resources, # noqa: F401, codeql[py/unused-import]
|
|
253
236
|
)
|
|
254
237
|
from souschef.parsers.recipe import parse_recipe as _parse_recipe
|
|
255
238
|
|
|
256
|
-
#
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
239
|
+
# Re-exports for backward compatibility (used by tests) - DO NOT REMOVE
|
|
240
|
+
# These imports are intentionally exposed for external test access
|
|
241
|
+
from souschef.parsers.resource import (
|
|
242
|
+
_extract_resource_actions, # noqa: F401, codeql[py/unused-import]
|
|
243
|
+
_extract_resource_properties, # noqa: F401, codeql[py/unused-import]
|
|
260
244
|
)
|
|
261
245
|
from souschef.parsers.resource import parse_custom_resource as _parse_custom_resource
|
|
262
246
|
|
|
263
|
-
#
|
|
264
|
-
from souschef.parsers.template import ( # noqa: F401
|
|
247
|
+
# Import internal functions for backward compatibility (used by tests)
|
|
248
|
+
from souschef.parsers.template import ( # noqa: F401, codeql[py/unused-import]
|
|
265
249
|
_convert_erb_to_jinja2,
|
|
266
250
|
_extract_code_block_variables,
|
|
267
251
|
_extract_heredoc_strings,
|
|
@@ -270,17 +254,35 @@ from souschef.parsers.template import ( # noqa: F401
|
|
|
270
254
|
_extract_template_variables,
|
|
271
255
|
_strip_ruby_comments,
|
|
272
256
|
)
|
|
273
|
-
|
|
274
|
-
# Import internal functions for backward compatibility (used by tests)
|
|
275
257
|
from souschef.parsers.template import parse_template as _parse_template
|
|
276
258
|
|
|
259
|
+
# Backward compatibility re-exports without underscore prefix (for tests)
|
|
260
|
+
# noinspection PyUnusedLocal
|
|
261
|
+
convert_chef_deployment_to_ansible_strategy = ( # noqa: F401
|
|
262
|
+
_convert_chef_deployment_to_ansible_strategy
|
|
263
|
+
)
|
|
264
|
+
generate_awx_inventory_source_from_chef = ( # noqa: F401
|
|
265
|
+
_generate_awx_inventory_source_from_chef
|
|
266
|
+
)
|
|
267
|
+
generate_awx_job_template_from_cookbook = ( # noqa: F401
|
|
268
|
+
_generate_awx_job_template_from_cookbook
|
|
269
|
+
)
|
|
270
|
+
generate_awx_project_from_cookbooks = _generate_awx_project_from_cookbooks # noqa: F401
|
|
271
|
+
generate_awx_workflow_from_chef_runlist = ( # noqa: F401
|
|
272
|
+
_generate_awx_workflow_from_chef_runlist
|
|
273
|
+
)
|
|
274
|
+
generate_blue_green_deployment_playbook = ( # noqa: F401
|
|
275
|
+
_generate_blue_green_deployment_playbook
|
|
276
|
+
)
|
|
277
|
+
generate_canary_deployment_strategy = ( # noqa: F401
|
|
278
|
+
_generate_canary_deployment_strategy
|
|
279
|
+
)
|
|
280
|
+
|
|
277
281
|
# Create a new FastMCP server
|
|
278
282
|
mcp = FastMCP("souschef")
|
|
279
283
|
|
|
280
|
-
#
|
|
281
|
-
|
|
282
|
-
ERROR_IS_DIRECTORY = "Error: {path} is a directory, not a file"
|
|
283
|
-
ERROR_PERMISSION_DENIED = "Error: Permission denied for {path}"
|
|
284
|
+
# File constants
|
|
285
|
+
METADATA_RB = "metadata.rb"
|
|
284
286
|
|
|
285
287
|
# File constants
|
|
286
288
|
METADATA_RB = "metadata.rb"
|
|
@@ -300,6 +302,10 @@ def parse_template(path: str) -> str:
|
|
|
300
302
|
JSON string with extracted variables and Jinja2-converted template.
|
|
301
303
|
|
|
302
304
|
"""
|
|
305
|
+
try:
|
|
306
|
+
path = str(_normalize_path(path))
|
|
307
|
+
except ValueError as e:
|
|
308
|
+
return format_error_with_context(e, "validating template path", path)
|
|
303
309
|
return _parse_template(path)
|
|
304
310
|
|
|
305
311
|
|
|
@@ -315,6 +321,10 @@ def parse_custom_resource(path: str) -> str:
|
|
|
315
321
|
JSON string with extracted properties, actions, and metadata.
|
|
316
322
|
|
|
317
323
|
"""
|
|
324
|
+
try:
|
|
325
|
+
path = str(_normalize_path(path))
|
|
326
|
+
except ValueError as e:
|
|
327
|
+
return format_error_with_context(e, "validating resource path", path)
|
|
318
328
|
return _parse_custom_resource(path)
|
|
319
329
|
|
|
320
330
|
|
|
@@ -330,6 +340,10 @@ def list_directory(path: str) -> list[str] | str:
|
|
|
330
340
|
A list of filenames in the directory, or an error message.
|
|
331
341
|
|
|
332
342
|
"""
|
|
343
|
+
try:
|
|
344
|
+
path = str(_normalize_path(path))
|
|
345
|
+
except ValueError as e:
|
|
346
|
+
return format_error_with_context(e, "validating directory path", path)
|
|
333
347
|
result: list[str] | str = _list_directory(path)
|
|
334
348
|
return result
|
|
335
349
|
|
|
@@ -346,6 +360,10 @@ def read_file(path: str) -> str:
|
|
|
346
360
|
The contents of the file, or an error message.
|
|
347
361
|
|
|
348
362
|
"""
|
|
363
|
+
try:
|
|
364
|
+
path = str(_normalize_path(path))
|
|
365
|
+
except ValueError as e:
|
|
366
|
+
return format_error_with_context(e, "validating file path", path)
|
|
349
367
|
result: str = _read_file(path)
|
|
350
368
|
return result
|
|
351
369
|
|
|
@@ -362,6 +380,10 @@ def read_cookbook_metadata(path: str) -> str:
|
|
|
362
380
|
Formatted string with extracted metadata.
|
|
363
381
|
|
|
364
382
|
"""
|
|
383
|
+
try:
|
|
384
|
+
path = str(_normalize_path(path))
|
|
385
|
+
except ValueError as e:
|
|
386
|
+
return format_error_with_context(e, "validating metadata path", path)
|
|
365
387
|
return _read_cookbook_metadata(path)
|
|
366
388
|
|
|
367
389
|
|
|
@@ -377,6 +399,10 @@ def parse_cookbook_metadata(path: str) -> dict[str, str | list[str]]:
|
|
|
377
399
|
Dictionary containing extracted metadata fields.
|
|
378
400
|
|
|
379
401
|
"""
|
|
402
|
+
try:
|
|
403
|
+
path = str(_normalize_path(path))
|
|
404
|
+
except ValueError as e:
|
|
405
|
+
return {"error": str(e)}
|
|
380
406
|
return _parse_cookbook_metadata(path)
|
|
381
407
|
|
|
382
408
|
|
|
@@ -392,6 +418,10 @@ def parse_recipe(path: str) -> str:
|
|
|
392
418
|
Formatted string with extracted Chef resources and their properties.
|
|
393
419
|
|
|
394
420
|
"""
|
|
421
|
+
try:
|
|
422
|
+
path = str(_normalize_path(path))
|
|
423
|
+
except ValueError as e:
|
|
424
|
+
return format_error_with_context(e, "validating recipe path", path)
|
|
395
425
|
return _parse_recipe(path)
|
|
396
426
|
|
|
397
427
|
|
|
@@ -421,6 +451,10 @@ def parse_attributes(path: str, resolve_precedence: bool = True) -> str:
|
|
|
421
451
|
Formatted string with extracted attributes.
|
|
422
452
|
|
|
423
453
|
"""
|
|
454
|
+
try:
|
|
455
|
+
path = str(_normalize_path(path))
|
|
456
|
+
except ValueError as e:
|
|
457
|
+
return format_error_with_context(e, "validating attributes path", path)
|
|
424
458
|
return _parse_attributes(path, resolve_precedence)
|
|
425
459
|
|
|
426
460
|
|
|
@@ -436,6 +470,10 @@ def list_cookbook_structure(path: str) -> str:
|
|
|
436
470
|
Formatted string showing the cookbook structure.
|
|
437
471
|
|
|
438
472
|
"""
|
|
473
|
+
try:
|
|
474
|
+
path = str(_normalize_path(path))
|
|
475
|
+
except ValueError as e:
|
|
476
|
+
return format_error_with_context(e, "validating cookbook path", path)
|
|
439
477
|
return _list_cookbook_structure(path)
|
|
440
478
|
|
|
441
479
|
|
|
@@ -505,7 +543,6 @@ def _extract_resource_subscriptions(
|
|
|
505
543
|
return subscriptions
|
|
506
544
|
|
|
507
545
|
|
|
508
|
-
@mcp.tool()
|
|
509
546
|
def _parse_controls_from_directory(profile_path: Path) -> list[dict[str, Any]]:
|
|
510
547
|
"""
|
|
511
548
|
Parse all control files from an InSpec profile directory.
|
|
@@ -526,9 +563,9 @@ def _parse_controls_from_directory(profile_path: Path) -> list[dict[str, Any]]:
|
|
|
526
563
|
raise FileNotFoundError(f"No controls directory found in {profile_path}")
|
|
527
564
|
|
|
528
565
|
controls = []
|
|
529
|
-
for control_file in controls_dir
|
|
566
|
+
for control_file in safe_glob(controls_dir, "*.rb", profile_path):
|
|
530
567
|
try:
|
|
531
|
-
content = control_file
|
|
568
|
+
content = safe_read_text(control_file, profile_path)
|
|
532
569
|
file_controls = _parse_inspec_control(content)
|
|
533
570
|
for ctrl in file_controls:
|
|
534
571
|
ctrl["file"] = str(control_file.relative_to(profile_path))
|
|
@@ -554,7 +591,7 @@ def _parse_controls_from_file(profile_path: Path) -> list[dict[str, Any]]:
|
|
|
554
591
|
|
|
555
592
|
"""
|
|
556
593
|
try:
|
|
557
|
-
content = profile_path.
|
|
594
|
+
content = safe_read_text(profile_path, profile_path.parent)
|
|
558
595
|
controls = _parse_inspec_control(content)
|
|
559
596
|
for ctrl in controls:
|
|
560
597
|
ctrl["file"] = profile_path.name
|
|
@@ -575,6 +612,10 @@ def parse_inspec_profile(path: str) -> str:
|
|
|
575
612
|
JSON string with parsed controls, or error message.
|
|
576
613
|
|
|
577
614
|
"""
|
|
615
|
+
try:
|
|
616
|
+
path = str(_normalize_path(path))
|
|
617
|
+
except ValueError as e:
|
|
618
|
+
return format_error_with_context(e, "validating InSpec path", path)
|
|
578
619
|
return _parse_inspec(path)
|
|
579
620
|
|
|
580
621
|
|
|
@@ -591,6 +632,10 @@ def convert_inspec_to_test(inspec_path: str, output_format: str = "testinfra") -
|
|
|
591
632
|
Converted test code or error message.
|
|
592
633
|
|
|
593
634
|
"""
|
|
635
|
+
try:
|
|
636
|
+
inspec_path = str(_normalize_path(inspec_path))
|
|
637
|
+
except ValueError as e:
|
|
638
|
+
return format_error_with_context(e, "validating InSpec path", inspec_path)
|
|
594
639
|
return _convert_inspec_test(inspec_path, output_format)
|
|
595
640
|
|
|
596
641
|
|
|
@@ -646,6 +691,9 @@ def generate_inspec_from_recipe(recipe_path: str) -> str:
|
|
|
646
691
|
|
|
647
692
|
"""
|
|
648
693
|
try:
|
|
694
|
+
# Validate and normalize path
|
|
695
|
+
recipe_path = str(_normalize_path(recipe_path))
|
|
696
|
+
|
|
649
697
|
# First parse the recipe
|
|
650
698
|
recipe_result: str = parse_recipe(recipe_path)
|
|
651
699
|
|
|
@@ -705,8 +753,6 @@ def convert_chef_databag_to_vars(
|
|
|
705
753
|
|
|
706
754
|
"""
|
|
707
755
|
try:
|
|
708
|
-
import yaml
|
|
709
|
-
|
|
710
756
|
# Validate inputs
|
|
711
757
|
if not databag_content or not databag_content.strip():
|
|
712
758
|
return (
|
|
@@ -812,13 +858,14 @@ def _validate_databags_directory(
|
|
|
812
858
|
return databags_path, None
|
|
813
859
|
|
|
814
860
|
|
|
815
|
-
def _convert_databag_item(
|
|
861
|
+
def _convert_databag_item(
|
|
862
|
+
item_file, databag_name: str, output_directory: str, base_path: Path
|
|
863
|
+
) -> dict:
|
|
816
864
|
"""Convert a single databag item file to Ansible format."""
|
|
817
865
|
item_name = item_file.stem
|
|
818
866
|
|
|
819
867
|
try:
|
|
820
|
-
|
|
821
|
-
content = f.read()
|
|
868
|
+
content = safe_read_text(item_file, base_path)
|
|
822
869
|
|
|
823
870
|
# Detect if encrypted
|
|
824
871
|
is_encrypted = _detect_encrypted_databag(content)
|
|
@@ -843,13 +890,17 @@ def _convert_databag_item(item_file, databag_name: str, output_directory: str) -
|
|
|
843
890
|
return {"databag": databag_name, "item": item_name, "error": str(e)}
|
|
844
891
|
|
|
845
892
|
|
|
846
|
-
def _process_databag_directory(
|
|
893
|
+
def _process_databag_directory(
|
|
894
|
+
databag_dir, output_directory: str, base_path: Path
|
|
895
|
+
) -> list[dict]:
|
|
847
896
|
"""Process all items in a single databag directory."""
|
|
848
897
|
results = []
|
|
849
898
|
databag_name = databag_dir.name
|
|
850
899
|
|
|
851
|
-
for item_file in databag_dir
|
|
852
|
-
result = _convert_databag_item(
|
|
900
|
+
for item_file in safe_glob(databag_dir, "*.json", base_path):
|
|
901
|
+
result = _convert_databag_item(
|
|
902
|
+
item_file, databag_name, output_directory, base_path
|
|
903
|
+
)
|
|
853
904
|
results.append(result)
|
|
854
905
|
|
|
855
906
|
return results
|
|
@@ -884,11 +935,13 @@ def generate_ansible_vault_from_databags(
|
|
|
884
935
|
conversion_results = []
|
|
885
936
|
|
|
886
937
|
# Process each data bag directory
|
|
887
|
-
for databag_dir in databags_path.iterdir():
|
|
938
|
+
for databag_dir in databags_path.iterdir(): # nosonar
|
|
888
939
|
if not databag_dir.is_dir():
|
|
889
940
|
continue
|
|
890
941
|
|
|
891
|
-
results = _process_databag_directory(
|
|
942
|
+
results = _process_databag_directory(
|
|
943
|
+
databag_dir, output_directory, databags_path
|
|
944
|
+
)
|
|
892
945
|
conversion_results.extend(results)
|
|
893
946
|
|
|
894
947
|
# Generate summary and file structure
|
|
@@ -915,6 +968,9 @@ def analyse_chef_databag_usage(cookbook_path: str, databags_path: str = "") -> s
|
|
|
915
968
|
Analysis of data bag usage and migration recommendations
|
|
916
969
|
|
|
917
970
|
"""
|
|
971
|
+
cookbook_path = str(_normalize_path(cookbook_path))
|
|
972
|
+
if databags_path:
|
|
973
|
+
databags_path = str(_normalize_path(databags_path))
|
|
918
974
|
try:
|
|
919
975
|
cookbook = _normalize_path(cookbook_path)
|
|
920
976
|
if not cookbook.exists():
|
|
@@ -1027,12 +1083,11 @@ def generate_inventory_from_chef_environments(
|
|
|
1027
1083
|
environments = {}
|
|
1028
1084
|
processing_results = []
|
|
1029
1085
|
|
|
1030
|
-
for env_file in env_path
|
|
1086
|
+
for env_file in safe_glob(env_path, "*.rb", env_path):
|
|
1031
1087
|
env_name = env_file.stem
|
|
1032
1088
|
|
|
1033
1089
|
try:
|
|
1034
|
-
|
|
1035
|
-
content = f.read()
|
|
1090
|
+
content = safe_read_text(env_file, env_path)
|
|
1036
1091
|
|
|
1037
1092
|
env_data = _parse_chef_environment_content(content)
|
|
1038
1093
|
environments[env_name] = env_data
|
|
@@ -1374,8 +1429,6 @@ def _generate_inventory_group_from_environment(
|
|
|
1374
1429
|
env_data: dict, env_name: str, include_constraints: bool
|
|
1375
1430
|
) -> str:
|
|
1376
1431
|
"""Generate Ansible inventory group configuration from environment data."""
|
|
1377
|
-
import yaml
|
|
1378
|
-
|
|
1379
1432
|
group_vars: dict[str, Any] = {}
|
|
1380
1433
|
|
|
1381
1434
|
# Add environment metadata
|
|
@@ -1408,7 +1461,7 @@ def _generate_inventory_group_from_environment(
|
|
|
1408
1461
|
),
|
|
1409
1462
|
}
|
|
1410
1463
|
|
|
1411
|
-
return yaml.dump(group_vars, default_flow_style=False, indent=2)
|
|
1464
|
+
return str(yaml.dump(group_vars, default_flow_style=False, indent=2))
|
|
1412
1465
|
|
|
1413
1466
|
|
|
1414
1467
|
def _build_conversion_summary(results: list) -> str:
|
|
@@ -1461,8 +1514,6 @@ def _generate_yaml_inventory(environments: dict) -> str:
|
|
|
1461
1514
|
YAML inventory string
|
|
1462
1515
|
|
|
1463
1516
|
"""
|
|
1464
|
-
import yaml
|
|
1465
|
-
|
|
1466
1517
|
inventory: dict[str, Any] = {"all": {"children": {}}}
|
|
1467
1518
|
|
|
1468
1519
|
for env_name, env_data in environments.items():
|
|
@@ -1592,8 +1643,7 @@ def _extract_environment_usage_from_cookbook(cookbook_path) -> list:
|
|
|
1592
1643
|
# Search for environment usage in Ruby files
|
|
1593
1644
|
for ruby_file in cookbook_path.rglob("*.rb"):
|
|
1594
1645
|
try:
|
|
1595
|
-
|
|
1596
|
-
content = f.read()
|
|
1646
|
+
content = safe_read_text(ruby_file, cookbook_path)
|
|
1597
1647
|
|
|
1598
1648
|
# Find environment usage patterns
|
|
1599
1649
|
found_patterns = _find_environment_patterns_in_content(
|
|
@@ -1650,13 +1700,12 @@ def _analyse_environments_structure(environments_path) -> dict:
|
|
|
1650
1700
|
"""Analyse the structure of Chef environments directory."""
|
|
1651
1701
|
structure: dict[str, Any] = {"total_environments": 0, "environments": {}}
|
|
1652
1702
|
|
|
1653
|
-
for env_file in environments_path
|
|
1703
|
+
for env_file in safe_glob(environments_path, "*.rb", environments_path):
|
|
1654
1704
|
structure["total_environments"] += 1
|
|
1655
1705
|
env_name = env_file.stem
|
|
1656
1706
|
|
|
1657
1707
|
try:
|
|
1658
|
-
|
|
1659
|
-
content = f.read()
|
|
1708
|
+
content = safe_read_text(env_file, environments_path)
|
|
1660
1709
|
|
|
1661
1710
|
env_data = _parse_chef_environment_content(content)
|
|
1662
1711
|
|
|
@@ -1852,12 +1901,10 @@ def _convert_databag_to_ansible_vars(
|
|
|
1852
1901
|
|
|
1853
1902
|
def _generate_vault_content(vars_dict: dict, databag_name: str) -> str:
|
|
1854
1903
|
"""Generate Ansible Vault YAML content from variables dictionary."""
|
|
1855
|
-
import yaml
|
|
1856
|
-
|
|
1857
1904
|
# Structure for vault file
|
|
1858
1905
|
vault_vars = {f"{databag_name}_vault": vars_dict}
|
|
1859
1906
|
|
|
1860
|
-
return yaml.dump(vault_vars, default_flow_style=False, indent=2)
|
|
1907
|
+
return str(yaml.dump(vault_vars, default_flow_style=False, indent=2))
|
|
1861
1908
|
|
|
1862
1909
|
|
|
1863
1910
|
def _detect_encrypted_databag(content: str) -> bool:
|
|
@@ -2036,8 +2083,7 @@ def _extract_databag_usage_from_cookbook(cookbook_path) -> list:
|
|
|
2036
2083
|
# Search for data bag usage in Ruby files
|
|
2037
2084
|
for ruby_file in cookbook_path.rglob("*.rb"):
|
|
2038
2085
|
try:
|
|
2039
|
-
|
|
2040
|
-
content = f.read()
|
|
2086
|
+
content = safe_read_text(ruby_file, cookbook_path)
|
|
2041
2087
|
|
|
2042
2088
|
# Find data bag usage patterns
|
|
2043
2089
|
found_patterns = _find_databag_patterns_in_content(content, str(ruby_file))
|
|
@@ -2099,7 +2145,7 @@ def _analyse_databag_structure(databags_path) -> dict:
|
|
|
2099
2145
|
"databags": {},
|
|
2100
2146
|
}
|
|
2101
2147
|
|
|
2102
|
-
for databag_dir in databags_path.iterdir():
|
|
2148
|
+
for databag_dir in databags_path.iterdir(): # nosonar
|
|
2103
2149
|
if not databag_dir.is_dir():
|
|
2104
2150
|
continue
|
|
2105
2151
|
|
|
@@ -2107,13 +2153,12 @@ def _analyse_databag_structure(databags_path) -> dict:
|
|
|
2107
2153
|
structure["total_databags"] += 1
|
|
2108
2154
|
|
|
2109
2155
|
items = []
|
|
2110
|
-
for item_file in databag_dir
|
|
2156
|
+
for item_file in safe_glob(databag_dir, "*.json", databags_path):
|
|
2111
2157
|
structure["total_items"] += 1
|
|
2112
2158
|
item_name = item_file.stem
|
|
2113
2159
|
|
|
2114
2160
|
try:
|
|
2115
|
-
|
|
2116
|
-
content = f.read()
|
|
2161
|
+
content = safe_read_text(item_file, databags_path)
|
|
2117
2162
|
|
|
2118
2163
|
is_encrypted = _detect_encrypted_databag(content)
|
|
2119
2164
|
if is_encrypted:
|
|
@@ -2359,8 +2404,10 @@ def assess_chef_migration_complexity(
|
|
|
2359
2404
|
Detailed assessment report in markdown format.
|
|
2360
2405
|
|
|
2361
2406
|
"""
|
|
2407
|
+
# Sanitise and contain user-provided cookbook paths before processing
|
|
2408
|
+
sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
|
|
2362
2409
|
return _assess_chef_migration_complexity(
|
|
2363
|
-
|
|
2410
|
+
sanitized, migration_scope, target_platform
|
|
2364
2411
|
)
|
|
2365
2412
|
|
|
2366
2413
|
|
|
@@ -2385,7 +2432,9 @@ def generate_migration_plan(
|
|
|
2385
2432
|
Detailed migration plan in markdown format.
|
|
2386
2433
|
|
|
2387
2434
|
"""
|
|
2388
|
-
|
|
2435
|
+
# Sanitise and contain user-provided cookbook paths before processing
|
|
2436
|
+
sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
|
|
2437
|
+
return _generate_migration_plan(sanitized, migration_strategy, timeline_weeks)
|
|
2389
2438
|
|
|
2390
2439
|
|
|
2391
2440
|
@mcp.tool()
|
|
@@ -2403,7 +2452,9 @@ def analyse_cookbook_dependencies(cookbook_paths: str) -> str:
|
|
|
2403
2452
|
Dependency analysis report in markdown format.
|
|
2404
2453
|
|
|
2405
2454
|
"""
|
|
2406
|
-
|
|
2455
|
+
# Sanitise and contain user-provided cookbook paths before processing
|
|
2456
|
+
sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
|
|
2457
|
+
return _analyse_cookbook_dependencies(sanitized)
|
|
2407
2458
|
|
|
2408
2459
|
|
|
2409
2460
|
@mcp.tool()
|
|
@@ -2427,11 +2478,47 @@ def generate_migration_report(
|
|
|
2427
2478
|
Comprehensive migration report in markdown format.
|
|
2428
2479
|
|
|
2429
2480
|
"""
|
|
2481
|
+
# Sanitise and contain user-provided cookbook paths before processing
|
|
2482
|
+
sanitized = _sanitize_cookbook_paths_input(cookbook_paths)
|
|
2430
2483
|
return _generate_migration_report(
|
|
2431
|
-
|
|
2484
|
+
sanitized, report_format, include_technical_details
|
|
2432
2485
|
)
|
|
2433
2486
|
|
|
2434
2487
|
|
|
2488
|
+
def _sanitize_cookbook_paths_input(cookbook_paths: str) -> str:
|
|
2489
|
+
"""
|
|
2490
|
+
Sanitise a comma-separated list of cookbook paths.
|
|
2491
|
+
|
|
2492
|
+
Args:
|
|
2493
|
+
cookbook_paths: Comma-separated paths provided by the user.
|
|
2494
|
+
|
|
2495
|
+
Returns:
|
|
2496
|
+
A comma-separated string of normalised paths.
|
|
2497
|
+
|
|
2498
|
+
Raises:
|
|
2499
|
+
ValueError: If any path is invalid.
|
|
2500
|
+
|
|
2501
|
+
"""
|
|
2502
|
+
sanitized_paths: list[str] = []
|
|
2503
|
+
for raw in cookbook_paths.split(","):
|
|
2504
|
+
candidate = raw.strip()
|
|
2505
|
+
if not candidate:
|
|
2506
|
+
continue
|
|
2507
|
+
# Normalize the path (resolves ., .., symlinks)
|
|
2508
|
+
|
|
2509
|
+
# prevents traversal attacks; file access is further contained by per-operation checks
|
|
2510
|
+
normalised = _normalize_path(candidate)
|
|
2511
|
+
|
|
2512
|
+
# Validate it's an absolute path after normalization
|
|
2513
|
+
if not normalised.is_absolute():
|
|
2514
|
+
msg = f"Path must be absolute after normalization: {candidate}"
|
|
2515
|
+
raise ValueError(msg)
|
|
2516
|
+
|
|
2517
|
+
# Use the normalized absolute path (temp dirs, workspace dirs all allowed)
|
|
2518
|
+
sanitized_paths.append(str(normalised))
|
|
2519
|
+
return ",".join(sanitized_paths)
|
|
2520
|
+
|
|
2521
|
+
|
|
2435
2522
|
@mcp.tool()
|
|
2436
2523
|
def validate_conversion(
|
|
2437
2524
|
conversion_type: str,
|
|
@@ -2474,6 +2561,7 @@ def parse_habitat_plan(plan_path: str) -> str:
|
|
|
2474
2561
|
JSON string with parsed plan metadata
|
|
2475
2562
|
|
|
2476
2563
|
"""
|
|
2564
|
+
plan_path = str(_normalize_path(plan_path))
|
|
2477
2565
|
return _parse_habitat_plan(plan_path)
|
|
2478
2566
|
|
|
2479
2567
|
|
|
@@ -2571,6 +2659,7 @@ def analyse_chef_search_patterns(recipe_or_cookbook_path: str) -> str:
|
|
|
2571
2659
|
Analysis of search patterns found.
|
|
2572
2660
|
|
|
2573
2661
|
"""
|
|
2662
|
+
recipe_or_cookbook_path = str(_normalize_path(recipe_or_cookbook_path))
|
|
2574
2663
|
return _analyse_chef_search_patterns(recipe_or_cookbook_path)
|
|
2575
2664
|
|
|
2576
2665
|
|
|
@@ -2593,6 +2682,7 @@ def profile_cookbook_performance(cookbook_path: str) -> str:
|
|
|
2593
2682
|
from souschef.profiling import generate_cookbook_performance_report
|
|
2594
2683
|
|
|
2595
2684
|
try:
|
|
2685
|
+
cookbook_path = str(_normalize_path(cookbook_path))
|
|
2596
2686
|
report = generate_cookbook_performance_report(cookbook_path)
|
|
2597
2687
|
return str(report)
|
|
2598
2688
|
except Exception as e:
|
|
@@ -2638,6 +2728,7 @@ def profile_parsing_operation(
|
|
|
2638
2728
|
func = operation_map[operation]
|
|
2639
2729
|
|
|
2640
2730
|
try:
|
|
2731
|
+
file_path = str(_normalize_path(file_path))
|
|
2641
2732
|
if detailed:
|
|
2642
2733
|
_, profile_result = detailed_profile_function(func, file_path)
|
|
2643
2734
|
result = str(profile_result)
|
|
@@ -2681,6 +2772,7 @@ def generate_jenkinsfile_from_chef(
|
|
|
2681
2772
|
from souschef.ci.jenkins_pipeline import generate_jenkinsfile_from_chef_ci
|
|
2682
2773
|
|
|
2683
2774
|
try:
|
|
2775
|
+
cookbook_path = str(_normalize_path(cookbook_path))
|
|
2684
2776
|
# Convert string to boolean
|
|
2685
2777
|
enable_parallel_bool = enable_parallel.lower() in ("yes", "true", "1")
|
|
2686
2778
|
|
|
@@ -2723,6 +2815,7 @@ def generate_gitlab_ci_from_chef(
|
|
|
2723
2815
|
from souschef.ci.gitlab_ci import generate_gitlab_ci_from_chef_ci
|
|
2724
2816
|
|
|
2725
2817
|
try:
|
|
2818
|
+
cookbook_path = str(_normalize_path(cookbook_path))
|
|
2726
2819
|
enable_cache_bool = enable_cache.lower() in ("yes", "true", "1")
|
|
2727
2820
|
enable_artifacts_bool = enable_artifacts.lower() in ("yes", "true", "1")
|
|
2728
2821
|
result = generate_gitlab_ci_from_chef_ci(
|
|
@@ -2768,6 +2861,7 @@ def generate_github_workflow_from_chef(
|
|
|
2768
2861
|
from souschef.ci.github_actions import generate_github_workflow_from_chef_ci
|
|
2769
2862
|
|
|
2770
2863
|
try:
|
|
2864
|
+
cookbook_path = str(_normalize_path(cookbook_path))
|
|
2771
2865
|
enable_cache_bool = enable_cache.lower() in ("yes", "true", "1")
|
|
2772
2866
|
enable_artifacts_bool = enable_artifacts.lower() in ("yes", "true", "1")
|
|
2773
2867
|
result = generate_github_workflow_from_chef_ci(
|
|
@@ -2789,6 +2883,105 @@ def generate_github_workflow_from_chef(
|
|
|
2789
2883
|
)
|
|
2790
2884
|
|
|
2791
2885
|
|
|
2886
|
+
@mcp.tool()
|
|
2887
|
+
def generate_ansible_repository(
|
|
2888
|
+
output_path: str,
|
|
2889
|
+
repo_type: str = "auto",
|
|
2890
|
+
cookbook_path: str = "",
|
|
2891
|
+
org_name: str = "myorg",
|
|
2892
|
+
init_git: str = "yes",
|
|
2893
|
+
) -> str:
|
|
2894
|
+
"""
|
|
2895
|
+
Generate a complete Ansible repository structure.
|
|
2896
|
+
|
|
2897
|
+
Analyses converted Chef cookbooks and creates an appropriate Ansible
|
|
2898
|
+
repository structure with proper organisation, configuration files,
|
|
2899
|
+
and git initialisation.
|
|
2900
|
+
|
|
2901
|
+
Repo Types:
|
|
2902
|
+
- auto: Auto-detect based on conversion analysis (recommended)
|
|
2903
|
+
- inventory_first: Classic inventory-first (best for infra management)
|
|
2904
|
+
- playbooks_roles: Simple playbooks + roles (best for small projects)
|
|
2905
|
+
- collection: Ansible Collection layout (best for reusable automation)
|
|
2906
|
+
- mono_repo: Multi-project mono-repo (best for platform teams)
|
|
2907
|
+
|
|
2908
|
+
Args:
|
|
2909
|
+
output_path: Path where the repository should be created
|
|
2910
|
+
repo_type: Type of repository structure (auto/inventory_first/playbooks_roles/collection/mono_repo)
|
|
2911
|
+
cookbook_path: Optional path to Chef cookbook for analysis (used with repo_type='auto')
|
|
2912
|
+
org_name: Organisation name for the repository
|
|
2913
|
+
init_git: Whether to initialise a git repository ('yes' or 'no')
|
|
2914
|
+
|
|
2915
|
+
Returns:
|
|
2916
|
+
JSON string with generation results including success status, files created, and git status
|
|
2917
|
+
|
|
2918
|
+
"""
|
|
2919
|
+
from souschef.generators.repo import (
|
|
2920
|
+
analyse_conversion_output,
|
|
2921
|
+
)
|
|
2922
|
+
from souschef.generators.repo import (
|
|
2923
|
+
generate_ansible_repository as gen_repo,
|
|
2924
|
+
)
|
|
2925
|
+
|
|
2926
|
+
try:
|
|
2927
|
+
output_path = str(_normalize_path(output_path))
|
|
2928
|
+
init_git_bool = init_git.lower() in ("yes", "true", "1")
|
|
2929
|
+
|
|
2930
|
+
# Determine repo type
|
|
2931
|
+
if repo_type == "auto":
|
|
2932
|
+
if not cookbook_path:
|
|
2933
|
+
return json.dumps(
|
|
2934
|
+
{
|
|
2935
|
+
"success": False,
|
|
2936
|
+
"error": "cookbook_path required when repo_type='auto'",
|
|
2937
|
+
}
|
|
2938
|
+
)
|
|
2939
|
+
|
|
2940
|
+
cookbook_path = str(_normalize_path(cookbook_path))
|
|
2941
|
+
|
|
2942
|
+
# Validate cookbook path exists
|
|
2943
|
+
if not Path(cookbook_path).exists():
|
|
2944
|
+
return json.dumps(
|
|
2945
|
+
{
|
|
2946
|
+
"success": False,
|
|
2947
|
+
"error": f"Cookbook path does not exist: {cookbook_path}",
|
|
2948
|
+
}
|
|
2949
|
+
)
|
|
2950
|
+
|
|
2951
|
+
# Analyse the cookbook to determine best repo type
|
|
2952
|
+
# Count recipes
|
|
2953
|
+
recipes_dir = Path(cookbook_path) / "recipes"
|
|
2954
|
+
num_recipes = (
|
|
2955
|
+
len(list(recipes_dir.glob("*.rb"))) if recipes_dir.exists() else 0
|
|
2956
|
+
)
|
|
2957
|
+
|
|
2958
|
+
# Basic heuristics for repo type selection
|
|
2959
|
+
has_multiple_apps = num_recipes > 5
|
|
2960
|
+
num_roles = max(1, num_recipes // 2) # Estimate roles from recipes
|
|
2961
|
+
|
|
2962
|
+
determined_type = analyse_conversion_output(
|
|
2963
|
+
cookbook_path=cookbook_path,
|
|
2964
|
+
num_recipes=num_recipes,
|
|
2965
|
+
num_roles=num_roles,
|
|
2966
|
+
has_multiple_apps=has_multiple_apps,
|
|
2967
|
+
needs_multi_env=True,
|
|
2968
|
+
)
|
|
2969
|
+
result = gen_repo(output_path, determined_type, org_name, init_git_bool)
|
|
2970
|
+
else:
|
|
2971
|
+
# Use specified repo type
|
|
2972
|
+
result = gen_repo(output_path, repo_type, org_name, init_git_bool)
|
|
2973
|
+
|
|
2974
|
+
return json.dumps(result, indent=2)
|
|
2975
|
+
|
|
2976
|
+
except Exception as e:
|
|
2977
|
+
return json.dumps(
|
|
2978
|
+
{
|
|
2979
|
+
"success": False,
|
|
2980
|
+
"error": f"Failed to generate repository: {e}",
|
|
2981
|
+
}
|
|
2982
|
+
)
|
|
2983
|
+
|
|
2984
|
+
|
|
2792
2985
|
@mcp.tool()
|
|
2793
2986
|
def parse_chef_migration_assessment(
|
|
2794
2987
|
cookbook_paths: str,
|
|
@@ -2917,21 +3110,46 @@ def _setup_conversion_metadata(cookbook_dir: Path, role_name: str) -> tuple[str,
|
|
|
2917
3110
|
return cookbook_name, role_name
|
|
2918
3111
|
|
|
2919
3112
|
|
|
3113
|
+
def _validate_role_name(role_name: str) -> None:
|
|
3114
|
+
"""
|
|
3115
|
+
Validate that role_name is safe for filesystem operations.
|
|
3116
|
+
|
|
3117
|
+
Args:
|
|
3118
|
+
role_name: The role name to validate.
|
|
3119
|
+
|
|
3120
|
+
Raises:
|
|
3121
|
+
ValueError: If the role name contains unsafe characters.
|
|
3122
|
+
|
|
3123
|
+
"""
|
|
3124
|
+
if not role_name:
|
|
3125
|
+
raise ValueError("Role name cannot be empty")
|
|
3126
|
+
if ".." in role_name or "/" in role_name or "\\" in role_name:
|
|
3127
|
+
raise ValueError(f"Role name contains unsafe characters: {role_name}")
|
|
3128
|
+
|
|
3129
|
+
|
|
2920
3130
|
def _create_role_structure(output_dir: Path, role_name: str) -> Path:
|
|
2921
3131
|
"""Create the standard Ansible role directory structure."""
|
|
2922
|
-
|
|
3132
|
+
# Validate role_name to ensure it's safe for filesystem operations
|
|
3133
|
+
_validate_role_name(role_name)
|
|
3134
|
+
|
|
3135
|
+
base = os.path.realpath(str(output_dir))
|
|
3136
|
+
role_dir_str = os.path.realpath(os.path.join(base, role_name)) # noqa: PTH111, PTH118
|
|
3137
|
+
if os.path.commonpath([base, role_dir_str]) != base:
|
|
3138
|
+
raise RuntimeError("Unsafe role path outside output directory")
|
|
3139
|
+
role_dir = Path(role_dir_str)
|
|
2923
3140
|
role_tasks_dir = role_dir / "tasks"
|
|
2924
3141
|
role_templates_dir = role_dir / "templates"
|
|
2925
3142
|
role_vars_dir = role_dir / "vars"
|
|
2926
3143
|
role_defaults_dir = role_dir / "defaults"
|
|
2927
3144
|
|
|
3145
|
+
# All paths are validated via os.path.commonpath containment check above
|
|
2928
3146
|
for directory in [
|
|
2929
3147
|
role_tasks_dir,
|
|
2930
3148
|
role_templates_dir,
|
|
2931
3149
|
role_vars_dir,
|
|
2932
3150
|
role_defaults_dir,
|
|
2933
3151
|
]:
|
|
2934
|
-
directory.mkdir(parents=True, exist_ok=True)
|
|
3152
|
+
directory.mkdir(parents=True, exist_ok=True) # nosonar: S2083
|
|
2935
3153
|
|
|
2936
3154
|
return role_dir
|
|
2937
3155
|
|
|
@@ -2940,9 +3158,11 @@ def _convert_recipes(
|
|
|
2940
3158
|
cookbook_dir: Path, role_dir: Path, conversion_summary: dict
|
|
2941
3159
|
) -> None:
|
|
2942
3160
|
"""Convert Chef recipes to Ansible tasks."""
|
|
2943
|
-
|
|
2944
|
-
|
|
2945
|
-
|
|
3161
|
+
cookbook_base = os.path.realpath(str(cookbook_dir))
|
|
3162
|
+
recipes_dir_str = os.path.realpath(os.path.join(cookbook_base, "recipes")) # noqa: PTH111, PTH118
|
|
3163
|
+
if os.path.commonpath([cookbook_base, recipes_dir_str]) != cookbook_base:
|
|
3164
|
+
raise RuntimeError("Unsafe recipes path outside cookbook directory")
|
|
3165
|
+
recipes_dir = Path(recipes_dir_str)
|
|
2946
3166
|
if not recipes_dir.exists():
|
|
2947
3167
|
conversion_summary["warnings"].append(
|
|
2948
3168
|
f"No recipes directory found in {cookbook_dir.name}. "
|
|
@@ -2952,7 +3172,7 @@ def _convert_recipes(
|
|
|
2952
3172
|
|
|
2953
3173
|
from souschef.converters.playbook import generate_playbook_from_recipe
|
|
2954
3174
|
|
|
2955
|
-
recipe_files =
|
|
3175
|
+
recipe_files = safe_glob(recipes_dir, "*.rb", cookbook_dir)
|
|
2956
3176
|
if not recipe_files:
|
|
2957
3177
|
conversion_summary["warnings"].append(
|
|
2958
3178
|
f"No recipe files (*.rb) found in {cookbook_dir.name}/recipes/. "
|
|
@@ -2962,10 +3182,11 @@ def _convert_recipes(
|
|
|
2962
3182
|
|
|
2963
3183
|
for recipe_file in recipe_files:
|
|
2964
3184
|
try:
|
|
2965
|
-
|
|
3185
|
+
validated_recipe = _validated_candidate(recipe_file, recipes_dir)
|
|
3186
|
+
recipe_name = validated_recipe.stem
|
|
2966
3187
|
|
|
2967
3188
|
# Parse recipe to validate it can be processed
|
|
2968
|
-
parse_result = _parse_recipe(str(
|
|
3189
|
+
parse_result = _parse_recipe(str(validated_recipe))
|
|
2969
3190
|
if parse_result.startswith("Error:"):
|
|
2970
3191
|
conversion_summary["errors"].append(
|
|
2971
3192
|
f"Failed to parse recipe {recipe_name}: {parse_result}"
|
|
@@ -2973,13 +3194,18 @@ def _convert_recipes(
|
|
|
2973
3194
|
continue
|
|
2974
3195
|
|
|
2975
3196
|
# Convert to Ansible tasks
|
|
2976
|
-
playbook_yaml = generate_playbook_from_recipe(str(
|
|
3197
|
+
playbook_yaml = generate_playbook_from_recipe(str(validated_recipe))
|
|
2977
3198
|
|
|
2978
|
-
# Write as task file
|
|
2979
|
-
task_file = (
|
|
2980
|
-
|
|
2981
|
-
|
|
2982
|
-
|
|
3199
|
+
# Write as task file; _safe_join already enforces containment within role_dir
|
|
3200
|
+
task_file = _safe_join(role_dir, "tasks", f"{recipe_name}.yml")
|
|
3201
|
+
try:
|
|
3202
|
+
task_file.parent.mkdir(parents=True, exist_ok=True) # nosonar
|
|
3203
|
+
safe_write_text(task_file, role_dir, playbook_yaml)
|
|
3204
|
+
except OSError as write_err:
|
|
3205
|
+
conversion_summary["errors"].append(
|
|
3206
|
+
f"Failed to write task file {task_file.name}: {write_err}"
|
|
3207
|
+
)
|
|
3208
|
+
continue
|
|
2983
3209
|
|
|
2984
3210
|
conversion_summary["converted_files"].append(
|
|
2985
3211
|
{
|
|
@@ -2999,19 +3225,21 @@ def _convert_templates(
|
|
|
2999
3225
|
cookbook_dir: Path, role_dir: Path, conversion_summary: dict
|
|
3000
3226
|
) -> None:
|
|
3001
3227
|
"""Convert ERB templates to Jinja2 templates."""
|
|
3002
|
-
templates_dir = cookbook_dir
|
|
3003
|
-
role_templates_dir = role_dir / "templates"
|
|
3228
|
+
templates_dir = _safe_join(cookbook_dir, "templates")
|
|
3004
3229
|
|
|
3005
3230
|
if not templates_dir.exists():
|
|
3006
3231
|
return
|
|
3007
3232
|
|
|
3008
|
-
for template_file in templates_dir
|
|
3233
|
+
for template_file in safe_glob(templates_dir, "**/*.erb", cookbook_dir):
|
|
3234
|
+
validated_template = template_file
|
|
3009
3235
|
try:
|
|
3010
3236
|
# Convert ERB to Jinja2
|
|
3011
|
-
|
|
3237
|
+
validated_template = _validated_candidate(template_file, templates_dir)
|
|
3238
|
+
|
|
3239
|
+
conversion_result = _parse_template(str(validated_template))
|
|
3012
3240
|
if conversion_result.startswith("Error:"):
|
|
3013
3241
|
conversion_summary["errors"].append(
|
|
3014
|
-
f"Failed to convert template {
|
|
3242
|
+
f"Failed to convert template {validated_template.name}: {conversion_result}"
|
|
3015
3243
|
)
|
|
3016
3244
|
continue
|
|
3017
3245
|
|
|
@@ -3020,15 +3248,15 @@ def _convert_templates(
|
|
|
3020
3248
|
template_data = json.loads(conversion_result)
|
|
3021
3249
|
jinja2_content = template_data.get("jinja2_template", "")
|
|
3022
3250
|
|
|
3023
|
-
# Determine relative path for role templates
|
|
3024
|
-
|
|
3025
|
-
|
|
3026
|
-
|
|
3027
|
-
target_file = (
|
|
3028
|
-
|
|
3029
|
-
)
|
|
3251
|
+
# Determine relative path for role templates using _safe_join
|
|
3252
|
+
|
|
3253
|
+
rel_path = validated_template.relative_to(templates_dir)
|
|
3254
|
+
# Build target file path with inline containment guard
|
|
3255
|
+
target_file = _safe_join(
|
|
3256
|
+
role_dir, "templates", str(rel_path.with_suffix(""))
|
|
3257
|
+
)
|
|
3030
3258
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
3031
|
-
target_file
|
|
3259
|
+
safe_write_text(target_file, role_dir, jinja2_content)
|
|
3032
3260
|
|
|
3033
3261
|
conversion_summary["converted_files"].append(
|
|
3034
3262
|
{
|
|
@@ -3040,12 +3268,12 @@ def _convert_templates(
|
|
|
3040
3268
|
|
|
3041
3269
|
except json.JSONDecodeError:
|
|
3042
3270
|
conversion_summary["errors"].append(
|
|
3043
|
-
f"Invalid JSON result for template {
|
|
3271
|
+
f"Invalid JSON result for template {validated_template.name}"
|
|
3044
3272
|
)
|
|
3045
3273
|
|
|
3046
3274
|
except Exception as e:
|
|
3047
3275
|
conversion_summary["errors"].append(
|
|
3048
|
-
f"Error converting template {
|
|
3276
|
+
f"Error converting template {validated_template.name}: {str(e)}"
|
|
3049
3277
|
)
|
|
3050
3278
|
|
|
3051
3279
|
|
|
@@ -3053,30 +3281,25 @@ def _convert_attributes(
|
|
|
3053
3281
|
cookbook_dir: Path, role_dir: Path, conversion_summary: dict
|
|
3054
3282
|
) -> None:
|
|
3055
3283
|
"""Convert Chef attributes to Ansible variables."""
|
|
3056
|
-
|
|
3057
|
-
|
|
3058
|
-
attributes_dir = cookbook_dir / "attributes"
|
|
3059
|
-
role_defaults_dir = role_dir / "defaults"
|
|
3284
|
+
attributes_dir = _safe_join(cookbook_dir, "attributes")
|
|
3285
|
+
role_defaults_dir = _safe_join(role_dir, "defaults")
|
|
3060
3286
|
|
|
3061
3287
|
if not attributes_dir.exists():
|
|
3062
3288
|
return
|
|
3063
3289
|
|
|
3064
|
-
for attr_file in attributes_dir
|
|
3290
|
+
for attr_file in safe_glob(attributes_dir, "*.rb", cookbook_dir):
|
|
3291
|
+
validated_attr = attr_file
|
|
3065
3292
|
try:
|
|
3293
|
+
validated_attr = _validated_candidate(attr_file, attributes_dir)
|
|
3066
3294
|
# Read the file content
|
|
3067
|
-
content =
|
|
3068
|
-
|
|
3069
|
-
# Extract attributes using internal function
|
|
3070
|
-
from souschef.parsers.attributes import (
|
|
3071
|
-
_extract_attributes,
|
|
3072
|
-
_resolve_attribute_precedence,
|
|
3073
|
-
)
|
|
3295
|
+
content = safe_read_text(validated_attr, cookbook_dir)
|
|
3074
3296
|
|
|
3297
|
+
# Extract attributes (already imported at top of file)
|
|
3075
3298
|
raw_attributes = _extract_attributes(content)
|
|
3076
3299
|
|
|
3077
3300
|
if not raw_attributes:
|
|
3078
3301
|
conversion_summary["warnings"].append(
|
|
3079
|
-
f"No attributes found in {
|
|
3302
|
+
f"No attributes found in {validated_attr.name}"
|
|
3080
3303
|
)
|
|
3081
3304
|
continue
|
|
3082
3305
|
|
|
@@ -3091,24 +3314,25 @@ def _convert_attributes(
|
|
|
3091
3314
|
ansible_key = attr_path.replace(".", "_")
|
|
3092
3315
|
ansible_vars[ansible_key] = attr_info["value"]
|
|
3093
3316
|
|
|
3094
|
-
# Write as defaults
|
|
3095
|
-
|
|
3096
|
-
|
|
3097
|
-
|
|
3317
|
+
# Write as defaults using _safe_join to prevent path injection
|
|
3318
|
+
# All paths are validated via _safe_join to ensure containment within role_defaults_dir
|
|
3319
|
+
defaults_filename: str = f"{validated_attr.stem}.yml"
|
|
3320
|
+
defaults_file: Path = _safe_join(role_defaults_dir, defaults_filename)
|
|
3098
3321
|
defaults_yaml = yaml.dump(ansible_vars, default_flow_style=False, indent=2)
|
|
3099
|
-
defaults_file.
|
|
3322
|
+
defaults_file.parent.mkdir(parents=True, exist_ok=True)
|
|
3323
|
+
safe_write_text(defaults_file, role_dir, defaults_yaml)
|
|
3100
3324
|
|
|
3101
3325
|
conversion_summary["converted_files"].append(
|
|
3102
3326
|
{
|
|
3103
3327
|
"type": "defaults",
|
|
3104
|
-
"source": f"attributes/{
|
|
3105
|
-
"target": f"{role_dir.name}/defaults/{
|
|
3328
|
+
"source": f"attributes/{validated_attr.name}",
|
|
3329
|
+
"target": f"{role_dir.name}/defaults/{validated_attr.stem}.yml",
|
|
3106
3330
|
}
|
|
3107
3331
|
)
|
|
3108
3332
|
|
|
3109
3333
|
except Exception as e:
|
|
3110
3334
|
conversion_summary["errors"].append(
|
|
3111
|
-
f"Error converting attributes {
|
|
3335
|
+
f"Error converting attributes {validated_attr.name}: {str(e)}"
|
|
3112
3336
|
)
|
|
3113
3337
|
|
|
3114
3338
|
|
|
@@ -3119,11 +3343,16 @@ def _create_main_task_file(
|
|
|
3119
3343
|
if not include_recipes:
|
|
3120
3344
|
return
|
|
3121
3345
|
|
|
3122
|
-
|
|
3346
|
+
# Build path to tasks directory safely
|
|
3347
|
+
tasks_dir: Path = _safe_join(role_dir, "tasks")
|
|
3348
|
+
# Build path to main.yml within tasks directory
|
|
3349
|
+
default_task_file: Path = _safe_join(tasks_dir, "main.yml")
|
|
3123
3350
|
if default_task_file.exists():
|
|
3124
3351
|
return # Already exists
|
|
3125
3352
|
|
|
3126
|
-
|
|
3353
|
+
# Build path to default recipe safely
|
|
3354
|
+
recipes_dir: Path = _safe_join(cookbook_dir, "recipes")
|
|
3355
|
+
default_recipe: Path = _safe_join(recipes_dir, "default.rb")
|
|
3127
3356
|
if not default_recipe.exists():
|
|
3128
3357
|
return
|
|
3129
3358
|
|
|
@@ -3131,7 +3360,8 @@ def _create_main_task_file(
|
|
|
3131
3360
|
from souschef.converters.playbook import generate_playbook_from_recipe
|
|
3132
3361
|
|
|
3133
3362
|
playbook_yaml = generate_playbook_from_recipe(str(default_recipe))
|
|
3134
|
-
default_task_file.
|
|
3363
|
+
default_task_file.parent.mkdir(parents=True, exist_ok=True)
|
|
3364
|
+
safe_write_text(default_task_file, role_dir, playbook_yaml)
|
|
3135
3365
|
conversion_summary["converted_files"].append(
|
|
3136
3366
|
{
|
|
3137
3367
|
"type": "task",
|
|
@@ -3153,14 +3383,11 @@ def _create_role_metadata(
|
|
|
3153
3383
|
conversion_summary: dict,
|
|
3154
3384
|
) -> None:
|
|
3155
3385
|
"""Create Ansible role metadata file."""
|
|
3156
|
-
|
|
3386
|
+
# Use _safe_join to construct metadata file path
|
|
3157
3387
|
|
|
3158
|
-
|
|
3159
|
-
meta_dir =
|
|
3160
|
-
meta_dir.
|
|
3161
|
-
meta_file = (
|
|
3162
|
-
meta_dir / "main.yml"
|
|
3163
|
-
) # nosemgrep: python.lang.security.audit.dynamic-urllib-use-detected
|
|
3388
|
+
meta_dir = _safe_join(role_dir, "meta")
|
|
3389
|
+
meta_dir.mkdir(parents=True, exist_ok=True)
|
|
3390
|
+
meta_file = _safe_join(meta_dir, "main.yml")
|
|
3164
3391
|
|
|
3165
3392
|
meta_content: dict[str, Any] = {
|
|
3166
3393
|
"galaxy_info": {
|
|
@@ -3182,7 +3409,7 @@ def _create_role_metadata(
|
|
|
3182
3409
|
meta_content["dependencies"] = [{"role": dep} for dep in deps]
|
|
3183
3410
|
|
|
3184
3411
|
meta_yaml = yaml.dump(meta_content, default_flow_style=False, indent=2)
|
|
3185
|
-
meta_file
|
|
3412
|
+
safe_write_text(meta_file, role_dir, meta_yaml)
|
|
3186
3413
|
|
|
3187
3414
|
conversion_summary["converted_files"].append(
|
|
3188
3415
|
{
|
|
@@ -3210,13 +3437,13 @@ def _generate_conversion_report(conversion_summary: dict, role_dir: Path) -> str
|
|
|
3210
3437
|
summary_lines.append("")
|
|
3211
3438
|
summary_lines.append("## Errors:")
|
|
3212
3439
|
for error in conversion_summary["errors"]:
|
|
3213
|
-
summary_lines.append(f"-
|
|
3440
|
+
summary_lines.append(f"- ERROR: {error}")
|
|
3214
3441
|
|
|
3215
3442
|
if conversion_summary["warnings"]:
|
|
3216
3443
|
summary_lines.append("")
|
|
3217
3444
|
summary_lines.append("## Warnings:")
|
|
3218
3445
|
for warning in conversion_summary["warnings"]:
|
|
3219
|
-
summary_lines.append(f"-
|
|
3446
|
+
summary_lines.append(f"- WARNING: {warning}")
|
|
3220
3447
|
|
|
3221
3448
|
summary_lines.append("")
|
|
3222
3449
|
summary_lines.append(f"## Role Location: {role_dir}")
|
|
@@ -3291,14 +3518,25 @@ def _validate_conversion_paths(
|
|
|
3291
3518
|
cookbooks_path: str, output_path: str
|
|
3292
3519
|
) -> tuple[Path, Path]:
|
|
3293
3520
|
"""Validate and return Path objects for conversion paths."""
|
|
3294
|
-
|
|
3521
|
+
base_dir = Path.cwd().resolve()
|
|
3295
3522
|
|
|
3296
|
-
|
|
3297
|
-
|
|
3523
|
+
cookbooks_candidate = _normalize_path(cookbooks_path)
|
|
3524
|
+
try:
|
|
3525
|
+
cookbooks_dir = _ensure_within_base_path(cookbooks_candidate, base_dir)
|
|
3526
|
+
except ValueError as e:
|
|
3527
|
+
raise ValueError(f"Cookbooks path is invalid or outside workspace: {e}") from e
|
|
3528
|
+
|
|
3529
|
+
from souschef.core.path_utils import safe_exists
|
|
3298
3530
|
|
|
3299
|
-
if not cookbooks_dir
|
|
3531
|
+
if not safe_exists(cookbooks_dir, base_dir):
|
|
3300
3532
|
raise ValueError(f"Cookbooks path does not exist: {cookbooks_path}")
|
|
3301
3533
|
|
|
3534
|
+
output_candidate = _normalize_path(output_path)
|
|
3535
|
+
try:
|
|
3536
|
+
output_dir = _ensure_within_base_path(output_candidate, base_dir)
|
|
3537
|
+
except ValueError as e:
|
|
3538
|
+
raise ValueError(f"Output path is invalid or outside workspace: {e}") from e
|
|
3539
|
+
|
|
3302
3540
|
return cookbooks_dir, output_dir
|
|
3303
3541
|
|
|
3304
3542
|
|