mcp-souschef 2.0.1__py3-none-any.whl → 2.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_souschef-2.0.1.dist-info → mcp_souschef-2.1.2.dist-info}/METADATA +427 -79
- mcp_souschef-2.1.2.dist-info/RECORD +29 -0
- souschef/__init__.py +17 -0
- souschef/assessment.py +1230 -0
- souschef/converters/__init__.py +23 -0
- souschef/converters/habitat.py +674 -0
- souschef/converters/playbook.py +1698 -0
- souschef/converters/resource.py +228 -0
- souschef/core/__init__.py +58 -0
- souschef/core/constants.py +145 -0
- souschef/core/path_utils.py +58 -0
- souschef/core/ruby_utils.py +39 -0
- souschef/core/validation.py +555 -0
- souschef/deployment.py +1594 -0
- souschef/filesystem/__init__.py +5 -0
- souschef/filesystem/operations.py +67 -0
- souschef/parsers/__init__.py +36 -0
- souschef/parsers/attributes.py +257 -0
- souschef/parsers/habitat.py +288 -0
- souschef/parsers/inspec.py +771 -0
- souschef/parsers/metadata.py +175 -0
- souschef/parsers/recipe.py +200 -0
- souschef/parsers/resource.py +170 -0
- souschef/parsers/template.py +342 -0
- souschef/server.py +1532 -7599
- mcp_souschef-2.0.1.dist-info/RECORD +0 -8
- {mcp_souschef-2.0.1.dist-info → mcp_souschef-2.1.2.dist-info}/WHEEL +0 -0
- {mcp_souschef-2.0.1.dist-info → mcp_souschef-2.1.2.dist-info}/entry_points.txt +0 -0
- {mcp_souschef-2.0.1.dist-info → mcp_souschef-2.1.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,1698 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Chef recipe to Ansible playbook and inventory conversion.
|
|
3
|
+
|
|
4
|
+
This module provides tools to convert Chef recipes to complete Ansible playbooks,
|
|
5
|
+
convert Chef search queries to Ansible inventory structures, and generate dynamic
|
|
6
|
+
inventory scripts.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import re
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any
|
|
14
|
+
|
|
15
|
+
from souschef.converters.resource import (
|
|
16
|
+
_convert_chef_resource_to_ansible,
|
|
17
|
+
_format_ansible_task,
|
|
18
|
+
)
|
|
19
|
+
from souschef.core.constants import (
|
|
20
|
+
ANSIBLE_SERVICE_MODULE,
|
|
21
|
+
ERROR_PREFIX,
|
|
22
|
+
JINJA2_VAR_REPLACEMENT,
|
|
23
|
+
NODE_PREFIX,
|
|
24
|
+
REGEX_QUOTE_DO_END,
|
|
25
|
+
REGEX_RESOURCE_BRACKET,
|
|
26
|
+
REGEX_RUBY_INTERPOLATION,
|
|
27
|
+
REGEX_WHITESPACE_QUOTE,
|
|
28
|
+
)
|
|
29
|
+
from souschef.core.path_utils import _normalize_path, _safe_join
|
|
30
|
+
from souschef.parsers.recipe import parse_recipe
|
|
31
|
+
|
|
32
|
+
# Maximum length for guard condition patterns in regex matching
|
|
33
|
+
MAX_GUARD_LENGTH = 500
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def generate_playbook_from_recipe(recipe_path: str) -> str:
|
|
37
|
+
"""
|
|
38
|
+
Generate a complete Ansible playbook from a Chef recipe.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
recipe_path: Path to the Chef recipe (.rb) file.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Complete Ansible playbook in YAML format with tasks, handlers, and
|
|
45
|
+
variables.
|
|
46
|
+
|
|
47
|
+
"""
|
|
48
|
+
try:
|
|
49
|
+
# First, parse the recipe to extract resources
|
|
50
|
+
recipe_content: str = parse_recipe(recipe_path)
|
|
51
|
+
|
|
52
|
+
if recipe_content.startswith(ERROR_PREFIX):
|
|
53
|
+
return recipe_content
|
|
54
|
+
|
|
55
|
+
# Parse the raw recipe file for advanced features
|
|
56
|
+
recipe_file = _normalize_path(recipe_path)
|
|
57
|
+
if not recipe_file.exists():
|
|
58
|
+
return f"{ERROR_PREFIX} Recipe file does not exist: {recipe_path}"
|
|
59
|
+
|
|
60
|
+
raw_content = recipe_file.read_text()
|
|
61
|
+
|
|
62
|
+
# Generate playbook structure
|
|
63
|
+
playbook: str = _generate_playbook_structure(
|
|
64
|
+
recipe_content, raw_content, recipe_file.name
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
return playbook
|
|
68
|
+
|
|
69
|
+
except Exception as e:
|
|
70
|
+
return f"Error generating playbook: {e}"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def convert_chef_search_to_inventory(search_query: str) -> str:
|
|
74
|
+
"""
|
|
75
|
+
Convert a Chef search query to Ansible inventory patterns and groups.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
search_query: Chef search query (e.g.,
|
|
79
|
+
"role:web AND environment:production").
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
JSON string with Ansible inventory patterns and group definitions.
|
|
83
|
+
|
|
84
|
+
"""
|
|
85
|
+
try:
|
|
86
|
+
# Parse the Chef search query
|
|
87
|
+
search_info = _parse_chef_search_query(search_query)
|
|
88
|
+
|
|
89
|
+
# Convert to Ansible inventory patterns
|
|
90
|
+
inventory_config = _generate_ansible_inventory_from_search(search_info)
|
|
91
|
+
|
|
92
|
+
return json.dumps(inventory_config, indent=2)
|
|
93
|
+
|
|
94
|
+
except Exception as e:
|
|
95
|
+
return f"Error converting Chef search: {e}"
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def generate_dynamic_inventory_script(search_queries: str) -> str:
|
|
99
|
+
"""
|
|
100
|
+
Generate a Python dynamic inventory script from Chef search queries.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
search_queries: JSON string containing Chef search queries and group
|
|
104
|
+
names.
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Complete Python script for Ansible dynamic inventory.
|
|
108
|
+
|
|
109
|
+
"""
|
|
110
|
+
try:
|
|
111
|
+
queries_data = json.loads(search_queries)
|
|
112
|
+
|
|
113
|
+
# Generate dynamic inventory script
|
|
114
|
+
script_content = _generate_inventory_script_content(queries_data)
|
|
115
|
+
|
|
116
|
+
return script_content
|
|
117
|
+
|
|
118
|
+
except json.JSONDecodeError:
|
|
119
|
+
return "Error: Invalid JSON format for search queries"
|
|
120
|
+
except Exception as e:
|
|
121
|
+
return f"Error generating dynamic inventory script: {e}"
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def analyze_chef_search_patterns(recipe_or_cookbook_path: str) -> str:
|
|
125
|
+
"""
|
|
126
|
+
Analyze recipes/cookbooks to extract search patterns for inventory planning.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
recipe_or_cookbook_path: Path to Chef recipe file or cookbook directory.
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
JSON string with discovered search patterns and recommended inventory
|
|
133
|
+
structure.
|
|
134
|
+
|
|
135
|
+
"""
|
|
136
|
+
try:
|
|
137
|
+
path_obj = _normalize_path(recipe_or_cookbook_path)
|
|
138
|
+
|
|
139
|
+
if path_obj.is_file():
|
|
140
|
+
# Single recipe file
|
|
141
|
+
search_patterns = _extract_search_patterns_from_file(path_obj)
|
|
142
|
+
elif path_obj.is_dir():
|
|
143
|
+
# Cookbook directory
|
|
144
|
+
search_patterns = _extract_search_patterns_from_cookbook(path_obj)
|
|
145
|
+
else:
|
|
146
|
+
return f"Error: Path {recipe_or_cookbook_path} does not exist"
|
|
147
|
+
|
|
148
|
+
# Generate inventory recommendations
|
|
149
|
+
recommendations = _generate_inventory_recommendations(search_patterns)
|
|
150
|
+
|
|
151
|
+
return json.dumps(
|
|
152
|
+
{
|
|
153
|
+
"discovered_searches": search_patterns,
|
|
154
|
+
"inventory_recommendations": recommendations,
|
|
155
|
+
},
|
|
156
|
+
indent=2,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
except Exception as e:
|
|
160
|
+
return f"Error analyzing Chef search patterns: {e}"
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
# Chef search query parsing
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def _determine_search_index(normalized_query: str) -> str:
|
|
167
|
+
"""Determine the search index from the query."""
|
|
168
|
+
index_match = re.match(r"^(\w+):", normalized_query)
|
|
169
|
+
if index_match:
|
|
170
|
+
potential_index = index_match.group(1)
|
|
171
|
+
if potential_index in ["role", "environment", "tag", "platform"]:
|
|
172
|
+
return "node" # These are node attributes
|
|
173
|
+
return potential_index
|
|
174
|
+
return "node"
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def _extract_query_parts(
|
|
178
|
+
normalized_query: str,
|
|
179
|
+
) -> tuple[list[dict[str, str]], list[str]]:
|
|
180
|
+
"""Extract conditions and operators from query."""
|
|
181
|
+
operator_pattern = r"\s{1,50}(AND|OR|NOT)\s{1,50}"
|
|
182
|
+
parts = re.split(operator_pattern, normalized_query, flags=re.IGNORECASE)
|
|
183
|
+
|
|
184
|
+
conditions: list[dict[str, str]] = []
|
|
185
|
+
operators: list[str] = []
|
|
186
|
+
|
|
187
|
+
for part in parts:
|
|
188
|
+
part = part.strip()
|
|
189
|
+
if part.upper() in ["AND", "OR", "NOT"]:
|
|
190
|
+
operators.append(part.upper())
|
|
191
|
+
elif part: # Non-empty condition
|
|
192
|
+
condition = _parse_search_condition(part)
|
|
193
|
+
if condition:
|
|
194
|
+
conditions.append(condition)
|
|
195
|
+
|
|
196
|
+
return conditions, operators
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def _determine_query_complexity(
|
|
200
|
+
conditions: list[dict[str, str]], operators: list[str]
|
|
201
|
+
) -> str:
|
|
202
|
+
"""Determine query complexity level."""
|
|
203
|
+
if len(conditions) > 1 or operators:
|
|
204
|
+
return "complex"
|
|
205
|
+
elif any(cond.get("operator") in ["~", "!="] for cond in conditions):
|
|
206
|
+
return "intermediate"
|
|
207
|
+
return "simple"
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def _parse_chef_search_query(query: str) -> dict[str, Any]:
|
|
211
|
+
"""Parse a Chef search query into structured components."""
|
|
212
|
+
normalized_query = query.strip()
|
|
213
|
+
|
|
214
|
+
search_info: dict[str, Any] = {
|
|
215
|
+
"original_query": query,
|
|
216
|
+
"index": _determine_search_index(normalized_query),
|
|
217
|
+
"conditions": [],
|
|
218
|
+
"logical_operators": [],
|
|
219
|
+
"complexity": "simple",
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
conditions, operators = _extract_query_parts(normalized_query)
|
|
223
|
+
|
|
224
|
+
search_info["conditions"] = conditions
|
|
225
|
+
search_info["logical_operators"] = operators
|
|
226
|
+
search_info["complexity"] = _determine_query_complexity(conditions, operators)
|
|
227
|
+
|
|
228
|
+
return search_info
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def _parse_search_condition(condition: str) -> dict[str, str]:
|
|
232
|
+
"""Parse a single search condition."""
|
|
233
|
+
# Handle different condition patterns
|
|
234
|
+
patterns = [
|
|
235
|
+
# Wildcard search: role:web*
|
|
236
|
+
(r"^(\w+):([^:]*\*)$", "wildcard"),
|
|
237
|
+
# Regex search: role:~web.*
|
|
238
|
+
(r"^(\w+):~(.+)$", "regex"),
|
|
239
|
+
# Not equal: role:!web
|
|
240
|
+
(r"^(\w+):!(.+)$", "not_equal"),
|
|
241
|
+
# Range: memory:(>1024 AND <4096)
|
|
242
|
+
(r"^(\w+):\(([^)]+)\)$", "range"),
|
|
243
|
+
# Simple key:value
|
|
244
|
+
(r"^(\w+):(.+)$", "equal"),
|
|
245
|
+
# Tag search: tags:web
|
|
246
|
+
(r"^tags?:(.+)$", "tag"),
|
|
247
|
+
]
|
|
248
|
+
|
|
249
|
+
for pattern, condition_type in patterns:
|
|
250
|
+
match = re.match(pattern, condition.strip())
|
|
251
|
+
if match:
|
|
252
|
+
if condition_type == "tag":
|
|
253
|
+
return {
|
|
254
|
+
"type": condition_type,
|
|
255
|
+
"key": "tags",
|
|
256
|
+
"value": match.group(1),
|
|
257
|
+
"operator": "contains",
|
|
258
|
+
}
|
|
259
|
+
elif condition_type in ["wildcard", "regex", "not_equal", "range"]:
|
|
260
|
+
return {
|
|
261
|
+
"type": condition_type,
|
|
262
|
+
"key": match.group(1),
|
|
263
|
+
"value": match.group(2),
|
|
264
|
+
"operator": condition_type,
|
|
265
|
+
}
|
|
266
|
+
else: # equal
|
|
267
|
+
return {
|
|
268
|
+
"type": condition_type,
|
|
269
|
+
"key": match.group(1),
|
|
270
|
+
"value": match.group(2),
|
|
271
|
+
"operator": "equal",
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
# Fallback for unrecognized patterns
|
|
275
|
+
return {
|
|
276
|
+
"type": "unknown",
|
|
277
|
+
"key": "unknown",
|
|
278
|
+
"value": condition,
|
|
279
|
+
"operator": "equal",
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
# Ansible inventory generation
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def _should_use_dynamic_inventory(search_info: dict[str, Any]) -> bool:
|
|
287
|
+
"""Determine if dynamic inventory is needed based on search complexity."""
|
|
288
|
+
return (
|
|
289
|
+
search_info["complexity"] != "simple"
|
|
290
|
+
or len(search_info["conditions"]) > 1
|
|
291
|
+
or any(
|
|
292
|
+
cond.get("operator") in ["regex", "wildcard", "range"]
|
|
293
|
+
for cond in search_info["conditions"]
|
|
294
|
+
)
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def _create_group_config_for_equal_condition(
|
|
299
|
+
condition: dict[str, str],
|
|
300
|
+
) -> dict[str, Any]:
|
|
301
|
+
"""Create group configuration for equal operator conditions."""
|
|
302
|
+
group_config: dict[str, Any] = {"hosts": [], "vars": {}, "children": []}
|
|
303
|
+
key = condition["key"]
|
|
304
|
+
value = condition["value"]
|
|
305
|
+
|
|
306
|
+
if key == "role":
|
|
307
|
+
group_config["hosts"] = [f"# Hosts with role: {value}"]
|
|
308
|
+
return group_config
|
|
309
|
+
elif key == "environment":
|
|
310
|
+
group_config["vars"]["environment"] = value
|
|
311
|
+
group_config["hosts"] = [f"# Hosts in environment: {value}"]
|
|
312
|
+
return group_config
|
|
313
|
+
elif key == "platform":
|
|
314
|
+
group_config["vars"]["ansible_os_family"] = value.capitalize()
|
|
315
|
+
group_config["hosts"] = [f"# {value} hosts"]
|
|
316
|
+
return group_config
|
|
317
|
+
elif key == "tags":
|
|
318
|
+
group_config["vars"]["tags"] = [value]
|
|
319
|
+
group_config["hosts"] = [f"# Hosts tagged with: {value}"]
|
|
320
|
+
return group_config
|
|
321
|
+
|
|
322
|
+
return group_config
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def _create_group_config_for_pattern_condition(
|
|
326
|
+
condition: dict[str, str],
|
|
327
|
+
) -> dict[str, Any]:
|
|
328
|
+
"""Create group configuration for wildcard/regex conditions."""
|
|
329
|
+
operator = condition["operator"]
|
|
330
|
+
pattern_type = "pattern" if operator == "wildcard" else "regex"
|
|
331
|
+
return {
|
|
332
|
+
"hosts": [
|
|
333
|
+
(
|
|
334
|
+
f"# Hosts matching {pattern_type}: "
|
|
335
|
+
f"{condition['key']}:{condition['value']}"
|
|
336
|
+
)
|
|
337
|
+
],
|
|
338
|
+
"vars": {},
|
|
339
|
+
"children": [],
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
def _generate_group_name_from_condition(condition: dict[str, str], index: int) -> str:
|
|
344
|
+
"""Generate an Ansible group name from a search condition."""
|
|
345
|
+
# Sanitize values for group names
|
|
346
|
+
key = condition.get("key", "unknown").lower()
|
|
347
|
+
value = condition.get("value", "unknown").lower()
|
|
348
|
+
|
|
349
|
+
# Remove special characters and replace with underscores
|
|
350
|
+
key = re.sub(r"[^a-z0-9_]", "_", key)
|
|
351
|
+
value = re.sub(r"[^a-z0-9_]", "_", value)
|
|
352
|
+
|
|
353
|
+
# Create meaningful group name
|
|
354
|
+
if condition.get("operator") == "equal":
|
|
355
|
+
return f"{key}_{value}"
|
|
356
|
+
elif condition.get("operator") == "wildcard":
|
|
357
|
+
return f"{key}_wildcard_{index}"
|
|
358
|
+
elif condition.get("operator") == "regex":
|
|
359
|
+
return f"{key}_regex_{index}"
|
|
360
|
+
elif condition.get("operator") == "not_equal":
|
|
361
|
+
return f"not_{key}_{value}"
|
|
362
|
+
else:
|
|
363
|
+
return f"search_condition_{index}"
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
def _process_search_condition(
|
|
367
|
+
condition: dict[str, str], index: int, inventory_config: dict[str, Any]
|
|
368
|
+
) -> None:
|
|
369
|
+
"""Process a single search condition and update inventory config."""
|
|
370
|
+
group_name = _generate_group_name_from_condition(condition, index)
|
|
371
|
+
|
|
372
|
+
if condition["operator"] == "equal":
|
|
373
|
+
group_config = _create_group_config_for_equal_condition(condition)
|
|
374
|
+
# Add role variable if it's a role condition
|
|
375
|
+
if condition["key"] == "role":
|
|
376
|
+
inventory_config["variables"][f"{group_name}_role"] = condition["value"]
|
|
377
|
+
elif condition["operator"] in ["wildcard", "regex"]:
|
|
378
|
+
group_config = _create_group_config_for_pattern_condition(condition)
|
|
379
|
+
inventory_config["dynamic_script_needed"] = True
|
|
380
|
+
else:
|
|
381
|
+
group_config = {"hosts": [], "vars": {}, "children": []}
|
|
382
|
+
|
|
383
|
+
inventory_config["groups"][group_name] = group_config
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
def _generate_ansible_inventory_from_search(
|
|
387
|
+
search_info: dict[str, Any],
|
|
388
|
+
) -> dict[str, Any]:
|
|
389
|
+
"""Generate Ansible inventory structure from parsed Chef search."""
|
|
390
|
+
inventory_config: dict[str, Any] = {
|
|
391
|
+
"inventory_type": "static",
|
|
392
|
+
"groups": {},
|
|
393
|
+
"host_patterns": [],
|
|
394
|
+
"variables": {},
|
|
395
|
+
"dynamic_script_needed": False,
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
# Determine if we need dynamic inventory
|
|
399
|
+
if _should_use_dynamic_inventory(search_info):
|
|
400
|
+
inventory_config["inventory_type"] = "dynamic"
|
|
401
|
+
inventory_config["dynamic_script_needed"] = True
|
|
402
|
+
|
|
403
|
+
# Process each condition
|
|
404
|
+
for i, condition in enumerate(search_info["conditions"]):
|
|
405
|
+
_process_search_condition(condition, i, inventory_config)
|
|
406
|
+
|
|
407
|
+
# Handle logical operators by creating combined groups
|
|
408
|
+
if search_info["logical_operators"]:
|
|
409
|
+
combined_group_name = "combined_search_results"
|
|
410
|
+
inventory_config["groups"][combined_group_name] = {
|
|
411
|
+
"children": list(inventory_config["groups"].keys()),
|
|
412
|
+
"vars": {"chef_search_query": search_info["original_query"]},
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
return inventory_config
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
def _generate_inventory_script_content(queries_data: list[dict[str, str]]) -> str:
|
|
419
|
+
"""Generate Python dynamic inventory script content."""
|
|
420
|
+
script_template = '''#!/usr/bin/env python3
|
|
421
|
+
"""Dynamic Ansible Inventory Script.
|
|
422
|
+
|
|
423
|
+
Generated from Chef search queries by SousChef
|
|
424
|
+
|
|
425
|
+
This script converts Chef search queries to Ansible inventory groups.
|
|
426
|
+
Requires: python-requests (for Chef server API)
|
|
427
|
+
"""
|
|
428
|
+
import json
|
|
429
|
+
import sys
|
|
430
|
+
import argparse
|
|
431
|
+
from typing import Dict, List, Any
|
|
432
|
+
|
|
433
|
+
# Chef server configuration
|
|
434
|
+
CHEF_SERVER_URL = "https://your-chef-server"
|
|
435
|
+
CLIENT_NAME = "your-client-name"
|
|
436
|
+
CLIENT_KEY_PATH = "/path/to/client.pem"
|
|
437
|
+
|
|
438
|
+
# Search query to group mappings
|
|
439
|
+
SEARCH_QUERIES = {search_queries_json}
|
|
440
|
+
|
|
441
|
+
|
|
442
|
+
def get_chef_nodes(search_query: str) -> List[Dict[str, Any]]:
|
|
443
|
+
"""Query Chef server for nodes matching search criteria.
|
|
444
|
+
|
|
445
|
+
Args:
|
|
446
|
+
search_query: Chef search query string
|
|
447
|
+
|
|
448
|
+
Returns:
|
|
449
|
+
List of node objects from Chef server
|
|
450
|
+
"""
|
|
451
|
+
# TODO: Implement Chef server API client
|
|
452
|
+
# This is a placeholder - implement Chef server communication
|
|
453
|
+
# using python-chef library or direct API calls
|
|
454
|
+
|
|
455
|
+
# Example structure of what this should return:
|
|
456
|
+
return [
|
|
457
|
+
{
|
|
458
|
+
"name": "web01.example.com",
|
|
459
|
+
"roles": ["web"],
|
|
460
|
+
"environment": "production",
|
|
461
|
+
"platform": "ubuntu",
|
|
462
|
+
"ipaddress": "10.0.1.10"
|
|
463
|
+
}
|
|
464
|
+
]
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def build_inventory() -> Dict[str, Any]:
|
|
468
|
+
"""Build Ansible inventory from Chef searches.
|
|
469
|
+
|
|
470
|
+
Returns:
|
|
471
|
+
Ansible inventory dictionary
|
|
472
|
+
"""
|
|
473
|
+
inventory = {
|
|
474
|
+
"_meta": {
|
|
475
|
+
"hostvars": {}
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
for group_name, search_query in SEARCH_QUERIES.items():
|
|
480
|
+
inventory[group_name] = {
|
|
481
|
+
"hosts": [],
|
|
482
|
+
"vars": {
|
|
483
|
+
"chef_search_query": search_query
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
try:
|
|
488
|
+
nodes = get_chef_nodes(search_query)
|
|
489
|
+
|
|
490
|
+
for node in nodes:
|
|
491
|
+
hostname = node.get("name", node.get("fqdn", "unknown"))
|
|
492
|
+
inventory[group_name]["hosts"].append(hostname)
|
|
493
|
+
|
|
494
|
+
# Add host variables
|
|
495
|
+
inventory["_meta"]["hostvars"][hostname] = {
|
|
496
|
+
"chef_roles": node.get("roles", []),
|
|
497
|
+
"chef_environment": node.get("environment", ""),
|
|
498
|
+
"chef_platform": node.get("platform", ""),
|
|
499
|
+
"ansible_host": node.get("ipaddress", hostname)
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
except Exception as e:
|
|
503
|
+
print(
|
|
504
|
+
f"Error querying Chef server for group {group_name}: {e}",
|
|
505
|
+
file=sys.stderr,
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
return inventory
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def main():
|
|
512
|
+
"""Main entry point for dynamic inventory script."""
|
|
513
|
+
parser = argparse.ArgumentParser(
|
|
514
|
+
description="Dynamic Ansible Inventory from Chef"
|
|
515
|
+
)
|
|
516
|
+
parser.add_argument(
|
|
517
|
+
"--list", action="store_true", help="List all groups and hosts"
|
|
518
|
+
)
|
|
519
|
+
parser.add_argument("--host", help="Get variables for specific host")
|
|
520
|
+
|
|
521
|
+
args = parser.parse_args()
|
|
522
|
+
|
|
523
|
+
if args.list:
|
|
524
|
+
inventory = build_inventory()
|
|
525
|
+
print(json.dumps(inventory, indent=2))
|
|
526
|
+
elif args.host:
|
|
527
|
+
# Return empty dict for host-specific queries
|
|
528
|
+
# All host vars are included in _meta/hostvars
|
|
529
|
+
print(json.dumps({}))
|
|
530
|
+
else:
|
|
531
|
+
parser.print_help()
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
if __name__ == "__main__":
|
|
535
|
+
main()
|
|
536
|
+
'''
|
|
537
|
+
|
|
538
|
+
# Convert queries_data to JSON string for embedding
|
|
539
|
+
queries_json = json.dumps(
|
|
540
|
+
{
|
|
541
|
+
item.get("group_name", f"group_{i}"): item.get("search_query", "")
|
|
542
|
+
for i, item in enumerate(queries_data)
|
|
543
|
+
},
|
|
544
|
+
indent=4,
|
|
545
|
+
)
|
|
546
|
+
|
|
547
|
+
return script_template.replace("{search_queries_json}", queries_json)
|
|
548
|
+
|
|
549
|
+
|
|
550
|
+
# Search pattern extraction
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
def _extract_search_patterns_from_file(file_path: Path) -> list[dict[str, str]]:
|
|
554
|
+
"""Extract Chef search patterns from a single recipe file."""
|
|
555
|
+
try:
|
|
556
|
+
content = file_path.read_text()
|
|
557
|
+
return _find_search_patterns_in_content(content, str(file_path))
|
|
558
|
+
except Exception:
|
|
559
|
+
return []
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
def _extract_search_patterns_from_cookbook(cookbook_path: Path) -> list[dict[str, str]]:
|
|
563
|
+
"""Extract Chef search patterns from all files in a cookbook."""
|
|
564
|
+
patterns = []
|
|
565
|
+
|
|
566
|
+
# Search in recipes directory
|
|
567
|
+
recipes_dir = _safe_join(cookbook_path, "recipes")
|
|
568
|
+
if recipes_dir.exists():
|
|
569
|
+
for recipe_file in recipes_dir.glob("*.rb"):
|
|
570
|
+
file_patterns = _extract_search_patterns_from_file(recipe_file)
|
|
571
|
+
patterns.extend(file_patterns)
|
|
572
|
+
|
|
573
|
+
# Search in libraries directory
|
|
574
|
+
libraries_dir = _safe_join(cookbook_path, "libraries")
|
|
575
|
+
if libraries_dir.exists():
|
|
576
|
+
for library_file in libraries_dir.glob("*.rb"):
|
|
577
|
+
file_patterns = _extract_search_patterns_from_file(library_file)
|
|
578
|
+
patterns.extend(file_patterns)
|
|
579
|
+
|
|
580
|
+
# Search in resources directory
|
|
581
|
+
resources_dir = _safe_join(cookbook_path, "resources")
|
|
582
|
+
if resources_dir.exists():
|
|
583
|
+
for resource_file in resources_dir.glob("*.rb"):
|
|
584
|
+
file_patterns = _extract_search_patterns_from_file(resource_file)
|
|
585
|
+
patterns.extend(file_patterns)
|
|
586
|
+
|
|
587
|
+
return patterns
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
def _find_search_patterns_in_content(
|
|
591
|
+
content: str, file_path: str
|
|
592
|
+
) -> list[dict[str, str]]:
|
|
593
|
+
"""Find Chef search patterns in file content."""
|
|
594
|
+
patterns = []
|
|
595
|
+
|
|
596
|
+
# Common Chef search patterns
|
|
597
|
+
search_patterns = [
|
|
598
|
+
# search(:node, "role:web")
|
|
599
|
+
r'search\s*\(\s*:?(\w+)\s*,\s*[\'"]([^\'"]+)[\'"]\s*\)',
|
|
600
|
+
# partial_search(:node, "environment:production")
|
|
601
|
+
r'partial_search\s*\(\s*:?(\w+)\s*,\s*[\'"]([^\'"]+)[\'"]\s*\)',
|
|
602
|
+
# data_bag_item with search-like queries
|
|
603
|
+
r'data_bag_item\s*\(\s*[\'"](\w+)[\'"]\s*,\s*[\'"]([^\'"]+)[\'"]\s*\)',
|
|
604
|
+
# Node attribute queries that imply searches
|
|
605
|
+
r'node\[[\'"](\w+)[\'"]\]\[[\'"]([^\'"]+)[\'"]\]',
|
|
606
|
+
]
|
|
607
|
+
|
|
608
|
+
for pattern in search_patterns:
|
|
609
|
+
matches = re.finditer(pattern, content, re.IGNORECASE)
|
|
610
|
+
for match in matches:
|
|
611
|
+
if "search" in pattern:
|
|
612
|
+
# Full search patterns
|
|
613
|
+
search_type = match.group(1)
|
|
614
|
+
query = match.group(2)
|
|
615
|
+
patterns.append(
|
|
616
|
+
{
|
|
617
|
+
"type": "search",
|
|
618
|
+
"index": search_type,
|
|
619
|
+
"query": query,
|
|
620
|
+
"file": file_path,
|
|
621
|
+
"context": _extract_context(content, match),
|
|
622
|
+
}
|
|
623
|
+
)
|
|
624
|
+
elif "data_bag_item" in pattern:
|
|
625
|
+
# Data bag patterns (related to search)
|
|
626
|
+
bag_name = match.group(1)
|
|
627
|
+
item_name = match.group(2)
|
|
628
|
+
patterns.append(
|
|
629
|
+
{
|
|
630
|
+
"type": "data_bag_access",
|
|
631
|
+
"bag": bag_name,
|
|
632
|
+
"item": item_name,
|
|
633
|
+
"file": file_path,
|
|
634
|
+
"context": _extract_context(content, match),
|
|
635
|
+
}
|
|
636
|
+
)
|
|
637
|
+
else:
|
|
638
|
+
# Node attribute patterns
|
|
639
|
+
attr_key = match.group(1)
|
|
640
|
+
attr_value = match.group(2)
|
|
641
|
+
patterns.append(
|
|
642
|
+
{
|
|
643
|
+
"type": "node_attribute",
|
|
644
|
+
"key": attr_key,
|
|
645
|
+
"value": attr_value,
|
|
646
|
+
"file": file_path,
|
|
647
|
+
"context": _extract_context(content, match),
|
|
648
|
+
}
|
|
649
|
+
)
|
|
650
|
+
|
|
651
|
+
return patterns
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
def _extract_context(content: str, match: re.Match[str]) -> str:
|
|
655
|
+
"""Extract context around a regex match."""
|
|
656
|
+
start = max(0, match.start() - 50)
|
|
657
|
+
end = min(len(content), match.end() + 50)
|
|
658
|
+
context = content[start:end].strip()
|
|
659
|
+
|
|
660
|
+
# Clean up context
|
|
661
|
+
lines = context.split("\n")
|
|
662
|
+
if len(lines) > 3:
|
|
663
|
+
# Keep middle line and one line before/after
|
|
664
|
+
mid = len(lines) // 2
|
|
665
|
+
lines = lines[mid - 1 : mid + 2]
|
|
666
|
+
|
|
667
|
+
return "...".join(lines)
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
# Inventory recommendations
|
|
671
|
+
|
|
672
|
+
|
|
673
|
+
def _count_pattern_types(patterns: list[dict[str, str]]) -> dict[str, int]:
|
|
674
|
+
"""Count pattern types from list of patterns."""
|
|
675
|
+
pattern_types: dict[str, int] = {}
|
|
676
|
+
for pattern in patterns:
|
|
677
|
+
ptype = pattern.get("type", "unknown")
|
|
678
|
+
pattern_types[ptype] = pattern_types.get(ptype, 0) + 1
|
|
679
|
+
return pattern_types
|
|
680
|
+
|
|
681
|
+
|
|
682
|
+
def _extract_groups_from_query(query: str) -> tuple[str | None, str | None]:
|
|
683
|
+
"""Extract role and environment from a single query."""
|
|
684
|
+
role = None
|
|
685
|
+
env = None
|
|
686
|
+
if "role:" in query:
|
|
687
|
+
role_match = re.search(r"role:([^\s]+)", query)
|
|
688
|
+
if role_match:
|
|
689
|
+
role = role_match.group(1)
|
|
690
|
+
if "environment:" in query:
|
|
691
|
+
env_match = re.search(r"environment:([^\s]+)", query)
|
|
692
|
+
if env_match:
|
|
693
|
+
env = env_match.group(1)
|
|
694
|
+
return role, env
|
|
695
|
+
|
|
696
|
+
|
|
697
|
+
def _extract_role_and_environment_groups(
|
|
698
|
+
patterns: list[dict[str, str]],
|
|
699
|
+
) -> tuple[set[str], set[str]]:
|
|
700
|
+
"""Extract role and environment groups from patterns."""
|
|
701
|
+
role_groups: set[str] = set()
|
|
702
|
+
environment_groups: set[str] = set()
|
|
703
|
+
|
|
704
|
+
for pattern in patterns:
|
|
705
|
+
if pattern.get("type") != "search":
|
|
706
|
+
continue
|
|
707
|
+
role, env = _extract_groups_from_query(pattern.get("query", ""))
|
|
708
|
+
if role:
|
|
709
|
+
role_groups.add(role)
|
|
710
|
+
if env:
|
|
711
|
+
environment_groups.add(env)
|
|
712
|
+
|
|
713
|
+
return role_groups, environment_groups
|
|
714
|
+
|
|
715
|
+
|
|
716
|
+
def _add_group_recommendations(
|
|
717
|
+
recommendations: dict[str, Any],
|
|
718
|
+
role_groups: set[str],
|
|
719
|
+
environment_groups: set[str],
|
|
720
|
+
) -> None:
|
|
721
|
+
"""Add group recommendations based on discovered groups."""
|
|
722
|
+
for role in role_groups:
|
|
723
|
+
recommendations["groups"][f"role_{role}"] = {
|
|
724
|
+
"description": f"Hosts with Chef role: {role}",
|
|
725
|
+
"vars": {"chef_role": role},
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
for env in environment_groups:
|
|
729
|
+
recommendations["groups"][f"env_{env}"] = {
|
|
730
|
+
"description": f"Hosts in Chef environment: {env}",
|
|
731
|
+
"vars": {"chef_environment": env},
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
|
|
735
|
+
def _add_general_recommendations(
|
|
736
|
+
recommendations: dict[str, Any], patterns: list[dict[str, str]]
|
|
737
|
+
) -> None:
|
|
738
|
+
"""Add general migration recommendations based on patterns."""
|
|
739
|
+
if len(patterns) > 5:
|
|
740
|
+
recommendations["notes"].append(
|
|
741
|
+
"Complex search patterns - consider Chef server integration"
|
|
742
|
+
)
|
|
743
|
+
|
|
744
|
+
if any(p.get("type") == "data_bag_access" for p in patterns):
|
|
745
|
+
recommendations["notes"].append(
|
|
746
|
+
"Data bag access detected - consider Ansible Vault migration"
|
|
747
|
+
)
|
|
748
|
+
|
|
749
|
+
|
|
750
|
+
def _generate_inventory_recommendations(
|
|
751
|
+
patterns: list[dict[str, str]],
|
|
752
|
+
) -> dict[str, Any]:
|
|
753
|
+
"""Generate inventory structure recommendations from search patterns."""
|
|
754
|
+
recommendations: dict[str, Any] = {
|
|
755
|
+
"groups": {},
|
|
756
|
+
"structure": "static", # vs dynamic
|
|
757
|
+
"variables": {},
|
|
758
|
+
"notes": [],
|
|
759
|
+
}
|
|
760
|
+
|
|
761
|
+
# Count pattern types and recommend structure
|
|
762
|
+
pattern_types = _count_pattern_types(patterns)
|
|
763
|
+
if pattern_types.get("search", 0) > 2:
|
|
764
|
+
recommendations["structure"] = "dynamic"
|
|
765
|
+
recommendations["notes"].append(
|
|
766
|
+
"Multiple search patterns detected - dynamic inventory recommended"
|
|
767
|
+
)
|
|
768
|
+
|
|
769
|
+
# Extract and add group recommendations
|
|
770
|
+
role_groups, environment_groups = _extract_role_and_environment_groups(patterns)
|
|
771
|
+
_add_group_recommendations(recommendations, role_groups, environment_groups)
|
|
772
|
+
|
|
773
|
+
# Add general recommendations
|
|
774
|
+
_add_general_recommendations(recommendations, patterns)
|
|
775
|
+
|
|
776
|
+
return recommendations
|
|
777
|
+
|
|
778
|
+
|
|
779
|
+
# Playbook generation
|
|
780
|
+
|
|
781
|
+
|
|
782
|
+
def _build_playbook_header(recipe_name: str) -> list[str]:
|
|
783
|
+
"""Build playbook header with metadata."""
|
|
784
|
+
return [
|
|
785
|
+
"---",
|
|
786
|
+
f"# Ansible playbook generated from Chef recipe: {recipe_name}",
|
|
787
|
+
f"# Generated by SousChef on {_get_current_timestamp()}",
|
|
788
|
+
"",
|
|
789
|
+
"- name: Configure system using converted Chef recipe",
|
|
790
|
+
" hosts: all",
|
|
791
|
+
" become: true",
|
|
792
|
+
" gather_facts: true",
|
|
793
|
+
"",
|
|
794
|
+
" vars:",
|
|
795
|
+
" # Variables extracted from Chef recipe",
|
|
796
|
+
]
|
|
797
|
+
|
|
798
|
+
|
|
799
|
+
def _add_playbook_variables(playbook_lines: list[str], raw_content: str) -> None:
|
|
800
|
+
"""Extract and add variables section to playbook."""
|
|
801
|
+
variables = _extract_recipe_variables(raw_content)
|
|
802
|
+
for var_name, var_value in variables.items():
|
|
803
|
+
playbook_lines.append(f" {var_name}: {var_value}")
|
|
804
|
+
|
|
805
|
+
if not variables:
|
|
806
|
+
playbook_lines.append(" # No variables found")
|
|
807
|
+
|
|
808
|
+
playbook_lines.extend(["", " tasks:"])
|
|
809
|
+
|
|
810
|
+
|
|
811
|
+
def _convert_and_collect_resources(
|
|
812
|
+
parsed_content: str, raw_content: str
|
|
813
|
+
) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
|
|
814
|
+
"""Convert Chef resources to Ansible tasks and collect handlers."""
|
|
815
|
+
resources = _extract_resources_from_parsed_content(parsed_content)
|
|
816
|
+
tasks = []
|
|
817
|
+
handlers = []
|
|
818
|
+
|
|
819
|
+
for resource in resources:
|
|
820
|
+
task_result = _convert_resource_to_task_dict(resource, raw_content)
|
|
821
|
+
tasks.append(task_result["task"])
|
|
822
|
+
if task_result["handlers"]:
|
|
823
|
+
handlers.extend(task_result["handlers"])
|
|
824
|
+
|
|
825
|
+
return tasks, handlers
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
def _format_item_lines(item_yaml: str) -> list[str]:
|
|
829
|
+
"""Format a single task/handler's YAML lines with proper indentation."""
|
|
830
|
+
formatted = []
|
|
831
|
+
for i, line in enumerate(item_yaml.split("\n")):
|
|
832
|
+
if i == 0: # First line gets 4-space indent
|
|
833
|
+
formatted.append(f" {line}")
|
|
834
|
+
elif line.strip(): # Non-empty property lines get 6-space indent
|
|
835
|
+
formatted.append(f" {line}")
|
|
836
|
+
else: # Empty lines preserved as-is
|
|
837
|
+
formatted.append(line)
|
|
838
|
+
return formatted
|
|
839
|
+
|
|
840
|
+
|
|
841
|
+
def _add_formatted_items(
|
|
842
|
+
playbook_lines: list[str],
|
|
843
|
+
items: list[dict[str, Any]],
|
|
844
|
+
default_message: str,
|
|
845
|
+
) -> None:
|
|
846
|
+
"""Add formatted tasks or handlers to playbook."""
|
|
847
|
+
if not items:
|
|
848
|
+
playbook_lines.append(f" {default_message}")
|
|
849
|
+
return
|
|
850
|
+
|
|
851
|
+
for i, item in enumerate(items):
|
|
852
|
+
if i > 0:
|
|
853
|
+
playbook_lines.append("")
|
|
854
|
+
playbook_lines.extend(_format_item_lines(_format_ansible_task(item)))
|
|
855
|
+
|
|
856
|
+
|
|
857
|
+
def _generate_playbook_structure(
|
|
858
|
+
parsed_content: str, raw_content: str, recipe_name: str
|
|
859
|
+
) -> str:
|
|
860
|
+
"""Generate complete playbook structure from parsed recipe content."""
|
|
861
|
+
playbook_lines = _build_playbook_header(recipe_name)
|
|
862
|
+
_add_playbook_variables(playbook_lines, raw_content)
|
|
863
|
+
|
|
864
|
+
# Convert resources to tasks and handlers
|
|
865
|
+
tasks, handlers = _convert_and_collect_resources(parsed_content, raw_content)
|
|
866
|
+
|
|
867
|
+
# Add tasks section
|
|
868
|
+
_add_formatted_items(playbook_lines, tasks, "# No tasks found")
|
|
869
|
+
|
|
870
|
+
# Add handlers section if any
|
|
871
|
+
if handlers:
|
|
872
|
+
playbook_lines.extend(["", " handlers:"])
|
|
873
|
+
_add_formatted_items(playbook_lines, handlers, "")
|
|
874
|
+
|
|
875
|
+
return "\n".join(playbook_lines)
|
|
876
|
+
|
|
877
|
+
|
|
878
|
+
def _get_current_timestamp() -> str:
|
|
879
|
+
"""Get current timestamp for playbook generation."""
|
|
880
|
+
return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
881
|
+
|
|
882
|
+
|
|
883
|
+
# Variable extraction
|
|
884
|
+
|
|
885
|
+
|
|
886
|
+
def _extract_version_variable(raw_content: str) -> dict[str, str]:
|
|
887
|
+
"""Extract version specification from recipe content."""
|
|
888
|
+
version_pattern = re.compile(r"version\s+['\"]([^'\"]+)['\"]")
|
|
889
|
+
versions = version_pattern.findall(raw_content)
|
|
890
|
+
if versions:
|
|
891
|
+
return {"package_version": f'"{versions[0]}"'}
|
|
892
|
+
return {}
|
|
893
|
+
|
|
894
|
+
|
|
895
|
+
def _extract_content_variables(raw_content: str) -> dict[str, str]:
|
|
896
|
+
"""Extract content and source specifications from recipe content."""
|
|
897
|
+
variables = {}
|
|
898
|
+
|
|
899
|
+
# Extract content specifications
|
|
900
|
+
content_pattern = re.compile(r"content\s+['\"]([^'\"]*)['\"]", re.DOTALL)
|
|
901
|
+
contents = content_pattern.findall(raw_content)
|
|
902
|
+
if contents:
|
|
903
|
+
variables["file_content"] = f'"{contents[0]}"'
|
|
904
|
+
|
|
905
|
+
# Extract source specifications for templates
|
|
906
|
+
source_pattern = re.compile(r"source\s+['\"]([^'\"]+)['\"]")
|
|
907
|
+
sources = source_pattern.findall(raw_content)
|
|
908
|
+
if sources:
|
|
909
|
+
variables["template_source"] = f'"{sources[0]}"'
|
|
910
|
+
|
|
911
|
+
return variables
|
|
912
|
+
|
|
913
|
+
|
|
914
|
+
def _extract_ownership_variables(raw_content: str) -> dict[str, str]:
|
|
915
|
+
"""Extract owner and group specifications from recipe content."""
|
|
916
|
+
variables = {}
|
|
917
|
+
|
|
918
|
+
# Extract owner specifications
|
|
919
|
+
owner_pattern = re.compile(r"owner\s+['\"]([^'\"]+)['\"]")
|
|
920
|
+
owners = owner_pattern.findall(raw_content)
|
|
921
|
+
if owners and owners[0] not in ["root"]: # Skip default root
|
|
922
|
+
variables["file_owner"] = f'"{owners[0]}"'
|
|
923
|
+
|
|
924
|
+
# Extract group specifications
|
|
925
|
+
group_pattern = re.compile(r"group\s+['\"]([^'\"]+)['\"]")
|
|
926
|
+
groups = group_pattern.findall(raw_content)
|
|
927
|
+
if groups and groups[0] not in ["root"]: # Skip default root
|
|
928
|
+
variables["file_group"] = f'"{groups[0]}"'
|
|
929
|
+
|
|
930
|
+
return variables
|
|
931
|
+
|
|
932
|
+
|
|
933
|
+
def _extract_mode_variables(raw_content: str) -> dict[str, str]:
|
|
934
|
+
"""Extract mode specifications from recipe content."""
|
|
935
|
+
# Extract mode specifications
|
|
936
|
+
mode_pattern = re.compile(r"mode\s+['\"]([^'\"]+)['\"]")
|
|
937
|
+
modes = mode_pattern.findall(raw_content)
|
|
938
|
+
unique_modes = list(set(modes))
|
|
939
|
+
|
|
940
|
+
if len(unique_modes) == 1:
|
|
941
|
+
return {"file_mode": f'"{unique_modes[0]}"'}
|
|
942
|
+
elif len(unique_modes) > 1:
|
|
943
|
+
return {"directory_mode": '"0755"', "file_mode": '"0644"'}
|
|
944
|
+
return {}
|
|
945
|
+
|
|
946
|
+
|
|
947
|
+
def _extract_recipe_variables(raw_content: str) -> dict[str, str]:
|
|
948
|
+
"""Extract variables from Chef recipe content."""
|
|
949
|
+
variables = {}
|
|
950
|
+
|
|
951
|
+
# Combine all extracted variables
|
|
952
|
+
variables.update(_extract_version_variable(raw_content))
|
|
953
|
+
variables.update(_extract_content_variables(raw_content))
|
|
954
|
+
variables.update(_extract_ownership_variables(raw_content))
|
|
955
|
+
variables.update(_extract_mode_variables(raw_content))
|
|
956
|
+
|
|
957
|
+
return variables
|
|
958
|
+
|
|
959
|
+
|
|
960
|
+
# Resource extraction and conversion
|
|
961
|
+
|
|
962
|
+
|
|
963
|
+
def _parse_resource_block(block: str) -> dict[str, str] | None:
|
|
964
|
+
"""Parse a single resource block into a dictionary."""
|
|
965
|
+
if not block.strip() or not block.startswith("Resource"):
|
|
966
|
+
return None
|
|
967
|
+
|
|
968
|
+
resource: dict[str, str] = {}
|
|
969
|
+
|
|
970
|
+
# Extract resource type
|
|
971
|
+
type_match = re.search(r"Type:\s*(\w+)", block)
|
|
972
|
+
if type_match:
|
|
973
|
+
resource["type"] = type_match.group(1)
|
|
974
|
+
|
|
975
|
+
# Extract resource name
|
|
976
|
+
name_match = re.search(r"Name:\s*([^\n]+)", block)
|
|
977
|
+
if name_match:
|
|
978
|
+
resource["name"] = name_match.group(1).strip()
|
|
979
|
+
|
|
980
|
+
# Extract action (default to "create")
|
|
981
|
+
action_match = re.search(r"Action:\s*([^\n]+)", block)
|
|
982
|
+
resource["action"] = action_match.group(1).strip() if action_match else "create"
|
|
983
|
+
|
|
984
|
+
# Extract properties
|
|
985
|
+
props_match = re.search(r"Properties:\n?((?:(?!\n\n).)*)", block, re.DOTALL)
|
|
986
|
+
resource["properties"] = props_match.group(1).strip() if props_match else ""
|
|
987
|
+
|
|
988
|
+
# Return None if missing required fields
|
|
989
|
+
if not resource.get("type") or not resource.get("name"):
|
|
990
|
+
return None
|
|
991
|
+
|
|
992
|
+
return resource
|
|
993
|
+
|
|
994
|
+
|
|
995
|
+
def _extract_resources_from_parsed_content(parsed_content: str) -> list[dict[str, str]]:
|
|
996
|
+
"""Extract resource information from parsed recipe content."""
|
|
997
|
+
resource_blocks = re.split(r"\n(?=Resource \d+:)", parsed_content)
|
|
998
|
+
resources = []
|
|
999
|
+
for block in resource_blocks:
|
|
1000
|
+
resource = _parse_resource_block(block)
|
|
1001
|
+
if resource:
|
|
1002
|
+
resources.append(resource)
|
|
1003
|
+
return resources
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
# Notification handling
|
|
1007
|
+
|
|
1008
|
+
|
|
1009
|
+
def _extract_notify_declarations(
|
|
1010
|
+
resource: dict[str, str], raw_content: str
|
|
1011
|
+
) -> list[tuple[str, str, str]]:
|
|
1012
|
+
"""Extract notifies declarations from a resource block."""
|
|
1013
|
+
resource_type_escaped = resource["type"]
|
|
1014
|
+
resource_name_escaped = re.escape(resource["name"])
|
|
1015
|
+
resource_pattern = (
|
|
1016
|
+
resource_type_escaped
|
|
1017
|
+
+ REGEX_WHITESPACE_QUOTE
|
|
1018
|
+
+ resource_name_escaped
|
|
1019
|
+
+ REGEX_QUOTE_DO_END
|
|
1020
|
+
)
|
|
1021
|
+
resource_match = re.search(resource_pattern, raw_content, re.DOTALL | re.MULTILINE)
|
|
1022
|
+
|
|
1023
|
+
if not resource_match:
|
|
1024
|
+
return []
|
|
1025
|
+
|
|
1026
|
+
resource_block = resource_match.group(1)
|
|
1027
|
+
notify_pattern = re.compile(
|
|
1028
|
+
r'notifies\s+:(\w+),\s*[\'"]([^\'\"]+)[\'"]\s*,?\s*:?(\w+)?'
|
|
1029
|
+
)
|
|
1030
|
+
return notify_pattern.findall(resource_block)
|
|
1031
|
+
|
|
1032
|
+
|
|
1033
|
+
def _extract_subscribe_declarations(raw_content: str) -> list[tuple[str, str, str]]:
|
|
1034
|
+
"""Extract subscribes declarations from raw content."""
|
|
1035
|
+
subscribes_pattern = re.compile(
|
|
1036
|
+
r'subscribes\s+:(\w+),\s*[\'"]([^\'\"]+)[\'"]\s*,?\s*:?(\w+)?'
|
|
1037
|
+
)
|
|
1038
|
+
return subscribes_pattern.findall(raw_content)
|
|
1039
|
+
|
|
1040
|
+
|
|
1041
|
+
def _process_notifications(
|
|
1042
|
+
notifications: list[tuple[str, str, str]],
|
|
1043
|
+
task: dict[str, Any],
|
|
1044
|
+
) -> list[dict[str, Any]]:
|
|
1045
|
+
"""Process notification declarations and create handlers."""
|
|
1046
|
+
handlers = []
|
|
1047
|
+
for notify_action, notify_target, _notify_timing in notifications:
|
|
1048
|
+
target_match = re.match(REGEX_RESOURCE_BRACKET, notify_target)
|
|
1049
|
+
if target_match:
|
|
1050
|
+
target_type = target_match.group(1)
|
|
1051
|
+
target_name = target_match.group(2)
|
|
1052
|
+
|
|
1053
|
+
handler = _create_handler(notify_action, target_type, target_name)
|
|
1054
|
+
if handler:
|
|
1055
|
+
if "notify" not in task:
|
|
1056
|
+
task["notify"] = []
|
|
1057
|
+
task["notify"].append(handler["name"])
|
|
1058
|
+
handlers.append(handler)
|
|
1059
|
+
|
|
1060
|
+
return handlers
|
|
1061
|
+
|
|
1062
|
+
|
|
1063
|
+
def _process_subscribes(
|
|
1064
|
+
resource: dict[str, str],
|
|
1065
|
+
subscribes: list[tuple[str, str, str]],
|
|
1066
|
+
raw_content: str,
|
|
1067
|
+
task: dict[str, Any],
|
|
1068
|
+
) -> list[dict[str, Any]]:
|
|
1069
|
+
"""Process subscribes declarations and create handlers."""
|
|
1070
|
+
handlers = []
|
|
1071
|
+
for sub_action, sub_target, _sub_timing in subscribes:
|
|
1072
|
+
target_match = re.match(REGEX_RESOURCE_BRACKET, sub_target)
|
|
1073
|
+
if not target_match:
|
|
1074
|
+
continue
|
|
1075
|
+
|
|
1076
|
+
target_type = target_match.group(1)
|
|
1077
|
+
target_name = target_match.group(2)
|
|
1078
|
+
|
|
1079
|
+
if resource["type"] != target_type or resource["name"] != target_name:
|
|
1080
|
+
continue
|
|
1081
|
+
|
|
1082
|
+
subscriber_pattern = (
|
|
1083
|
+
rf"(\w+)\s+['\"]?[^'\"]*['\"]?\s+do\s*.{{0,1000}}?"
|
|
1084
|
+
rf"subscribes\s+:{sub_action}"
|
|
1085
|
+
)
|
|
1086
|
+
subscriber_match = re.search(subscriber_pattern, raw_content, re.DOTALL)
|
|
1087
|
+
|
|
1088
|
+
if subscriber_match:
|
|
1089
|
+
subscriber_type = subscriber_match.group(1)
|
|
1090
|
+
handler = _create_handler(sub_action, subscriber_type, resource["name"])
|
|
1091
|
+
if handler:
|
|
1092
|
+
if "notify" not in task:
|
|
1093
|
+
task["notify"] = []
|
|
1094
|
+
task["notify"].append(handler["name"])
|
|
1095
|
+
handlers.append(handler)
|
|
1096
|
+
|
|
1097
|
+
return handlers
|
|
1098
|
+
|
|
1099
|
+
|
|
1100
|
+
def _convert_resource_to_task_dict(
|
|
1101
|
+
resource: dict[str, str], raw_content: str
|
|
1102
|
+
) -> dict[str, Any]:
|
|
1103
|
+
"""Convert a Chef resource to an Ansible task dictionary with handlers."""
|
|
1104
|
+
# Convert basic resource to task
|
|
1105
|
+
task = _convert_chef_resource_to_ansible(
|
|
1106
|
+
resource["type"], resource["name"], resource["action"], resource["properties"]
|
|
1107
|
+
)
|
|
1108
|
+
|
|
1109
|
+
# Extract and convert Chef guards to Ansible when conditions
|
|
1110
|
+
guards = _extract_chef_guards(resource, raw_content)
|
|
1111
|
+
if guards:
|
|
1112
|
+
task.update(guards)
|
|
1113
|
+
|
|
1114
|
+
# Process all handlers
|
|
1115
|
+
handlers = []
|
|
1116
|
+
|
|
1117
|
+
# Handle enhanced notifications with timing
|
|
1118
|
+
notifications = _extract_enhanced_notifications(resource, raw_content)
|
|
1119
|
+
for notification in notifications:
|
|
1120
|
+
handler = _create_handler_with_timing(
|
|
1121
|
+
notification["action"],
|
|
1122
|
+
notification["target_type"],
|
|
1123
|
+
notification["target_name"],
|
|
1124
|
+
notification["timing"],
|
|
1125
|
+
)
|
|
1126
|
+
if handler:
|
|
1127
|
+
if "notify" not in task:
|
|
1128
|
+
task["notify"] = []
|
|
1129
|
+
task["notify"].append(handler["name"])
|
|
1130
|
+
handlers.append(handler)
|
|
1131
|
+
|
|
1132
|
+
# Handle basic notifies declarations
|
|
1133
|
+
notifies = _extract_notify_declarations(resource, raw_content)
|
|
1134
|
+
handlers.extend(_process_notifications(notifies, task))
|
|
1135
|
+
|
|
1136
|
+
# Handle subscribes (reverse notifications)
|
|
1137
|
+
subscribes = _extract_subscribe_declarations(raw_content)
|
|
1138
|
+
handlers.extend(_process_subscribes(resource, subscribes, raw_content, task))
|
|
1139
|
+
|
|
1140
|
+
return {"task": task, "handlers": handlers}
|
|
1141
|
+
|
|
1142
|
+
|
|
1143
|
+
def _create_handler(
|
|
1144
|
+
action: str, resource_type: str, resource_name: str
|
|
1145
|
+
) -> dict[str, Any]:
|
|
1146
|
+
"""Create an Ansible handler from Chef notification."""
|
|
1147
|
+
# Map Chef actions to Ansible states
|
|
1148
|
+
action_mappings = {
|
|
1149
|
+
"reload": "reloaded",
|
|
1150
|
+
"restart": "restarted",
|
|
1151
|
+
"start": "started",
|
|
1152
|
+
"stop": "stopped",
|
|
1153
|
+
"enable": "started", # enabling usually means start too
|
|
1154
|
+
"run": "run",
|
|
1155
|
+
}
|
|
1156
|
+
|
|
1157
|
+
if resource_type == "service":
|
|
1158
|
+
ansible_state = action_mappings.get(action, action)
|
|
1159
|
+
|
|
1160
|
+
handler: dict[str, Any] = {
|
|
1161
|
+
"name": f"{action.capitalize()} {resource_name}",
|
|
1162
|
+
ANSIBLE_SERVICE_MODULE: {"name": resource_name, "state": ansible_state},
|
|
1163
|
+
}
|
|
1164
|
+
|
|
1165
|
+
if action == "enable":
|
|
1166
|
+
handler[ANSIBLE_SERVICE_MODULE]["enabled"] = True
|
|
1167
|
+
|
|
1168
|
+
return handler
|
|
1169
|
+
|
|
1170
|
+
elif resource_type == "execute":
|
|
1171
|
+
handler = {
|
|
1172
|
+
"name": f"Run {resource_name}",
|
|
1173
|
+
"ansible.builtin.command": {"cmd": resource_name},
|
|
1174
|
+
}
|
|
1175
|
+
return handler
|
|
1176
|
+
|
|
1177
|
+
return {}
|
|
1178
|
+
|
|
1179
|
+
|
|
1180
|
+
def _extract_enhanced_notifications(
|
|
1181
|
+
resource: dict[str, str], raw_content: str
|
|
1182
|
+
) -> list[dict[str, str]]:
|
|
1183
|
+
"""Extract notification information with timing constraints for a resource."""
|
|
1184
|
+
notifications = []
|
|
1185
|
+
|
|
1186
|
+
# Find the resource block in raw content
|
|
1187
|
+
resource_type_escaped = resource["type"]
|
|
1188
|
+
resource_name_escaped = re.escape(resource["name"])
|
|
1189
|
+
resource_pattern = (
|
|
1190
|
+
resource_type_escaped
|
|
1191
|
+
+ REGEX_WHITESPACE_QUOTE
|
|
1192
|
+
+ resource_name_escaped
|
|
1193
|
+
+ REGEX_QUOTE_DO_END
|
|
1194
|
+
)
|
|
1195
|
+
resource_match = re.search(resource_pattern, raw_content, re.DOTALL | re.MULTILINE)
|
|
1196
|
+
|
|
1197
|
+
if resource_match:
|
|
1198
|
+
resource_block = resource_match.group(1)
|
|
1199
|
+
|
|
1200
|
+
# Enhanced notifies pattern that captures timing
|
|
1201
|
+
notify_pattern = re.compile(
|
|
1202
|
+
r'notifies\s+:(\w+),\s*[\'"]([^\'"]+)[\'"]\s*(?:,\s*:(\w+))?'
|
|
1203
|
+
)
|
|
1204
|
+
notifies = notify_pattern.findall(resource_block)
|
|
1205
|
+
|
|
1206
|
+
for notify_action, notify_target, notify_timing in notifies:
|
|
1207
|
+
# Parse target like 'service[nginx]'
|
|
1208
|
+
target_match = re.match(REGEX_RESOURCE_BRACKET, notify_target)
|
|
1209
|
+
if target_match:
|
|
1210
|
+
target_type = target_match.group(1)
|
|
1211
|
+
target_name = target_match.group(2)
|
|
1212
|
+
|
|
1213
|
+
notifications.append(
|
|
1214
|
+
{
|
|
1215
|
+
"action": notify_action,
|
|
1216
|
+
"target_type": target_type,
|
|
1217
|
+
"target_name": target_name,
|
|
1218
|
+
"timing": notify_timing or "delayed", # Default to delayed
|
|
1219
|
+
}
|
|
1220
|
+
)
|
|
1221
|
+
|
|
1222
|
+
return notifications
|
|
1223
|
+
|
|
1224
|
+
|
|
1225
|
+
def _create_handler_with_timing(
|
|
1226
|
+
action: str, resource_type: str, resource_name: str, timing: str
|
|
1227
|
+
) -> dict[str, Any]:
|
|
1228
|
+
"""Create an Ansible handler with timing considerations."""
|
|
1229
|
+
handler = _create_handler(action, resource_type, resource_name)
|
|
1230
|
+
if handler:
|
|
1231
|
+
# Add timing metadata (can be used by Ansible playbook optimization)
|
|
1232
|
+
handler["_chef_timing"] = timing
|
|
1233
|
+
|
|
1234
|
+
# For immediate timing, we could add listen/notify optimization
|
|
1235
|
+
if timing == "immediate":
|
|
1236
|
+
handler["_priority"] = "immediate"
|
|
1237
|
+
# Note: Ansible handlers always run at the end, but we can document
|
|
1238
|
+
# the original Chef timing intention for migration planning
|
|
1239
|
+
handler["# NOTE"] = "Chef immediate timing - consider task ordering"
|
|
1240
|
+
|
|
1241
|
+
return handler
|
|
1242
|
+
|
|
1243
|
+
|
|
1244
|
+
# Chef guard conversion
|
|
1245
|
+
|
|
1246
|
+
|
|
1247
|
+
def _find_resource_block(resource: dict[str, str], raw_content: str) -> str | None:
|
|
1248
|
+
"""Find the resource block in raw content."""
|
|
1249
|
+
resource_type_escaped = resource["type"]
|
|
1250
|
+
resource_name_escaped = re.escape(resource["name"])
|
|
1251
|
+
resource_pattern = (
|
|
1252
|
+
resource_type_escaped
|
|
1253
|
+
+ REGEX_WHITESPACE_QUOTE
|
|
1254
|
+
+ resource_name_escaped
|
|
1255
|
+
+ REGEX_QUOTE_DO_END
|
|
1256
|
+
)
|
|
1257
|
+
resource_match = re.search(resource_pattern, raw_content, re.DOTALL | re.MULTILINE)
|
|
1258
|
+
|
|
1259
|
+
if resource_match:
|
|
1260
|
+
return resource_match.group(1)
|
|
1261
|
+
return None
|
|
1262
|
+
|
|
1263
|
+
|
|
1264
|
+
def _extract_guard_patterns(
|
|
1265
|
+
resource_block: str,
|
|
1266
|
+
) -> tuple[list[str], list[str], list[str], list[str], list[str], list[str]]:
|
|
1267
|
+
"""Extract all guard patterns from resource block including enhanced support."""
|
|
1268
|
+
# Extract only_if conditions
|
|
1269
|
+
only_if_pattern = re.compile(
|
|
1270
|
+
rf'only_if\s+[\'"]([^\'"]{{{1},{MAX_GUARD_LENGTH}}})[\'"]'
|
|
1271
|
+
)
|
|
1272
|
+
only_if_matches = only_if_pattern.findall(resource_block)
|
|
1273
|
+
|
|
1274
|
+
# Extract not_if conditions
|
|
1275
|
+
not_if_pattern = re.compile(
|
|
1276
|
+
rf'not_if\s+[\'"]([^\'"]{{{1},{MAX_GUARD_LENGTH}}})[\'"]'
|
|
1277
|
+
)
|
|
1278
|
+
not_if_matches = not_if_pattern.findall(resource_block)
|
|
1279
|
+
|
|
1280
|
+
# Extract only_if blocks (Ruby code blocks)
|
|
1281
|
+
only_if_block_pattern = re.compile(r"only_if\s+do\b(.*?)\bend", re.DOTALL)
|
|
1282
|
+
only_if_block_matches = only_if_block_pattern.findall(resource_block)
|
|
1283
|
+
|
|
1284
|
+
# Extract not_if blocks (Ruby code blocks)
|
|
1285
|
+
not_if_block_pattern = re.compile(r"not_if\s+do\b(.*?)\bend", re.DOTALL)
|
|
1286
|
+
not_if_block_matches = not_if_block_pattern.findall(resource_block)
|
|
1287
|
+
|
|
1288
|
+
# Extract only_if with curly brace blocks (lambda/proc syntax)
|
|
1289
|
+
only_if_lambda_pattern = re.compile(
|
|
1290
|
+
rf"only_if\s+\{{([^}}]{{{1},{MAX_GUARD_LENGTH}}})\}}", re.DOTALL
|
|
1291
|
+
)
|
|
1292
|
+
only_if_lambda_matches = only_if_lambda_pattern.findall(resource_block)
|
|
1293
|
+
only_if_block_matches.extend(only_if_lambda_matches)
|
|
1294
|
+
|
|
1295
|
+
# Extract not_if with curly brace blocks (lambda/proc syntax)
|
|
1296
|
+
not_if_lambda_pattern = re.compile(
|
|
1297
|
+
rf"not_if\s+\{{([^}}]{{{1},{MAX_GUARD_LENGTH}}})\}}", re.DOTALL
|
|
1298
|
+
)
|
|
1299
|
+
not_if_lambda_matches = not_if_lambda_pattern.findall(resource_block)
|
|
1300
|
+
not_if_block_matches.extend(not_if_lambda_matches)
|
|
1301
|
+
|
|
1302
|
+
# Extract only_if arrays [condition1, condition2]
|
|
1303
|
+
only_if_array_pattern = re.compile(
|
|
1304
|
+
rf"only_if\s+\[([^\]]{{{1},{MAX_GUARD_LENGTH}}})\]", re.DOTALL
|
|
1305
|
+
)
|
|
1306
|
+
only_if_array_matches = only_if_array_pattern.findall(resource_block)
|
|
1307
|
+
|
|
1308
|
+
# Extract not_if arrays [condition1, condition2]
|
|
1309
|
+
not_if_array_pattern = re.compile(
|
|
1310
|
+
rf"not_if\s+\[([^\]]{{{1},{MAX_GUARD_LENGTH}}})\]", re.DOTALL
|
|
1311
|
+
)
|
|
1312
|
+
not_if_array_matches = not_if_array_pattern.findall(resource_block)
|
|
1313
|
+
|
|
1314
|
+
return (
|
|
1315
|
+
only_if_matches,
|
|
1316
|
+
not_if_matches,
|
|
1317
|
+
only_if_block_matches,
|
|
1318
|
+
not_if_block_matches,
|
|
1319
|
+
only_if_array_matches,
|
|
1320
|
+
not_if_array_matches,
|
|
1321
|
+
)
|
|
1322
|
+
|
|
1323
|
+
|
|
1324
|
+
def _process_only_if_guards(
|
|
1325
|
+
only_if_conditions: list[str],
|
|
1326
|
+
only_if_blocks: list[str],
|
|
1327
|
+
only_if_arrays: list[str],
|
|
1328
|
+
) -> list[str]:
|
|
1329
|
+
"""Process only_if guards and convert to Ansible when conditions."""
|
|
1330
|
+
when_conditions = []
|
|
1331
|
+
|
|
1332
|
+
# Process only_if conditions
|
|
1333
|
+
for condition in only_if_conditions:
|
|
1334
|
+
ansible_condition = _convert_chef_condition_to_ansible(condition)
|
|
1335
|
+
if ansible_condition:
|
|
1336
|
+
when_conditions.append(ansible_condition)
|
|
1337
|
+
|
|
1338
|
+
# Process only_if blocks
|
|
1339
|
+
for block in only_if_blocks:
|
|
1340
|
+
ansible_condition = _convert_chef_block_to_ansible(block, positive=True)
|
|
1341
|
+
if ansible_condition:
|
|
1342
|
+
when_conditions.append(ansible_condition)
|
|
1343
|
+
|
|
1344
|
+
# Process only_if arrays (multiple conditions with AND logic)
|
|
1345
|
+
for array_content in only_if_arrays:
|
|
1346
|
+
array_conditions = _parse_guard_array(array_content, negate=False)
|
|
1347
|
+
when_conditions.extend(array_conditions)
|
|
1348
|
+
|
|
1349
|
+
return when_conditions
|
|
1350
|
+
|
|
1351
|
+
|
|
1352
|
+
def _process_not_if_guards(
|
|
1353
|
+
not_if_conditions: list[str],
|
|
1354
|
+
not_if_blocks: list[str],
|
|
1355
|
+
not_if_arrays: list[str],
|
|
1356
|
+
) -> list[str]:
|
|
1357
|
+
"""Process not_if guards and convert to Ansible when conditions."""
|
|
1358
|
+
when_conditions = []
|
|
1359
|
+
|
|
1360
|
+
# Process not_if conditions (these become when conditions with negation)
|
|
1361
|
+
for condition in not_if_conditions:
|
|
1362
|
+
ansible_condition = _convert_chef_condition_to_ansible(condition, negate=True)
|
|
1363
|
+
if ansible_condition:
|
|
1364
|
+
when_conditions.append(ansible_condition)
|
|
1365
|
+
|
|
1366
|
+
# Process not_if blocks
|
|
1367
|
+
for block in not_if_blocks:
|
|
1368
|
+
ansible_condition = _convert_chef_block_to_ansible(block, positive=False)
|
|
1369
|
+
if ansible_condition:
|
|
1370
|
+
when_conditions.append(ansible_condition)
|
|
1371
|
+
|
|
1372
|
+
# Process not_if arrays (multiple conditions with AND logic, negated)
|
|
1373
|
+
for array_content in not_if_arrays:
|
|
1374
|
+
array_conditions = _parse_guard_array(array_content, negate=True)
|
|
1375
|
+
when_conditions.extend(array_conditions)
|
|
1376
|
+
|
|
1377
|
+
return when_conditions
|
|
1378
|
+
|
|
1379
|
+
|
|
1380
|
+
def _convert_guards_to_when_conditions(
|
|
1381
|
+
only_if_conditions: list[str],
|
|
1382
|
+
not_if_conditions: list[str],
|
|
1383
|
+
only_if_blocks: list[str],
|
|
1384
|
+
not_if_blocks: list[str],
|
|
1385
|
+
only_if_arrays: list[str],
|
|
1386
|
+
not_if_arrays: list[str],
|
|
1387
|
+
) -> list[str]:
|
|
1388
|
+
"""Convert Chef guards to Ansible when conditions with enhanced support."""
|
|
1389
|
+
when_conditions = []
|
|
1390
|
+
|
|
1391
|
+
# Process only_if guards
|
|
1392
|
+
when_conditions.extend(
|
|
1393
|
+
_process_only_if_guards(only_if_conditions, only_if_blocks, only_if_arrays)
|
|
1394
|
+
)
|
|
1395
|
+
|
|
1396
|
+
# Process not_if guards
|
|
1397
|
+
when_conditions.extend(
|
|
1398
|
+
_process_not_if_guards(not_if_conditions, not_if_blocks, not_if_arrays)
|
|
1399
|
+
)
|
|
1400
|
+
|
|
1401
|
+
return when_conditions
|
|
1402
|
+
|
|
1403
|
+
|
|
1404
|
+
def _extract_chef_guards(resource: dict[str, str], raw_content: str) -> dict[str, Any]:
|
|
1405
|
+
"""Extract Chef guards (only_if, not_if) and convert to Ansible when conditions."""
|
|
1406
|
+
guards: dict[str, Any] = {}
|
|
1407
|
+
|
|
1408
|
+
# Find the resource block in raw content
|
|
1409
|
+
resource_block = _find_resource_block(resource, raw_content)
|
|
1410
|
+
if not resource_block:
|
|
1411
|
+
return guards
|
|
1412
|
+
|
|
1413
|
+
# Extract all guard patterns
|
|
1414
|
+
(
|
|
1415
|
+
only_if_conditions,
|
|
1416
|
+
not_if_conditions,
|
|
1417
|
+
only_if_blocks,
|
|
1418
|
+
not_if_blocks,
|
|
1419
|
+
only_if_arrays,
|
|
1420
|
+
not_if_arrays,
|
|
1421
|
+
) = _extract_guard_patterns(resource_block)
|
|
1422
|
+
|
|
1423
|
+
# Convert to Ansible when conditions
|
|
1424
|
+
when_conditions = _convert_guards_to_when_conditions(
|
|
1425
|
+
only_if_conditions,
|
|
1426
|
+
not_if_conditions,
|
|
1427
|
+
only_if_blocks,
|
|
1428
|
+
not_if_blocks,
|
|
1429
|
+
only_if_arrays,
|
|
1430
|
+
not_if_arrays,
|
|
1431
|
+
)
|
|
1432
|
+
|
|
1433
|
+
# Format the when clause
|
|
1434
|
+
if when_conditions:
|
|
1435
|
+
if len(when_conditions) == 1:
|
|
1436
|
+
guards["when"] = when_conditions[0]
|
|
1437
|
+
else:
|
|
1438
|
+
# Multiple conditions - combine with 'and'
|
|
1439
|
+
guards["when"] = when_conditions
|
|
1440
|
+
|
|
1441
|
+
return guards
|
|
1442
|
+
|
|
1443
|
+
|
|
1444
|
+
def _split_guard_array_parts(array_content: str) -> list[str]:
|
|
1445
|
+
"""Split array content by commas, respecting quotes and blocks."""
|
|
1446
|
+
parts = []
|
|
1447
|
+
current_part = ""
|
|
1448
|
+
in_quotes = False
|
|
1449
|
+
in_block = 0
|
|
1450
|
+
quote_char = None
|
|
1451
|
+
|
|
1452
|
+
for char in array_content:
|
|
1453
|
+
if char in ['"', "'"] and not in_block:
|
|
1454
|
+
if not in_quotes:
|
|
1455
|
+
in_quotes = True
|
|
1456
|
+
quote_char = char
|
|
1457
|
+
elif char == quote_char:
|
|
1458
|
+
in_quotes = False
|
|
1459
|
+
quote_char = None
|
|
1460
|
+
elif char == "{" and not in_quotes:
|
|
1461
|
+
in_block += 1
|
|
1462
|
+
elif char == "}" and not in_quotes:
|
|
1463
|
+
in_block -= 1
|
|
1464
|
+
elif char == "," and not in_quotes and in_block == 0:
|
|
1465
|
+
parts.append(current_part.strip())
|
|
1466
|
+
current_part = ""
|
|
1467
|
+
continue
|
|
1468
|
+
|
|
1469
|
+
current_part += char
|
|
1470
|
+
|
|
1471
|
+
if current_part.strip():
|
|
1472
|
+
parts.append(current_part.strip())
|
|
1473
|
+
|
|
1474
|
+
return parts
|
|
1475
|
+
|
|
1476
|
+
|
|
1477
|
+
def _extract_lambda_body(part: str) -> str:
|
|
1478
|
+
"""Extract lambda body from lambda syntax."""
|
|
1479
|
+
if "->" in part:
|
|
1480
|
+
return part.split("->", 1)[1].strip()
|
|
1481
|
+
if "lambda" in part and "{" in part:
|
|
1482
|
+
return part.split("{", 1)[1].rsplit("}", 1)[0].strip()
|
|
1483
|
+
return ""
|
|
1484
|
+
|
|
1485
|
+
|
|
1486
|
+
def _process_guard_array_part(part: str, negate: bool) -> str | None:
|
|
1487
|
+
"""Process a single guard array part and convert to Ansible condition."""
|
|
1488
|
+
part = part.strip()
|
|
1489
|
+
if not part:
|
|
1490
|
+
return None
|
|
1491
|
+
|
|
1492
|
+
# Handle string conditions
|
|
1493
|
+
if part.startswith(("'", '"')) and part.endswith(("'", '"')):
|
|
1494
|
+
condition_str = part[1:-1]
|
|
1495
|
+
return _convert_chef_condition_to_ansible(condition_str, negate=negate)
|
|
1496
|
+
|
|
1497
|
+
# Handle block conditions
|
|
1498
|
+
if part.startswith("{") and part.endswith("}"):
|
|
1499
|
+
block_content = part[1:-1].strip()
|
|
1500
|
+
return _convert_chef_block_to_ansible(block_content, positive=not negate)
|
|
1501
|
+
|
|
1502
|
+
# Handle lambda syntax
|
|
1503
|
+
if part.startswith("lambda") or part.startswith("->"):
|
|
1504
|
+
lambda_body = _extract_lambda_body(part)
|
|
1505
|
+
if lambda_body:
|
|
1506
|
+
return _convert_chef_block_to_ansible(lambda_body, positive=not negate)
|
|
1507
|
+
|
|
1508
|
+
return None
|
|
1509
|
+
|
|
1510
|
+
|
|
1511
|
+
def _parse_guard_array(array_content: str, negate: bool = False) -> list[str]:
|
|
1512
|
+
"""Parse Chef guard array content and convert to Ansible conditions."""
|
|
1513
|
+
parts = _split_guard_array_parts(array_content)
|
|
1514
|
+
conditions = []
|
|
1515
|
+
|
|
1516
|
+
for part in parts:
|
|
1517
|
+
condition = _process_guard_array_part(part, negate)
|
|
1518
|
+
if condition:
|
|
1519
|
+
conditions.append(condition)
|
|
1520
|
+
|
|
1521
|
+
return conditions
|
|
1522
|
+
|
|
1523
|
+
|
|
1524
|
+
def _convert_chef_condition_to_ansible(condition: str, negate: bool = False) -> str:
|
|
1525
|
+
"""Convert a Chef condition string to Ansible when condition."""
|
|
1526
|
+
# Common Chef to Ansible condition mappings
|
|
1527
|
+
condition_mappings = {
|
|
1528
|
+
# File existence checks
|
|
1529
|
+
r'File\.exist\?\([\'"]([^\'"]+)[\'"]\)': (
|
|
1530
|
+
r'ansible_check_mode or {{ "\1" is file }}'
|
|
1531
|
+
),
|
|
1532
|
+
r'File\.directory\?\([\'"]([^\'"]+)[\'"]\)': (
|
|
1533
|
+
r'ansible_check_mode or {{ "\1" is directory }}'
|
|
1534
|
+
),
|
|
1535
|
+
r'File\.executable\?\([\'"]([^\'"]+)[\'"]\)': (
|
|
1536
|
+
r'ansible_check_mode or {{ "\1" is executable }}'
|
|
1537
|
+
),
|
|
1538
|
+
# Package checks
|
|
1539
|
+
r'system\([\'"]which\s+(\w+)[\'"]\)': (
|
|
1540
|
+
r'ansible_check_mode or {{ ansible_facts.packages["\1"] is defined }}'
|
|
1541
|
+
),
|
|
1542
|
+
# Service checks
|
|
1543
|
+
r'system\([\'"]systemctl\s+is-active\s+(\w+)[\'"]\)': (
|
|
1544
|
+
r"ansible_check_mode or "
|
|
1545
|
+
r'{{ ansible_facts.services["\1"].state == "running" }}'
|
|
1546
|
+
),
|
|
1547
|
+
r'system\([\'"]service\s+(\w+)\s+status[\'"]\)': (
|
|
1548
|
+
r"ansible_check_mode or "
|
|
1549
|
+
r'{{ ansible_facts.services["\1"].state == "running" }}'
|
|
1550
|
+
),
|
|
1551
|
+
# Platform checks
|
|
1552
|
+
r"platform\?": r"ansible_facts.os_family",
|
|
1553
|
+
r"platform_family\?": r"ansible_facts.os_family",
|
|
1554
|
+
# Node attribute checks
|
|
1555
|
+
r'node\[[\'"]([^\'"]+)[\'"]\]': r'hostvars[inventory_hostname]["\1"]',
|
|
1556
|
+
r"node\.([a-zA-Z_][a-zA-Z0-9_.]*)": r'hostvars[inventory_hostname]["\1"]',
|
|
1557
|
+
}
|
|
1558
|
+
|
|
1559
|
+
# Apply mappings
|
|
1560
|
+
converted = condition
|
|
1561
|
+
for chef_pattern, ansible_replacement in condition_mappings.items():
|
|
1562
|
+
converted = re.sub(
|
|
1563
|
+
chef_pattern, ansible_replacement, converted, flags=re.IGNORECASE
|
|
1564
|
+
)
|
|
1565
|
+
|
|
1566
|
+
# Handle simple command checks
|
|
1567
|
+
if converted == condition: # No mapping found, treat as shell command
|
|
1568
|
+
converted = (
|
|
1569
|
+
f"ansible_check_mode or {{ ansible_facts.env.PATH is defined "
|
|
1570
|
+
f'and "{condition}" | length > 0 }}'
|
|
1571
|
+
)
|
|
1572
|
+
|
|
1573
|
+
if negate:
|
|
1574
|
+
converted = f"not ({converted})"
|
|
1575
|
+
|
|
1576
|
+
return converted
|
|
1577
|
+
|
|
1578
|
+
|
|
1579
|
+
def _handle_file_existence_block(block: str, positive: bool) -> str | None:
|
|
1580
|
+
"""Handle File.exist? patterns in Chef blocks."""
|
|
1581
|
+
file_exist_patterns = [
|
|
1582
|
+
r'File\.exist\?\([\'"]([^\'"]+)[\'"]\)',
|
|
1583
|
+
r'File\.exists\?\([\'"]([^\'"]+)[\'"]\)',
|
|
1584
|
+
r'File\.exist\?\("([^"]+)"\)',
|
|
1585
|
+
r'File\.exist\?\((["\'])?#\{([^}]+)\}\1\)',
|
|
1586
|
+
]
|
|
1587
|
+
|
|
1588
|
+
for pattern in file_exist_patterns:
|
|
1589
|
+
file_match = re.search(pattern, block)
|
|
1590
|
+
if file_match:
|
|
1591
|
+
path = file_match.group(1) if len(file_match.groups()) >= 1 else ""
|
|
1592
|
+
if "#{" in path:
|
|
1593
|
+
path = re.sub(REGEX_RUBY_INTERPOLATION, JINJA2_VAR_REPLACEMENT, path)
|
|
1594
|
+
# Use Ansible's native Jinja2 file test for better performance
|
|
1595
|
+
condition = f'ansible_check_mode or "{path}" is file'
|
|
1596
|
+
return condition if positive else f"not ({condition})"
|
|
1597
|
+
|
|
1598
|
+
return None
|
|
1599
|
+
|
|
1600
|
+
|
|
1601
|
+
def _handle_directory_existence_block(block: str, positive: bool) -> str | None:
|
|
1602
|
+
"""Handle File.directory? patterns in Chef blocks."""
|
|
1603
|
+
dir_patterns = [
|
|
1604
|
+
r'File\.directory\?\([\'"]([^\'"]+)[\'"]\)',
|
|
1605
|
+
r'File\.directory\?\("([^"]+)"\)',
|
|
1606
|
+
]
|
|
1607
|
+
|
|
1608
|
+
for pattern in dir_patterns:
|
|
1609
|
+
dir_match = re.search(pattern, block)
|
|
1610
|
+
if dir_match:
|
|
1611
|
+
path = dir_match.group(1)
|
|
1612
|
+
if "#{" in path:
|
|
1613
|
+
path = re.sub(REGEX_RUBY_INTERPOLATION, JINJA2_VAR_REPLACEMENT, path)
|
|
1614
|
+
# Use Ansible's native Jinja2 directory test for better performance
|
|
1615
|
+
condition = f'ansible_check_mode or "{path}" is directory'
|
|
1616
|
+
return condition if positive else f"not ({condition})"
|
|
1617
|
+
|
|
1618
|
+
return None
|
|
1619
|
+
|
|
1620
|
+
|
|
1621
|
+
def _handle_command_execution_block(block: str, positive: bool) -> str | None:
|
|
1622
|
+
"""Handle system() and backtick command execution patterns."""
|
|
1623
|
+
system_patterns = [
|
|
1624
|
+
r'system\([\'"]([^\'"]+)[\'"]\)',
|
|
1625
|
+
r"`([^`]+)`",
|
|
1626
|
+
]
|
|
1627
|
+
|
|
1628
|
+
for pattern in system_patterns:
|
|
1629
|
+
system_match = re.search(pattern, block)
|
|
1630
|
+
if system_match:
|
|
1631
|
+
cmd = system_match.group(1)
|
|
1632
|
+
if cmd.startswith("which "):
|
|
1633
|
+
pkg = cmd.split()[1]
|
|
1634
|
+
condition = (
|
|
1635
|
+
f"ansible_check_mode or ansible_facts.packages['{pkg}'] is defined"
|
|
1636
|
+
)
|
|
1637
|
+
else:
|
|
1638
|
+
condition = "ansible_check_mode or true # TODO: Review shell command"
|
|
1639
|
+
return condition if positive else f"not ({condition})"
|
|
1640
|
+
|
|
1641
|
+
return None
|
|
1642
|
+
|
|
1643
|
+
|
|
1644
|
+
def _handle_node_attribute_block(block: str, positive: bool) -> str | None:
|
|
1645
|
+
"""Handle node attribute checks in Chef blocks."""
|
|
1646
|
+
if NODE_PREFIX in block or "node." in block:
|
|
1647
|
+
converted = re.sub(
|
|
1648
|
+
r"node\[['\"]([^'\"]+)['\"]\]",
|
|
1649
|
+
r"hostvars[inventory_hostname]['\1']",
|
|
1650
|
+
block,
|
|
1651
|
+
)
|
|
1652
|
+
converted = re.sub(
|
|
1653
|
+
r"node\.([a-zA-Z_]\w*)",
|
|
1654
|
+
r"hostvars[inventory_hostname]['\1']",
|
|
1655
|
+
converted,
|
|
1656
|
+
)
|
|
1657
|
+
return converted if positive else f"not ({converted})"
|
|
1658
|
+
|
|
1659
|
+
return None
|
|
1660
|
+
|
|
1661
|
+
|
|
1662
|
+
def _handle_platform_check_block(block: str, positive: bool) -> str | None:
|
|
1663
|
+
"""Handle platform? and platform_family? checks."""
|
|
1664
|
+
if "platform?" in block.lower() or "platform_family?" in block.lower():
|
|
1665
|
+
condition = "ansible_facts.os_family is defined"
|
|
1666
|
+
return condition if positive else f"not ({condition})"
|
|
1667
|
+
|
|
1668
|
+
return None
|
|
1669
|
+
|
|
1670
|
+
|
|
1671
|
+
def _convert_chef_block_to_ansible(block: str, positive: bool = True) -> str:
|
|
1672
|
+
"""Convert a Chef condition block to Ansible when condition."""
|
|
1673
|
+
block = block.strip()
|
|
1674
|
+
|
|
1675
|
+
# Handle simple boolean returns
|
|
1676
|
+
if block.lower() in ["true", "false"]:
|
|
1677
|
+
is_true = block.lower() == "true"
|
|
1678
|
+
return str(is_true if positive else not is_true).lower()
|
|
1679
|
+
|
|
1680
|
+
# Handle ::File prefix (Chef's scope resolution)
|
|
1681
|
+
block = block.replace("::File.", "File.")
|
|
1682
|
+
|
|
1683
|
+
# Try each handler in sequence
|
|
1684
|
+
handlers = [
|
|
1685
|
+
_handle_file_existence_block,
|
|
1686
|
+
_handle_directory_existence_block,
|
|
1687
|
+
_handle_command_execution_block,
|
|
1688
|
+
_handle_node_attribute_block,
|
|
1689
|
+
_handle_platform_check_block,
|
|
1690
|
+
]
|
|
1691
|
+
|
|
1692
|
+
for handler in handlers:
|
|
1693
|
+
condition = handler(block, positive)
|
|
1694
|
+
if condition is not None:
|
|
1695
|
+
return condition
|
|
1696
|
+
|
|
1697
|
+
# For complex blocks, create a comment indicating manual review needed
|
|
1698
|
+
return f"# TODO: Review Chef block condition: {block[:50]}..."
|