mcp-souschef 2.0.1__py3-none-any.whl → 2.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,674 @@
1
+ """
2
+ Habitat plan to Docker/Compose conversion.
3
+
4
+ This module provides tools to convert Chef Habitat plans to Dockerfiles
5
+ and Docker Compose configurations.
6
+ """
7
+
8
+ import json
9
+ import re
10
+ import shlex
11
+ from pathlib import Path
12
+ from typing import Any
13
+
14
+ from souschef.core.constants import ERROR_PREFIX
15
+ from souschef.core.path_utils import _normalize_path
16
+ from souschef.parsers.habitat import parse_habitat_plan
17
+
18
+
19
+ def convert_habitat_to_dockerfile(
20
+ plan_path: str, base_image: str = "ubuntu:22.04"
21
+ ) -> str:
22
+ """
23
+ Convert a Chef Habitat plan to a Dockerfile.
24
+
25
+ Creates a Dockerfile that replicates Habitat plan configuration.
26
+
27
+ Security Warning: This tool processes shell commands from Habitat plans
28
+ and includes them in the generated Dockerfile. Only use with trusted
29
+ Habitat plans from known sources. Review generated Dockerfiles before
30
+ building images, especially if the plan source is untrusted.
31
+
32
+ Args:
33
+ plan_path: Path to the plan.sh file.
34
+ base_image: Base Docker image (default: ubuntu:22.04).
35
+
36
+ Returns:
37
+ Dockerfile content as a string.
38
+
39
+ """
40
+ try:
41
+ # Validate and normalize path to prevent path traversal
42
+ try:
43
+ normalized_path = _normalize_path(plan_path)
44
+ validated_path = str(normalized_path)
45
+ except ValueError as e:
46
+ return f"Invalid path {plan_path}: {e}"
47
+
48
+ plan_json: str = parse_habitat_plan(validated_path)
49
+ if plan_json.startswith(ERROR_PREFIX):
50
+ return plan_json
51
+ plan: dict[str, Any] = json.loads(plan_json)
52
+ lines = _build_dockerfile_header(plan, validated_path, base_image)
53
+ _add_dockerfile_deps(lines, plan)
54
+ _add_dockerfile_build(lines, plan)
55
+ _add_dockerfile_runtime(lines, plan)
56
+ return "\n".join(lines)
57
+ except Exception as e:
58
+ return f"Error converting Habitat plan to Dockerfile: {e}"
59
+
60
+
61
+ def generate_compose_from_habitat(
62
+ plan_paths: str, network_name: str = "habitat_net"
63
+ ) -> str:
64
+ """
65
+ Generate docker-compose.yml from Habitat plans.
66
+
67
+ Creates Docker Compose configuration for multiple services.
68
+
69
+ Args:
70
+ plan_paths: Comma-separated paths to plan.sh files.
71
+ network_name: Docker network name.
72
+
73
+ Returns:
74
+ docker-compose.yml content.
75
+
76
+ """
77
+ try:
78
+ # Validate network_name to prevent YAML injection
79
+ if not _validate_docker_network_name(network_name):
80
+ return (
81
+ f"Invalid Docker network name: {network_name}. "
82
+ "Expected format: alphanumeric with hyphens, underscores, or dots"
83
+ )
84
+
85
+ paths = [p.strip() for p in plan_paths.split(",")]
86
+ # Validate and normalize all paths to prevent path traversal
87
+ validated_paths = []
88
+ for path_str in paths:
89
+ try:
90
+ normalized = _normalize_path(path_str)
91
+ validated_paths.append(str(normalized))
92
+ except ValueError as e:
93
+ return f"Invalid path {path_str}: {e}"
94
+
95
+ services: dict[str, Any] = {}
96
+ for plan_path in validated_paths:
97
+ plan_json = parse_habitat_plan(plan_path)
98
+ if plan_json.startswith(ERROR_PREFIX):
99
+ return f"Error parsing {plan_path}: {plan_json}"
100
+ plan: dict[str, Any] = json.loads(plan_json)
101
+ pkg_name = plan["package"].get("name", "unknown")
102
+ service = _build_compose_service(plan, pkg_name)
103
+ service["networks"] = [network_name]
104
+ services[pkg_name] = service
105
+ return _format_compose_yaml(services, network_name)
106
+ except Exception as e:
107
+ return f"Error generating docker-compose.yml: {e}"
108
+
109
+
110
+ # Dependency mapping
111
+
112
+
113
+ def _map_habitat_deps_to_apt(habitat_deps: list[str]) -> list[str]:
114
+ """
115
+ Map Habitat package dependencies to apt package names.
116
+
117
+ Args:
118
+ habitat_deps: List of Habitat package identifiers
119
+ (e.g., 'core/gcc', 'custom/org/package').
120
+
121
+ Returns:
122
+ List of apt package names. Unknown dependencies are included with
123
+ basic validation.
124
+
125
+ """
126
+ dep_mapping = {
127
+ "core/gcc": "gcc",
128
+ "core/make": "make",
129
+ "core/openssl": "libssl-dev",
130
+ "core/pcre": "libpcre3-dev",
131
+ "core/zlib": "zlib1g-dev",
132
+ "core/glibc": "libc6-dev",
133
+ "core/readline": "libreadline-dev",
134
+ "core/curl": "curl",
135
+ "core/wget": "wget",
136
+ "core/git": "git",
137
+ "core/python": "python3",
138
+ "core/ruby": "ruby",
139
+ "core/perl": "perl",
140
+ }
141
+ apt_packages = []
142
+ for dep in habitat_deps:
143
+ if not dep or not dep.strip():
144
+ continue
145
+
146
+ dep = dep.strip()
147
+
148
+ # Check known mappings first
149
+ if dep in dep_mapping:
150
+ apt_packages.append(dep_mapping[dep])
151
+ elif "/" in dep:
152
+ # Extract package name from Habitat identifier
153
+ # (e.g., 'core/gcc' -> 'gcc')
154
+ # For multi-segment paths like 'custom/org/package',
155
+ # take the last component
156
+ pkg_name = dep.split("/")[-1]
157
+
158
+ # Basic validation: package name should be alphanumeric with
159
+ # hyphens/underscores
160
+ if pkg_name and re.match(
161
+ r"^[a-z0-9][a-z0-9._+-]*$", pkg_name, re.IGNORECASE
162
+ ):
163
+ apt_packages.append(pkg_name)
164
+ # If invalid, skip but don't fail - let apt handle the error later
165
+ else:
166
+ # Dependency without slash - might be a direct apt package name
167
+ # Validate it looks like a package name before including
168
+ if re.match(r"^[a-z0-9][a-z0-9._+-]*$", dep, re.IGNORECASE):
169
+ apt_packages.append(dep)
170
+
171
+ return apt_packages
172
+
173
+
174
+ def _extract_default_port(port_name: str) -> str:
175
+ """Extract default port number based on common port names."""
176
+ port_defaults = {
177
+ "http": "80",
178
+ "https": "443",
179
+ "port": "8080",
180
+ "ssl-port": "443",
181
+ "postgresql": "5432",
182
+ "mysql": "3306",
183
+ "redis": "6379",
184
+ "mongodb": "27017",
185
+ }
186
+ if port_name in port_defaults:
187
+ return port_defaults[port_name]
188
+ for key, value in port_defaults.items():
189
+ if key in port_name.lower():
190
+ return value
191
+ return ""
192
+
193
+
194
+ # Validation functions
195
+
196
+
197
+ def _validate_docker_network_name(network_name: str) -> bool:
198
+ """
199
+ Validate Docker network name format.
200
+
201
+ Validates that network_name matches expected Docker naming patterns to
202
+ prevent YAML injection or malformed compose files.
203
+
204
+ Args:
205
+ network_name: Docker network name to validate.
206
+
207
+ Returns:
208
+ True if valid, False otherwise.
209
+
210
+ """
211
+ if not network_name or not isinstance(network_name, str):
212
+ return False
213
+
214
+ # Docker network names must:
215
+ # - Start with alphanumeric character
216
+ # - Contain only alphanumeric, hyphens, underscores, or dots
217
+ # - Not contain spaces or special characters that could break YAML
218
+
219
+ # Pattern: starts with alphanumeric, followed by
220
+ # alphanumeric/hyphen/underscore/dot
221
+ pattern = r"^[a-zA-Z0-9][a-zA-Z0-9._-]*$"
222
+
223
+ if not re.match(pattern, network_name):
224
+ return False
225
+
226
+ # Reject dangerous characters that could break YAML structure
227
+ dangerous_chars = [
228
+ "\n",
229
+ "\r",
230
+ ":",
231
+ "[",
232
+ "]",
233
+ "{",
234
+ "}",
235
+ "#",
236
+ "|",
237
+ ">",
238
+ "&",
239
+ "*",
240
+ "!",
241
+ "%",
242
+ "@",
243
+ ]
244
+ if any(char in network_name for char in dangerous_chars):
245
+ return False
246
+
247
+ # Validate reasonable length (Docker network names should be < 64 chars)
248
+ return len(network_name) <= 63
249
+
250
+
251
+ def _validate_docker_image_name(base_image: str) -> bool:
252
+ """
253
+ Validate Docker image name format.
254
+
255
+ Validates that base_image matches expected Docker image format to prevent
256
+ Dockerfile injection or malformed content.
257
+
258
+ Args:
259
+ base_image: Docker image name to validate.
260
+
261
+ Returns:
262
+ True if valid, False otherwise.
263
+
264
+ """
265
+ if not base_image or not isinstance(base_image, str):
266
+ return False
267
+
268
+ # Docker image format: [registry/][namespace/]repository[:tag|@digest]
269
+ # Examples: ubuntu:22.04, docker.io/library/nginx:latest,
270
+ # myregistry.com:5000/myimage:v1
271
+ # Allow alphanumeric, hyphens, underscores, dots, colons
272
+ # (only in specific positions), slashes, and @ for digests.
273
+ # Reject shell metacharacters.
274
+
275
+ # Pattern breakdown:
276
+ # - Optional registry (with optional port), must be followed by a slash:
277
+ # [hostname[:port]/]
278
+ # - Hostname allows dots: "myregistry.com", "registry.local"
279
+ # - One or more repository path components (may include namespaces):
280
+ # [namespace/]*repository
281
+ # - Path components do not contain colons
282
+ # - Optional tag or digest at the end: [:tag] or [@sha256:digest]
283
+ pattern = (
284
+ r"^"
285
+ # Optional registry (with optional port), must be followed by a slash.
286
+ r"(?:(?:[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?"
287
+ r"(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9_-]*[a-zA-Z0-9])?)*"
288
+ r"(?::\d+)?)/)?"
289
+ # Repository path components (one or more), no colons here.
290
+ r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9._-]*[a-zA-Z0-9])?"
291
+ r"(?:/[a-zA-Z0-9](?:[a-zA-Z0-9._-]*[a-zA-Z0-9])?)*)"
292
+ # Optional tag or digest at the end.
293
+ r"(?::[a-zA-Z0-9._-]+|@sha256:[a-fA-F0-9]{64})?"
294
+ r"$"
295
+ )
296
+
297
+ if not re.match(pattern, base_image):
298
+ return False
299
+
300
+ # Additional safety checks: reject dangerous characters
301
+ dangerous_chars = ["\n", "\r", ";", "|", "&", "$", "`", "(", ")", "<", ">", "\\"]
302
+ if any(char in base_image for char in dangerous_chars):
303
+ return False
304
+
305
+ # Validate reasonable length (Docker image names are typically < 256 chars)
306
+ return len(base_image) <= 256
307
+
308
+
309
+ # Dockerfile generation
310
+
311
+
312
+ def _add_dockerfile_label(
313
+ lines: list[str], label_name: str, label_value: str | None
314
+ ) -> None:
315
+ """
316
+ Add a LABEL to Dockerfile lines with validation.
317
+
318
+ Args:
319
+ lines: List of Dockerfile lines to append to.
320
+ label_name: Name of the label (e.g., 'maintainer', 'version').
321
+ label_value: Value for the label, or None if not present.
322
+
323
+ """
324
+ if not label_value:
325
+ return
326
+
327
+ # Validate no newlines that would break Dockerfile syntax
328
+ if "\n" in label_value or "\r" in label_value:
329
+ lines.append(
330
+ f"# WARNING: {label_name.title()} field contains newlines, omitting LABEL"
331
+ )
332
+ else:
333
+ # Use json.dumps to properly escape quotes and special characters
334
+ escaped_value = json.dumps(label_value)
335
+ lines.append(f"LABEL {label_name}={escaped_value}")
336
+
337
+
338
+ def _build_dockerfile_header(
339
+ plan: dict[str, Any], plan_path: str, base_image: str
340
+ ) -> list[str]:
341
+ """
342
+ Build Dockerfile header with metadata.
343
+
344
+ Args:
345
+ plan: Parsed Habitat plan dictionary.
346
+ plan_path: Path to the plan.sh file.
347
+ base_image: Base Docker image name (validated).
348
+
349
+ Returns:
350
+ List of Dockerfile header lines.
351
+
352
+ Raises:
353
+ ValueError: If base_image format is invalid.
354
+
355
+ """
356
+ # Validate base_image to prevent Dockerfile injection
357
+ if not _validate_docker_image_name(base_image):
358
+ raise ValueError(
359
+ f"Invalid Docker image name: {base_image}. "
360
+ "Expected format: [registry/]repository[:tag]"
361
+ )
362
+
363
+ lines = [
364
+ "# Dockerfile generated from Habitat plan",
365
+ f"# Original plan: {Path(plan_path).name}",
366
+ (
367
+ f"# Package: {plan['package'].get('origin', 'unknown')}/"
368
+ f"{plan['package'].get('name', 'unknown')}"
369
+ ),
370
+ f"# Version: {plan['package'].get('version', 'unknown')}",
371
+ "",
372
+ f"FROM {base_image}",
373
+ "",
374
+ ]
375
+ _add_dockerfile_label(lines, "maintainer", plan["package"].get("maintainer"))
376
+ _add_dockerfile_label(lines, "version", plan["package"].get("version"))
377
+ _add_dockerfile_label(lines, "description", plan["package"].get("description"))
378
+ if lines[-1].startswith("LABEL"):
379
+ lines.append("")
380
+ return lines
381
+
382
+
383
+ def _add_dockerfile_deps(lines: list[str], plan: dict[str, Any]) -> None:
384
+ """Add dependency installation to Dockerfile."""
385
+ if plan["dependencies"]["build"] or plan["dependencies"]["runtime"]:
386
+ lines.append("# Install dependencies")
387
+ all_deps = set(plan["dependencies"]["build"] + plan["dependencies"]["runtime"])
388
+ apt_packages = _map_habitat_deps_to_apt(list(all_deps))
389
+ if apt_packages:
390
+ safe_apt_packages = [shlex.quote(pkg) for pkg in apt_packages]
391
+ lines.append("RUN apt-get update && \\")
392
+ lines.append(f" apt-get install -y {' '.join(safe_apt_packages)} && \\")
393
+ lines.append(" rm -rf /var/lib/apt/lists/*")
394
+ lines.append("")
395
+
396
+
397
+ def _process_callback_lines(
398
+ callback_content: str, replace_vars: bool = False
399
+ ) -> list[str]:
400
+ """
401
+ Process callback lines for Dockerfile.
402
+
403
+ Security Note: This function processes shell commands from Habitat plans
404
+ and embeds them directly into Dockerfile RUN commands. Only use this
405
+ with trusted Habitat plans from known sources. Malicious commands in
406
+ untrusted plans will be executed during Docker image builds.
407
+
408
+ Args:
409
+ callback_content: Raw callback content to process.
410
+ replace_vars: Whether to replace Habitat variables with paths.
411
+
412
+ Returns:
413
+ List of processed RUN commands.
414
+
415
+ """
416
+ processed = []
417
+ # Patterns that might indicate malicious or dangerous commands
418
+ dangerous_patterns = [
419
+ r"curl.*\|.*sh", # Piping curl to shell
420
+ r"wget.*\|.*sh", # Piping wget to shell
421
+ r"eval", # eval commands
422
+ r"\$\(curl", # Command substitution with curl
423
+ r"\$\(wget", # Command substitution with wget
424
+ ]
425
+
426
+ for line in callback_content.split("\n"):
427
+ line = line.strip()
428
+ if line and not line.startswith("#"):
429
+ # Perform variable replacement BEFORE validation
430
+ if replace_vars:
431
+ line = (
432
+ line.replace("$pkg_prefix", "/usr/local")
433
+ .replace("$pkg_svc_config_path", "/etc/app")
434
+ .replace("$pkg_svc_data_path", "/var/lib/app")
435
+ .replace("$pkg_svc_var_path", "/var/run/app")
436
+ )
437
+
438
+ # Check for potentially dangerous patterns AFTER replacement
439
+ for pattern in dangerous_patterns:
440
+ if re.search(pattern, line, re.IGNORECASE):
441
+ # Add a warning comment but still include the command
442
+ # Users should review their Dockerfiles before building
443
+ processed.append(
444
+ "# WARNING: Potentially dangerous command pattern detected"
445
+ )
446
+ break
447
+
448
+ processed.append(f"RUN {line}")
449
+ return processed
450
+
451
+
452
+ def _add_dockerfile_build(lines: list[str], plan: dict[str, Any]) -> None:
453
+ """Add build and install steps to Dockerfile."""
454
+ if "do_build" in plan["callbacks"]:
455
+ lines.append("# Build steps")
456
+ lines.extend(
457
+ _process_callback_lines(plan["callbacks"]["do_build"], replace_vars=True)
458
+ )
459
+ lines.append("")
460
+ if "do_install" in plan["callbacks"]:
461
+ lines.append("# Install steps")
462
+ lines.extend(
463
+ _process_callback_lines(plan["callbacks"]["do_install"], replace_vars=True)
464
+ )
465
+ lines.append("")
466
+ if "do_init" in plan["callbacks"]:
467
+ lines.append("# Initialization steps")
468
+ lines.extend(
469
+ _process_callback_lines(plan["callbacks"]["do_init"], replace_vars=True)
470
+ )
471
+ lines.append("")
472
+
473
+
474
+ def _add_dockerfile_runtime(lines: list[str], plan: dict[str, Any]) -> None:
475
+ """Add runtime configuration to Dockerfile."""
476
+ if plan["ports"]:
477
+ lines.append("# Expose ports")
478
+ for port in plan["ports"]:
479
+ port_num = _extract_default_port(port["name"])
480
+ if port_num:
481
+ lines.append(f"EXPOSE {port_num}")
482
+ lines.append("")
483
+ if plan["service"].get("user") and plan["service"]["user"] != "root":
484
+ lines.append(f"USER {plan['service']['user']}")
485
+ lines.append("")
486
+ lines.append("WORKDIR /usr/local")
487
+ lines.append("")
488
+ if plan["service"].get("run"):
489
+ run_cmd = plan["service"]["run"]
490
+ cmd_parts = shlex.split(run_cmd)
491
+ if cmd_parts:
492
+ lines.append(f"CMD {json.dumps(cmd_parts)}")
493
+
494
+
495
+ # Docker Compose generation
496
+
497
+
498
+ def _needs_data_volume(plan: dict[str, Any]) -> bool:
499
+ """
500
+ Detect if a service needs a data volume.
501
+
502
+ Checks for data-related patterns in callbacks (like do_init creating
503
+ directories with mkdir) rather than relying on fragile keyword matching
504
+ in run commands.
505
+
506
+ Args:
507
+ plan: Parsed Habitat plan dictionary.
508
+
509
+ Returns:
510
+ True if the service needs a data volume.
511
+
512
+ """
513
+ # Check if do_init callback creates data directories
514
+ if "do_init" in plan["callbacks"]:
515
+ init_code = plan["callbacks"]["do_init"]
516
+ # Look for mkdir commands creating data directories
517
+ if "mkdir" in init_code and ("data" in init_code or "pgdata" in init_code):
518
+ return True
519
+
520
+ # Check for database-related package names (common use case)
521
+ pkg_name = plan["package"].get("name", "")
522
+ return pkg_name in ["postgresql", "mysql", "mongodb", "redis"]
523
+
524
+
525
+ def _build_compose_service(plan: dict[str, Any], pkg_name: str) -> dict[str, Any]:
526
+ """Build a docker-compose service definition."""
527
+ service: dict[str, Any] = {
528
+ "build": {"context": ".", "dockerfile": f"Dockerfile.{pkg_name}"},
529
+ "container_name": pkg_name,
530
+ "networks": [],
531
+ }
532
+ if plan["ports"]:
533
+ service["ports"] = []
534
+ for port in plan["ports"]:
535
+ port_num = _extract_default_port(port["name"])
536
+ if port_num:
537
+ service["ports"].append(f"{port_num}:{port_num}")
538
+ if _needs_data_volume(plan):
539
+ service["volumes"] = [f"{pkg_name}_data:/var/lib/app"]
540
+ service["environment"] = []
541
+ for port in plan["ports"]:
542
+ port_num = _extract_default_port(port["name"])
543
+ if port_num:
544
+ service["environment"].append(f"{port['name'].upper()}={port_num}")
545
+ if plan["binds"]:
546
+ service["depends_on"] = [bind["name"] for bind in plan["binds"]]
547
+ return service
548
+
549
+
550
+ def _add_service_build(lines: list[str], service: dict[str, Any]) -> None:
551
+ """
552
+ Add build configuration to service lines.
553
+
554
+ Args:
555
+ lines: List of YAML lines to append to.
556
+ service: Service dictionary containing optional 'build' configuration.
557
+
558
+ """
559
+ if "build" in service:
560
+ lines.append(" build:")
561
+ lines.append(f" context: {service['build']['context']}")
562
+ lines.append(f" dockerfile: {service['build']['dockerfile']}")
563
+
564
+
565
+ def _add_service_ports(lines: list[str], service: dict[str, Any]) -> None:
566
+ """
567
+ Add ports configuration to service lines.
568
+
569
+ Args:
570
+ lines: List of YAML lines to append to.
571
+ service: Service dictionary containing optional 'ports' configuration.
572
+
573
+ """
574
+ if "ports" in service:
575
+ lines.append(" ports:")
576
+ for port in service["ports"]:
577
+ lines.append(f' - "{port}"')
578
+
579
+
580
+ def _add_service_volumes(
581
+ lines: list[str], service: dict[str, Any], volumes_used: set[str]
582
+ ) -> None:
583
+ """
584
+ Add volumes configuration to service lines.
585
+
586
+ Args:
587
+ lines: List of YAML lines to append to.
588
+ service: Service dictionary containing optional 'volumes' configuration.
589
+ volumes_used: Set to track volume names for top-level volumes section.
590
+
591
+ """
592
+ if "volumes" in service:
593
+ lines.append(" volumes:")
594
+ for volume in service["volumes"]:
595
+ lines.append(f" - {volume}")
596
+ volumes_used.add(volume.split(":")[0])
597
+
598
+
599
+ def _add_service_environment(lines: list[str], service: dict[str, Any]) -> None:
600
+ """
601
+ Add environment configuration to service lines.
602
+
603
+ Args:
604
+ lines: List of YAML lines to append to.
605
+ service: Service dictionary containing optional 'environment'
606
+ configuration.
607
+
608
+ """
609
+ if "environment" in service:
610
+ lines.append(" environment:")
611
+ for env in service["environment"]:
612
+ lines.append(f" - {env}")
613
+
614
+
615
+ def _add_service_dependencies(lines: list[str], service: dict[str, Any]) -> None:
616
+ """
617
+ Add depends_on and networks configuration to service lines.
618
+
619
+ Args:
620
+ lines: List of YAML lines to append to.
621
+ service: Service dictionary containing optional 'depends_on' and
622
+ 'networks' configuration.
623
+
624
+ """
625
+ if "depends_on" in service:
626
+ lines.append(" depends_on:")
627
+ for dep in service["depends_on"]:
628
+ lines.append(f" - {dep}")
629
+ if "networks" in service:
630
+ lines.append(" networks:")
631
+ for net in service["networks"]:
632
+ lines.append(f" - {net}")
633
+
634
+
635
+ def _format_compose_yaml(services: dict[str, Any], network_name: str) -> str:
636
+ """
637
+ Format services as docker-compose YAML.
638
+
639
+ Args:
640
+ services: Dictionary of service configurations.
641
+ network_name: Docker network name (validated).
642
+
643
+ Returns:
644
+ docker-compose.yml content as string.
645
+
646
+ Raises:
647
+ ValueError: If network_name format is invalid.
648
+
649
+ """
650
+ # Validate network_name to prevent YAML injection
651
+ if not _validate_docker_network_name(network_name):
652
+ raise ValueError(
653
+ f"Invalid Docker network name: {network_name}. "
654
+ "Expected format: alphanumeric with hyphens, underscores, or dots"
655
+ )
656
+
657
+ lines = ["version: '3.8'", "", "services:"]
658
+ volumes_used: set[str] = set()
659
+ for name, service in services.items():
660
+ lines.append(f" {name}:")
661
+ if "container_name" in service:
662
+ lines.append(f" container_name: {service['container_name']}")
663
+ _add_service_build(lines, service)
664
+ _add_service_ports(lines, service)
665
+ _add_service_volumes(lines, service, volumes_used)
666
+ _add_service_environment(lines, service)
667
+ _add_service_dependencies(lines, service)
668
+ lines.append("")
669
+ lines.extend(["networks:", f" {network_name}:", " driver: bridge"])
670
+ if volumes_used:
671
+ lines.extend(["", "volumes:"])
672
+ for vol in sorted(volumes_used):
673
+ lines.append(f" {vol}:")
674
+ return "\n".join(lines)