pactown 0.1.4__py3-none-any.whl → 0.1.47__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pactown/__init__.py +178 -4
- pactown/cli.py +539 -37
- pactown/config.py +12 -11
- pactown/deploy/__init__.py +17 -3
- pactown/deploy/base.py +35 -33
- pactown/deploy/compose.py +59 -58
- pactown/deploy/docker.py +40 -41
- pactown/deploy/kubernetes.py +43 -42
- pactown/deploy/podman.py +55 -56
- pactown/deploy/quadlet.py +1021 -0
- pactown/deploy/quadlet_api.py +533 -0
- pactown/deploy/quadlet_shell.py +557 -0
- pactown/events.py +1066 -0
- pactown/fast_start.py +514 -0
- pactown/generator.py +31 -30
- pactown/llm.py +450 -0
- pactown/markpact_blocks.py +50 -0
- pactown/network.py +59 -38
- pactown/orchestrator.py +90 -93
- pactown/parallel.py +40 -40
- pactown/platform.py +146 -0
- pactown/registry/__init__.py +1 -1
- pactown/registry/client.py +45 -46
- pactown/registry/models.py +25 -25
- pactown/registry/server.py +24 -24
- pactown/resolver.py +30 -30
- pactown/runner_api.py +458 -0
- pactown/sandbox_manager.py +480 -79
- pactown/security.py +682 -0
- pactown/service_runner.py +1201 -0
- pactown/user_isolation.py +458 -0
- {pactown-0.1.4.dist-info → pactown-0.1.47.dist-info}/METADATA +65 -9
- pactown-0.1.47.dist-info/RECORD +36 -0
- pactown-0.1.47.dist-info/entry_points.txt +5 -0
- pactown-0.1.4.dist-info/RECORD +0 -24
- pactown-0.1.4.dist-info/entry_points.txt +0 -3
- {pactown-0.1.4.dist-info → pactown-0.1.47.dist-info}/WHEEL +0 -0
- {pactown-0.1.4.dist-info → pactown-0.1.47.dist-info}/licenses/LICENSE +0 -0
pactown/config.py
CHANGED
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass, field
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Optional
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
6
7
|
import yaml
|
|
7
8
|
|
|
8
9
|
|
|
@@ -14,7 +15,7 @@ class DependencyConfig:
|
|
|
14
15
|
registry: str = "local"
|
|
15
16
|
endpoint: Optional[str] = None
|
|
16
17
|
env_var: Optional[str] = None
|
|
17
|
-
|
|
18
|
+
|
|
18
19
|
@classmethod
|
|
19
20
|
def from_dict(cls, data: dict | str) -> "DependencyConfig":
|
|
20
21
|
if isinstance(data, str):
|
|
@@ -38,13 +39,13 @@ class ServiceConfig:
|
|
|
38
39
|
sandbox_path: Optional[str] = None
|
|
39
40
|
auto_restart: bool = True
|
|
40
41
|
timeout: int = 60
|
|
41
|
-
|
|
42
|
+
|
|
42
43
|
@classmethod
|
|
43
44
|
def from_dict(cls, name: str, data: dict) -> "ServiceConfig":
|
|
44
45
|
deps = []
|
|
45
46
|
for dep in data.get("depends_on", []):
|
|
46
47
|
deps.append(DependencyConfig.from_dict(dep))
|
|
47
|
-
|
|
48
|
+
|
|
48
49
|
return cls(
|
|
49
50
|
name=name,
|
|
50
51
|
readme=data.get("readme", f"{name}/README.md"),
|
|
@@ -78,32 +79,32 @@ class EcosystemConfig:
|
|
|
78
79
|
base_port: int = 8000
|
|
79
80
|
sandbox_root: str = "./.pactown-sandboxes"
|
|
80
81
|
network: str = "pactown-net"
|
|
81
|
-
|
|
82
|
+
|
|
82
83
|
@classmethod
|
|
83
84
|
def from_yaml(cls, path: Path) -> "EcosystemConfig":
|
|
84
85
|
"""Load ecosystem configuration from YAML file."""
|
|
85
86
|
with open(path) as f:
|
|
86
87
|
data = yaml.safe_load(f)
|
|
87
88
|
return cls.from_dict(data, base_path=path.parent)
|
|
88
|
-
|
|
89
|
+
|
|
89
90
|
@classmethod
|
|
90
91
|
def from_dict(cls, data: dict, base_path: Optional[Path] = None) -> "EcosystemConfig":
|
|
91
92
|
"""Create configuration from dictionary."""
|
|
92
93
|
services = {}
|
|
93
94
|
base_port = data.get("base_port", 8000)
|
|
94
|
-
|
|
95
|
+
|
|
95
96
|
for i, (name, svc_data) in enumerate(data.get("services", {}).items()):
|
|
96
97
|
if svc_data.get("port") is None:
|
|
97
98
|
svc_data["port"] = base_port + i
|
|
98
99
|
services[name] = ServiceConfig.from_dict(name, svc_data)
|
|
99
|
-
|
|
100
|
+
|
|
100
101
|
registry_data = data.get("registry", {})
|
|
101
102
|
registry = RegistryConfig(
|
|
102
103
|
url=registry_data.get("url", "http://localhost:8800"),
|
|
103
104
|
auth_token=registry_data.get("auth_token"),
|
|
104
105
|
namespace=registry_data.get("namespace", "default"),
|
|
105
106
|
)
|
|
106
|
-
|
|
107
|
+
|
|
107
108
|
return cls(
|
|
108
109
|
name=data.get("name", "unnamed-ecosystem"),
|
|
109
110
|
version=data.get("version", "0.1.0"),
|
|
@@ -114,7 +115,7 @@ class EcosystemConfig:
|
|
|
114
115
|
sandbox_root=data.get("sandbox_root", "./.pactown-sandboxes"),
|
|
115
116
|
network=data.get("network", "pactown-net"),
|
|
116
117
|
)
|
|
117
|
-
|
|
118
|
+
|
|
118
119
|
def to_dict(self) -> dict:
|
|
119
120
|
"""Convert configuration to dictionary."""
|
|
120
121
|
return {
|
|
@@ -143,7 +144,7 @@ class EcosystemConfig:
|
|
|
143
144
|
for name, svc in self.services.items()
|
|
144
145
|
},
|
|
145
146
|
}
|
|
146
|
-
|
|
147
|
+
|
|
147
148
|
def to_yaml(self, path: Path) -> None:
|
|
148
149
|
"""Save configuration to YAML file."""
|
|
149
150
|
with open(path, "w") as f:
|
pactown/deploy/__init__.py
CHANGED
|
@@ -1,10 +1,18 @@
|
|
|
1
|
-
"""Deployment backends for pactown - Docker, Podman, Kubernetes, etc."""
|
|
1
|
+
"""Deployment backends for pactown - Docker, Podman, Kubernetes, Quadlet, etc."""
|
|
2
2
|
|
|
3
3
|
from .base import DeploymentBackend, DeploymentConfig, DeploymentResult
|
|
4
|
+
from .compose import ComposeGenerator
|
|
4
5
|
from .docker import DockerBackend
|
|
5
|
-
from .podman import PodmanBackend
|
|
6
6
|
from .kubernetes import KubernetesBackend
|
|
7
|
-
from .
|
|
7
|
+
from .podman import PodmanBackend
|
|
8
|
+
from .quadlet import (
|
|
9
|
+
QuadletBackend,
|
|
10
|
+
QuadletConfig,
|
|
11
|
+
QuadletTemplates,
|
|
12
|
+
QuadletUnit,
|
|
13
|
+
generate_markdown_service_quadlet,
|
|
14
|
+
generate_traefik_quadlet,
|
|
15
|
+
)
|
|
8
16
|
|
|
9
17
|
__all__ = [
|
|
10
18
|
"DeploymentBackend",
|
|
@@ -14,4 +22,10 @@ __all__ = [
|
|
|
14
22
|
"PodmanBackend",
|
|
15
23
|
"KubernetesBackend",
|
|
16
24
|
"ComposeGenerator",
|
|
25
|
+
"QuadletBackend",
|
|
26
|
+
"QuadletConfig",
|
|
27
|
+
"QuadletTemplates",
|
|
28
|
+
"QuadletUnit",
|
|
29
|
+
"generate_traefik_quadlet",
|
|
30
|
+
"generate_markdown_service_quadlet",
|
|
17
31
|
]
|
pactown/deploy/base.py
CHANGED
|
@@ -6,7 +6,7 @@ from abc import ABC, abstractmethod
|
|
|
6
6
|
from dataclasses import dataclass, field
|
|
7
7
|
from enum import Enum
|
|
8
8
|
from pathlib import Path
|
|
9
|
-
from typing import
|
|
9
|
+
from typing import Any, Optional
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class RuntimeType(Enum):
|
|
@@ -28,44 +28,44 @@ class DeploymentMode(Enum):
|
|
|
28
28
|
@dataclass
|
|
29
29
|
class DeploymentConfig:
|
|
30
30
|
"""Configuration for deployment."""
|
|
31
|
-
|
|
31
|
+
|
|
32
32
|
# Runtime settings
|
|
33
33
|
runtime: RuntimeType = RuntimeType.LOCAL
|
|
34
34
|
mode: DeploymentMode = DeploymentMode.DEVELOPMENT
|
|
35
|
-
|
|
35
|
+
|
|
36
36
|
# Container settings
|
|
37
37
|
registry: str = "" # Container registry URL
|
|
38
38
|
namespace: str = "default" # K8s namespace or project name
|
|
39
39
|
image_prefix: str = "pactown" # Image name prefix
|
|
40
|
-
|
|
40
|
+
|
|
41
41
|
# Network settings
|
|
42
42
|
network_name: str = "pactown-net" # Container network name
|
|
43
43
|
expose_ports: bool = True # Expose ports to host
|
|
44
44
|
use_internal_dns: bool = True # Use container DNS for service discovery
|
|
45
|
-
|
|
45
|
+
|
|
46
46
|
# Security settings
|
|
47
47
|
rootless: bool = True # Use rootless containers (Podman)
|
|
48
48
|
read_only_fs: bool = False # Read-only filesystem
|
|
49
49
|
no_new_privileges: bool = True # No new privileges
|
|
50
50
|
drop_capabilities: list[str] = field(default_factory=lambda: ["ALL"])
|
|
51
51
|
add_capabilities: list[str] = field(default_factory=list)
|
|
52
|
-
|
|
52
|
+
|
|
53
53
|
# Resource limits
|
|
54
54
|
memory_limit: str = "512m" # Memory limit per service
|
|
55
55
|
cpu_limit: str = "0.5" # CPU limit per service
|
|
56
|
-
|
|
56
|
+
|
|
57
57
|
# Health check settings
|
|
58
58
|
health_check_interval: str = "30s"
|
|
59
59
|
health_check_timeout: str = "10s"
|
|
60
60
|
health_check_retries: int = 3
|
|
61
|
-
|
|
61
|
+
|
|
62
62
|
# Persistence
|
|
63
63
|
volumes_path: Optional[Path] = None # Path for persistent volumes
|
|
64
|
-
|
|
64
|
+
|
|
65
65
|
# Labels and annotations
|
|
66
66
|
labels: dict[str, str] = field(default_factory=dict)
|
|
67
67
|
annotations: dict[str, str] = field(default_factory=dict)
|
|
68
|
-
|
|
68
|
+
|
|
69
69
|
@classmethod
|
|
70
70
|
def for_production(cls) -> "DeploymentConfig":
|
|
71
71
|
"""Create production-ready configuration."""
|
|
@@ -80,7 +80,7 @@ class DeploymentConfig:
|
|
|
80
80
|
health_check_interval="10s",
|
|
81
81
|
health_check_retries=5,
|
|
82
82
|
)
|
|
83
|
-
|
|
83
|
+
|
|
84
84
|
@classmethod
|
|
85
85
|
def for_development(cls) -> "DeploymentConfig":
|
|
86
86
|
"""Create development configuration."""
|
|
@@ -107,21 +107,21 @@ class DeploymentResult:
|
|
|
107
107
|
|
|
108
108
|
class DeploymentBackend(ABC):
|
|
109
109
|
"""Abstract base class for deployment backends."""
|
|
110
|
-
|
|
110
|
+
|
|
111
111
|
def __init__(self, config: DeploymentConfig):
|
|
112
112
|
self.config = config
|
|
113
|
-
|
|
113
|
+
|
|
114
114
|
@property
|
|
115
115
|
@abstractmethod
|
|
116
116
|
def runtime_type(self) -> RuntimeType:
|
|
117
117
|
"""Return the runtime type."""
|
|
118
118
|
pass
|
|
119
|
-
|
|
119
|
+
|
|
120
120
|
@abstractmethod
|
|
121
121
|
def is_available(self) -> bool:
|
|
122
122
|
"""Check if the runtime is available."""
|
|
123
123
|
pass
|
|
124
|
-
|
|
124
|
+
|
|
125
125
|
@abstractmethod
|
|
126
126
|
def build_image(
|
|
127
127
|
self,
|
|
@@ -132,7 +132,7 @@ class DeploymentBackend(ABC):
|
|
|
132
132
|
) -> DeploymentResult:
|
|
133
133
|
"""Build a container image."""
|
|
134
134
|
pass
|
|
135
|
-
|
|
135
|
+
|
|
136
136
|
@abstractmethod
|
|
137
137
|
def push_image(
|
|
138
138
|
self,
|
|
@@ -141,7 +141,7 @@ class DeploymentBackend(ABC):
|
|
|
141
141
|
) -> DeploymentResult:
|
|
142
142
|
"""Push image to registry."""
|
|
143
143
|
pass
|
|
144
|
-
|
|
144
|
+
|
|
145
145
|
@abstractmethod
|
|
146
146
|
def deploy(
|
|
147
147
|
self,
|
|
@@ -153,22 +153,22 @@ class DeploymentBackend(ABC):
|
|
|
153
153
|
) -> DeploymentResult:
|
|
154
154
|
"""Deploy a service."""
|
|
155
155
|
pass
|
|
156
|
-
|
|
156
|
+
|
|
157
157
|
@abstractmethod
|
|
158
158
|
def stop(self, service_name: str) -> DeploymentResult:
|
|
159
159
|
"""Stop a deployed service."""
|
|
160
160
|
pass
|
|
161
|
-
|
|
161
|
+
|
|
162
162
|
@abstractmethod
|
|
163
163
|
def logs(self, service_name: str, tail: int = 100) -> str:
|
|
164
164
|
"""Get logs from a service."""
|
|
165
165
|
pass
|
|
166
|
-
|
|
166
|
+
|
|
167
167
|
@abstractmethod
|
|
168
168
|
def status(self, service_name: str) -> dict[str, Any]:
|
|
169
169
|
"""Get status of a service."""
|
|
170
170
|
pass
|
|
171
|
-
|
|
171
|
+
|
|
172
172
|
def generate_dockerfile(
|
|
173
173
|
self,
|
|
174
174
|
service_name: str,
|
|
@@ -179,11 +179,11 @@ class DeploymentBackend(ABC):
|
|
|
179
179
|
dockerfile_content = self._create_dockerfile(
|
|
180
180
|
sandbox_path, base_image
|
|
181
181
|
)
|
|
182
|
-
|
|
182
|
+
|
|
183
183
|
dockerfile_path = sandbox_path / "Dockerfile"
|
|
184
184
|
dockerfile_path.write_text(dockerfile_content)
|
|
185
185
|
return dockerfile_path
|
|
186
|
-
|
|
186
|
+
|
|
187
187
|
def _create_dockerfile(
|
|
188
188
|
self,
|
|
189
189
|
sandbox_path: Path,
|
|
@@ -192,13 +192,13 @@ class DeploymentBackend(ABC):
|
|
|
192
192
|
"""Create Dockerfile content."""
|
|
193
193
|
# Check for requirements.txt
|
|
194
194
|
has_requirements = (sandbox_path / "requirements.txt").exists()
|
|
195
|
-
|
|
195
|
+
|
|
196
196
|
# Check for package.json (Node.js)
|
|
197
197
|
has_package_json = (sandbox_path / "package.json").exists()
|
|
198
|
-
|
|
198
|
+
|
|
199
199
|
if has_package_json:
|
|
200
200
|
return self._create_node_dockerfile(sandbox_path)
|
|
201
|
-
|
|
201
|
+
|
|
202
202
|
# Default Python Dockerfile
|
|
203
203
|
lines = [
|
|
204
204
|
f"FROM {base_image}",
|
|
@@ -209,7 +209,7 @@ class DeploymentBackend(ABC):
|
|
|
209
209
|
"RUN useradd -m -u 1000 appuser",
|
|
210
210
|
"",
|
|
211
211
|
]
|
|
212
|
-
|
|
212
|
+
|
|
213
213
|
if has_requirements:
|
|
214
214
|
lines.extend([
|
|
215
215
|
"# Install dependencies",
|
|
@@ -217,7 +217,7 @@ class DeploymentBackend(ABC):
|
|
|
217
217
|
"RUN pip install --no-cache-dir -r requirements.txt",
|
|
218
218
|
"",
|
|
219
219
|
])
|
|
220
|
-
|
|
220
|
+
|
|
221
221
|
lines.extend([
|
|
222
222
|
"# Copy application",
|
|
223
223
|
"COPY . .",
|
|
@@ -227,14 +227,16 @@ class DeploymentBackend(ABC):
|
|
|
227
227
|
"",
|
|
228
228
|
"# Health check",
|
|
229
229
|
'HEALTHCHECK --interval=30s --timeout=10s --retries=3 \\',
|
|
230
|
-
' CMD
|
|
230
|
+
' CMD python -c "import os,urllib.request; ' +
|
|
231
|
+
"port=os.environ.get('MARKPACT_PORT') or os.environ.get('PORT','8000'); " +
|
|
232
|
+
"urllib.request.urlopen('http://localhost:%s/health' % port, timeout=5)\"",
|
|
231
233
|
"",
|
|
232
234
|
"# Default command",
|
|
233
235
|
'CMD ["python", "main.py"]',
|
|
234
236
|
])
|
|
235
|
-
|
|
237
|
+
|
|
236
238
|
return "\n".join(lines)
|
|
237
|
-
|
|
239
|
+
|
|
238
240
|
def _create_node_dockerfile(self, sandbox_path: Path) -> str:
|
|
239
241
|
"""Create Dockerfile for Node.js service."""
|
|
240
242
|
return """FROM node:20-slim
|
|
@@ -246,7 +248,7 @@ RUN useradd -m -u 1000 appuser
|
|
|
246
248
|
|
|
247
249
|
# Install dependencies
|
|
248
250
|
COPY package*.json ./
|
|
249
|
-
RUN npm ci --only=production
|
|
251
|
+
RUN if [ -f package-lock.json ]; then npm ci --only=production; else npm install --only=production; fi
|
|
250
252
|
|
|
251
253
|
# Copy application
|
|
252
254
|
COPY . .
|
|
@@ -256,7 +258,7 @@ USER appuser
|
|
|
256
258
|
|
|
257
259
|
# Health check
|
|
258
260
|
HEALTHCHECK --interval=30s --timeout=10s --retries=3 \\
|
|
259
|
-
CMD
|
|
261
|
+
CMD node -e "require('http').get('http://localhost:'+(process.env.MARKPACT_PORT||3000)+'/health',res=>process.exit(res.statusCode===200?0:1)).on('error',()=>process.exit(1));"
|
|
260
262
|
|
|
261
263
|
# Default command
|
|
262
264
|
CMD ["node", "server.js"]
|
pactown/deploy/compose.py
CHANGED
|
@@ -2,10 +2,11 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
import
|
|
5
|
+
from dataclasses import dataclass
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
from typing import Optional
|
|
8
|
-
|
|
8
|
+
|
|
9
|
+
import yaml
|
|
9
10
|
|
|
10
11
|
from ..config import EcosystemConfig, ServiceConfig
|
|
11
12
|
from .base import DeploymentConfig, DeploymentMode
|
|
@@ -30,13 +31,13 @@ class ComposeService:
|
|
|
30
31
|
class ComposeGenerator:
|
|
31
32
|
"""
|
|
32
33
|
Generate Docker Compose / Podman Compose files for pactown ecosystems.
|
|
33
|
-
|
|
34
|
+
|
|
34
35
|
Supports:
|
|
35
36
|
- Docker Compose v3.8+
|
|
36
37
|
- Podman Compose
|
|
37
38
|
- Docker Swarm mode
|
|
38
39
|
"""
|
|
39
|
-
|
|
40
|
+
|
|
40
41
|
def __init__(
|
|
41
42
|
self,
|
|
42
43
|
ecosystem: EcosystemConfig,
|
|
@@ -46,7 +47,7 @@ class ComposeGenerator:
|
|
|
46
47
|
self.ecosystem = ecosystem
|
|
47
48
|
self.deploy_config = deploy_config
|
|
48
49
|
self.base_path = Path(base_path)
|
|
49
|
-
|
|
50
|
+
|
|
50
51
|
def generate(
|
|
51
52
|
self,
|
|
52
53
|
output_path: Optional[Path] = None,
|
|
@@ -54,11 +55,11 @@ class ComposeGenerator:
|
|
|
54
55
|
) -> dict:
|
|
55
56
|
"""
|
|
56
57
|
Generate docker-compose.yaml content.
|
|
57
|
-
|
|
58
|
+
|
|
58
59
|
Args:
|
|
59
60
|
output_path: Optional path to write the file
|
|
60
61
|
include_registry: Include pactown registry service
|
|
61
|
-
|
|
62
|
+
|
|
62
63
|
Returns:
|
|
63
64
|
Compose file as dict
|
|
64
65
|
"""
|
|
@@ -72,39 +73,39 @@ class ComposeGenerator:
|
|
|
72
73
|
},
|
|
73
74
|
},
|
|
74
75
|
}
|
|
75
|
-
|
|
76
|
+
|
|
76
77
|
# Add volumes section if needed
|
|
77
78
|
volumes = {}
|
|
78
|
-
|
|
79
|
+
|
|
79
80
|
for name, service in self.ecosystem.services.items():
|
|
80
81
|
compose_service = self._create_service(name, service)
|
|
81
82
|
compose["services"][name] = compose_service
|
|
82
|
-
|
|
83
|
+
|
|
83
84
|
# Check for volume mounts
|
|
84
85
|
if self.deploy_config.volumes_path:
|
|
85
86
|
volume_name = f"{name}-data"
|
|
86
87
|
volumes[volume_name] = {"driver": "local"}
|
|
87
|
-
|
|
88
|
+
|
|
88
89
|
if volumes:
|
|
89
90
|
compose["volumes"] = volumes
|
|
90
|
-
|
|
91
|
+
|
|
91
92
|
# Add registry service if requested
|
|
92
93
|
if include_registry:
|
|
93
94
|
compose["services"]["registry"] = self._create_registry_service()
|
|
94
|
-
|
|
95
|
+
|
|
95
96
|
# Write to file if path provided
|
|
96
97
|
if output_path:
|
|
97
98
|
output_path = Path(output_path)
|
|
98
99
|
with open(output_path, "w") as f:
|
|
99
100
|
yaml.dump(compose, f, default_flow_style=False, sort_keys=False)
|
|
100
|
-
|
|
101
|
+
|
|
101
102
|
return compose
|
|
102
|
-
|
|
103
|
+
|
|
103
104
|
def _create_service(self, name: str, service: ServiceConfig) -> dict:
|
|
104
105
|
"""Create compose service definition."""
|
|
105
|
-
|
|
106
|
+
self.base_path / service.readme
|
|
106
107
|
sandbox_path = self.base_path / self.ecosystem.sandbox_root / name
|
|
107
|
-
|
|
108
|
+
|
|
108
109
|
svc = {
|
|
109
110
|
"build": {
|
|
110
111
|
"context": str(sandbox_path),
|
|
@@ -115,16 +116,16 @@ class ComposeGenerator:
|
|
|
115
116
|
"restart": "unless-stopped",
|
|
116
117
|
"networks": [self.deploy_config.network_name],
|
|
117
118
|
}
|
|
118
|
-
|
|
119
|
+
|
|
119
120
|
# Ports
|
|
120
121
|
if service.port and self.deploy_config.expose_ports:
|
|
121
122
|
svc["ports"] = [f"{service.port}:{service.port}"]
|
|
122
|
-
|
|
123
|
+
|
|
123
124
|
# Environment
|
|
124
125
|
env = {"SERVICE_NAME": name}
|
|
125
126
|
if service.port:
|
|
126
127
|
env["MARKPACT_PORT"] = str(service.port)
|
|
127
|
-
|
|
128
|
+
|
|
128
129
|
# Add dependency URLs
|
|
129
130
|
for dep in service.depends_on:
|
|
130
131
|
dep_service = self.ecosystem.services.get(dep.name)
|
|
@@ -132,19 +133,19 @@ class ComposeGenerator:
|
|
|
132
133
|
env_key = dep.name.upper().replace("-", "_")
|
|
133
134
|
# Use container DNS name
|
|
134
135
|
env[f"{env_key}_URL"] = f"http://{dep.name}:{dep_service.port}"
|
|
135
|
-
|
|
136
|
+
|
|
136
137
|
env.update(service.env)
|
|
137
138
|
svc["environment"] = env
|
|
138
|
-
|
|
139
|
+
|
|
139
140
|
# Dependencies
|
|
140
141
|
if service.depends_on:
|
|
141
142
|
svc["depends_on"] = {}
|
|
142
143
|
for dep in service.depends_on:
|
|
143
144
|
if dep.name in self.ecosystem.services:
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
}
|
|
147
|
-
|
|
145
|
+
dep_svc = self.ecosystem.services[dep.name]
|
|
146
|
+
condition = "service_healthy" if dep_svc.health_check else "service_started"
|
|
147
|
+
svc["depends_on"][dep.name] = {"condition": condition}
|
|
148
|
+
|
|
148
149
|
# Health check
|
|
149
150
|
if service.health_check:
|
|
150
151
|
svc["healthcheck"] = {
|
|
@@ -154,7 +155,7 @@ class ComposeGenerator:
|
|
|
154
155
|
"retries": self.deploy_config.health_check_retries,
|
|
155
156
|
"start_period": "10s",
|
|
156
157
|
}
|
|
157
|
-
|
|
158
|
+
|
|
158
159
|
# Production settings
|
|
159
160
|
if self.deploy_config.mode == DeploymentMode.PRODUCTION:
|
|
160
161
|
svc["deploy"] = {
|
|
@@ -175,28 +176,28 @@ class ComposeGenerator:
|
|
|
175
176
|
"window": "120s",
|
|
176
177
|
},
|
|
177
178
|
}
|
|
178
|
-
|
|
179
|
+
|
|
179
180
|
# Security options
|
|
180
181
|
svc["security_opt"] = ["no-new-privileges:true"]
|
|
181
|
-
|
|
182
|
+
|
|
182
183
|
if self.deploy_config.read_only_fs:
|
|
183
184
|
svc["read_only"] = True
|
|
184
185
|
svc["tmpfs"] = ["/tmp"]
|
|
185
|
-
|
|
186
|
+
|
|
186
187
|
svc["cap_drop"] = self.deploy_config.drop_capabilities
|
|
187
|
-
|
|
188
|
+
|
|
188
189
|
if self.deploy_config.add_capabilities:
|
|
189
190
|
svc["cap_add"] = self.deploy_config.add_capabilities
|
|
190
|
-
|
|
191
|
+
|
|
191
192
|
# Labels
|
|
192
193
|
svc["labels"] = {
|
|
193
194
|
"pactown.ecosystem": self.ecosystem.name,
|
|
194
195
|
"pactown.service": name,
|
|
195
196
|
**self.deploy_config.labels,
|
|
196
197
|
}
|
|
197
|
-
|
|
198
|
+
|
|
198
199
|
return svc
|
|
199
|
-
|
|
200
|
+
|
|
200
201
|
def _create_registry_service(self) -> dict:
|
|
201
202
|
"""Create pactown registry service."""
|
|
202
203
|
return {
|
|
@@ -217,7 +218,7 @@ class ComposeGenerator:
|
|
|
217
218
|
"retries": 3,
|
|
218
219
|
},
|
|
219
220
|
}
|
|
220
|
-
|
|
221
|
+
|
|
221
222
|
def generate_override(
|
|
222
223
|
self,
|
|
223
224
|
output_path: Optional[Path] = None,
|
|
@@ -225,7 +226,7 @@ class ComposeGenerator:
|
|
|
225
226
|
) -> dict:
|
|
226
227
|
"""
|
|
227
228
|
Generate docker-compose.override.yaml for development.
|
|
228
|
-
|
|
229
|
+
|
|
229
230
|
This file is automatically merged with docker-compose.yaml
|
|
230
231
|
and provides development-specific settings.
|
|
231
232
|
"""
|
|
@@ -233,37 +234,37 @@ class ComposeGenerator:
|
|
|
233
234
|
"version": "3.8",
|
|
234
235
|
"services": {},
|
|
235
236
|
}
|
|
236
|
-
|
|
237
|
+
|
|
237
238
|
for name, service in self.ecosystem.services.items():
|
|
238
239
|
sandbox_path = self.base_path / self.ecosystem.sandbox_root / name
|
|
239
|
-
|
|
240
|
+
|
|
240
241
|
svc = {}
|
|
241
|
-
|
|
242
|
+
|
|
242
243
|
if dev_mode:
|
|
243
244
|
# Mount source code for hot reload
|
|
244
245
|
svc["volumes"] = [
|
|
245
246
|
f"{sandbox_path}:/app:z",
|
|
246
247
|
]
|
|
247
|
-
|
|
248
|
+
|
|
248
249
|
# Enable debug mode
|
|
249
250
|
svc["environment"] = {
|
|
250
251
|
"DEBUG": "true",
|
|
251
252
|
"LOG_LEVEL": "debug",
|
|
252
253
|
}
|
|
253
|
-
|
|
254
|
+
|
|
254
255
|
# Remove resource limits for development
|
|
255
256
|
svc["deploy"] = None
|
|
256
|
-
|
|
257
|
+
|
|
257
258
|
if svc:
|
|
258
259
|
override["services"][name] = svc
|
|
259
|
-
|
|
260
|
+
|
|
260
261
|
if output_path:
|
|
261
262
|
output_path = Path(output_path)
|
|
262
263
|
with open(output_path, "w") as f:
|
|
263
264
|
yaml.dump(override, f, default_flow_style=False, sort_keys=False)
|
|
264
|
-
|
|
265
|
+
|
|
265
266
|
return override
|
|
266
|
-
|
|
267
|
+
|
|
267
268
|
def generate_production(
|
|
268
269
|
self,
|
|
269
270
|
output_path: Optional[Path] = None,
|
|
@@ -273,10 +274,10 @@ class ComposeGenerator:
|
|
|
273
274
|
Generate docker-compose.prod.yaml for production/swarm deployment.
|
|
274
275
|
"""
|
|
275
276
|
compose = self.generate()
|
|
276
|
-
|
|
277
|
+
|
|
277
278
|
for name in compose["services"]:
|
|
278
279
|
svc = compose["services"][name]
|
|
279
|
-
|
|
280
|
+
|
|
280
281
|
# Add swarm-specific deploy config
|
|
281
282
|
svc["deploy"] = {
|
|
282
283
|
"mode": "replicated",
|
|
@@ -298,12 +299,12 @@ class ComposeGenerator:
|
|
|
298
299
|
},
|
|
299
300
|
},
|
|
300
301
|
}
|
|
301
|
-
|
|
302
|
+
|
|
302
303
|
if output_path:
|
|
303
304
|
output_path = Path(output_path)
|
|
304
305
|
with open(output_path, "w") as f:
|
|
305
306
|
yaml.dump(compose, f, default_flow_style=False, sort_keys=False)
|
|
306
|
-
|
|
307
|
+
|
|
307
308
|
return compose
|
|
308
309
|
|
|
309
310
|
|
|
@@ -314,38 +315,38 @@ def generate_compose_from_config(
|
|
|
314
315
|
) -> dict:
|
|
315
316
|
"""
|
|
316
317
|
Convenience function to generate compose files from pactown config.
|
|
317
|
-
|
|
318
|
+
|
|
318
319
|
Args:
|
|
319
320
|
config_path: Path to saas.pactown.yaml
|
|
320
321
|
output_dir: Directory to write compose files
|
|
321
322
|
production: Generate production configuration
|
|
322
|
-
|
|
323
|
+
|
|
323
324
|
Returns:
|
|
324
325
|
Generated compose dict
|
|
325
326
|
"""
|
|
326
327
|
from ..config import load_config
|
|
327
|
-
|
|
328
|
+
|
|
328
329
|
config_path = Path(config_path)
|
|
329
330
|
ecosystem = load_config(config_path)
|
|
330
|
-
|
|
331
|
+
|
|
331
332
|
deploy_config = (
|
|
332
333
|
DeploymentConfig.for_production() if production
|
|
333
334
|
else DeploymentConfig.for_development()
|
|
334
335
|
)
|
|
335
|
-
|
|
336
|
+
|
|
336
337
|
generator = ComposeGenerator(
|
|
337
338
|
ecosystem=ecosystem,
|
|
338
339
|
deploy_config=deploy_config,
|
|
339
340
|
base_path=config_path.parent,
|
|
340
341
|
)
|
|
341
|
-
|
|
342
|
+
|
|
342
343
|
output_dir = output_dir or config_path.parent
|
|
343
|
-
|
|
344
|
+
|
|
344
345
|
# Generate main compose file
|
|
345
346
|
compose = generator.generate(
|
|
346
347
|
output_path=output_dir / "docker-compose.yaml"
|
|
347
348
|
)
|
|
348
|
-
|
|
349
|
+
|
|
349
350
|
# Generate override for development
|
|
350
351
|
if not production:
|
|
351
352
|
generator.generate_override(
|
|
@@ -355,5 +356,5 @@ def generate_compose_from_config(
|
|
|
355
356
|
generator.generate_production(
|
|
356
357
|
output_path=output_dir / "docker-compose.prod.yaml"
|
|
357
358
|
)
|
|
358
|
-
|
|
359
|
+
|
|
359
360
|
return compose
|