hopx-ai 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hopx-ai might be problematic. Click here for more details.
- hopx_ai/__init__.py +114 -0
- hopx_ai/_agent_client.py +373 -0
- hopx_ai/_async_client.py +230 -0
- hopx_ai/_client.py +230 -0
- hopx_ai/_generated/__init__.py +22 -0
- hopx_ai/_generated/models.py +502 -0
- hopx_ai/_utils.py +9 -0
- hopx_ai/_ws_client.py +141 -0
- hopx_ai/async_sandbox.py +427 -0
- hopx_ai/cache.py +97 -0
- hopx_ai/commands.py +174 -0
- hopx_ai/desktop.py +1227 -0
- hopx_ai/env_vars.py +242 -0
- hopx_ai/errors.py +249 -0
- hopx_ai/files.py +489 -0
- hopx_ai/models.py +274 -0
- hopx_ai/models_updated.py +270 -0
- hopx_ai/sandbox.py +1439 -0
- hopx_ai/template/__init__.py +47 -0
- hopx_ai/template/build_flow.py +540 -0
- hopx_ai/template/builder.py +300 -0
- hopx_ai/template/file_hasher.py +81 -0
- hopx_ai/template/ready_checks.py +106 -0
- hopx_ai/template/tar_creator.py +122 -0
- hopx_ai/template/types.py +199 -0
- hopx_ai/terminal.py +164 -0
- hopx_ai-0.1.10.dist-info/METADATA +460 -0
- hopx_ai-0.1.10.dist-info/RECORD +29 -0
- hopx_ai-0.1.10.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Template Builder - Fluent API for building templates
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import List, Dict, Optional, Union, Any
|
|
8
|
+
from .types import Step, StepType, CopyOptions, ReadyCheck, BuildOptions, BuildResult, RegistryAuth, GCPRegistryAuth, AWSRegistryAuth
|
|
9
|
+
from .build_flow import build_template
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Template:
|
|
13
|
+
"""Fluent API for building templates"""
|
|
14
|
+
|
|
15
|
+
def __init__(self):
|
|
16
|
+
self.steps: List[Step] = []
|
|
17
|
+
self.start_cmd: Optional[str] = None
|
|
18
|
+
self.ready_check: Optional[ReadyCheck] = None
|
|
19
|
+
|
|
20
|
+
# ==================== Base Images ====================
|
|
21
|
+
|
|
22
|
+
def from_ubuntu_image(self, version: str) -> 'Template':
|
|
23
|
+
"""Start from Ubuntu base image"""
|
|
24
|
+
self.steps.append(Step(
|
|
25
|
+
type=StepType.FROM,
|
|
26
|
+
args=[f"ubuntu:{version}"]
|
|
27
|
+
))
|
|
28
|
+
return self
|
|
29
|
+
|
|
30
|
+
def from_python_image(self, version: str) -> 'Template':
|
|
31
|
+
"""Start from Python base image"""
|
|
32
|
+
self.steps.append(Step(
|
|
33
|
+
type=StepType.FROM,
|
|
34
|
+
args=[f"python:{version}"]
|
|
35
|
+
))
|
|
36
|
+
return self
|
|
37
|
+
|
|
38
|
+
def from_node_image(self, version: str) -> 'Template':
|
|
39
|
+
"""Start from Node.js base image"""
|
|
40
|
+
self.steps.append(Step(
|
|
41
|
+
type=StepType.FROM,
|
|
42
|
+
args=[f"node:{version}"]
|
|
43
|
+
))
|
|
44
|
+
return self
|
|
45
|
+
|
|
46
|
+
def from_image(self, image: str, auth: Optional[RegistryAuth] = None) -> 'Template':
|
|
47
|
+
"""Start from any Docker image (with optional authentication)"""
|
|
48
|
+
self.steps.append(Step(
|
|
49
|
+
type=StepType.FROM,
|
|
50
|
+
args=[image],
|
|
51
|
+
registry_auth=auth
|
|
52
|
+
))
|
|
53
|
+
return self
|
|
54
|
+
|
|
55
|
+
def from_gcp_registry(self, image: str, auth: GCPRegistryAuth) -> 'Template':
|
|
56
|
+
"""Start from GCP Container Registry image"""
|
|
57
|
+
# Parse service account JSON
|
|
58
|
+
if isinstance(auth.service_account_json, str):
|
|
59
|
+
# It's a file path
|
|
60
|
+
with open(auth.service_account_json, 'r') as f:
|
|
61
|
+
service_account = json.load(f)
|
|
62
|
+
else:
|
|
63
|
+
# It's already a dict
|
|
64
|
+
service_account = auth.service_account_json
|
|
65
|
+
|
|
66
|
+
# GCP uses _json_key as username
|
|
67
|
+
registry_auth = RegistryAuth(
|
|
68
|
+
username='_json_key',
|
|
69
|
+
password=json.dumps(service_account)
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
return self.from_image(image, registry_auth)
|
|
73
|
+
|
|
74
|
+
def from_aws_registry(self, image: str, auth: AWSRegistryAuth) -> 'Template':
|
|
75
|
+
"""Start from AWS ECR image"""
|
|
76
|
+
self.steps.append(Step(
|
|
77
|
+
type=StepType.FROM,
|
|
78
|
+
args=[image],
|
|
79
|
+
aws_auth=auth
|
|
80
|
+
))
|
|
81
|
+
return self
|
|
82
|
+
|
|
83
|
+
# ==================== File Operations ====================
|
|
84
|
+
|
|
85
|
+
def copy(
|
|
86
|
+
self,
|
|
87
|
+
src: Union[str, List[str]],
|
|
88
|
+
dest: str,
|
|
89
|
+
options: Optional[CopyOptions] = None
|
|
90
|
+
) -> 'Template':
|
|
91
|
+
"""Copy files to the template"""
|
|
92
|
+
sources = src if isinstance(src, list) else [src]
|
|
93
|
+
|
|
94
|
+
self.steps.append(Step(
|
|
95
|
+
type=StepType.COPY,
|
|
96
|
+
args=[','.join(sources), dest, str(options or {})]
|
|
97
|
+
))
|
|
98
|
+
return self
|
|
99
|
+
|
|
100
|
+
# ==================== Commands ====================
|
|
101
|
+
|
|
102
|
+
def run_cmd(self, cmd: str) -> 'Template':
|
|
103
|
+
"""Run a command during build"""
|
|
104
|
+
self.steps.append(Step(
|
|
105
|
+
type=StepType.RUN,
|
|
106
|
+
args=[cmd]
|
|
107
|
+
))
|
|
108
|
+
return self
|
|
109
|
+
|
|
110
|
+
# ==================== Environment ====================
|
|
111
|
+
|
|
112
|
+
def set_env(self, key: str, value: str) -> 'Template':
|
|
113
|
+
"""Set an environment variable"""
|
|
114
|
+
self.steps.append(Step(
|
|
115
|
+
type=StepType.ENV,
|
|
116
|
+
args=[key, value]
|
|
117
|
+
))
|
|
118
|
+
return self
|
|
119
|
+
|
|
120
|
+
def set_envs(self, vars: Dict[str, str]) -> 'Template':
|
|
121
|
+
"""Set multiple environment variables"""
|
|
122
|
+
for key, value in vars.items():
|
|
123
|
+
self.set_env(key, value)
|
|
124
|
+
return self
|
|
125
|
+
|
|
126
|
+
# ==================== Working Directory ====================
|
|
127
|
+
|
|
128
|
+
def set_workdir(self, directory: str) -> 'Template':
|
|
129
|
+
"""Set working directory"""
|
|
130
|
+
self.steps.append(Step(
|
|
131
|
+
type=StepType.WORKDIR,
|
|
132
|
+
args=[directory]
|
|
133
|
+
))
|
|
134
|
+
return self
|
|
135
|
+
|
|
136
|
+
# ==================== User ====================
|
|
137
|
+
|
|
138
|
+
def set_user(self, user: str) -> 'Template':
|
|
139
|
+
"""Set user"""
|
|
140
|
+
self.steps.append(Step(
|
|
141
|
+
type=StepType.USER,
|
|
142
|
+
args=[user]
|
|
143
|
+
))
|
|
144
|
+
return self
|
|
145
|
+
|
|
146
|
+
# ==================== Smart Helpers ====================
|
|
147
|
+
|
|
148
|
+
def apt_install(self, *packages: Union[str, List[str]]) -> 'Template':
|
|
149
|
+
"""
|
|
150
|
+
Install packages with apt
|
|
151
|
+
|
|
152
|
+
Examples:
|
|
153
|
+
.apt_install("curl", "git", "vim") # Multiple args
|
|
154
|
+
.apt_install(["curl", "git", "vim"]) # List
|
|
155
|
+
.apt_install("curl").apt_install("git") # Chained
|
|
156
|
+
"""
|
|
157
|
+
# Flatten args
|
|
158
|
+
pkg_list = []
|
|
159
|
+
for pkg in packages:
|
|
160
|
+
if isinstance(pkg, list):
|
|
161
|
+
pkg_list.extend(pkg)
|
|
162
|
+
else:
|
|
163
|
+
pkg_list.append(pkg)
|
|
164
|
+
|
|
165
|
+
if not pkg_list:
|
|
166
|
+
raise ValueError("apt_install requires at least one package")
|
|
167
|
+
|
|
168
|
+
pkgs = ' '.join(pkg_list)
|
|
169
|
+
self.run_cmd(
|
|
170
|
+
f"apt-get update -qq && DEBIAN_FRONTEND=noninteractive apt-get install -y {pkgs}"
|
|
171
|
+
)
|
|
172
|
+
return self
|
|
173
|
+
|
|
174
|
+
def pip_install(self, *packages: Union[str, List[str], None]) -> 'Template':
|
|
175
|
+
"""
|
|
176
|
+
Install Python packages with pip
|
|
177
|
+
|
|
178
|
+
Examples:
|
|
179
|
+
.pip_install("numpy", "pandas") # Multiple args
|
|
180
|
+
.pip_install(["numpy", "pandas"]) # List
|
|
181
|
+
.pip_install("numpy").pip_install("pandas") # Chained
|
|
182
|
+
.pip_install() # Install from requirements.txt
|
|
183
|
+
"""
|
|
184
|
+
# Handle no args (requirements.txt)
|
|
185
|
+
if not packages:
|
|
186
|
+
self.run_cmd("/usr/local/bin/pip3 install --no-cache-dir -r requirements.txt")
|
|
187
|
+
return self
|
|
188
|
+
|
|
189
|
+
# Flatten args
|
|
190
|
+
pkg_list = []
|
|
191
|
+
for pkg in packages:
|
|
192
|
+
if pkg is None:
|
|
193
|
+
continue
|
|
194
|
+
if isinstance(pkg, list):
|
|
195
|
+
pkg_list.extend(pkg)
|
|
196
|
+
else:
|
|
197
|
+
pkg_list.append(pkg)
|
|
198
|
+
|
|
199
|
+
if not pkg_list:
|
|
200
|
+
raise ValueError("pip_install requires at least one package or no args for requirements.txt")
|
|
201
|
+
|
|
202
|
+
pkgs = ' '.join(pkg_list)
|
|
203
|
+
# Use full path for pip (works after systemd restart)
|
|
204
|
+
self.run_cmd(f"/usr/local/bin/pip3 install --no-cache-dir {pkgs}")
|
|
205
|
+
return self
|
|
206
|
+
|
|
207
|
+
def npm_install(self, *packages: Union[str, List[str], None]) -> 'Template':
|
|
208
|
+
"""
|
|
209
|
+
Install Node packages with npm
|
|
210
|
+
|
|
211
|
+
Examples:
|
|
212
|
+
.npm_install("typescript", "tsx") # Multiple args
|
|
213
|
+
.npm_install(["typescript", "tsx"]) # List
|
|
214
|
+
.npm_install("typescript").npm_install("tsx") # Chained
|
|
215
|
+
.npm_install() # Install from package.json
|
|
216
|
+
"""
|
|
217
|
+
# Handle no args (package.json)
|
|
218
|
+
if not packages:
|
|
219
|
+
self.run_cmd("/usr/bin/npm install")
|
|
220
|
+
return self
|
|
221
|
+
|
|
222
|
+
# Flatten args
|
|
223
|
+
pkg_list = []
|
|
224
|
+
for pkg in packages:
|
|
225
|
+
if pkg is None:
|
|
226
|
+
continue
|
|
227
|
+
if isinstance(pkg, list):
|
|
228
|
+
pkg_list.extend(pkg)
|
|
229
|
+
else:
|
|
230
|
+
pkg_list.append(pkg)
|
|
231
|
+
|
|
232
|
+
if not pkg_list:
|
|
233
|
+
raise ValueError("npm_install requires at least one package or no args for package.json")
|
|
234
|
+
|
|
235
|
+
pkgs = ' '.join(pkg_list)
|
|
236
|
+
# Use full path for npm (works after systemd restart)
|
|
237
|
+
self.run_cmd(f"/usr/bin/npm install -g {pkgs}")
|
|
238
|
+
return self
|
|
239
|
+
|
|
240
|
+
def go_install(self, packages: List[str]) -> 'Template':
|
|
241
|
+
"""Install Go packages"""
|
|
242
|
+
for pkg in packages:
|
|
243
|
+
self.run_cmd(f"go install {pkg}")
|
|
244
|
+
return self
|
|
245
|
+
|
|
246
|
+
def cargo_install(self, packages: List[str]) -> 'Template':
|
|
247
|
+
"""Install Rust packages with cargo"""
|
|
248
|
+
for pkg in packages:
|
|
249
|
+
self.run_cmd(f"cargo install {pkg}")
|
|
250
|
+
return self
|
|
251
|
+
|
|
252
|
+
def git_clone(self, url: str, dest: str) -> 'Template':
|
|
253
|
+
"""Clone a git repository"""
|
|
254
|
+
self.run_cmd(f"git clone {url} {dest}")
|
|
255
|
+
return self
|
|
256
|
+
|
|
257
|
+
# ==================== Caching ====================
|
|
258
|
+
|
|
259
|
+
def skip_cache(self) -> 'Template':
|
|
260
|
+
"""Skip cache for the last step"""
|
|
261
|
+
if self.steps:
|
|
262
|
+
self.steps[-1].skip_cache = True
|
|
263
|
+
return self
|
|
264
|
+
|
|
265
|
+
# ==================== Start Command ====================
|
|
266
|
+
|
|
267
|
+
def set_start_cmd(
|
|
268
|
+
self,
|
|
269
|
+
cmd: str,
|
|
270
|
+
ready: Optional[ReadyCheck] = None
|
|
271
|
+
) -> 'Template':
|
|
272
|
+
"""Set the start command and ready check"""
|
|
273
|
+
self.start_cmd = cmd
|
|
274
|
+
self.ready_check = ready
|
|
275
|
+
return self
|
|
276
|
+
|
|
277
|
+
# ==================== Build ====================
|
|
278
|
+
|
|
279
|
+
def get_steps(self) -> List[Step]:
|
|
280
|
+
"""Get all steps"""
|
|
281
|
+
return self.steps
|
|
282
|
+
|
|
283
|
+
def get_start_cmd(self) -> Optional[str]:
|
|
284
|
+
"""Get start command"""
|
|
285
|
+
return self.start_cmd
|
|
286
|
+
|
|
287
|
+
def get_ready_check(self) -> Optional[ReadyCheck]:
|
|
288
|
+
"""Get ready check"""
|
|
289
|
+
return self.ready_check
|
|
290
|
+
|
|
291
|
+
@staticmethod
|
|
292
|
+
async def build(template: 'Template', options: BuildOptions) -> BuildResult:
|
|
293
|
+
"""Build the template"""
|
|
294
|
+
return await build_template(template, options)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def create_template() -> Template:
|
|
298
|
+
"""Factory function to create a new template"""
|
|
299
|
+
return Template()
|
|
300
|
+
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File Hasher - Calculate SHA256 hash for COPY steps
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import os
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import List, Tuple
|
|
9
|
+
import glob as glob_module
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class FileHasher:
|
|
13
|
+
"""Calculate SHA256 hashes for files"""
|
|
14
|
+
|
|
15
|
+
async def calculate_hash(self, src: str, dest: str, context_path: str) -> str:
|
|
16
|
+
"""
|
|
17
|
+
Calculate SHA256 hash of files and metadata
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
src: Source pattern (e.g., "app/")
|
|
21
|
+
dest: Destination path (e.g., "/app/")
|
|
22
|
+
context_path: Base path for resolving src
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
SHA256 hash string
|
|
26
|
+
"""
|
|
27
|
+
hasher = hashlib.sha256()
|
|
28
|
+
|
|
29
|
+
# Hash the COPY command
|
|
30
|
+
hasher.update(f"COPY {src} {dest}".encode('utf-8'))
|
|
31
|
+
|
|
32
|
+
# Get all files matching the pattern
|
|
33
|
+
pattern = os.path.join(context_path, src)
|
|
34
|
+
files = glob_module.glob(pattern, recursive=True)
|
|
35
|
+
|
|
36
|
+
# Sort files for consistent hashing
|
|
37
|
+
files = sorted(files)
|
|
38
|
+
|
|
39
|
+
# Hash each file
|
|
40
|
+
for file_path in files:
|
|
41
|
+
if os.path.isfile(file_path):
|
|
42
|
+
# Relative path from context
|
|
43
|
+
relative_path = os.path.relpath(file_path, context_path)
|
|
44
|
+
hasher.update(relative_path.encode('utf-8'))
|
|
45
|
+
|
|
46
|
+
# File stats
|
|
47
|
+
stat = os.stat(file_path)
|
|
48
|
+
hasher.update(str(stat.st_mode).encode('utf-8'))
|
|
49
|
+
hasher.update(str(stat.st_size).encode('utf-8'))
|
|
50
|
+
hasher.update(str(int(stat.st_mtime * 1000)).encode('utf-8'))
|
|
51
|
+
|
|
52
|
+
# File content
|
|
53
|
+
with open(file_path, 'rb') as f:
|
|
54
|
+
while chunk := f.read(8192):
|
|
55
|
+
hasher.update(chunk)
|
|
56
|
+
|
|
57
|
+
return hasher.hexdigest()
|
|
58
|
+
|
|
59
|
+
async def calculate_multi_hash(
|
|
60
|
+
self,
|
|
61
|
+
sources: List[Tuple[str, str]],
|
|
62
|
+
context_path: str
|
|
63
|
+
) -> str:
|
|
64
|
+
"""
|
|
65
|
+
Calculate hash for multiple sources
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
sources: List of (src, dest) tuples
|
|
69
|
+
context_path: Base path
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Combined SHA256 hash
|
|
73
|
+
"""
|
|
74
|
+
hasher = hashlib.sha256()
|
|
75
|
+
|
|
76
|
+
for src, dest in sources:
|
|
77
|
+
file_hash = await self.calculate_hash(src, dest, context_path)
|
|
78
|
+
hasher.update(file_hash.encode('utf-8'))
|
|
79
|
+
|
|
80
|
+
return hasher.hexdigest()
|
|
81
|
+
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Ready Check Helpers
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from .types import ReadyCheck, ReadyCheckType
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def wait_for_port(port: int, timeout: int = 30000, interval: int = 2000) -> ReadyCheck:
|
|
9
|
+
"""
|
|
10
|
+
Wait for TCP port to be open
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
port: Port number to check
|
|
14
|
+
timeout: Timeout in milliseconds (default: 30000)
|
|
15
|
+
interval: Check interval in milliseconds (default: 2000)
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
ReadyCheck configuration
|
|
19
|
+
"""
|
|
20
|
+
return ReadyCheck(
|
|
21
|
+
type=ReadyCheckType.PORT,
|
|
22
|
+
port=port,
|
|
23
|
+
timeout=timeout,
|
|
24
|
+
interval=interval,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def wait_for_url(url: str, timeout: int = 30000, interval: int = 2000) -> ReadyCheck:
|
|
29
|
+
"""
|
|
30
|
+
Wait for HTTP URL to return 200
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
url: URL to check
|
|
34
|
+
timeout: Timeout in milliseconds (default: 30000)
|
|
35
|
+
interval: Check interval in milliseconds (default: 2000)
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
ReadyCheck configuration
|
|
39
|
+
"""
|
|
40
|
+
return ReadyCheck(
|
|
41
|
+
type=ReadyCheckType.URL,
|
|
42
|
+
url=url,
|
|
43
|
+
timeout=timeout,
|
|
44
|
+
interval=interval,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def wait_for_file(path: str, timeout: int = 30000, interval: int = 2000) -> ReadyCheck:
|
|
49
|
+
"""
|
|
50
|
+
Wait for file to exist
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
path: File path to check
|
|
54
|
+
timeout: Timeout in milliseconds (default: 30000)
|
|
55
|
+
interval: Check interval in milliseconds (default: 2000)
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
ReadyCheck configuration
|
|
59
|
+
"""
|
|
60
|
+
return ReadyCheck(
|
|
61
|
+
type=ReadyCheckType.FILE,
|
|
62
|
+
path=path,
|
|
63
|
+
timeout=timeout,
|
|
64
|
+
interval=interval,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def wait_for_process(process_name: str, timeout: int = 30000, interval: int = 2000) -> ReadyCheck:
|
|
69
|
+
"""
|
|
70
|
+
Wait for process to be running
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
process_name: Process name to check
|
|
74
|
+
timeout: Timeout in milliseconds (default: 30000)
|
|
75
|
+
interval: Check interval in milliseconds (default: 2000)
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
ReadyCheck configuration
|
|
79
|
+
"""
|
|
80
|
+
return ReadyCheck(
|
|
81
|
+
type=ReadyCheckType.PROCESS,
|
|
82
|
+
process_name=process_name,
|
|
83
|
+
timeout=timeout,
|
|
84
|
+
interval=interval,
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def wait_for_command(command: str, timeout: int = 30000, interval: int = 2000) -> ReadyCheck:
|
|
89
|
+
"""
|
|
90
|
+
Wait for command to exit with code 0
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
command: Command to execute
|
|
94
|
+
timeout: Timeout in milliseconds (default: 30000)
|
|
95
|
+
interval: Check interval in milliseconds (default: 2000)
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
ReadyCheck configuration
|
|
99
|
+
"""
|
|
100
|
+
return ReadyCheck(
|
|
101
|
+
type=ReadyCheckType.COMMAND,
|
|
102
|
+
command=command,
|
|
103
|
+
timeout=timeout,
|
|
104
|
+
interval=interval,
|
|
105
|
+
)
|
|
106
|
+
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tar Creator - Create tar.gz archives
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import tarfile
|
|
7
|
+
import tempfile
|
|
8
|
+
import glob as glob_module
|
|
9
|
+
from typing import List, BinaryIO
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class TarResult:
|
|
15
|
+
"""Result of tar creation"""
|
|
16
|
+
file_path: str
|
|
17
|
+
size: int
|
|
18
|
+
|
|
19
|
+
def open(self, mode: str = 'rb') -> BinaryIO:
|
|
20
|
+
"""Open the tar file"""
|
|
21
|
+
return open(self.file_path, mode)
|
|
22
|
+
|
|
23
|
+
def cleanup(self):
|
|
24
|
+
"""Delete the temporary file"""
|
|
25
|
+
if os.path.exists(self.file_path):
|
|
26
|
+
os.unlink(self.file_path)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class TarCreator:
|
|
30
|
+
"""Create tar.gz archives"""
|
|
31
|
+
|
|
32
|
+
async def create_tar_gz(self, src: str, context_path: str) -> TarResult:
|
|
33
|
+
"""
|
|
34
|
+
Create tar.gz from files
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
src: Source pattern (e.g., "app/")
|
|
38
|
+
context_path: Base path for resolving src
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
TarResult with file path and size
|
|
42
|
+
"""
|
|
43
|
+
# Get all files matching the pattern
|
|
44
|
+
pattern = os.path.join(context_path, src)
|
|
45
|
+
files = glob_module.glob(pattern, recursive=True)
|
|
46
|
+
|
|
47
|
+
# Convert to relative paths
|
|
48
|
+
relative_paths = []
|
|
49
|
+
for file_path in files:
|
|
50
|
+
if os.path.exists(file_path):
|
|
51
|
+
relative_path = os.path.relpath(file_path, context_path)
|
|
52
|
+
relative_paths.append(relative_path)
|
|
53
|
+
|
|
54
|
+
# Create temporary tar.gz file
|
|
55
|
+
fd, tmp_file = tempfile.mkstemp(suffix='.tar.gz', prefix='tar-')
|
|
56
|
+
os.close(fd)
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
# Create tar.gz
|
|
60
|
+
with tarfile.open(tmp_file, 'w:gz') as tar:
|
|
61
|
+
for relative_path in relative_paths:
|
|
62
|
+
full_path = os.path.join(context_path, relative_path)
|
|
63
|
+
tar.add(full_path, arcname=relative_path)
|
|
64
|
+
|
|
65
|
+
# Get file size
|
|
66
|
+
size = os.path.getsize(tmp_file)
|
|
67
|
+
|
|
68
|
+
return TarResult(file_path=tmp_file, size=size)
|
|
69
|
+
except Exception as e:
|
|
70
|
+
# Cleanup on error
|
|
71
|
+
if os.path.exists(tmp_file):
|
|
72
|
+
os.unlink(tmp_file)
|
|
73
|
+
raise e
|
|
74
|
+
|
|
75
|
+
async def create_multi_tar_gz(
|
|
76
|
+
self,
|
|
77
|
+
sources: List[str],
|
|
78
|
+
context_path: str
|
|
79
|
+
) -> TarResult:
|
|
80
|
+
"""
|
|
81
|
+
Create tar.gz from multiple sources
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
sources: List of source patterns
|
|
85
|
+
context_path: Base path
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
TarResult with file path and size
|
|
89
|
+
"""
|
|
90
|
+
all_files = set()
|
|
91
|
+
|
|
92
|
+
# Collect all files from all sources
|
|
93
|
+
for src in sources:
|
|
94
|
+
pattern = os.path.join(context_path, src)
|
|
95
|
+
files = glob_module.glob(pattern, recursive=True)
|
|
96
|
+
|
|
97
|
+
for file_path in files:
|
|
98
|
+
if os.path.exists(file_path):
|
|
99
|
+
relative_path = os.path.relpath(file_path, context_path)
|
|
100
|
+
all_files.add(relative_path)
|
|
101
|
+
|
|
102
|
+
# Create temporary tar.gz file
|
|
103
|
+
fd, tmp_file = tempfile.mkstemp(suffix='.tar.gz', prefix='tar-')
|
|
104
|
+
os.close(fd)
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
# Create tar.gz
|
|
108
|
+
with tarfile.open(tmp_file, 'w:gz') as tar:
|
|
109
|
+
for relative_path in sorted(all_files):
|
|
110
|
+
full_path = os.path.join(context_path, relative_path)
|
|
111
|
+
tar.add(full_path, arcname=relative_path)
|
|
112
|
+
|
|
113
|
+
# Get file size
|
|
114
|
+
size = os.path.getsize(tmp_file)
|
|
115
|
+
|
|
116
|
+
return TarResult(file_path=tmp_file, size=size)
|
|
117
|
+
except Exception as e:
|
|
118
|
+
# Cleanup on error
|
|
119
|
+
if os.path.exists(tmp_file):
|
|
120
|
+
os.unlink(tmp_file)
|
|
121
|
+
raise e
|
|
122
|
+
|