mdify-cli 2.11.9__py3-none-any.whl → 2.11.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mdify/__init__.py +1 -1
- mdify/cli.py +542 -0
- mdify/container.py +0 -4
- mdify/ssh/__init__.py +11 -0
- mdify/ssh/client.py +408 -0
- mdify/ssh/models.py +470 -0
- mdify/ssh/remote_container.py +237 -0
- mdify/ssh/transfer.py +297 -0
- {mdify_cli-2.11.9.dist-info → mdify_cli-2.11.10.dist-info}/METADATA +192 -4
- mdify_cli-2.11.10.dist-info/RECORD +17 -0
- mdify_cli-2.11.9.dist-info/RECORD +0 -12
- {mdify_cli-2.11.9.dist-info → mdify_cli-2.11.10.dist-info}/WHEEL +0 -0
- {mdify_cli-2.11.9.dist-info → mdify_cli-2.11.10.dist-info}/entry_points.txt +0 -0
- {mdify_cli-2.11.9.dist-info → mdify_cli-2.11.10.dist-info}/licenses/LICENSE +0 -0
- {mdify_cli-2.11.9.dist-info → mdify_cli-2.11.10.dist-info}/top_level.txt +0 -0
mdify/ssh/models.py
ADDED
|
@@ -0,0 +1,470 @@
|
|
|
1
|
+
"""Data models for SSH remote server support."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Literal
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
import uuid
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SSHError(Exception):
|
|
11
|
+
"""Base exception for SSH operations."""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SSHConnectionError(SSHError):
|
|
15
|
+
"""Connection establishment or maintenance failed."""
|
|
16
|
+
def __init__(self, message: str, host: str, port: int):
|
|
17
|
+
self.message = message
|
|
18
|
+
self.host = host
|
|
19
|
+
self.port = port
|
|
20
|
+
super().__init__(f"{message} ({host}:{port})")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class SSHAuthError(SSHConnectionError):
|
|
24
|
+
"""Authentication failed (bad password, key, or permissions)."""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ConfigError(SSHError):
|
|
28
|
+
"""Configuration is invalid or incomplete."""
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ValidationError(SSHError):
|
|
32
|
+
"""Resource validation check failed."""
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class SSHConfig:
|
|
37
|
+
"""SSH connection configuration with precedence-aware merging."""
|
|
38
|
+
|
|
39
|
+
# Required fields
|
|
40
|
+
host: str
|
|
41
|
+
port: int = 22
|
|
42
|
+
username: str = ""
|
|
43
|
+
|
|
44
|
+
# Authentication
|
|
45
|
+
password: str | None = None
|
|
46
|
+
key_file: str | None = None
|
|
47
|
+
key_passphrase: str | None = None
|
|
48
|
+
|
|
49
|
+
# Connection behavior
|
|
50
|
+
timeout: int = 30
|
|
51
|
+
keepalive: int = 60
|
|
52
|
+
compression: bool = False
|
|
53
|
+
|
|
54
|
+
# Remote environment
|
|
55
|
+
work_dir: str = "/tmp/mdify"
|
|
56
|
+
container_runtime: str | None = None
|
|
57
|
+
|
|
58
|
+
# Metadata
|
|
59
|
+
source: str = "cli"
|
|
60
|
+
config_file: str | None = None
|
|
61
|
+
created_at: datetime = field(default_factory=datetime.now)
|
|
62
|
+
|
|
63
|
+
# Validation metadata
|
|
64
|
+
validated: bool = False
|
|
65
|
+
validation_errors: list[str] = field(default_factory=list)
|
|
66
|
+
|
|
67
|
+
def __post_init__(self):
|
|
68
|
+
"""Validate config after initialization."""
|
|
69
|
+
if self.port is None:
|
|
70
|
+
self.port = 22
|
|
71
|
+
if self.timeout is None:
|
|
72
|
+
self.timeout = 30
|
|
73
|
+
if self.keepalive is None:
|
|
74
|
+
self.keepalive = 60
|
|
75
|
+
if self.compression is None:
|
|
76
|
+
self.compression = False
|
|
77
|
+
if self.work_dir is None:
|
|
78
|
+
self.work_dir = "/tmp/mdify"
|
|
79
|
+
if self.username is None:
|
|
80
|
+
self.username = ""
|
|
81
|
+
if not self.host:
|
|
82
|
+
raise ConfigError("host is required")
|
|
83
|
+
if not 1 <= self.port <= 65535:
|
|
84
|
+
raise ConfigError(f"port must be 1-65535, got {self.port}")
|
|
85
|
+
if self.timeout < 1:
|
|
86
|
+
raise ConfigError(f"timeout must be positive, got {self.timeout}")
|
|
87
|
+
|
|
88
|
+
@classmethod
|
|
89
|
+
def from_cli_args(cls, args) -> "SSHConfig":
|
|
90
|
+
"""Create SSHConfig from CLI argument namespace.
|
|
91
|
+
|
|
92
|
+
Parameters:
|
|
93
|
+
args: Parsed CLI arguments (argparse.Namespace)
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
SSHConfig instance with source='cli'
|
|
97
|
+
|
|
98
|
+
Raises:
|
|
99
|
+
ConfigError: Invalid configuration
|
|
100
|
+
"""
|
|
101
|
+
# Extract only CLI-provided values (not defaults)
|
|
102
|
+
kwargs = {"source": "cli"}
|
|
103
|
+
|
|
104
|
+
if hasattr(args, "remote_host") and args.remote_host:
|
|
105
|
+
kwargs["host"] = args.remote_host
|
|
106
|
+
if hasattr(args, "remote_port") and args.remote_port:
|
|
107
|
+
kwargs["port"] = args.remote_port
|
|
108
|
+
if hasattr(args, "remote_user") and args.remote_user:
|
|
109
|
+
kwargs["username"] = args.remote_user
|
|
110
|
+
if hasattr(args, "remote_key") and args.remote_key:
|
|
111
|
+
kwargs["key_file"] = args.remote_key
|
|
112
|
+
if hasattr(args, "remote_key_pass_phrase") and args.remote_key_pass_phrase:
|
|
113
|
+
kwargs["key_passphrase"] = args.remote_key_pass_phrase
|
|
114
|
+
if hasattr(args, "remote_timeout") and args.remote_timeout:
|
|
115
|
+
kwargs["timeout"] = args.remote_timeout
|
|
116
|
+
if hasattr(args, "remote_keepalive") and args.remote_keepalive:
|
|
117
|
+
kwargs["keepalive"] = args.remote_keepalive
|
|
118
|
+
if hasattr(args, "remote_work_dir") and args.remote_work_dir:
|
|
119
|
+
kwargs["work_dir"] = args.remote_work_dir
|
|
120
|
+
if hasattr(args, "remote_runtime") and args.remote_runtime:
|
|
121
|
+
kwargs["container_runtime"] = args.remote_runtime
|
|
122
|
+
if hasattr(args, "remote_compression") and args.remote_compression:
|
|
123
|
+
kwargs["compression"] = args.remote_compression
|
|
124
|
+
|
|
125
|
+
# For partial configs (CLI may only override one or two fields),
|
|
126
|
+
# we need a default host to create the object
|
|
127
|
+
if "host" not in kwargs:
|
|
128
|
+
kwargs["host"] = "localhost" # Temporary, will be overridden by merge
|
|
129
|
+
|
|
130
|
+
return cls(**kwargs)
|
|
131
|
+
|
|
132
|
+
@classmethod
|
|
133
|
+
def from_ssh_config(cls, host: str, ssh_config_path: str | None = None) -> "SSHConfig":
|
|
134
|
+
"""Load SSH config for host from ~/.ssh/config.
|
|
135
|
+
|
|
136
|
+
Parameters:
|
|
137
|
+
host: Host alias or hostname to look up
|
|
138
|
+
ssh_config_path: Path to SSH config file (defaults to ~/.ssh/config)
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
SSHConfig instance with source='ssh_config'
|
|
142
|
+
|
|
143
|
+
Raises:
|
|
144
|
+
ConfigError: SSH config file not found or invalid
|
|
145
|
+
"""
|
|
146
|
+
import os
|
|
147
|
+
from pathlib import Path
|
|
148
|
+
|
|
149
|
+
if ssh_config_path is None:
|
|
150
|
+
ssh_config_path = "~/.ssh/config"
|
|
151
|
+
|
|
152
|
+
ssh_config_path = os.path.expanduser(ssh_config_path)
|
|
153
|
+
|
|
154
|
+
if not Path(ssh_config_path).exists():
|
|
155
|
+
# SSH config is optional; return defaults if not found
|
|
156
|
+
return cls(
|
|
157
|
+
host=host,
|
|
158
|
+
source="ssh_config",
|
|
159
|
+
config_file=ssh_config_path,
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
try:
|
|
163
|
+
# For SSH config loading, we'll use a simple approach:
|
|
164
|
+
# Parse the SSH config file ourselves to extract host configuration
|
|
165
|
+
config_data = cls._parse_ssh_config_file(ssh_config_path, host)
|
|
166
|
+
|
|
167
|
+
kwargs = {
|
|
168
|
+
"source": "ssh_config",
|
|
169
|
+
"config_file": ssh_config_path,
|
|
170
|
+
"host": config_data.get("hostname", host),
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
if "port" in config_data:
|
|
174
|
+
kwargs["port"] = int(config_data["port"])
|
|
175
|
+
if "user" in config_data:
|
|
176
|
+
kwargs["username"] = config_data["user"]
|
|
177
|
+
if "identityfile" in config_data:
|
|
178
|
+
# Use first identity file, expand ~ if present
|
|
179
|
+
identity_files = config_data["identityfile"]
|
|
180
|
+
if isinstance(identity_files, list):
|
|
181
|
+
kwargs["key_file"] = os.path.expanduser(identity_files[0])
|
|
182
|
+
else:
|
|
183
|
+
kwargs["key_file"] = os.path.expanduser(identity_files)
|
|
184
|
+
if "connecttimeout" in config_data:
|
|
185
|
+
kwargs["timeout"] = int(config_data["connecttimeout"])
|
|
186
|
+
if "serveraliveinterval" in config_data:
|
|
187
|
+
kwargs["keepalive"] = int(config_data["serveraliveinterval"])
|
|
188
|
+
if "compression" in config_data:
|
|
189
|
+
compression_str = config_data["compression"]
|
|
190
|
+
kwargs["compression"] = compression_str.lower() in ("yes", "true", "1")
|
|
191
|
+
|
|
192
|
+
return cls(**kwargs)
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
raise ConfigError(f"Failed to load SSH config: {e}")
|
|
196
|
+
|
|
197
|
+
@staticmethod
|
|
198
|
+
def _parse_ssh_config_file(config_path: str, target_host: str) -> dict:
|
|
199
|
+
"""Parse SSH config file for a specific host.
|
|
200
|
+
|
|
201
|
+
Parameters:
|
|
202
|
+
config_path: Path to SSH config file
|
|
203
|
+
target_host: Host alias to look for
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
Dictionary of configuration values for the host
|
|
207
|
+
"""
|
|
208
|
+
config_data = {}
|
|
209
|
+
current_hosts = []
|
|
210
|
+
in_target_block = False
|
|
211
|
+
|
|
212
|
+
try:
|
|
213
|
+
with open(config_path, 'r') as f:
|
|
214
|
+
for line in f:
|
|
215
|
+
line = line.strip()
|
|
216
|
+
|
|
217
|
+
# Skip comments and empty lines
|
|
218
|
+
if not line or line.startswith('#'):
|
|
219
|
+
continue
|
|
220
|
+
|
|
221
|
+
# Check for Host directive
|
|
222
|
+
if line.lower().startswith('host '):
|
|
223
|
+
parts = line.split(None, 1)
|
|
224
|
+
if len(parts) == 2:
|
|
225
|
+
hosts = parts[1].split()
|
|
226
|
+
in_target_block = target_host in hosts or '*' in hosts
|
|
227
|
+
if not in_target_block:
|
|
228
|
+
config_data = {}
|
|
229
|
+
continue
|
|
230
|
+
|
|
231
|
+
# Parse config directives
|
|
232
|
+
if in_target_block:
|
|
233
|
+
parts = line.split(None, 1)
|
|
234
|
+
if len(parts) == 2:
|
|
235
|
+
key = parts[0].lower()
|
|
236
|
+
value = parts[1]
|
|
237
|
+
|
|
238
|
+
# Handle multi-value options (identity files)
|
|
239
|
+
if key == 'identityfile':
|
|
240
|
+
if key not in config_data:
|
|
241
|
+
config_data[key] = []
|
|
242
|
+
if isinstance(config_data[key], list):
|
|
243
|
+
config_data[key].append(value)
|
|
244
|
+
else:
|
|
245
|
+
config_data[key] = [config_data[key], value]
|
|
246
|
+
else:
|
|
247
|
+
# For single-value options, use the first occurrence
|
|
248
|
+
if key not in config_data:
|
|
249
|
+
config_data[key] = value
|
|
250
|
+
except Exception as e:
|
|
251
|
+
raise ConfigError(f"Failed to parse SSH config file {config_path}: {e}")
|
|
252
|
+
|
|
253
|
+
return config_data
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
@classmethod
|
|
257
|
+
def from_remote_conf(cls, remote_conf_path: str | None = None) -> "SSHConfig":
|
|
258
|
+
"""Load SSH config from ~/.mdify/remote.conf.
|
|
259
|
+
|
|
260
|
+
Parameters:
|
|
261
|
+
remote_conf_path: Path to remote config file
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
SSHConfig instance with source='remote_conf'
|
|
265
|
+
|
|
266
|
+
Raises:
|
|
267
|
+
ConfigError: Config file not found or invalid
|
|
268
|
+
"""
|
|
269
|
+
import yaml
|
|
270
|
+
import os
|
|
271
|
+
|
|
272
|
+
if remote_conf_path is None:
|
|
273
|
+
remote_conf_path = "~/.mdify/remote.conf"
|
|
274
|
+
|
|
275
|
+
remote_conf_path = os.path.expanduser(remote_conf_path)
|
|
276
|
+
|
|
277
|
+
if not Path(remote_conf_path).exists():
|
|
278
|
+
raise ConfigError(f"Remote config file not found: {remote_conf_path}")
|
|
279
|
+
|
|
280
|
+
try:
|
|
281
|
+
with open(remote_conf_path, "r") as f:
|
|
282
|
+
config_data = yaml.safe_load(f) or {}
|
|
283
|
+
|
|
284
|
+
# Extract defaults
|
|
285
|
+
defaults = config_data.get("defaults", {})
|
|
286
|
+
servers = config_data.get("servers", {})
|
|
287
|
+
|
|
288
|
+
if not servers:
|
|
289
|
+
raise ConfigError("No servers defined in remote config")
|
|
290
|
+
|
|
291
|
+
# Use first server or named one
|
|
292
|
+
first_server_name = next(iter(servers.keys()))
|
|
293
|
+
server_config = servers[first_server_name]
|
|
294
|
+
|
|
295
|
+
# Merge defaults with server config (server overrides defaults)
|
|
296
|
+
merged = {**defaults, **server_config}
|
|
297
|
+
|
|
298
|
+
# Build SSHConfig
|
|
299
|
+
kwargs = {
|
|
300
|
+
"source": "remote_conf",
|
|
301
|
+
"config_file": remote_conf_path,
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
if "host" in merged:
|
|
305
|
+
kwargs["host"] = merged["host"]
|
|
306
|
+
else:
|
|
307
|
+
raise ConfigError(f"Server '{first_server_name}' missing 'host' field")
|
|
308
|
+
|
|
309
|
+
if "port" in merged:
|
|
310
|
+
kwargs["port"] = int(merged["port"])
|
|
311
|
+
if "username" in merged:
|
|
312
|
+
kwargs["username"] = merged["username"]
|
|
313
|
+
if "key_file" in merged:
|
|
314
|
+
kwargs["key_file"] = merged["key_file"]
|
|
315
|
+
if "timeout" in merged:
|
|
316
|
+
kwargs["timeout"] = int(merged["timeout"])
|
|
317
|
+
if "keepalive" in merged:
|
|
318
|
+
kwargs["keepalive"] = int(merged["keepalive"])
|
|
319
|
+
if "compression" in merged:
|
|
320
|
+
kwargs["compression"] = bool(merged["compression"])
|
|
321
|
+
if "work_dir" in merged:
|
|
322
|
+
kwargs["work_dir"] = merged["work_dir"]
|
|
323
|
+
if "container_runtime" in merged:
|
|
324
|
+
kwargs["container_runtime"] = merged["container_runtime"]
|
|
325
|
+
|
|
326
|
+
return cls(**kwargs)
|
|
327
|
+
|
|
328
|
+
except yaml.YAMLError as e:
|
|
329
|
+
raise ConfigError(f"Invalid YAML in remote config: {e}")
|
|
330
|
+
except Exception as e:
|
|
331
|
+
raise ConfigError(f"Failed to load remote config: {e}")
|
|
332
|
+
|
|
333
|
+
def merge(self, higher_precedence: "SSHConfig") -> "SSHConfig":
|
|
334
|
+
"""Merge with higher precedence config.
|
|
335
|
+
|
|
336
|
+
Parameters:
|
|
337
|
+
higher_precedence: Config with higher precedence (e.g., CLI args)
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
Merged SSHConfig with higher precedence values
|
|
341
|
+
"""
|
|
342
|
+
# Use higher precedence value if provided, otherwise use self
|
|
343
|
+
def pick_value(self_val, higher_val, is_string=True):
|
|
344
|
+
if is_string:
|
|
345
|
+
return higher_val if higher_val else self_val
|
|
346
|
+
else:
|
|
347
|
+
return higher_val if higher_val is not None else self_val
|
|
348
|
+
|
|
349
|
+
return SSHConfig(
|
|
350
|
+
host=pick_value(self.host, higher_precedence.host),
|
|
351
|
+
port=pick_value(self.port, higher_precedence.port, is_string=False),
|
|
352
|
+
username=pick_value(self.username, higher_precedence.username),
|
|
353
|
+
password=pick_value(self.password, higher_precedence.password),
|
|
354
|
+
key_file=pick_value(self.key_file, higher_precedence.key_file),
|
|
355
|
+
key_passphrase=pick_value(self.key_passphrase, higher_precedence.key_passphrase),
|
|
356
|
+
timeout=pick_value(self.timeout, higher_precedence.timeout, is_string=False),
|
|
357
|
+
keepalive=pick_value(self.keepalive, higher_precedence.keepalive, is_string=False),
|
|
358
|
+
compression=pick_value(self.compression, higher_precedence.compression, is_string=False),
|
|
359
|
+
work_dir=pick_value(self.work_dir, higher_precedence.work_dir),
|
|
360
|
+
container_runtime=pick_value(self.container_runtime, higher_precedence.container_runtime),
|
|
361
|
+
source=higher_precedence.source, # Track higher precedence source
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
def to_dict(self) -> dict:
|
|
365
|
+
"""Convert to dictionary for serialization (excludes secrets)."""
|
|
366
|
+
return {
|
|
367
|
+
"host": self.host,
|
|
368
|
+
"port": self.port,
|
|
369
|
+
"username": self.username or "default",
|
|
370
|
+
"timeout": self.timeout,
|
|
371
|
+
"keepalive": self.keepalive,
|
|
372
|
+
"compression": self.compression,
|
|
373
|
+
"work_dir": self.work_dir,
|
|
374
|
+
"container_runtime": self.container_runtime or "auto-detect",
|
|
375
|
+
"source": self.source,
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
@dataclass
|
|
380
|
+
class TransferSession:
|
|
381
|
+
"""Active file transfer session with progress tracking."""
|
|
382
|
+
|
|
383
|
+
session_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
|
384
|
+
local_path: str = ""
|
|
385
|
+
remote_path: str = ""
|
|
386
|
+
direction: Literal["upload", "download"] = "upload"
|
|
387
|
+
|
|
388
|
+
# Progress tracking
|
|
389
|
+
total_bytes: int = 0
|
|
390
|
+
transferred_bytes: int = 0
|
|
391
|
+
start_time: datetime = field(default_factory=datetime.now)
|
|
392
|
+
end_time: datetime | None = None
|
|
393
|
+
|
|
394
|
+
# Status
|
|
395
|
+
status: Literal["pending", "in_progress", "completed", "failed", "cancelled"] = "pending"
|
|
396
|
+
error_message: str | None = None
|
|
397
|
+
|
|
398
|
+
# Performance metrics
|
|
399
|
+
avg_speed_mbps: float = 0.0
|
|
400
|
+
current_speed_mbps: float = 0.0
|
|
401
|
+
eta_seconds: int | None = None
|
|
402
|
+
|
|
403
|
+
# Debugging
|
|
404
|
+
debug_mode: bool = False
|
|
405
|
+
chunk_log: list[str] = field(default_factory=list)
|
|
406
|
+
|
|
407
|
+
def update_progress(self, transferred_bytes: int) -> None:
|
|
408
|
+
"""Update transfer progress and recalculate speed/ETA."""
|
|
409
|
+
elapsed = (datetime.now() - self.start_time).total_seconds()
|
|
410
|
+
if elapsed <= 0:
|
|
411
|
+
return
|
|
412
|
+
|
|
413
|
+
self.transferred_bytes = transferred_bytes
|
|
414
|
+
self.avg_speed_mbps = (transferred_bytes / elapsed) / (1024 * 1024)
|
|
415
|
+
|
|
416
|
+
if self.avg_speed_mbps > 0 and self.transferred_bytes < self.total_bytes:
|
|
417
|
+
remaining_bytes = self.total_bytes - self.transferred_bytes
|
|
418
|
+
self.eta_seconds = int(remaining_bytes / (self.avg_speed_mbps * 1024 * 1024))
|
|
419
|
+
else:
|
|
420
|
+
self.eta_seconds = None
|
|
421
|
+
|
|
422
|
+
def complete(self) -> None:
|
|
423
|
+
"""Mark transfer as completed."""
|
|
424
|
+
self.end_time = datetime.now()
|
|
425
|
+
self.status = "completed"
|
|
426
|
+
|
|
427
|
+
elapsed = (self.end_time - self.start_time).total_seconds()
|
|
428
|
+
if elapsed > 0:
|
|
429
|
+
self.avg_speed_mbps = (self.total_bytes / elapsed) / (1024 * 1024)
|
|
430
|
+
|
|
431
|
+
def fail(self, error: Exception) -> None:
|
|
432
|
+
"""Mark transfer as failed."""
|
|
433
|
+
self.end_time = datetime.now()
|
|
434
|
+
self.status = "failed"
|
|
435
|
+
self.error_message = str(error)
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
@dataclass
|
|
439
|
+
class RemoteContainerState:
|
|
440
|
+
"""State of a container running on a remote server."""
|
|
441
|
+
|
|
442
|
+
container_id: str = ""
|
|
443
|
+
container_name: str = ""
|
|
444
|
+
host: str = ""
|
|
445
|
+
port: int = 8000
|
|
446
|
+
|
|
447
|
+
# Runtime state
|
|
448
|
+
runtime: Literal["docker", "podman"] = "docker"
|
|
449
|
+
is_running: bool = False
|
|
450
|
+
health_status: Literal["healthy", "unhealthy", "unknown"] = "unknown"
|
|
451
|
+
|
|
452
|
+
# Lifecycle timestamps
|
|
453
|
+
created_at: datetime | None = None
|
|
454
|
+
started_at: datetime | None = None
|
|
455
|
+
stopped_at: datetime | None = None
|
|
456
|
+
|
|
457
|
+
# Status details
|
|
458
|
+
exit_code: int | None = None
|
|
459
|
+
error_message: str | None = None
|
|
460
|
+
|
|
461
|
+
# Network info
|
|
462
|
+
base_url: str = ""
|
|
463
|
+
|
|
464
|
+
# Metadata
|
|
465
|
+
created_by: str = ""
|
|
466
|
+
tags: dict[str, str] = field(default_factory=dict)
|
|
467
|
+
|
|
468
|
+
def is_accessible(self) -> bool:
|
|
469
|
+
"""Check if container is running and healthy."""
|
|
470
|
+
return self.is_running and self.health_status == "healthy"
|