claude-mpm 4.3.22__py3-none-any.whl → 4.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/cli/commands/doctor.py +2 -2
- claude_mpm/hooks/memory_integration_hook.py +1 -1
- claude_mpm/services/agents/memory/content_manager.py +5 -2
- claude_mpm/services/agents/memory/memory_file_service.py +1 -0
- claude_mpm/services/agents/memory/memory_limits_service.py +1 -0
- claude_mpm/services/unified/__init__.py +65 -0
- claude_mpm/services/unified/analyzer_strategies/__init__.py +44 -0
- claude_mpm/services/unified/analyzer_strategies/code_analyzer.py +473 -0
- claude_mpm/services/unified/analyzer_strategies/dependency_analyzer.py +643 -0
- claude_mpm/services/unified/analyzer_strategies/performance_analyzer.py +804 -0
- claude_mpm/services/unified/analyzer_strategies/security_analyzer.py +661 -0
- claude_mpm/services/unified/analyzer_strategies/structure_analyzer.py +696 -0
- claude_mpm/services/unified/deployment_strategies/__init__.py +97 -0
- claude_mpm/services/unified/deployment_strategies/base.py +557 -0
- claude_mpm/services/unified/deployment_strategies/cloud_strategies.py +486 -0
- claude_mpm/services/unified/deployment_strategies/local.py +594 -0
- claude_mpm/services/unified/deployment_strategies/utils.py +672 -0
- claude_mpm/services/unified/deployment_strategies/vercel.py +471 -0
- claude_mpm/services/unified/interfaces.py +499 -0
- claude_mpm/services/unified/migration.py +532 -0
- claude_mpm/services/unified/strategies.py +551 -0
- claude_mpm/services/unified/unified_analyzer.py +534 -0
- claude_mpm/services/unified/unified_config.py +688 -0
- claude_mpm/services/unified/unified_deployment.py +470 -0
- {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/METADATA +1 -1
- {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/RECORD +31 -12
- {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/WHEEL +0 -0
- {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.3.22.dist-info → claude_mpm-4.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,594 @@
|
|
1
|
+
"""
|
2
|
+
Local Deployment Strategy
|
3
|
+
=========================
|
4
|
+
|
5
|
+
Handles deployment to local filesystem and project directories.
|
6
|
+
Consolidates functionality from:
|
7
|
+
- agent_deployment.py
|
8
|
+
- local_template_deployment.py
|
9
|
+
- agent_filesystem_manager.py
|
10
|
+
- system_instructions_deployer.py
|
11
|
+
"""
|
12
|
+
|
13
|
+
import json
|
14
|
+
import shutil
|
15
|
+
import tempfile
|
16
|
+
from datetime import datetime
|
17
|
+
from pathlib import Path
|
18
|
+
from typing import Any, Dict, List, Optional
|
19
|
+
|
20
|
+
import yaml
|
21
|
+
|
22
|
+
from claude_mpm.core.logging_utils import get_logger
|
23
|
+
from claude_mpm.services.unified.strategies import StrategyMetadata, StrategyPriority
|
24
|
+
|
25
|
+
from .base import DeploymentContext, DeploymentResult, DeploymentStrategy, DeploymentType
|
26
|
+
|
27
|
+
|
28
|
+
class LocalDeploymentStrategy(DeploymentStrategy):
|
29
|
+
"""
|
30
|
+
Strategy for local filesystem deployments.
|
31
|
+
|
32
|
+
Handles deployment of agents, configs, templates, and resources to
|
33
|
+
local project directories. Consolidates multiple local deployment
|
34
|
+
patterns from various services.
|
35
|
+
|
36
|
+
Features:
|
37
|
+
- Agent deployment to .claude/agents
|
38
|
+
- Configuration file deployment
|
39
|
+
- Template instantiation and deployment
|
40
|
+
- Resource copying and linking
|
41
|
+
- Version management and rollback
|
42
|
+
- Backup and restore capabilities
|
43
|
+
"""
|
44
|
+
|
45
|
+
def __init__(self):
|
46
|
+
"""Initialize local deployment strategy."""
|
47
|
+
metadata = StrategyMetadata(
|
48
|
+
name="LocalDeploymentStrategy",
|
49
|
+
description="Deploy to local filesystem and project directories",
|
50
|
+
supported_types=["agent", "config", "template", "resource", "*"],
|
51
|
+
supported_operations=["deploy", "rollback", "verify", "backup"],
|
52
|
+
priority=StrategyPriority.HIGH,
|
53
|
+
tags={"local", "filesystem", "project"},
|
54
|
+
)
|
55
|
+
super().__init__(metadata)
|
56
|
+
self._logger = get_logger(f"{__name__}.LocalDeploymentStrategy")
|
57
|
+
self._backups: Dict[str, Path] = {}
|
58
|
+
|
59
|
+
def validate(self, context: DeploymentContext) -> List[str]:
|
60
|
+
"""
|
61
|
+
Validate local deployment configuration.
|
62
|
+
|
63
|
+
Args:
|
64
|
+
context: Deployment context
|
65
|
+
|
66
|
+
Returns:
|
67
|
+
List of validation errors
|
68
|
+
"""
|
69
|
+
errors = []
|
70
|
+
|
71
|
+
# Check source exists
|
72
|
+
source_path = Path(context.source)
|
73
|
+
if not source_path.exists():
|
74
|
+
errors.append(f"Source does not exist: {source_path}")
|
75
|
+
|
76
|
+
# Check target directory permissions
|
77
|
+
target_path = Path(context.target)
|
78
|
+
target_parent = target_path.parent if target_path.suffix else target_path
|
79
|
+
|
80
|
+
if not target_parent.exists():
|
81
|
+
# Check if we can create it
|
82
|
+
try:
|
83
|
+
target_parent.mkdir(parents=True, exist_ok=True)
|
84
|
+
target_parent.rmdir() # Clean up test directory
|
85
|
+
except PermissionError:
|
86
|
+
errors.append(f"No permission to create target directory: {target_parent}")
|
87
|
+
else:
|
88
|
+
# Check write permissions
|
89
|
+
if not target_parent.is_dir():
|
90
|
+
errors.append(f"Target parent is not a directory: {target_parent}")
|
91
|
+
elif not self._check_write_permission(target_parent):
|
92
|
+
errors.append(f"No write permission for target: {target_parent}")
|
93
|
+
|
94
|
+
# Validate deployment type specific requirements
|
95
|
+
if context.deployment_type == DeploymentType.AGENT:
|
96
|
+
errors.extend(self._validate_agent_deployment(context))
|
97
|
+
elif context.deployment_type == DeploymentType.CONFIG:
|
98
|
+
errors.extend(self._validate_config_deployment(context))
|
99
|
+
elif context.deployment_type == DeploymentType.TEMPLATE:
|
100
|
+
errors.extend(self._validate_template_deployment(context))
|
101
|
+
|
102
|
+
return errors
|
103
|
+
|
104
|
+
def prepare(self, context: DeploymentContext) -> List[Path]:
|
105
|
+
"""
|
106
|
+
Prepare deployment artifacts.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
context: Deployment context
|
110
|
+
|
111
|
+
Returns:
|
112
|
+
List of prepared artifact paths
|
113
|
+
"""
|
114
|
+
artifacts = []
|
115
|
+
source_path = Path(context.source)
|
116
|
+
|
117
|
+
# Create backup if enabled
|
118
|
+
if context.backup_enabled:
|
119
|
+
backup_path = self._create_backup(context)
|
120
|
+
if backup_path:
|
121
|
+
self._backups[str(context.target)] = backup_path
|
122
|
+
artifacts.append(backup_path)
|
123
|
+
|
124
|
+
# Prepare based on deployment type
|
125
|
+
if context.deployment_type == DeploymentType.AGENT:
|
126
|
+
artifacts.extend(self._prepare_agent_artifacts(context))
|
127
|
+
elif context.deployment_type == DeploymentType.CONFIG:
|
128
|
+
artifacts.extend(self._prepare_config_artifacts(context))
|
129
|
+
elif context.deployment_type == DeploymentType.TEMPLATE:
|
130
|
+
artifacts.extend(self._prepare_template_artifacts(context))
|
131
|
+
else:
|
132
|
+
# Default: prepare source as-is
|
133
|
+
artifacts.append(source_path)
|
134
|
+
|
135
|
+
return artifacts
|
136
|
+
|
137
|
+
def execute(
|
138
|
+
self, context: DeploymentContext, artifacts: List[Path]
|
139
|
+
) -> Dict[str, Any]:
|
140
|
+
"""
|
141
|
+
Execute local deployment.
|
142
|
+
|
143
|
+
Args:
|
144
|
+
context: Deployment context
|
145
|
+
artifacts: Prepared artifacts
|
146
|
+
|
147
|
+
Returns:
|
148
|
+
Deployment information
|
149
|
+
"""
|
150
|
+
target_path = Path(context.target)
|
151
|
+
deployment_id = self._generate_deployment_id()
|
152
|
+
|
153
|
+
# Ensure target directory exists
|
154
|
+
if not target_path.suffix:
|
155
|
+
target_path.mkdir(parents=True, exist_ok=True)
|
156
|
+
else:
|
157
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
158
|
+
|
159
|
+
deployed_files = []
|
160
|
+
|
161
|
+
# Deploy artifacts based on type
|
162
|
+
if context.deployment_type == DeploymentType.AGENT:
|
163
|
+
deployed_files = self._deploy_agent(context, artifacts, target_path)
|
164
|
+
elif context.deployment_type == DeploymentType.CONFIG:
|
165
|
+
deployed_files = self._deploy_config(context, artifacts, target_path)
|
166
|
+
elif context.deployment_type == DeploymentType.TEMPLATE:
|
167
|
+
deployed_files = self._deploy_template(context, artifacts, target_path)
|
168
|
+
else:
|
169
|
+
deployed_files = self._deploy_resources(context, artifacts, target_path)
|
170
|
+
|
171
|
+
# Update version file if versioned
|
172
|
+
if context.version:
|
173
|
+
self._write_version_file(target_path, context.version)
|
174
|
+
|
175
|
+
return {
|
176
|
+
"deployment_id": deployment_id,
|
177
|
+
"deployed_path": target_path,
|
178
|
+
"deployed_files": deployed_files,
|
179
|
+
"artifacts": [str(a) for a in artifacts],
|
180
|
+
"timestamp": datetime.now().isoformat(),
|
181
|
+
}
|
182
|
+
|
183
|
+
def verify(
|
184
|
+
self, context: DeploymentContext, deployment_info: Dict[str, Any]
|
185
|
+
) -> bool:
|
186
|
+
"""
|
187
|
+
Verify local deployment success.
|
188
|
+
|
189
|
+
Args:
|
190
|
+
context: Deployment context
|
191
|
+
deployment_info: Deployment information
|
192
|
+
|
193
|
+
Returns:
|
194
|
+
True if deployment verified
|
195
|
+
"""
|
196
|
+
deployed_path = Path(deployment_info["deployed_path"])
|
197
|
+
|
198
|
+
# Check deployed path exists
|
199
|
+
if not deployed_path.exists():
|
200
|
+
self._logger.error(f"Deployed path does not exist: {deployed_path}")
|
201
|
+
return False
|
202
|
+
|
203
|
+
# Check all deployed files exist
|
204
|
+
for file_path in deployment_info.get("deployed_files", []):
|
205
|
+
if not Path(file_path).exists():
|
206
|
+
self._logger.error(f"Deployed file missing: {file_path}")
|
207
|
+
return False
|
208
|
+
|
209
|
+
# Type-specific verification
|
210
|
+
if context.deployment_type == DeploymentType.AGENT:
|
211
|
+
return self._verify_agent_deployment(deployed_path, context)
|
212
|
+
elif context.deployment_type == DeploymentType.CONFIG:
|
213
|
+
return self._verify_config_deployment(deployed_path, context)
|
214
|
+
elif context.deployment_type == DeploymentType.TEMPLATE:
|
215
|
+
return self._verify_template_deployment(deployed_path, context)
|
216
|
+
|
217
|
+
return True
|
218
|
+
|
219
|
+
def rollback(
|
220
|
+
self, context: DeploymentContext, result: DeploymentResult
|
221
|
+
) -> bool:
|
222
|
+
"""
|
223
|
+
Rollback local deployment.
|
224
|
+
|
225
|
+
Args:
|
226
|
+
context: Deployment context
|
227
|
+
result: Current deployment result
|
228
|
+
|
229
|
+
Returns:
|
230
|
+
True if rollback successful
|
231
|
+
"""
|
232
|
+
target_path = Path(context.target)
|
233
|
+
|
234
|
+
try:
|
235
|
+
# Remove deployed files
|
236
|
+
if result.deployed_path and result.deployed_path.exists():
|
237
|
+
if result.deployed_path.is_file():
|
238
|
+
result.deployed_path.unlink()
|
239
|
+
elif result.deployed_path.is_dir():
|
240
|
+
shutil.rmtree(result.deployed_path)
|
241
|
+
|
242
|
+
# Restore from backup if available
|
243
|
+
backup_path = self._backups.get(str(target_path))
|
244
|
+
if backup_path and backup_path.exists():
|
245
|
+
if backup_path.is_file():
|
246
|
+
shutil.copy2(backup_path, target_path)
|
247
|
+
else:
|
248
|
+
shutil.copytree(backup_path, target_path, dirs_exist_ok=True)
|
249
|
+
|
250
|
+
self._logger.info(f"Restored from backup: {backup_path}")
|
251
|
+
|
252
|
+
return True
|
253
|
+
|
254
|
+
except Exception as e:
|
255
|
+
self._logger.error(f"Rollback failed: {str(e)}")
|
256
|
+
return False
|
257
|
+
|
258
|
+
def get_health_status(
|
259
|
+
self, deployment_info: Dict[str, Any]
|
260
|
+
) -> Dict[str, Any]:
|
261
|
+
"""
|
262
|
+
Get health status of local deployment.
|
263
|
+
|
264
|
+
Args:
|
265
|
+
deployment_info: Deployment information
|
266
|
+
|
267
|
+
Returns:
|
268
|
+
Health status information
|
269
|
+
"""
|
270
|
+
deployed_path = Path(deployment_info.get("deployed_path", ""))
|
271
|
+
|
272
|
+
health = {
|
273
|
+
"status": "unknown",
|
274
|
+
"deployed_path": str(deployed_path),
|
275
|
+
"exists": deployed_path.exists() if deployed_path else False,
|
276
|
+
"checks": {},
|
277
|
+
}
|
278
|
+
|
279
|
+
if deployed_path and deployed_path.exists():
|
280
|
+
health["status"] = "healthy"
|
281
|
+
|
282
|
+
# Check file integrity
|
283
|
+
for file_path in deployment_info.get("deployed_files", []):
|
284
|
+
path = Path(file_path)
|
285
|
+
health["checks"][str(path)] = path.exists()
|
286
|
+
|
287
|
+
# Check if any file is missing
|
288
|
+
if any(not check for check in health["checks"].values()):
|
289
|
+
health["status"] = "degraded"
|
290
|
+
|
291
|
+
else:
|
292
|
+
health["status"] = "unhealthy"
|
293
|
+
|
294
|
+
return health
|
295
|
+
|
296
|
+
# Private helper methods
|
297
|
+
|
298
|
+
def _check_write_permission(self, path: Path) -> bool:
|
299
|
+
"""Check if we have write permission to path."""
|
300
|
+
try:
|
301
|
+
test_file = path / f".write_test_{datetime.now().timestamp()}"
|
302
|
+
test_file.touch()
|
303
|
+
test_file.unlink()
|
304
|
+
return True
|
305
|
+
except Exception:
|
306
|
+
return False
|
307
|
+
|
308
|
+
def _generate_deployment_id(self) -> str:
|
309
|
+
"""Generate unique deployment ID."""
|
310
|
+
return f"local_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{id(self) % 10000:04d}"
|
311
|
+
|
312
|
+
def _create_backup(self, context: DeploymentContext) -> Optional[Path]:
|
313
|
+
"""Create backup of target before deployment."""
|
314
|
+
target_path = Path(context.target)
|
315
|
+
|
316
|
+
if not target_path.exists():
|
317
|
+
return None
|
318
|
+
|
319
|
+
try:
|
320
|
+
backup_dir = Path(tempfile.gettempdir()) / "claude_mpm_backups"
|
321
|
+
backup_dir.mkdir(parents=True, exist_ok=True)
|
322
|
+
|
323
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
324
|
+
backup_name = f"{target_path.name}.backup_{timestamp}"
|
325
|
+
backup_path = backup_dir / backup_name
|
326
|
+
|
327
|
+
if target_path.is_file():
|
328
|
+
shutil.copy2(target_path, backup_path)
|
329
|
+
else:
|
330
|
+
shutil.copytree(target_path, backup_path)
|
331
|
+
|
332
|
+
self._logger.info(f"Created backup: {backup_path}")
|
333
|
+
return backup_path
|
334
|
+
|
335
|
+
except Exception as e:
|
336
|
+
self._logger.warning(f"Failed to create backup: {str(e)}")
|
337
|
+
return None
|
338
|
+
|
339
|
+
def _write_version_file(self, target_path: Path, version: str) -> None:
|
340
|
+
"""Write version file to deployment."""
|
341
|
+
version_file = target_path / ".version"
|
342
|
+
version_file.write_text(f"{version}\n{datetime.now().isoformat()}\n")
|
343
|
+
|
344
|
+
# Agent deployment methods (consolidating agent_deployment.py patterns)
|
345
|
+
|
346
|
+
def _validate_agent_deployment(self, context: DeploymentContext) -> List[str]:
|
347
|
+
"""Validate agent deployment specifics."""
|
348
|
+
errors = []
|
349
|
+
source_path = Path(context.source)
|
350
|
+
|
351
|
+
# Check for required agent files
|
352
|
+
if source_path.is_file():
|
353
|
+
if source_path.suffix not in [".json", ".yaml", ".yml"]:
|
354
|
+
errors.append(f"Invalid agent file format: {source_path.suffix}")
|
355
|
+
elif source_path.is_dir():
|
356
|
+
# Check for agent definition files
|
357
|
+
agent_files = list(source_path.glob("*.json")) + \
|
358
|
+
list(source_path.glob("*.yaml")) + \
|
359
|
+
list(source_path.glob("*.yml"))
|
360
|
+
if not agent_files:
|
361
|
+
errors.append(f"No agent definition files found in: {source_path}")
|
362
|
+
|
363
|
+
return errors
|
364
|
+
|
365
|
+
def _prepare_agent_artifacts(self, context: DeploymentContext) -> List[Path]:
|
366
|
+
"""Prepare agent deployment artifacts."""
|
367
|
+
source_path = Path(context.source)
|
368
|
+
artifacts = []
|
369
|
+
|
370
|
+
# Convert JSON to YAML if needed
|
371
|
+
if source_path.suffix == ".json":
|
372
|
+
yaml_path = self._convert_json_to_yaml(source_path)
|
373
|
+
artifacts.append(yaml_path)
|
374
|
+
else:
|
375
|
+
artifacts.append(source_path)
|
376
|
+
|
377
|
+
return artifacts
|
378
|
+
|
379
|
+
def _deploy_agent(
|
380
|
+
self, context: DeploymentContext, artifacts: List[Path], target_path: Path
|
381
|
+
) -> List[Path]:
|
382
|
+
"""Deploy agent files."""
|
383
|
+
deployed = []
|
384
|
+
|
385
|
+
for artifact in artifacts:
|
386
|
+
if artifact.suffix in [".yaml", ".yml"]:
|
387
|
+
dest = target_path / artifact.name
|
388
|
+
shutil.copy2(artifact, dest)
|
389
|
+
deployed.append(dest)
|
390
|
+
|
391
|
+
self._logger.info(f"Deployed agent: {dest}")
|
392
|
+
|
393
|
+
return deployed
|
394
|
+
|
395
|
+
def _verify_agent_deployment(
|
396
|
+
self, deployed_path: Path, context: DeploymentContext
|
397
|
+
) -> bool:
|
398
|
+
"""Verify agent deployment."""
|
399
|
+
# Check for valid YAML structure
|
400
|
+
yaml_files = list(deployed_path.glob("*.yaml")) + \
|
401
|
+
list(deployed_path.glob("*.yml"))
|
402
|
+
|
403
|
+
for yaml_file in yaml_files:
|
404
|
+
try:
|
405
|
+
with open(yaml_file) as f:
|
406
|
+
data = yaml.safe_load(f)
|
407
|
+
# Basic agent structure validation
|
408
|
+
if not isinstance(data, dict):
|
409
|
+
return False
|
410
|
+
if "name" not in data:
|
411
|
+
self._logger.error(f"Agent missing 'name' field: {yaml_file}")
|
412
|
+
return False
|
413
|
+
except Exception as e:
|
414
|
+
self._logger.error(f"Invalid agent YAML: {yaml_file}: {str(e)}")
|
415
|
+
return False
|
416
|
+
|
417
|
+
return True
|
418
|
+
|
419
|
+
def _convert_json_to_yaml(self, json_path: Path) -> Path:
|
420
|
+
"""Convert JSON agent to YAML format."""
|
421
|
+
with open(json_path) as f:
|
422
|
+
data = json.load(f)
|
423
|
+
|
424
|
+
yaml_path = Path(tempfile.gettempdir()) / f"{json_path.stem}.yaml"
|
425
|
+
with open(yaml_path, "w") as f:
|
426
|
+
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
|
427
|
+
|
428
|
+
return yaml_path
|
429
|
+
|
430
|
+
# Config deployment methods
|
431
|
+
|
432
|
+
def _validate_config_deployment(self, context: DeploymentContext) -> List[str]:
|
433
|
+
"""Validate config deployment."""
|
434
|
+
errors = []
|
435
|
+
source_path = Path(context.source)
|
436
|
+
|
437
|
+
if source_path.is_file():
|
438
|
+
# Validate config file format
|
439
|
+
if source_path.suffix not in [".json", ".yaml", ".yml", ".toml", ".ini", ".env"]:
|
440
|
+
errors.append(f"Unsupported config format: {source_path.suffix}")
|
441
|
+
|
442
|
+
return errors
|
443
|
+
|
444
|
+
def _prepare_config_artifacts(self, context: DeploymentContext) -> List[Path]:
|
445
|
+
"""Prepare config artifacts."""
|
446
|
+
return [Path(context.source)]
|
447
|
+
|
448
|
+
def _deploy_config(
|
449
|
+
self, context: DeploymentContext, artifacts: List[Path], target_path: Path
|
450
|
+
) -> List[Path]:
|
451
|
+
"""Deploy configuration files."""
|
452
|
+
deployed = []
|
453
|
+
|
454
|
+
for artifact in artifacts:
|
455
|
+
if target_path.is_dir():
|
456
|
+
dest = target_path / artifact.name
|
457
|
+
else:
|
458
|
+
dest = target_path
|
459
|
+
|
460
|
+
shutil.copy2(artifact, dest)
|
461
|
+
deployed.append(dest)
|
462
|
+
|
463
|
+
self._logger.info(f"Deployed config: {dest}")
|
464
|
+
|
465
|
+
return deployed
|
466
|
+
|
467
|
+
def _verify_config_deployment(
|
468
|
+
self, deployed_path: Path, context: DeploymentContext
|
469
|
+
) -> bool:
|
470
|
+
"""Verify config deployment."""
|
471
|
+
# Basic validation - file exists and is readable
|
472
|
+
if deployed_path.is_file():
|
473
|
+
try:
|
474
|
+
deployed_path.read_text()
|
475
|
+
return True
|
476
|
+
except Exception:
|
477
|
+
return False
|
478
|
+
return deployed_path.exists()
|
479
|
+
|
480
|
+
# Template deployment methods
|
481
|
+
|
482
|
+
def _validate_template_deployment(self, context: DeploymentContext) -> List[str]:
|
483
|
+
"""Validate template deployment."""
|
484
|
+
errors = []
|
485
|
+
|
486
|
+
# Check for template variables in config
|
487
|
+
if "variables" not in context.config:
|
488
|
+
self._logger.warning("No template variables provided")
|
489
|
+
|
490
|
+
return errors
|
491
|
+
|
492
|
+
def _prepare_template_artifacts(self, context: DeploymentContext) -> List[Path]:
|
493
|
+
"""Prepare template artifacts."""
|
494
|
+
source_path = Path(context.source)
|
495
|
+
artifacts = []
|
496
|
+
|
497
|
+
# Process template with variables
|
498
|
+
if source_path.is_file():
|
499
|
+
processed = self._process_template(source_path, context.config.get("variables", {}))
|
500
|
+
artifacts.append(processed)
|
501
|
+
else:
|
502
|
+
# Process all template files in directory
|
503
|
+
for template_file in source_path.rglob("*"):
|
504
|
+
if template_file.is_file():
|
505
|
+
processed = self._process_template(
|
506
|
+
template_file,
|
507
|
+
context.config.get("variables", {})
|
508
|
+
)
|
509
|
+
artifacts.append(processed)
|
510
|
+
|
511
|
+
return artifacts
|
512
|
+
|
513
|
+
def _deploy_template(
|
514
|
+
self, context: DeploymentContext, artifacts: List[Path], target_path: Path
|
515
|
+
) -> List[Path]:
|
516
|
+
"""Deploy template files."""
|
517
|
+
deployed = []
|
518
|
+
|
519
|
+
for artifact in artifacts:
|
520
|
+
if target_path.is_dir():
|
521
|
+
# Maintain relative structure
|
522
|
+
source_base = Path(context.source)
|
523
|
+
if source_base.is_dir():
|
524
|
+
rel_path = artifact.relative_to(Path(tempfile.gettempdir()))
|
525
|
+
dest = target_path / rel_path
|
526
|
+
else:
|
527
|
+
dest = target_path / artifact.name
|
528
|
+
else:
|
529
|
+
dest = target_path
|
530
|
+
|
531
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
532
|
+
shutil.copy2(artifact, dest)
|
533
|
+
deployed.append(dest)
|
534
|
+
|
535
|
+
self._logger.info(f"Deployed template: {dest}")
|
536
|
+
|
537
|
+
return deployed
|
538
|
+
|
539
|
+
def _verify_template_deployment(
|
540
|
+
self, deployed_path: Path, context: DeploymentContext
|
541
|
+
) -> bool:
|
542
|
+
"""Verify template deployment."""
|
543
|
+
# Check that template variables were replaced
|
544
|
+
variables = context.config.get("variables", {})
|
545
|
+
|
546
|
+
if deployed_path.is_file():
|
547
|
+
content = deployed_path.read_text()
|
548
|
+
# Check no template markers remain
|
549
|
+
if "{{" in content or "{%" in content:
|
550
|
+
self._logger.warning("Template markers still present in deployed file")
|
551
|
+
return False
|
552
|
+
|
553
|
+
return True
|
554
|
+
|
555
|
+
def _process_template(
|
556
|
+
self, template_path: Path, variables: Dict[str, Any]
|
557
|
+
) -> Path:
|
558
|
+
"""Process template file with variables."""
|
559
|
+
content = template_path.read_text()
|
560
|
+
|
561
|
+
# Simple variable replacement (can be enhanced with Jinja2)
|
562
|
+
for key, value in variables.items():
|
563
|
+
content = content.replace(f"{{{{{key}}}}}", str(value))
|
564
|
+
|
565
|
+
# Write to temp file
|
566
|
+
processed_path = Path(tempfile.gettempdir()) / template_path.name
|
567
|
+
processed_path.write_text(content)
|
568
|
+
|
569
|
+
return processed_path
|
570
|
+
|
571
|
+
# Resource deployment methods
|
572
|
+
|
573
|
+
def _deploy_resources(
|
574
|
+
self, context: DeploymentContext, artifacts: List[Path], target_path: Path
|
575
|
+
) -> List[Path]:
|
576
|
+
"""Deploy generic resources."""
|
577
|
+
deployed = []
|
578
|
+
|
579
|
+
for artifact in artifacts:
|
580
|
+
if artifact.is_file():
|
581
|
+
dest = target_path / artifact.name if target_path.is_dir() else target_path
|
582
|
+
shutil.copy2(artifact, dest)
|
583
|
+
deployed.append(dest)
|
584
|
+
elif artifact.is_dir():
|
585
|
+
if target_path.is_dir():
|
586
|
+
dest = target_path / artifact.name
|
587
|
+
else:
|
588
|
+
dest = target_path
|
589
|
+
shutil.copytree(artifact, dest, dirs_exist_ok=True)
|
590
|
+
deployed.append(dest)
|
591
|
+
|
592
|
+
self._logger.info(f"Deployed resource: {deployed[-1]}")
|
593
|
+
|
594
|
+
return deployed
|