adamops 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. adamops/__init__.py +40 -0
  2. adamops/cli.py +163 -0
  3. adamops/data/__init__.py +24 -0
  4. adamops/data/feature_engineering.py +284 -0
  5. adamops/data/loaders.py +922 -0
  6. adamops/data/preprocessors.py +227 -0
  7. adamops/data/splitters.py +218 -0
  8. adamops/data/validators.py +148 -0
  9. adamops/deployment/__init__.py +21 -0
  10. adamops/deployment/api.py +237 -0
  11. adamops/deployment/cloud.py +191 -0
  12. adamops/deployment/containerize.py +262 -0
  13. adamops/deployment/exporters.py +148 -0
  14. adamops/evaluation/__init__.py +24 -0
  15. adamops/evaluation/comparison.py +133 -0
  16. adamops/evaluation/explainability.py +143 -0
  17. adamops/evaluation/metrics.py +233 -0
  18. adamops/evaluation/reports.py +165 -0
  19. adamops/evaluation/visualization.py +238 -0
  20. adamops/models/__init__.py +21 -0
  21. adamops/models/automl.py +277 -0
  22. adamops/models/ensembles.py +228 -0
  23. adamops/models/modelops.py +308 -0
  24. adamops/models/registry.py +250 -0
  25. adamops/monitoring/__init__.py +21 -0
  26. adamops/monitoring/alerts.py +200 -0
  27. adamops/monitoring/dashboard.py +117 -0
  28. adamops/monitoring/drift.py +212 -0
  29. adamops/monitoring/performance.py +195 -0
  30. adamops/pipelines/__init__.py +15 -0
  31. adamops/pipelines/orchestrators.py +183 -0
  32. adamops/pipelines/workflows.py +212 -0
  33. adamops/utils/__init__.py +18 -0
  34. adamops/utils/config.py +457 -0
  35. adamops/utils/helpers.py +663 -0
  36. adamops/utils/logging.py +412 -0
  37. adamops-0.1.0.dist-info/METADATA +310 -0
  38. adamops-0.1.0.dist-info/RECORD +42 -0
  39. adamops-0.1.0.dist-info/WHEEL +5 -0
  40. adamops-0.1.0.dist-info/entry_points.txt +2 -0
  41. adamops-0.1.0.dist-info/licenses/LICENSE +21 -0
  42. adamops-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,237 @@
1
+ """
2
+ AdamOps API Module
3
+
4
+ Create REST APIs for model serving with FastAPI/Flask.
5
+ """
6
+
7
+ from typing import Any, Callable, Dict, List, Optional
8
+ import json
9
+ from pathlib import Path
10
+
11
+ from adamops.utils.logging import get_logger
12
+ from adamops.deployment.exporters import load_model
13
+
14
+ logger = get_logger(__name__)
15
+
16
+
17
+ def create_fastapi_app(
18
+ model: Any, model_name: str = "model",
19
+ input_schema: Optional[Dict] = None,
20
+ preprocess_fn: Optional[Callable] = None,
21
+ postprocess_fn: Optional[Callable] = None
22
+ ):
23
+ """
24
+ Create FastAPI application for model serving.
25
+
26
+ Args:
27
+ model: Trained model.
28
+ model_name: Name for the model.
29
+ input_schema: Pydantic model or dict for input validation.
30
+ preprocess_fn: Function to preprocess input.
31
+ postprocess_fn: Function to postprocess output.
32
+
33
+ Returns:
34
+ FastAPI application.
35
+ """
36
+ try:
37
+ from fastapi import FastAPI, HTTPException
38
+ from pydantic import BaseModel, create_model
39
+ except ImportError:
40
+ raise ImportError("FastAPI required. Install with: pip install fastapi uvicorn")
41
+
42
+ import numpy as np
43
+
44
+ app = FastAPI(title=f"{model_name} API", version="1.0.0")
45
+
46
+ # Create input model
47
+ class PredictionInput(BaseModel):
48
+ features: List[List[float]]
49
+
50
+ class PredictionOutput(BaseModel):
51
+ predictions: List
52
+ model_name: str
53
+
54
+ @app.get("/")
55
+ def root():
56
+ return {"message": f"Welcome to {model_name} API", "status": "healthy"}
57
+
58
+ @app.get("/health")
59
+ def health():
60
+ return {"status": "healthy", "model": model_name}
61
+
62
+ @app.post("/predict", response_model=PredictionOutput)
63
+ def predict(input_data: PredictionInput):
64
+ try:
65
+ features = np.array(input_data.features)
66
+
67
+ if preprocess_fn:
68
+ features = preprocess_fn(features)
69
+
70
+ predictions = model.predict(features).tolist()
71
+
72
+ if postprocess_fn:
73
+ predictions = postprocess_fn(predictions)
74
+
75
+ return PredictionOutput(predictions=predictions, model_name=model_name)
76
+ except Exception as e:
77
+ raise HTTPException(status_code=500, detail=str(e))
78
+
79
+ @app.post("/predict_proba")
80
+ def predict_proba(input_data: PredictionInput):
81
+ if not hasattr(model, 'predict_proba'):
82
+ raise HTTPException(status_code=400, detail="Model does not support probability predictions")
83
+
84
+ try:
85
+ features = np.array(input_data.features)
86
+ probas = model.predict_proba(features).tolist()
87
+ return {"probabilities": probas, "model_name": model_name}
88
+ except Exception as e:
89
+ raise HTTPException(status_code=500, detail=str(e))
90
+
91
+ return app
92
+
93
+
94
+ def create_flask_app(
95
+ model: Any, model_name: str = "model",
96
+ preprocess_fn: Optional[Callable] = None,
97
+ postprocess_fn: Optional[Callable] = None
98
+ ):
99
+ """Create Flask application for model serving."""
100
+ try:
101
+ from flask import Flask, request, jsonify
102
+ except ImportError:
103
+ raise ImportError("Flask required. Install with: pip install flask")
104
+
105
+ import numpy as np
106
+
107
+ app = Flask(model_name)
108
+
109
+ @app.route("/")
110
+ def root():
111
+ return jsonify({"message": f"Welcome to {model_name} API", "status": "healthy"})
112
+
113
+ @app.route("/health")
114
+ def health():
115
+ return jsonify({"status": "healthy", "model": model_name})
116
+
117
+ @app.route("/predict", methods=["POST"])
118
+ def predict():
119
+ try:
120
+ data = request.get_json()
121
+ features = np.array(data["features"])
122
+
123
+ if preprocess_fn:
124
+ features = preprocess_fn(features)
125
+
126
+ predictions = model.predict(features).tolist()
127
+
128
+ if postprocess_fn:
129
+ predictions = postprocess_fn(predictions)
130
+
131
+ return jsonify({"predictions": predictions, "model_name": model_name})
132
+ except Exception as e:
133
+ return jsonify({"error": str(e)}), 500
134
+
135
+ return app
136
+
137
+
138
+ def run_api(
139
+ model: Any, framework: str = "fastapi",
140
+ host: str = "0.0.0.0", port: int = 8000, **kwargs
141
+ ):
142
+ """
143
+ Run model serving API.
144
+
145
+ Args:
146
+ model: Trained model.
147
+ framework: 'fastapi' or 'flask'.
148
+ host: Host address.
149
+ port: Port number.
150
+ """
151
+ if framework == "fastapi":
152
+ import uvicorn
153
+ app = create_fastapi_app(model, **kwargs)
154
+ uvicorn.run(app, host=host, port=port)
155
+ elif framework == "flask":
156
+ app = create_flask_app(model, **kwargs)
157
+ app.run(host=host, port=port)
158
+ else:
159
+ raise ValueError(f"Unknown framework: {framework}")
160
+
161
+
162
+ def generate_api_code(
163
+ model_path: str, output_path: str, framework: str = "fastapi", model_name: str = "model"
164
+ ) -> str:
165
+ """
166
+ Generate standalone API code.
167
+
168
+ Args:
169
+ model_path: Path to saved model.
170
+ output_path: Output file path.
171
+ framework: 'fastapi' or 'flask'.
172
+ model_name: Name for the model.
173
+
174
+ Returns:
175
+ Path to generated file.
176
+ """
177
+ if framework == "fastapi":
178
+ code = f'''"""Auto-generated FastAPI model serving code."""
179
+ from fastapi import FastAPI, HTTPException
180
+ from pydantic import BaseModel
181
+ from typing import List
182
+ import numpy as np
183
+ import joblib
184
+
185
+ app = FastAPI(title="{model_name} API")
186
+ model = joblib.load("{model_path}")
187
+
188
+ class PredictionInput(BaseModel):
189
+ features: List[List[float]]
190
+
191
+ @app.get("/")
192
+ def root():
193
+ return {{"message": "Welcome to {model_name} API"}}
194
+
195
+ @app.get("/health")
196
+ def health():
197
+ return {{"status": "healthy"}}
198
+
199
+ @app.post("/predict")
200
+ def predict(input_data: PredictionInput):
201
+ features = np.array(input_data.features)
202
+ predictions = model.predict(features).tolist()
203
+ return {{"predictions": predictions}}
204
+
205
+ if __name__ == "__main__":
206
+ import uvicorn
207
+ uvicorn.run(app, host="0.0.0.0", port=8000)
208
+ '''
209
+ else:
210
+ code = f'''"""Auto-generated Flask model serving code."""
211
+ from flask import Flask, request, jsonify
212
+ import numpy as np
213
+ import joblib
214
+
215
+ app = Flask(__name__)
216
+ model = joblib.load("{model_path}")
217
+
218
+ @app.route("/")
219
+ def root():
220
+ return jsonify({{"message": "Welcome to {model_name} API"}})
221
+
222
+ @app.route("/predict", methods=["POST"])
223
+ def predict():
224
+ data = request.get_json()
225
+ features = np.array(data["features"])
226
+ predictions = model.predict(features).tolist()
227
+ return jsonify({{"predictions": predictions}})
228
+
229
+ if __name__ == "__main__":
230
+ app.run(host="0.0.0.0", port=8000)
231
+ '''
232
+
233
+ with open(output_path, 'w') as f:
234
+ f.write(code)
235
+
236
+ logger.info(f"Generated API code at {output_path}")
237
+ return output_path
@@ -0,0 +1,191 @@
1
+ """
2
+ AdamOps Cloud Deployment Module
3
+
4
+ AWS, GCP, and Azure deployment helpers.
5
+ """
6
+
7
+ from typing import Any, Dict, Optional
8
+ from pathlib import Path
9
+
10
+ from adamops.utils.logging import get_logger
11
+ from adamops.utils.helpers import ensure_dir
12
+
13
+ logger = get_logger(__name__)
14
+
15
+
16
+ class CloudDeployer:
17
+ """Base class for cloud deployment."""
18
+
19
+ def __init__(self, config: Optional[Dict] = None):
20
+ self.config = config or {}
21
+
22
+ def deploy(self, model_path: str, name: str) -> Dict:
23
+ raise NotImplementedError
24
+
25
+
26
+ class AWSDeployer(CloudDeployer):
27
+ """AWS SageMaker deployment."""
28
+
29
+ def upload_to_s3(self, local_path: str, bucket: str, key: str) -> str:
30
+ """Upload file to S3."""
31
+ try:
32
+ import boto3
33
+ except ImportError:
34
+ raise ImportError("boto3 required. Install with: pip install boto3")
35
+
36
+ s3 = boto3.client('s3')
37
+ s3.upload_file(local_path, bucket, key)
38
+ return f"s3://{bucket}/{key}"
39
+
40
+ def deploy_sagemaker(
41
+ self, model_path: str, name: str,
42
+ instance_type: str = "ml.t2.medium",
43
+ role_arn: Optional[str] = None
44
+ ) -> Dict:
45
+ """Deploy to SageMaker endpoint."""
46
+ try:
47
+ import boto3
48
+ import sagemaker
49
+ except ImportError:
50
+ raise ImportError("boto3 and sagemaker required")
51
+
52
+ logger.info(f"Deploying {name} to SageMaker...")
53
+
54
+ # This is a simplified example - full implementation would need
55
+ # proper model packaging for SageMaker
56
+ return {
57
+ "status": "pending",
58
+ "message": "SageMaker deployment requires additional setup",
59
+ "model_path": model_path,
60
+ "endpoint_name": name,
61
+ }
62
+
63
+ def generate_lambda_handler(self, output_path: str, model_s3_path: str) -> str:
64
+ """Generate AWS Lambda handler code."""
65
+ code = f'''"""AWS Lambda handler for model inference."""
66
+ import json
67
+ import boto3
68
+ import joblib
69
+ from io import BytesIO
70
+
71
+ # Download model from S3 on cold start
72
+ s3 = boto3.client('s3')
73
+ bucket = "{model_s3_path.split('/')[2]}"
74
+ key = "/".join("{model_s3_path}".split('/')[3:])
75
+
76
+ response = s3.get_object(Bucket=bucket, Key=key)
77
+ model = joblib.load(BytesIO(response['Body'].read()))
78
+
79
+ def lambda_handler(event, context):
80
+ try:
81
+ body = json.loads(event.get('body', '{{}}'))
82
+ features = body.get('features', [])
83
+
84
+ import numpy as np
85
+ predictions = model.predict(np.array(features)).tolist()
86
+
87
+ return {{
88
+ 'statusCode': 200,
89
+ 'body': json.dumps({{'predictions': predictions}})
90
+ }}
91
+ except Exception as e:
92
+ return {{
93
+ 'statusCode': 500,
94
+ 'body': json.dumps({{'error': str(e)}})
95
+ }}
96
+ '''
97
+ with open(output_path, 'w') as f:
98
+ f.write(code)
99
+
100
+ return output_path
101
+
102
+
103
+ class GCPDeployer(CloudDeployer):
104
+ """Google Cloud Platform deployment."""
105
+
106
+ def upload_to_gcs(self, local_path: str, bucket: str, blob_name: str) -> str:
107
+ """Upload file to Google Cloud Storage."""
108
+ try:
109
+ from google.cloud import storage
110
+ except ImportError:
111
+ raise ImportError("google-cloud-storage required")
112
+
113
+ client = storage.Client()
114
+ bucket_obj = client.bucket(bucket)
115
+ blob = bucket_obj.blob(blob_name)
116
+ blob.upload_from_filename(local_path)
117
+
118
+ return f"gs://{bucket}/{blob_name}"
119
+
120
+ def generate_cloud_run_config(self, output_dir: str, name: str, port: int = 8080) -> str:
121
+ """Generate Cloud Run configuration."""
122
+ config = f'''# Cloud Run service configuration
123
+ steps:
124
+ - name: 'gcr.io/cloud-builders/docker'
125
+ args: ['build', '-t', 'gcr.io/$PROJECT_ID/{name}', '.']
126
+ - name: 'gcr.io/cloud-builders/docker'
127
+ args: ['push', 'gcr.io/$PROJECT_ID/{name}']
128
+ - name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
129
+ entrypoint: gcloud
130
+ args:
131
+ - 'run'
132
+ - 'deploy'
133
+ - '{name}'
134
+ - '--image'
135
+ - 'gcr.io/$PROJECT_ID/{name}'
136
+ - '--region'
137
+ - 'us-central1'
138
+ - '--platform'
139
+ - 'managed'
140
+ - '--port'
141
+ - '{port}'
142
+ images:
143
+ - 'gcr.io/$PROJECT_ID/{name}'
144
+ '''
145
+ output_path = Path(output_dir) / "cloudbuild.yaml"
146
+ with open(output_path, 'w') as f:
147
+ f.write(config)
148
+
149
+ return str(output_path)
150
+
151
+
152
+ class AzureDeployer(CloudDeployer):
153
+ """Azure ML deployment."""
154
+
155
+ def upload_to_blob(self, local_path: str, container: str, blob_name: str,
156
+ connection_string: str) -> str:
157
+ """Upload file to Azure Blob Storage."""
158
+ try:
159
+ from azure.storage.blob import BlobServiceClient
160
+ except ImportError:
161
+ raise ImportError("azure-storage-blob required")
162
+
163
+ blob_service = BlobServiceClient.from_connection_string(connection_string)
164
+ blob_client = blob_service.get_blob_client(container=container, blob=blob_name)
165
+
166
+ with open(local_path, 'rb') as data:
167
+ blob_client.upload_blob(data, overwrite=True)
168
+
169
+ return f"https://{blob_service.account_name}.blob.core.windows.net/{container}/{blob_name}"
170
+
171
+
172
+ def get_deployer(cloud: str, config: Optional[Dict] = None) -> CloudDeployer:
173
+ """Get cloud deployer by name."""
174
+ deployers = {
175
+ "aws": AWSDeployer,
176
+ "gcp": GCPDeployer,
177
+ "azure": AzureDeployer,
178
+ }
179
+
180
+ if cloud not in deployers:
181
+ raise ValueError(f"Unknown cloud: {cloud}. Available: {list(deployers.keys())}")
182
+
183
+ return deployers[cloud](config)
184
+
185
+
186
+ def deploy_to_cloud(
187
+ model_path: str, cloud: str, name: str, config: Optional[Dict] = None
188
+ ) -> Dict:
189
+ """Deploy model to cloud platform."""
190
+ deployer = get_deployer(cloud, config)
191
+ return deployer.deploy(model_path, name)
@@ -0,0 +1,262 @@
1
+ """
2
+ AdamOps Containerization Module
3
+
4
+ Docker and Kubernetes deployment support.
5
+ """
6
+
7
+ from typing import Dict, List, Optional
8
+ from pathlib import Path
9
+
10
+ from adamops.utils.logging import get_logger
11
+ from adamops.utils.helpers import ensure_dir
12
+
13
+ logger = get_logger(__name__)
14
+
15
+
16
+ DOCKERFILE_TEMPLATE = '''# Auto-generated Dockerfile for AdamOps model
17
+ FROM python:{python_version}-slim
18
+
19
+ WORKDIR /app
20
+
21
+ # Install system dependencies
22
+ RUN apt-get update && apt-get install -y --no-install-recommends \\
23
+ gcc \\
24
+ && rm -rf /var/lib/apt/lists/*
25
+
26
+ # Copy requirements
27
+ COPY requirements.txt .
28
+ RUN pip install --no-cache-dir -r requirements.txt
29
+
30
+ # Copy application
31
+ COPY . .
32
+
33
+ # Expose port
34
+ EXPOSE {port}
35
+
36
+ # Run application
37
+ CMD ["python", "{entrypoint}"]
38
+ '''
39
+
40
+ REQUIREMENTS_TEMPLATE = '''# Auto-generated requirements for model serving
41
+ numpy>=1.23.0
42
+ pandas>=1.5.0
43
+ scikit-learn>=1.2.0
44
+ joblib>=1.2.0
45
+ {framework_deps}
46
+ '''
47
+
48
+ K8S_DEPLOYMENT_TEMPLATE = '''apiVersion: apps/v1
49
+ kind: Deployment
50
+ metadata:
51
+ name: {name}-deployment
52
+ labels:
53
+ app: {name}
54
+ spec:
55
+ replicas: {replicas}
56
+ selector:
57
+ matchLabels:
58
+ app: {name}
59
+ template:
60
+ metadata:
61
+ labels:
62
+ app: {name}
63
+ spec:
64
+ containers:
65
+ - name: {name}
66
+ image: {image}
67
+ ports:
68
+ - containerPort: {port}
69
+ resources:
70
+ requests:
71
+ memory: "{memory}"
72
+ cpu: "{cpu}"
73
+ limits:
74
+ memory: "{memory_limit}"
75
+ cpu: "{cpu_limit}"
76
+ ---
77
+ apiVersion: v1
78
+ kind: Service
79
+ metadata:
80
+ name: {name}-service
81
+ spec:
82
+ selector:
83
+ app: {name}
84
+ ports:
85
+ - port: {port}
86
+ targetPort: {port}
87
+ type: {service_type}
88
+ '''
89
+
90
+
91
+ def generate_dockerfile(
92
+ output_dir: str, entrypoint: str = "app.py",
93
+ python_version: str = "3.10", port: int = 8000,
94
+ framework: str = "fastapi"
95
+ ) -> str:
96
+ """
97
+ Generate Dockerfile for model serving.
98
+
99
+ Args:
100
+ output_dir: Output directory.
101
+ entrypoint: Python entrypoint file.
102
+ python_version: Python version.
103
+ port: Exposed port.
104
+ framework: 'fastapi' or 'flask'.
105
+
106
+ Returns:
107
+ Path to Dockerfile.
108
+ """
109
+ output_dir = Path(output_dir)
110
+ ensure_dir(output_dir)
111
+
112
+ dockerfile_content = DOCKERFILE_TEMPLATE.format(
113
+ python_version=python_version,
114
+ port=port,
115
+ entrypoint=entrypoint
116
+ )
117
+
118
+ dockerfile_path = output_dir / "Dockerfile"
119
+ with open(dockerfile_path, "w") as f:
120
+ f.write(dockerfile_content)
121
+
122
+ # Generate requirements
123
+ framework_deps = "fastapi>=0.100.0\nuvicorn>=0.22.0" if framework == "fastapi" else "flask>=2.3.0"
124
+
125
+ requirements_content = REQUIREMENTS_TEMPLATE.format(framework_deps=framework_deps)
126
+
127
+ requirements_path = output_dir / "requirements.txt"
128
+ with open(requirements_path, "w") as f:
129
+ f.write(requirements_content)
130
+
131
+ logger.info(f"Generated Dockerfile at {dockerfile_path}")
132
+ return str(dockerfile_path)
133
+
134
+
135
+ def generate_docker_compose(
136
+ output_dir: str, service_name: str = "model-api",
137
+ port: int = 8000, image: Optional[str] = None
138
+ ) -> str:
139
+ """Generate docker-compose.yml."""
140
+ output_dir = Path(output_dir)
141
+ ensure_dir(output_dir)
142
+
143
+ content = f'''version: "3.8"
144
+
145
+ services:
146
+ {service_name}:
147
+ build: .
148
+ ports:
149
+ - "{port}:{port}"
150
+ environment:
151
+ - PORT={port}
152
+ restart: unless-stopped
153
+ '''
154
+
155
+ if image:
156
+ content = content.replace("build: .", f"image: {image}")
157
+
158
+ filepath = output_dir / "docker-compose.yml"
159
+ with open(filepath, "w") as f:
160
+ f.write(content)
161
+
162
+ logger.info(f"Generated docker-compose.yml at {filepath}")
163
+ return str(filepath)
164
+
165
+
166
+ def generate_k8s_manifests(
167
+ output_dir: str, name: str = "model-api",
168
+ image: str = "model-api:latest", port: int = 8000,
169
+ replicas: int = 2, memory: str = "512Mi", cpu: str = "250m",
170
+ service_type: str = "LoadBalancer"
171
+ ) -> str:
172
+ """
173
+ Generate Kubernetes deployment manifests.
174
+
175
+ Args:
176
+ output_dir: Output directory.
177
+ name: Deployment name.
178
+ image: Docker image.
179
+ port: Container port.
180
+ replicas: Number of replicas.
181
+ memory: Memory request.
182
+ cpu: CPU request.
183
+ service_type: K8s service type.
184
+
185
+ Returns:
186
+ Path to manifest file.
187
+ """
188
+ output_dir = Path(output_dir)
189
+ ensure_dir(output_dir)
190
+
191
+ content = K8S_DEPLOYMENT_TEMPLATE.format(
192
+ name=name, image=image, port=port, replicas=replicas,
193
+ memory=memory, cpu=cpu, memory_limit=memory, cpu_limit=cpu,
194
+ service_type=service_type
195
+ )
196
+
197
+ filepath = output_dir / "k8s-deployment.yaml"
198
+ with open(filepath, "w") as f:
199
+ f.write(content)
200
+
201
+ logger.info(f"Generated K8s manifests at {filepath}")
202
+ return str(filepath)
203
+
204
+
205
+ def build_docker_image(
206
+ context_dir: str, image_name: str, tag: str = "latest"
207
+ ) -> bool:
208
+ """Build Docker image (requires Docker CLI)."""
209
+ import subprocess
210
+
211
+ full_tag = f"{image_name}:{tag}"
212
+ cmd = ["docker", "build", "-t", full_tag, context_dir]
213
+
214
+ try:
215
+ result = subprocess.run(cmd, check=True, capture_output=True, text=True)
216
+ logger.info(f"Built Docker image: {full_tag}")
217
+ return True
218
+ except subprocess.CalledProcessError as e:
219
+ logger.error(f"Docker build failed: {e.stderr}")
220
+ return False
221
+
222
+
223
+ def containerize(
224
+ model_path: str, output_dir: str, name: str = "model-api",
225
+ framework: str = "fastapi", port: int = 8000, build: bool = False
226
+ ) -> Dict[str, str]:
227
+ """
228
+ Create complete containerization package.
229
+
230
+ Returns:
231
+ Dict with paths to generated files.
232
+ """
233
+ from adamops.deployment.api import generate_api_code
234
+
235
+ output_dir = Path(output_dir)
236
+ ensure_dir(output_dir)
237
+
238
+ # Generate API code
239
+ api_path = generate_api_code(model_path, output_dir / "app.py", framework, name)
240
+
241
+ # Copy model
242
+ import shutil
243
+ model_dest = output_dir / Path(model_path).name
244
+ shutil.copy(model_path, model_dest)
245
+
246
+ # Generate Docker files
247
+ dockerfile = generate_dockerfile(output_dir, "app.py", port=port, framework=framework)
248
+ compose = generate_docker_compose(output_dir, name, port)
249
+ k8s = generate_k8s_manifests(output_dir, name, f"{name}:latest", port)
250
+
251
+ result = {
252
+ "api": api_path,
253
+ "dockerfile": dockerfile,
254
+ "docker_compose": compose,
255
+ "k8s": k8s,
256
+ "model": str(model_dest),
257
+ }
258
+
259
+ if build:
260
+ build_docker_image(str(output_dir), name)
261
+
262
+ return result