kailash 0.8.4__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. kailash/__init__.py +1 -7
  2. kailash/cli/__init__.py +11 -1
  3. kailash/cli/validation_audit.py +570 -0
  4. kailash/core/actors/supervisor.py +1 -1
  5. kailash/core/resilience/circuit_breaker.py +71 -1
  6. kailash/core/resilience/health_monitor.py +172 -0
  7. kailash/edge/compliance.py +33 -0
  8. kailash/edge/consistency.py +609 -0
  9. kailash/edge/coordination/__init__.py +30 -0
  10. kailash/edge/coordination/global_ordering.py +355 -0
  11. kailash/edge/coordination/leader_election.py +217 -0
  12. kailash/edge/coordination/partition_detector.py +296 -0
  13. kailash/edge/coordination/raft.py +485 -0
  14. kailash/edge/discovery.py +63 -1
  15. kailash/edge/migration/__init__.py +19 -0
  16. kailash/edge/migration/edge_migrator.py +832 -0
  17. kailash/edge/monitoring/__init__.py +21 -0
  18. kailash/edge/monitoring/edge_monitor.py +736 -0
  19. kailash/edge/prediction/__init__.py +10 -0
  20. kailash/edge/prediction/predictive_warmer.py +591 -0
  21. kailash/edge/resource/__init__.py +102 -0
  22. kailash/edge/resource/cloud_integration.py +796 -0
  23. kailash/edge/resource/cost_optimizer.py +949 -0
  24. kailash/edge/resource/docker_integration.py +919 -0
  25. kailash/edge/resource/kubernetes_integration.py +893 -0
  26. kailash/edge/resource/platform_integration.py +913 -0
  27. kailash/edge/resource/predictive_scaler.py +959 -0
  28. kailash/edge/resource/resource_analyzer.py +824 -0
  29. kailash/edge/resource/resource_pools.py +610 -0
  30. kailash/integrations/dataflow_edge.py +261 -0
  31. kailash/mcp_server/registry_integration.py +1 -1
  32. kailash/monitoring/__init__.py +18 -0
  33. kailash/monitoring/alerts.py +646 -0
  34. kailash/monitoring/metrics.py +677 -0
  35. kailash/nodes/__init__.py +2 -0
  36. kailash/nodes/ai/semantic_memory.py +2 -2
  37. kailash/nodes/base.py +545 -0
  38. kailash/nodes/edge/__init__.py +36 -0
  39. kailash/nodes/edge/base.py +240 -0
  40. kailash/nodes/edge/cloud_node.py +710 -0
  41. kailash/nodes/edge/coordination.py +239 -0
  42. kailash/nodes/edge/docker_node.py +825 -0
  43. kailash/nodes/edge/edge_data.py +582 -0
  44. kailash/nodes/edge/edge_migration_node.py +392 -0
  45. kailash/nodes/edge/edge_monitoring_node.py +421 -0
  46. kailash/nodes/edge/edge_state.py +673 -0
  47. kailash/nodes/edge/edge_warming_node.py +393 -0
  48. kailash/nodes/edge/kubernetes_node.py +652 -0
  49. kailash/nodes/edge/platform_node.py +766 -0
  50. kailash/nodes/edge/resource_analyzer_node.py +378 -0
  51. kailash/nodes/edge/resource_optimizer_node.py +501 -0
  52. kailash/nodes/edge/resource_scaler_node.py +397 -0
  53. kailash/nodes/ports.py +676 -0
  54. kailash/runtime/local.py +344 -1
  55. kailash/runtime/validation/__init__.py +20 -0
  56. kailash/runtime/validation/connection_context.py +119 -0
  57. kailash/runtime/validation/enhanced_error_formatter.py +202 -0
  58. kailash/runtime/validation/error_categorizer.py +164 -0
  59. kailash/runtime/validation/metrics.py +380 -0
  60. kailash/runtime/validation/performance.py +615 -0
  61. kailash/runtime/validation/suggestion_engine.py +212 -0
  62. kailash/testing/fixtures.py +2 -2
  63. kailash/workflow/builder.py +230 -4
  64. kailash/workflow/contracts.py +418 -0
  65. kailash/workflow/edge_infrastructure.py +369 -0
  66. kailash/workflow/migration.py +3 -3
  67. kailash/workflow/type_inference.py +669 -0
  68. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/METADATA +43 -27
  69. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/RECORD +73 -27
  70. kailash/nexus/__init__.py +0 -21
  71. kailash/nexus/cli/__init__.py +0 -5
  72. kailash/nexus/cli/__main__.py +0 -6
  73. kailash/nexus/cli/main.py +0 -176
  74. kailash/nexus/factory.py +0 -413
  75. kailash/nexus/gateway.py +0 -545
  76. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/WHEEL +0 -0
  77. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/entry_points.txt +0 -0
  78. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/licenses/LICENSE +0 -0
  79. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,397 @@
1
+ """Resource scaler node for predictive edge resource scaling.
2
+
3
+ This node integrates predictive scaling capabilities into workflows,
4
+ enabling ML-based resource demand prediction and proactive scaling.
5
+ """
6
+
7
+ import asyncio
8
+ from datetime import datetime
9
+ from typing import Any, Dict, List, Optional
10
+
11
+ from kailash.edge.resource.predictive_scaler import (
12
+ PredictionHorizon,
13
+ PredictiveScaler,
14
+ ScalingDecision,
15
+ ScalingPrediction,
16
+ ScalingStrategy,
17
+ )
18
+ from kailash.nodes.base import NodeParameter, register_node
19
+ from kailash.nodes.base_async import AsyncNode
20
+
21
+
22
+ @register_node()
23
+ class ResourceScalerNode(AsyncNode):
24
+ """Node for predictive resource scaling operations.
25
+
26
+ This node provides ML-based scaling predictions and automated
27
+ scaling decisions for edge resources.
28
+
29
+ Example:
30
+ >>> # Record usage for predictions
31
+ >>> result = await scaler_node.execute_async(
32
+ ... operation="record_usage",
33
+ ... edge_node="edge-west-1",
34
+ ... resource_type="cpu",
35
+ ... usage=3.2,
36
+ ... capacity=4.0
37
+ ... )
38
+
39
+ >>> # Get scaling predictions
40
+ >>> result = await scaler_node.execute_async(
41
+ ... operation="predict_scaling",
42
+ ... strategy="hybrid",
43
+ ... horizons=["immediate", "short_term"]
44
+ ... )
45
+
46
+ >>> # Get resource forecast
47
+ >>> result = await scaler_node.execute_async(
48
+ ... operation="get_forecast",
49
+ ... edge_node="edge-west-1",
50
+ ... resource_type="cpu",
51
+ ... forecast_minutes=60
52
+ ... )
53
+
54
+ >>> # Evaluate past decision
55
+ >>> result = await scaler_node.execute_async(
56
+ ... operation="evaluate_decision",
57
+ ... decision_id="edge-1_12345",
58
+ ... actual_usage={"edge-1:cpu": 85.5}
59
+ ... )
60
+ """
61
+
62
+ def __init__(self, **kwargs):
63
+ """Initialize resource scaler node."""
64
+ super().__init__(**kwargs)
65
+
66
+ # Extract configuration
67
+ prediction_window = kwargs.get("prediction_window", 3600)
68
+ update_interval = kwargs.get("update_interval", 60)
69
+ confidence_threshold = kwargs.get("confidence_threshold", 0.7)
70
+ scale_up_threshold = kwargs.get("scale_up_threshold", 0.8)
71
+ scale_down_threshold = kwargs.get("scale_down_threshold", 0.3)
72
+ min_data_points = kwargs.get("min_data_points", 30)
73
+
74
+ # Initialize scaler
75
+ self.scaler = PredictiveScaler(
76
+ prediction_window=prediction_window,
77
+ update_interval=update_interval,
78
+ confidence_threshold=confidence_threshold,
79
+ scale_up_threshold=scale_up_threshold,
80
+ scale_down_threshold=scale_down_threshold,
81
+ min_data_points=min_data_points,
82
+ )
83
+
84
+ self._scaler_started = False
85
+
86
+ @property
87
+ def input_parameters(self) -> Dict[str, NodeParameter]:
88
+ """Define input parameters."""
89
+ return {
90
+ "operation": NodeParameter(
91
+ name="operation",
92
+ type=str,
93
+ required=True,
94
+ description="Operation to perform (record_usage, predict_scaling, get_forecast, evaluate_decision, start_scaler, stop_scaler)",
95
+ ),
96
+ # For record_usage
97
+ "edge_node": NodeParameter(
98
+ name="edge_node",
99
+ type=str,
100
+ required=False,
101
+ description="Edge node identifier",
102
+ ),
103
+ "resource_type": NodeParameter(
104
+ name="resource_type",
105
+ type=str,
106
+ required=False,
107
+ description="Type of resource",
108
+ ),
109
+ "usage": NodeParameter(
110
+ name="usage",
111
+ type=float,
112
+ required=False,
113
+ description="Current resource usage",
114
+ ),
115
+ "capacity": NodeParameter(
116
+ name="capacity",
117
+ type=float,
118
+ required=False,
119
+ description="Total resource capacity",
120
+ ),
121
+ # For predict_scaling
122
+ "strategy": NodeParameter(
123
+ name="strategy",
124
+ type=str,
125
+ required=False,
126
+ default="hybrid",
127
+ description="Scaling strategy (reactive, predictive, scheduled, hybrid, aggressive, conservative)",
128
+ ),
129
+ "horizons": NodeParameter(
130
+ name="horizons",
131
+ type=list,
132
+ required=False,
133
+ description="Prediction horizons (immediate, short_term, medium_term, long_term)",
134
+ ),
135
+ # For get_forecast
136
+ "forecast_minutes": NodeParameter(
137
+ name="forecast_minutes",
138
+ type=int,
139
+ required=False,
140
+ default=60,
141
+ description="Minutes to forecast ahead",
142
+ ),
143
+ # For evaluate_decision
144
+ "decision_id": NodeParameter(
145
+ name="decision_id",
146
+ type=str,
147
+ required=False,
148
+ description="Decision ID to evaluate",
149
+ ),
150
+ "actual_usage": NodeParameter(
151
+ name="actual_usage",
152
+ type=dict,
153
+ required=False,
154
+ description="Actual usage that occurred",
155
+ ),
156
+ "feedback": NodeParameter(
157
+ name="feedback",
158
+ type=str,
159
+ required=False,
160
+ description="Optional feedback on decision",
161
+ ),
162
+ # Configuration
163
+ "prediction_window": NodeParameter(
164
+ name="prediction_window",
165
+ type=int,
166
+ required=False,
167
+ default=3600,
168
+ description="Historical data window for predictions (seconds)",
169
+ ),
170
+ "update_interval": NodeParameter(
171
+ name="update_interval",
172
+ type=int,
173
+ required=False,
174
+ default=60,
175
+ description="How often to update predictions (seconds)",
176
+ ),
177
+ "confidence_threshold": NodeParameter(
178
+ name="confidence_threshold",
179
+ type=float,
180
+ required=False,
181
+ default=0.7,
182
+ description="Minimum confidence for scaling actions",
183
+ ),
184
+ "scale_up_threshold": NodeParameter(
185
+ name="scale_up_threshold",
186
+ type=float,
187
+ required=False,
188
+ default=0.8,
189
+ description="Utilization threshold for scaling up (0-1)",
190
+ ),
191
+ "scale_down_threshold": NodeParameter(
192
+ name="scale_down_threshold",
193
+ type=float,
194
+ required=False,
195
+ default=0.3,
196
+ description="Utilization threshold for scaling down (0-1)",
197
+ ),
198
+ "min_data_points": NodeParameter(
199
+ name="min_data_points",
200
+ type=int,
201
+ required=False,
202
+ default=30,
203
+ description="Minimum data points for predictions",
204
+ ),
205
+ }
206
+
207
+ @property
208
+ def output_parameters(self) -> Dict[str, NodeParameter]:
209
+ """Define output parameters."""
210
+ return {
211
+ "status": NodeParameter(
212
+ name="status", type=str, description="Operation status"
213
+ ),
214
+ "predictions": NodeParameter(
215
+ name="predictions",
216
+ type=list,
217
+ required=False,
218
+ description="Scaling predictions",
219
+ ),
220
+ "decisions": NodeParameter(
221
+ name="decisions",
222
+ type=list,
223
+ required=False,
224
+ description="Scaling decisions",
225
+ ),
226
+ "forecast": NodeParameter(
227
+ name="forecast",
228
+ type=dict,
229
+ required=False,
230
+ description="Resource usage forecast",
231
+ ),
232
+ "usage_recorded": NodeParameter(
233
+ name="usage_recorded",
234
+ type=bool,
235
+ required=False,
236
+ description="Whether usage was recorded",
237
+ ),
238
+ "evaluation_result": NodeParameter(
239
+ name="evaluation_result",
240
+ type=dict,
241
+ required=False,
242
+ description="Decision evaluation result",
243
+ ),
244
+ "scaler_active": NodeParameter(
245
+ name="scaler_active",
246
+ type=bool,
247
+ required=False,
248
+ description="Whether scaler is active",
249
+ ),
250
+ }
251
+
252
+ def get_parameters(self) -> Dict[str, NodeParameter]:
253
+ """Get all node parameters for compatibility."""
254
+ return self.input_parameters
255
+
256
+ async def async_run(self, **kwargs) -> Dict[str, Any]:
257
+ """Execute scaling operation."""
258
+ operation = kwargs["operation"]
259
+
260
+ try:
261
+ if operation == "record_usage":
262
+ return await self._record_usage(kwargs)
263
+ elif operation == "predict_scaling":
264
+ return await self._predict_scaling(kwargs)
265
+ elif operation == "get_forecast":
266
+ return await self._get_forecast(kwargs)
267
+ elif operation == "evaluate_decision":
268
+ return await self._evaluate_decision(kwargs)
269
+ elif operation == "start_scaler":
270
+ return await self._start_scaler()
271
+ elif operation == "stop_scaler":
272
+ return await self._stop_scaler()
273
+ else:
274
+ raise ValueError(f"Unknown operation: {operation}")
275
+
276
+ except Exception as e:
277
+ self.logger.error(f"Resource scaling operation failed: {str(e)}")
278
+ return {"status": "error", "error": str(e)}
279
+
280
+ async def _record_usage(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
281
+ """Record resource usage."""
282
+ await self.scaler.record_usage(
283
+ edge_node=kwargs.get("edge_node", "unknown"),
284
+ resource_type=kwargs.get("resource_type", "unknown"),
285
+ usage=kwargs.get("usage", 0.0),
286
+ capacity=kwargs.get("capacity", 1.0),
287
+ timestamp=datetime.now(),
288
+ )
289
+
290
+ return {
291
+ "status": "success",
292
+ "usage_recorded": True,
293
+ "edge_node": kwargs.get("edge_node"),
294
+ "resource_type": kwargs.get("resource_type"),
295
+ "utilization": (kwargs.get("usage", 0) / kwargs.get("capacity", 1) * 100),
296
+ }
297
+
298
+ async def _predict_scaling(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
299
+ """Generate scaling predictions."""
300
+ # Parse strategy
301
+ strategy_str = kwargs.get("strategy", "hybrid")
302
+ try:
303
+ strategy = ScalingStrategy(strategy_str)
304
+ except ValueError:
305
+ strategy = ScalingStrategy.HYBRID
306
+
307
+ # Parse horizons
308
+ horizon_strs = kwargs.get("horizons", ["immediate", "short_term"])
309
+ horizons = []
310
+
311
+ horizon_map = {
312
+ "immediate": PredictionHorizon.IMMEDIATE,
313
+ "short_term": PredictionHorizon.SHORT_TERM,
314
+ "medium_term": PredictionHorizon.MEDIUM_TERM,
315
+ "long_term": PredictionHorizon.LONG_TERM,
316
+ }
317
+
318
+ for h_str in horizon_strs:
319
+ if h_str in horizon_map:
320
+ horizons.append(horizon_map[h_str])
321
+
322
+ if not horizons:
323
+ horizons = [PredictionHorizon.IMMEDIATE, PredictionHorizon.SHORT_TERM]
324
+
325
+ # Get predictions
326
+ decisions = await self.scaler.predict_scaling_needs(
327
+ strategy=strategy, horizons=horizons
328
+ )
329
+
330
+ # Extract predictions from decisions
331
+ all_predictions = []
332
+ for decision in decisions:
333
+ all_predictions.extend(decision.predictions)
334
+
335
+ return {
336
+ "status": "success",
337
+ "decisions": [d.to_dict() for d in decisions],
338
+ "predictions": [p.to_dict() for p in all_predictions],
339
+ "decision_count": len(decisions),
340
+ "prediction_count": len(all_predictions),
341
+ "actions_required": len(
342
+ [d for d in decisions if d.action_plan.get("actions")]
343
+ ),
344
+ }
345
+
346
+ async def _get_forecast(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
347
+ """Get resource forecast."""
348
+ forecast = await self.scaler.get_resource_forecast(
349
+ edge_node=kwargs.get("edge_node", "unknown"),
350
+ resource_type=kwargs.get("resource_type", "unknown"),
351
+ forecast_minutes=kwargs.get("forecast_minutes", 60),
352
+ )
353
+
354
+ return {"status": "success", "forecast": forecast}
355
+
356
+ async def _evaluate_decision(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
357
+ """Evaluate a scaling decision."""
358
+ decision_id = kwargs.get("decision_id")
359
+ actual_usage = kwargs.get("actual_usage", {})
360
+ feedback = kwargs.get("feedback")
361
+
362
+ if not decision_id:
363
+ return {"status": "error", "error": "decision_id is required"}
364
+
365
+ await self.scaler.evaluate_scaling_decision(
366
+ decision_id=decision_id, actual_usage=actual_usage, feedback=feedback
367
+ )
368
+
369
+ return {
370
+ "status": "success",
371
+ "evaluation_result": {
372
+ "decision_id": decision_id,
373
+ "evaluated": True,
374
+ "feedback_provided": feedback is not None,
375
+ },
376
+ }
377
+
378
+ async def _start_scaler(self) -> Dict[str, Any]:
379
+ """Start background scaler."""
380
+ if not self._scaler_started:
381
+ await self.scaler.start()
382
+ self._scaler_started = True
383
+
384
+ return {"status": "success", "scaler_active": True}
385
+
386
+ async def _stop_scaler(self) -> Dict[str, Any]:
387
+ """Stop background scaler."""
388
+ if self._scaler_started:
389
+ await self.scaler.stop()
390
+ self._scaler_started = False
391
+
392
+ return {"status": "success", "scaler_active": False}
393
+
394
+ async def cleanup(self):
395
+ """Clean up resources."""
396
+ if self._scaler_started:
397
+ await self.scaler.stop()