kailash 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. kailash/__init__.py +1 -7
  2. kailash/cli/__init__.py +11 -1
  3. kailash/cli/validation_audit.py +570 -0
  4. kailash/core/actors/supervisor.py +1 -1
  5. kailash/core/resilience/circuit_breaker.py +71 -1
  6. kailash/core/resilience/health_monitor.py +172 -0
  7. kailash/edge/compliance.py +33 -0
  8. kailash/edge/consistency.py +609 -0
  9. kailash/edge/coordination/__init__.py +30 -0
  10. kailash/edge/coordination/global_ordering.py +355 -0
  11. kailash/edge/coordination/leader_election.py +217 -0
  12. kailash/edge/coordination/partition_detector.py +296 -0
  13. kailash/edge/coordination/raft.py +485 -0
  14. kailash/edge/discovery.py +63 -1
  15. kailash/edge/migration/__init__.py +19 -0
  16. kailash/edge/migration/edge_migrator.py +832 -0
  17. kailash/edge/monitoring/__init__.py +21 -0
  18. kailash/edge/monitoring/edge_monitor.py +736 -0
  19. kailash/edge/prediction/__init__.py +10 -0
  20. kailash/edge/prediction/predictive_warmer.py +591 -0
  21. kailash/edge/resource/__init__.py +102 -0
  22. kailash/edge/resource/cloud_integration.py +796 -0
  23. kailash/edge/resource/cost_optimizer.py +949 -0
  24. kailash/edge/resource/docker_integration.py +919 -0
  25. kailash/edge/resource/kubernetes_integration.py +893 -0
  26. kailash/edge/resource/platform_integration.py +913 -0
  27. kailash/edge/resource/predictive_scaler.py +959 -0
  28. kailash/edge/resource/resource_analyzer.py +824 -0
  29. kailash/edge/resource/resource_pools.py +610 -0
  30. kailash/integrations/dataflow_edge.py +261 -0
  31. kailash/mcp_server/registry_integration.py +1 -1
  32. kailash/monitoring/__init__.py +18 -0
  33. kailash/monitoring/alerts.py +646 -0
  34. kailash/monitoring/metrics.py +677 -0
  35. kailash/nodes/__init__.py +2 -0
  36. kailash/nodes/ai/__init__.py +17 -0
  37. kailash/nodes/ai/a2a.py +1914 -43
  38. kailash/nodes/ai/a2a_backup.py +1807 -0
  39. kailash/nodes/ai/hybrid_search.py +972 -0
  40. kailash/nodes/ai/semantic_memory.py +558 -0
  41. kailash/nodes/ai/streaming_analytics.py +947 -0
  42. kailash/nodes/base.py +545 -0
  43. kailash/nodes/edge/__init__.py +36 -0
  44. kailash/nodes/edge/base.py +240 -0
  45. kailash/nodes/edge/cloud_node.py +710 -0
  46. kailash/nodes/edge/coordination.py +239 -0
  47. kailash/nodes/edge/docker_node.py +825 -0
  48. kailash/nodes/edge/edge_data.py +582 -0
  49. kailash/nodes/edge/edge_migration_node.py +392 -0
  50. kailash/nodes/edge/edge_monitoring_node.py +421 -0
  51. kailash/nodes/edge/edge_state.py +673 -0
  52. kailash/nodes/edge/edge_warming_node.py +393 -0
  53. kailash/nodes/edge/kubernetes_node.py +652 -0
  54. kailash/nodes/edge/platform_node.py +766 -0
  55. kailash/nodes/edge/resource_analyzer_node.py +378 -0
  56. kailash/nodes/edge/resource_optimizer_node.py +501 -0
  57. kailash/nodes/edge/resource_scaler_node.py +397 -0
  58. kailash/nodes/ports.py +676 -0
  59. kailash/runtime/local.py +344 -1
  60. kailash/runtime/validation/__init__.py +20 -0
  61. kailash/runtime/validation/connection_context.py +119 -0
  62. kailash/runtime/validation/enhanced_error_formatter.py +202 -0
  63. kailash/runtime/validation/error_categorizer.py +164 -0
  64. kailash/runtime/validation/metrics.py +380 -0
  65. kailash/runtime/validation/performance.py +615 -0
  66. kailash/runtime/validation/suggestion_engine.py +212 -0
  67. kailash/testing/fixtures.py +2 -2
  68. kailash/workflow/builder.py +234 -8
  69. kailash/workflow/contracts.py +418 -0
  70. kailash/workflow/edge_infrastructure.py +369 -0
  71. kailash/workflow/migration.py +3 -3
  72. kailash/workflow/type_inference.py +669 -0
  73. {kailash-0.8.3.dist-info → kailash-0.8.5.dist-info}/METADATA +44 -27
  74. {kailash-0.8.3.dist-info → kailash-0.8.5.dist-info}/RECORD +78 -28
  75. kailash/nexus/__init__.py +0 -21
  76. kailash/nexus/cli/__init__.py +0 -5
  77. kailash/nexus/cli/__main__.py +0 -6
  78. kailash/nexus/cli/main.py +0 -176
  79. kailash/nexus/factory.py +0 -413
  80. kailash/nexus/gateway.py +0 -545
  81. {kailash-0.8.3.dist-info → kailash-0.8.5.dist-info}/WHEEL +0 -0
  82. {kailash-0.8.3.dist-info → kailash-0.8.5.dist-info}/entry_points.txt +0 -0
  83. {kailash-0.8.3.dist-info → kailash-0.8.5.dist-info}/licenses/LICENSE +0 -0
  84. {kailash-0.8.3.dist-info → kailash-0.8.5.dist-info}/top_level.txt +0 -0
kailash/nodes/base.py CHANGED
@@ -31,6 +31,7 @@ from typing import Any
31
31
 
32
32
  from pydantic import BaseModel, Field, ValidationError
33
33
 
34
+ from kailash.nodes.ports import InputPort, OutputPort, get_port_registry
34
35
  from kailash.sdk_exceptions import (
35
36
  NodeConfigurationError,
36
37
  NodeExecutionError,
@@ -1224,6 +1225,550 @@ class Node(ABC):
1224
1225
  ) from e
1225
1226
 
1226
1227
 
1228
+ class TypedNode(Node):
1229
+ """Enhanced node base class with type-safe port system.
1230
+
1231
+ This class extends the base Node with a declarative port system that provides:
1232
+
1233
+ 1. Type-safe input/output declarations using descriptors
1234
+ 2. Automatic parameter schema generation from ports
1235
+ 3. IDE support with full autocomplete and type checking
1236
+ 4. Runtime type validation and constraint enforcement
1237
+ 5. Backward compatibility with existing Node patterns
1238
+
1239
+ Design Goals:
1240
+ - Better developer experience with IDE support
1241
+ - Compile-time type checking for safer workflows
1242
+ - Declarative port definitions reduce boilerplate
1243
+ - Runtime safety through automatic validation
1244
+ - Seamless migration from existing Node classes
1245
+
1246
+ Usage Pattern:
1247
+ class MyTypedNode(TypedNode):
1248
+ # Input ports with type safety
1249
+ text_input = InputPort[str]("text_input", description="Text to process")
1250
+ count = InputPort[int]("count", default=1, description="Number of iterations")
1251
+
1252
+ # Output ports
1253
+ result = OutputPort[str]("result", description="Processed text")
1254
+ metadata = OutputPort[Dict[str, Any]]("metadata", description="Processing info")
1255
+
1256
+ def run(self, **kwargs) -> Dict[str, Any]:
1257
+ # Type-safe access to inputs
1258
+ text = self.text_input.get()
1259
+ count = self.count.get()
1260
+
1261
+ # Process data
1262
+ processed = text * count
1263
+
1264
+ # Set outputs (with type validation)
1265
+ self.result.set(processed)
1266
+ self.metadata.set({"length": len(processed), "iterations": count})
1267
+
1268
+ # Return traditional dict format
1269
+ return {
1270
+ self.result.name: processed,
1271
+ self.metadata.name: {"length": len(processed), "iterations": count}
1272
+ }
1273
+
1274
+ Migration Benefits:
1275
+ - Existing Node.run() signature unchanged
1276
+ - get_parameters() automatically generated from ports
1277
+ - execute() handles port-to-parameter conversion
1278
+ - Full backward compatibility maintained
1279
+
1280
+ Advanced Features:
1281
+ - Port constraints (min/max length, value ranges, patterns)
1282
+ - Complex type support (Union, Optional, List[T], Dict[K,V])
1283
+ - Port metadata for documentation and UI generation
1284
+ - Connection compatibility checking
1285
+ """
1286
+
1287
+ def __init__(self, **kwargs):
1288
+ """Initialize typed node with port system integration.
1289
+
1290
+ Performs the same initialization as Node, plus:
1291
+ 1. Scan class for port definitions
1292
+ 2. Set up port registry for validation
1293
+ 3. Initialize port instances for this node
1294
+
1295
+ Args:
1296
+ **kwargs: Node configuration including port defaults
1297
+ """
1298
+ # Set up port registry BEFORE calling super().__init__()
1299
+ # because base class will call get_parameters() during validation
1300
+ self._port_registry = get_port_registry(self.__class__)
1301
+
1302
+ # Initialize base node
1303
+ super().__init__(**kwargs)
1304
+
1305
+ # Set default values for input ports from config
1306
+ for port_name, port in self._port_registry.input_ports.items():
1307
+ if hasattr(self, port_name):
1308
+ bound_port = getattr(self, port_name)
1309
+ # Set default from config if available
1310
+ if port_name in self.config and hasattr(bound_port, "set"):
1311
+ try:
1312
+ bound_port.set(self.config[port_name])
1313
+ except (TypeError, ValueError):
1314
+ # If type validation fails, let normal validation handle it
1315
+ pass
1316
+
1317
+ def get_parameters(self) -> dict[str, NodeParameter]:
1318
+ """Generate parameter schema from port definitions.
1319
+
1320
+ Automatically creates NodeParameter definitions from InputPort declarations,
1321
+ providing seamless integration with existing Node validation systems.
1322
+
1323
+ Returns:
1324
+ Dictionary mapping parameter names to NodeParameter instances
1325
+ generated from port definitions
1326
+ """
1327
+ parameters = {}
1328
+
1329
+ for port_name, port in self._port_registry.input_ports.items():
1330
+ # Convert port metadata to NodeParameter
1331
+ param_type = port.type_hint if port.type_hint else Any
1332
+
1333
+ # Handle generic types - NodeParameter expects plain types
1334
+ if hasattr(param_type, "__origin__"):
1335
+ # For generic types like List[str], Dict[str, Any], use the origin type
1336
+ from typing import Union, get_origin
1337
+
1338
+ origin = get_origin(param_type)
1339
+ if origin is Union:
1340
+ # For Union types (including Optional), use object as a safe fallback
1341
+ param_type = object
1342
+ else:
1343
+ param_type = origin or param_type
1344
+
1345
+ parameters[port_name] = NodeParameter(
1346
+ name=port_name,
1347
+ type=param_type,
1348
+ required=port.metadata.required,
1349
+ default=port.metadata.default,
1350
+ description=port.metadata.description,
1351
+ )
1352
+
1353
+ return parameters
1354
+
1355
+ def get_output_schema(self) -> dict[str, NodeParameter]:
1356
+ """Generate output schema from port definitions.
1357
+
1358
+ Creates output parameter definitions from OutputPort declarations,
1359
+ enabling output validation and documentation generation.
1360
+
1361
+ Returns:
1362
+ Dictionary mapping output names to NodeParameter instances
1363
+ """
1364
+ outputs = {}
1365
+
1366
+ for port_name, port in self._port_registry.output_ports.items():
1367
+ param_type = port.type_hint if port.type_hint else Any
1368
+
1369
+ # Handle generic types - NodeParameter expects plain types
1370
+ if hasattr(param_type, "__origin__"):
1371
+ # For generic types like List[str], Dict[str, Any], use the origin type
1372
+ from typing import Union, get_origin
1373
+
1374
+ origin = get_origin(param_type)
1375
+ if origin is Union:
1376
+ # For Union types (including Optional), use object as a safe fallback
1377
+ param_type = object
1378
+ else:
1379
+ param_type = origin or param_type
1380
+
1381
+ outputs[port_name] = NodeParameter(
1382
+ name=port_name,
1383
+ type=param_type,
1384
+ required=False, # Output ports are generally not "required"
1385
+ default=None,
1386
+ description=port.metadata.description,
1387
+ )
1388
+
1389
+ return outputs
1390
+
1391
+ def validate_inputs(self, **kwargs) -> dict[str, Any]:
1392
+ """Enhanced input validation using port system.
1393
+
1394
+ Performs validation in two phases:
1395
+ 1. Standard Node validation for backward compatibility
1396
+ 2. Port-specific validation for enhanced type checking
1397
+
1398
+ This dual approach ensures:
1399
+ - Existing validation logic continues to work
1400
+ - Enhanced type safety from port definitions
1401
+ - Constraint validation (min/max, patterns, etc.)
1402
+ - Better error messages with port context
1403
+
1404
+ Args:
1405
+ **kwargs: Runtime inputs to validate
1406
+
1407
+ Returns:
1408
+ Validated inputs with type conversions applied
1409
+
1410
+ Raises:
1411
+ NodeValidationError: If validation fails with enhanced error context
1412
+ """
1413
+ # First, run standard Node validation
1414
+ validated = super().validate_inputs(**kwargs)
1415
+
1416
+ # Then, perform port-specific validation
1417
+ port_errors = self._port_registry.validate_input_types(validated)
1418
+ if port_errors:
1419
+ error_details = "; ".join(port_errors)
1420
+ raise NodeValidationError(
1421
+ f"Port validation failed for node '{self.id}': {error_details}"
1422
+ )
1423
+
1424
+ # Set validated values in bound ports for type-safe access
1425
+ # This allows port.get() to work during run() execution
1426
+ for port_name, port in self._port_registry.input_ports.items():
1427
+ if port_name in validated:
1428
+ bound_port = getattr(self, port_name, None)
1429
+ if bound_port and hasattr(bound_port, "set"):
1430
+ try:
1431
+ bound_port.set(validated[port_name])
1432
+ except (TypeError, ValueError):
1433
+ # Port validation should have caught this, but be safe
1434
+ pass
1435
+ elif hasattr(self, port_name):
1436
+ # If bound port doesn't have set method, set the value directly
1437
+ port_instance = getattr(self, port_name)
1438
+ if hasattr(port_instance, "_value"):
1439
+ port_instance._value = validated[port_name]
1440
+
1441
+ return validated
1442
+
1443
+ def validate_outputs(self, outputs: dict[str, Any]) -> dict[str, Any]:
1444
+ """Enhanced output validation using port system.
1445
+
1446
+ Validates outputs using both standard Node validation and port definitions:
1447
+ 1. Standard JSON serializability checks
1448
+ 2. Port type validation with enhanced error messages
1449
+ 3. Constraint validation for output values
1450
+
1451
+ Args:
1452
+ outputs: Output dictionary from run() method
1453
+
1454
+ Returns:
1455
+ Validated outputs
1456
+
1457
+ Raises:
1458
+ NodeValidationError: If validation fails
1459
+ """
1460
+ # First, run standard Node validation
1461
+ validated = super().validate_outputs(outputs)
1462
+
1463
+ # Then, perform port-specific validation
1464
+ port_errors = self._port_registry.validate_output_types(validated)
1465
+ if port_errors:
1466
+ error_details = "; ".join(port_errors)
1467
+ raise NodeValidationError(
1468
+ f"Output port validation failed for node '{self.id}': {error_details}"
1469
+ )
1470
+
1471
+ return validated
1472
+
1473
+ def get_port_schema(self) -> dict[str, Any]:
1474
+ """Get complete port schema for documentation and tooling.
1475
+
1476
+ Returns the full port schema including type information,
1477
+ constraints, examples, and metadata. Used by:
1478
+ - Documentation generators
1479
+ - UI form builders
1480
+ - Workflow validation tools
1481
+ - Type inference systems
1482
+
1483
+ Returns:
1484
+ Complete port schema with input and output definitions
1485
+ """
1486
+ return self._port_registry.get_port_schema()
1487
+
1488
+ def to_dict(self) -> dict[str, Any]:
1489
+ """Enhanced serialization including port information.
1490
+
1491
+ Extends base Node serialization with port schema information
1492
+ for complete node documentation and reconstruction.
1493
+
1494
+ Returns:
1495
+ Node dictionary with port schema included
1496
+ """
1497
+ base_dict = super().to_dict()
1498
+ base_dict["port_schema"] = self.get_port_schema()
1499
+ return base_dict
1500
+
1501
+
1502
+ class AsyncTypedNode(TypedNode):
1503
+ """Async version of TypedNode with full async support.
1504
+
1505
+ This class combines the type-safe port system from TypedNode with
1506
+ the async execution capabilities of AsyncNode, providing:
1507
+
1508
+ 1. Type-safe input/output ports with async execution
1509
+ 2. Async-first execution with execute_async() and async_run()
1510
+ 3. All port validation and type checking in async context
1511
+ 4. Full backward compatibility with TypedNode patterns
1512
+ 5. Optimal performance for I/O-bound async operations
1513
+
1514
+ Design Goals:
1515
+ - Async-first execution for modern Kailash workflows
1516
+ - Type safety with full IDE support in async context
1517
+ - Seamless port access during async execution
1518
+ - Compatible with AsyncLocalRuntime and async workflows
1519
+
1520
+ Usage Pattern:
1521
+ class MyAsyncTypedNode(AsyncTypedNode):
1522
+ # Same port declarations as TypedNode
1523
+ text_input = InputPort[str]("text_input", description="Text to process")
1524
+ count = InputPort[int]("count", default=1, description="Number of iterations")
1525
+
1526
+ # Output ports
1527
+ result = OutputPort[str]("result", description="Processed text")
1528
+ metadata = OutputPort[Dict[str, Any]]("metadata", description="Processing info")
1529
+
1530
+ async def async_run(self, **kwargs) -> Dict[str, Any]:
1531
+ # Type-safe async access to inputs
1532
+ text = self.text_input.get()
1533
+ count = self.count.get()
1534
+
1535
+ # Async processing (e.g., API calls, DB queries)
1536
+ processed = await self.process_async(text, count)
1537
+
1538
+ # Set outputs (with type validation)
1539
+ self.result.set(processed)
1540
+ self.metadata.set({"length": len(processed), "iterations": count})
1541
+
1542
+ # Return traditional dict format
1543
+ return {
1544
+ self.result.name: processed,
1545
+ self.metadata.name: {"length": len(processed), "iterations": count}
1546
+ }
1547
+
1548
+ async def process_async(self, text: str, count: int) -> str:
1549
+ # Example async processing
1550
+ await asyncio.sleep(0.1) # Simulate I/O
1551
+ return text * count
1552
+
1553
+ Migration from TypedNode:
1554
+ - Change inheritance from TypedNode to AsyncTypedNode
1555
+ - Change run() method to async def async_run()
1556
+ - Add await to any async operations
1557
+ - Use execute_async() for execution instead of execute()
1558
+ """
1559
+
1560
+ def run(self, **kwargs) -> dict[str, Any]:
1561
+ """Override run() to require async_run() implementation.
1562
+
1563
+ AsyncTypedNode requires async_run() implementation for proper async execution.
1564
+ This method should not be called directly - use execute_async() instead.
1565
+
1566
+ Raises:
1567
+ NotImplementedError: Always, as async typed nodes must use async_run()
1568
+ """
1569
+ raise NotImplementedError(
1570
+ f"AsyncTypedNode '{self.__class__.__name__}' should implement async_run() method, not run()"
1571
+ )
1572
+
1573
+ async def async_run(self, **kwargs) -> dict[str, Any]:
1574
+ """Execute the async node's logic with type-safe port access.
1575
+
1576
+ This is the core method that implements the node's async data processing
1577
+ logic. It receives validated inputs and must return a dictionary of outputs.
1578
+
1579
+ Design requirements:
1580
+ - Must be async and stateless - no side effects between runs
1581
+ - All inputs are provided as keyword arguments
1582
+ - Must return a dictionary (JSON-serializable)
1583
+ - Can use self.port.get() for type-safe input access
1584
+ - Can use self.port.set() for type-safe output setting
1585
+ - Should handle errors gracefully with async context
1586
+ - Can use self.config for configuration values
1587
+ - Should use self.logger for status reporting
1588
+ - Can perform async I/O operations (API calls, DB queries, etc.)
1589
+
1590
+ Example:
1591
+ async def async_run(self, **kwargs):
1592
+ # Type-safe port access
1593
+ text = self.text_input.get()
1594
+ count = self.count.get()
1595
+
1596
+ # Async processing
1597
+ result = await self.process_text_async(text, count)
1598
+
1599
+ # Set outputs and return
1600
+ self.result.set(result)
1601
+ return {"result": result}
1602
+
1603
+ Args:
1604
+ **kwargs: Validated input parameters matching get_parameters()
1605
+
1606
+ Returns:
1607
+ Dictionary of outputs that will be validated and passed
1608
+ to downstream nodes
1609
+
1610
+ Raises:
1611
+ NodeExecutionError: If execution fails (will be caught and
1612
+ re-raised by execute_async())
1613
+
1614
+ Called by:
1615
+ - execute_async(): Wraps with validation and error handling
1616
+ - AsyncLocalRuntime: During async workflow execution
1617
+ - Async test runners: During async unit testing
1618
+ """
1619
+ raise NotImplementedError(
1620
+ f"AsyncTypedNode '{self.__class__.__name__}' must implement async_run() method"
1621
+ )
1622
+
1623
+ async def execute_async(self, **runtime_inputs) -> dict[str, Any]:
1624
+ """Execute the async node with validation and error handling.
1625
+
1626
+ This is the main entry point for async node execution that orchestrates
1627
+ the complete async execution lifecycle:
1628
+
1629
+ 1. Input validation (validate_inputs)
1630
+ 2. Async execution (async_run)
1631
+ 3. Output validation (validate_outputs)
1632
+ 4. Error handling and logging
1633
+ 5. Performance metrics
1634
+
1635
+ Async execution flow:
1636
+ 1. Logs execution start
1637
+ 2. Validates inputs against parameter schema (including port validation)
1638
+ 3. Sets validated values in ports for type-safe access
1639
+ 4. Calls async_run() with validated inputs
1640
+ 5. Validates outputs are JSON-serializable (including port validation)
1641
+ 6. Logs execution time
1642
+ 7. Returns validated outputs
1643
+
1644
+ Args:
1645
+ **runtime_inputs: Runtime inputs for async node execution
1646
+
1647
+ Returns:
1648
+ Dictionary of validated outputs from async_run()
1649
+
1650
+ Raises:
1651
+ NodeExecutionError: If async execution fails in async_run()
1652
+ NodeValidationError: If input/output validation fails
1653
+ """
1654
+ from datetime import UTC, datetime
1655
+
1656
+ start_time = datetime.now(UTC)
1657
+ try:
1658
+ self.logger.info(f"Executing async node {self.id}")
1659
+
1660
+ # Merge runtime inputs with config (runtime inputs take precedence)
1661
+ merged_inputs = {**self.config, **runtime_inputs}
1662
+
1663
+ # Handle nested config case (same as base Node)
1664
+ if "config" in merged_inputs and isinstance(merged_inputs["config"], dict):
1665
+ nested_config = merged_inputs["config"]
1666
+ for key, value in nested_config.items():
1667
+ if key not in runtime_inputs:
1668
+ merged_inputs[key] = value
1669
+
1670
+ # Validate inputs (includes port validation and setting port values)
1671
+ validated_inputs = self.validate_inputs(**merged_inputs)
1672
+ self.logger.debug(
1673
+ f"Validated inputs for async node {self.id}: {validated_inputs}"
1674
+ )
1675
+
1676
+ # Execute async node logic
1677
+ outputs = await self.async_run(**validated_inputs)
1678
+
1679
+ # Validate outputs (includes port validation)
1680
+ validated_outputs = self.validate_outputs(outputs)
1681
+
1682
+ execution_time = (datetime.now(UTC) - start_time).total_seconds()
1683
+ self.logger.info(
1684
+ f"Async node {self.id} executed successfully in {execution_time:.3f}s"
1685
+ )
1686
+ return validated_outputs
1687
+
1688
+ except NodeValidationError:
1689
+ # Re-raise validation errors as-is
1690
+ raise
1691
+ except NodeExecutionError:
1692
+ # Re-raise execution errors as-is
1693
+ raise
1694
+ except Exception as e:
1695
+ # Wrap any other exception in NodeExecutionError
1696
+ self.logger.error(
1697
+ f"Async node {self.id} execution failed: {e}", exc_info=True
1698
+ )
1699
+ raise NodeExecutionError(
1700
+ f"Async node '{self.id}' execution failed: {type(e).__name__}: {e}"
1701
+ ) from e
1702
+
1703
+ def execute(self, **runtime_inputs) -> dict[str, Any]:
1704
+ """Execute the async node synchronously by running async code.
1705
+
1706
+ This method provides backward compatibility by running the async execution
1707
+ in a synchronous context. It handles event loop management automatically.
1708
+
1709
+ For optimal performance in async workflows, use execute_async() directly.
1710
+
1711
+ Args:
1712
+ **runtime_inputs: Runtime inputs for node execution
1713
+
1714
+ Returns:
1715
+ Dictionary of validated outputs
1716
+ """
1717
+ import asyncio
1718
+ import concurrent.futures
1719
+ import sys
1720
+ import threading
1721
+
1722
+ # Handle event loop scenarios (same as AsyncNode)
1723
+ if sys.platform == "win32":
1724
+ asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
1725
+
1726
+ current_thread = threading.current_thread()
1727
+ is_main_thread = isinstance(current_thread, threading._MainThread)
1728
+
1729
+ try:
1730
+ # Try to get current event loop
1731
+ loop = asyncio.get_running_loop()
1732
+ # Event loop is running - need to run in separate thread
1733
+ return self._execute_in_thread(**runtime_inputs)
1734
+ except RuntimeError:
1735
+ # No event loop running
1736
+ if is_main_thread:
1737
+ # Main thread without loop - safe to use asyncio.run()
1738
+ return asyncio.run(self.execute_async(**runtime_inputs))
1739
+ else:
1740
+ # Non-main thread without loop - create new loop
1741
+ return self._execute_in_new_loop(**runtime_inputs)
1742
+
1743
+ def _execute_in_thread(self, **runtime_inputs) -> dict[str, Any]:
1744
+ """Execute async code in a separate thread with its own event loop."""
1745
+ import asyncio
1746
+ import concurrent.futures
1747
+
1748
+ def run_in_thread():
1749
+ loop = asyncio.new_event_loop()
1750
+ asyncio.set_event_loop(loop)
1751
+ try:
1752
+ return loop.run_until_complete(self.execute_async(**runtime_inputs))
1753
+ finally:
1754
+ loop.close()
1755
+
1756
+ with concurrent.futures.ThreadPoolExecutor() as executor:
1757
+ future = executor.submit(run_in_thread)
1758
+ return future.result()
1759
+
1760
+ def _execute_in_new_loop(self, **runtime_inputs) -> dict[str, Any]:
1761
+ """Execute async code in a new event loop."""
1762
+ import asyncio
1763
+
1764
+ new_loop = asyncio.new_event_loop()
1765
+ asyncio.set_event_loop(new_loop)
1766
+ try:
1767
+ return new_loop.run_until_complete(self.execute_async(**runtime_inputs))
1768
+ finally:
1769
+ new_loop.close()
1770
+
1771
+
1227
1772
  # Node Registry
1228
1773
  class NodeRegistry:
1229
1774
  """Registry for discovering and managing available nodes.
@@ -0,0 +1,36 @@
1
+ """Edge computing nodes for distributed processing and data management."""
2
+
3
+ from .base import EdgeNode
4
+ from .cloud_node import CloudNode
5
+ from .coordination import EdgeCoordinationNode
6
+ from .docker_node import DockerNode
7
+ from .edge_data import EdgeDataNode
8
+ from .edge_migration_node import EdgeMigrationNode
9
+ from .edge_monitoring_node import EdgeMonitoringNode
10
+ from .edge_state import EdgeStateMachine
11
+ from .edge_warming_node import EdgeWarmingNode
12
+
13
+ # Phase 4.4 Integration & Testing nodes
14
+ from .kubernetes_node import KubernetesNode
15
+ from .platform_node import PlatformNode
16
+ from .resource_analyzer_node import ResourceAnalyzerNode
17
+ from .resource_optimizer_node import ResourceOptimizerNode
18
+ from .resource_scaler_node import ResourceScalerNode
19
+
20
+ __all__ = [
21
+ "EdgeNode",
22
+ "EdgeDataNode",
23
+ "EdgeStateMachine",
24
+ "EdgeCoordinationNode",
25
+ "EdgeWarmingNode",
26
+ "EdgeMonitoringNode",
27
+ "EdgeMigrationNode",
28
+ "ResourceAnalyzerNode",
29
+ "ResourceScalerNode",
30
+ "ResourceOptimizerNode",
31
+ # Phase 4.4 nodes
32
+ "KubernetesNode",
33
+ "DockerNode",
34
+ "CloudNode",
35
+ "PlatformNode",
36
+ ]