kailash 0.8.4__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. kailash/__init__.py +1 -7
  2. kailash/cli/__init__.py +11 -1
  3. kailash/cli/validation_audit.py +570 -0
  4. kailash/core/actors/supervisor.py +1 -1
  5. kailash/core/resilience/circuit_breaker.py +71 -1
  6. kailash/core/resilience/health_monitor.py +172 -0
  7. kailash/edge/compliance.py +33 -0
  8. kailash/edge/consistency.py +609 -0
  9. kailash/edge/coordination/__init__.py +30 -0
  10. kailash/edge/coordination/global_ordering.py +355 -0
  11. kailash/edge/coordination/leader_election.py +217 -0
  12. kailash/edge/coordination/partition_detector.py +296 -0
  13. kailash/edge/coordination/raft.py +485 -0
  14. kailash/edge/discovery.py +63 -1
  15. kailash/edge/migration/__init__.py +19 -0
  16. kailash/edge/migration/edge_migrator.py +832 -0
  17. kailash/edge/monitoring/__init__.py +21 -0
  18. kailash/edge/monitoring/edge_monitor.py +736 -0
  19. kailash/edge/prediction/__init__.py +10 -0
  20. kailash/edge/prediction/predictive_warmer.py +591 -0
  21. kailash/edge/resource/__init__.py +102 -0
  22. kailash/edge/resource/cloud_integration.py +796 -0
  23. kailash/edge/resource/cost_optimizer.py +949 -0
  24. kailash/edge/resource/docker_integration.py +919 -0
  25. kailash/edge/resource/kubernetes_integration.py +893 -0
  26. kailash/edge/resource/platform_integration.py +913 -0
  27. kailash/edge/resource/predictive_scaler.py +959 -0
  28. kailash/edge/resource/resource_analyzer.py +824 -0
  29. kailash/edge/resource/resource_pools.py +610 -0
  30. kailash/integrations/dataflow_edge.py +261 -0
  31. kailash/mcp_server/registry_integration.py +1 -1
  32. kailash/monitoring/__init__.py +18 -0
  33. kailash/monitoring/alerts.py +646 -0
  34. kailash/monitoring/metrics.py +677 -0
  35. kailash/nodes/__init__.py +2 -0
  36. kailash/nodes/ai/semantic_memory.py +2 -2
  37. kailash/nodes/base.py +545 -0
  38. kailash/nodes/edge/__init__.py +36 -0
  39. kailash/nodes/edge/base.py +240 -0
  40. kailash/nodes/edge/cloud_node.py +710 -0
  41. kailash/nodes/edge/coordination.py +239 -0
  42. kailash/nodes/edge/docker_node.py +825 -0
  43. kailash/nodes/edge/edge_data.py +582 -0
  44. kailash/nodes/edge/edge_migration_node.py +392 -0
  45. kailash/nodes/edge/edge_monitoring_node.py +421 -0
  46. kailash/nodes/edge/edge_state.py +673 -0
  47. kailash/nodes/edge/edge_warming_node.py +393 -0
  48. kailash/nodes/edge/kubernetes_node.py +652 -0
  49. kailash/nodes/edge/platform_node.py +766 -0
  50. kailash/nodes/edge/resource_analyzer_node.py +378 -0
  51. kailash/nodes/edge/resource_optimizer_node.py +501 -0
  52. kailash/nodes/edge/resource_scaler_node.py +397 -0
  53. kailash/nodes/ports.py +676 -0
  54. kailash/runtime/local.py +344 -1
  55. kailash/runtime/validation/__init__.py +20 -0
  56. kailash/runtime/validation/connection_context.py +119 -0
  57. kailash/runtime/validation/enhanced_error_formatter.py +202 -0
  58. kailash/runtime/validation/error_categorizer.py +164 -0
  59. kailash/runtime/validation/metrics.py +380 -0
  60. kailash/runtime/validation/performance.py +615 -0
  61. kailash/runtime/validation/suggestion_engine.py +212 -0
  62. kailash/testing/fixtures.py +2 -2
  63. kailash/workflow/builder.py +230 -4
  64. kailash/workflow/contracts.py +418 -0
  65. kailash/workflow/edge_infrastructure.py +369 -0
  66. kailash/workflow/migration.py +3 -3
  67. kailash/workflow/type_inference.py +669 -0
  68. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/METADATA +43 -27
  69. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/RECORD +73 -27
  70. kailash/nexus/__init__.py +0 -21
  71. kailash/nexus/cli/__init__.py +0 -5
  72. kailash/nexus/cli/__main__.py +0 -6
  73. kailash/nexus/cli/main.py +0 -176
  74. kailash/nexus/factory.py +0 -413
  75. kailash/nexus/gateway.py +0 -545
  76. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/WHEEL +0 -0
  77. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/entry_points.txt +0 -0
  78. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/licenses/LICENSE +0 -0
  79. {kailash-0.8.4.dist-info → kailash-0.8.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,212 @@
1
+ """
2
+ Suggestion engine for generating actionable guidance from validation errors.
3
+
4
+ Provides specific, actionable suggestions for fixing connection validation
5
+ errors based on error category, node type, and connection context.
6
+ """
7
+
8
+ from dataclasses import dataclass
9
+ from typing import Dict, List, Optional
10
+
11
+ from .connection_context import ConnectionContext
12
+ from .error_categorizer import ErrorCategory
13
+
14
+
15
+ @dataclass
16
+ class ValidationSuggestion:
17
+ """Actionable suggestion for fixing a validation error."""
18
+
19
+ message: str
20
+ """Human-readable suggestion message"""
21
+
22
+ code_example: Optional[str] = None
23
+ """Code example showing how to fix the issue"""
24
+
25
+ documentation_link: Optional[str] = None
26
+ """Link to relevant documentation"""
27
+
28
+ alternative_approaches: Optional[List[str]] = None
29
+ """List of alternative approaches to consider"""
30
+
31
+
32
+ class ValidationSuggestionEngine:
33
+ """Generates actionable suggestions for connection validation errors."""
34
+
35
+ def __init__(self):
36
+ self._suggestion_templates = self._initialize_suggestion_templates()
37
+
38
+ def generate_suggestion(
39
+ self,
40
+ error_category: ErrorCategory,
41
+ node_type: str,
42
+ connection_context: ConnectionContext,
43
+ original_error: str,
44
+ ) -> Optional[ValidationSuggestion]:
45
+ """Generate actionable suggestion for a validation error.
46
+
47
+ Args:
48
+ error_category: Categorized error type
49
+ node_type: Type of target node (e.g., 'CSVWriterNode')
50
+ connection_context: Context about the failing connection
51
+ original_error: Original error message
52
+
53
+ Returns:
54
+ ValidationSuggestion with actionable guidance, or None if no suggestion available
55
+ """
56
+ # Get base suggestion template for error category
57
+ template = self._suggestion_templates.get(error_category)
58
+ if not template:
59
+ return None
60
+
61
+ # Customize suggestion based on node type and context
62
+ return self._customize_suggestion(
63
+ template, node_type, connection_context, original_error
64
+ )
65
+
66
+ def _initialize_suggestion_templates(self) -> Dict[ErrorCategory, Dict]:
67
+ """Initialize suggestion templates for each error category."""
68
+ return {
69
+ ErrorCategory.TYPE_MISMATCH: {
70
+ "message": "The parameter type doesn't match what the node expects. Check the data types being passed through the connection.",
71
+ "code_template": '# Check the output type from source node\n# Expected: {expected_type}\n# Got: {actual_type}\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")',
72
+ "doc_link": "sdk-users/validation/common-mistakes.md#type-mismatch",
73
+ "alternatives": [
74
+ "Add a data transformation node between source and target",
75
+ "Check if you're connecting to the correct output port",
76
+ "Verify the source node produces the expected data type",
77
+ ],
78
+ },
79
+ ErrorCategory.MISSING_PARAMETER: {
80
+ "message": "A required parameter is missing. Make sure all required parameters are provided via connections or node configuration.",
81
+ "code_template": '# Add the missing parameter connection:\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{missing_param}")\n\n# Or provide it directly in node configuration:\nworkflow.add_node("{node_type}", "{target}", {{"{missing_param}": "value"}})',
82
+ "doc_link": "sdk-users/validation/common-mistakes.md#missing-parameters",
83
+ "alternatives": [
84
+ "Provide the parameter directly in node configuration",
85
+ "Create a PythonCodeNode to generate the required parameter",
86
+ "Check if another node output can provide this parameter",
87
+ ],
88
+ },
89
+ ErrorCategory.CONSTRAINT_VIOLATION: {
90
+ "message": "The parameter value violates validation constraints. Check the parameter requirements for this node type.",
91
+ "code_template": '# Ensure parameter meets requirements:\n# {constraint_details}\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")\n\n# Or add validation in source node:\nworkflow.add_node("PythonCodeNode", "validator", {{"code": "result = max(0, input_value)"}})',
92
+ "doc_link": "sdk-users/nodes/node-selection-guide.md#parameter-validation",
93
+ "alternatives": [
94
+ "Add data validation/transformation before the target node",
95
+ "Check the node documentation for parameter requirements",
96
+ "Use a different node that accepts your data format",
97
+ ],
98
+ },
99
+ ErrorCategory.SECURITY_VIOLATION: {
100
+ "message": "Potential security issue detected in parameter value. This could indicate SQL injection, script injection, or other security vulnerabilities.",
101
+ "code_template": '# Use parameterized/sanitized approach:\n# For SQL operations:\nworkflow.add_node("SQLDatabaseNode", "safe_query", {{\n "query": "SELECT * FROM table WHERE id = $1",\n "params": ["user_input"]\n}})\n\n# For user input, add validation:\nworkflow.add_node("PythonCodeNode", "sanitizer", {{"code": "result = sanitize_input(user_data)"}})',
102
+ "doc_link": "sdk-users/enterprise/security-patterns.md#input-validation",
103
+ "alternatives": [
104
+ "Use parameterized queries instead of string concatenation",
105
+ "Add input sanitization/validation nodes",
106
+ "Review the data source for potential security issues",
107
+ "Use whitelisting instead of blacklisting for allowed values",
108
+ ],
109
+ },
110
+ ErrorCategory.UNKNOWN: {
111
+ "message": "An unexpected validation error occurred. Check the error details and node documentation.",
112
+ "code_template": '# General troubleshooting:\n# 1. Check node documentation for parameter requirements\n# 2. Verify data types and formats\n# 3. Test with simpler data first\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")',
113
+ "doc_link": "sdk-users/developer/05-troubleshooting.md",
114
+ "alternatives": [
115
+ "Check the node documentation for specific requirements",
116
+ "Test with simplified data to isolate the issue",
117
+ "Add debug logging to inspect the data flow",
118
+ "Review similar examples in the documentation",
119
+ ],
120
+ },
121
+ }
122
+
123
+ def _customize_suggestion(
124
+ self,
125
+ template: Dict,
126
+ node_type: str,
127
+ connection_context: ConnectionContext,
128
+ original_error: str,
129
+ ) -> ValidationSuggestion:
130
+ """Customize suggestion template with specific context."""
131
+ # Extract context information
132
+ source = connection_context.source_node
133
+ source_port = connection_context.source_port or "result"
134
+ target = connection_context.target_node
135
+ target_port = connection_context.target_port
136
+
137
+ # Customize code example
138
+ code_example = template["code_template"].format(
139
+ source=source,
140
+ source_port=source_port,
141
+ target=target,
142
+ target_port=target_port,
143
+ node_type=node_type,
144
+ expected_type="<check_node_docs>",
145
+ actual_type="<check_source_output>",
146
+ missing_param=target_port,
147
+ constraint_details="See node documentation for valid ranges/formats",
148
+ )
149
+
150
+ # Add node-specific customizations
151
+ message = template["message"]
152
+ if (
153
+ "DataFlow" in node_type
154
+ or node_type.endswith("CreateNode")
155
+ or node_type.endswith("UpdateNode")
156
+ ):
157
+ message += " For DataFlow nodes, ensure the data matches your model schema."
158
+
159
+ if "SQL" in node_type or "Database" in node_type:
160
+ message += " For database nodes, verify connection strings and SQL syntax."
161
+
162
+ if "LLM" in node_type or "Agent" in node_type:
163
+ message += " For AI nodes, check prompt formatting and model parameters."
164
+
165
+ return ValidationSuggestion(
166
+ message=message,
167
+ code_example=code_example,
168
+ documentation_link=template["doc_link"],
169
+ alternative_approaches=template["alternatives"],
170
+ )
171
+
172
+ def get_dataflow_specific_suggestion(
173
+ self, error_category: ErrorCategory, connection_context: ConnectionContext
174
+ ) -> Optional[str]:
175
+ """Get DataFlow-specific suggestions for common issues."""
176
+ dataflow_suggestions = {
177
+ ErrorCategory.TYPE_MISMATCH: (
178
+ "For DataFlow models, ensure the connected data matches your model field types. "
179
+ "Check your @db.model class definition for expected types."
180
+ ),
181
+ ErrorCategory.SECURITY_VIOLATION: (
182
+ "DataFlow automatically sanitizes SQL parameters, but connection-level validation "
183
+ "caught a potential issue. Review the data source for SQL injection attempts."
184
+ ),
185
+ ErrorCategory.MISSING_PARAMETER: (
186
+ "DataFlow nodes require all model fields to be provided. Check your model definition "
187
+ "and ensure all required fields have connections or default values."
188
+ ),
189
+ }
190
+ return dataflow_suggestions.get(error_category)
191
+
192
+ def get_common_connection_patterns(self, node_type: str) -> List[str]:
193
+ """Get common connection patterns for specific node types."""
194
+ patterns = {
195
+ "CSVReaderNode": [
196
+ "workflow.add_connection('reader', 'data', 'processor', 'input_data')",
197
+ "workflow.add_connection('reader', 'metadata.rows', 'counter', 'count')",
198
+ ],
199
+ "HTTPRequestNode": [
200
+ "workflow.add_connection('api', 'response.data', 'processor', 'json_data')",
201
+ "workflow.add_connection('api', 'status_code', 'checker', 'status')",
202
+ ],
203
+ "LLMAgentNode": [
204
+ "workflow.add_connection('input', 'text', 'llm', 'prompt')",
205
+ "workflow.add_connection('llm', 'result', 'output', 'analysis')",
206
+ ],
207
+ "SQLDatabaseNode": [
208
+ "workflow.add_connection('data', 'records', 'db', 'data')",
209
+ "workflow.add_connection('config', 'table_name', 'db', 'table')",
210
+ ],
211
+ }
212
+ return patterns.get(node_type, [])
@@ -183,8 +183,8 @@ class AsyncWorkflowFixtures:
183
183
  f"mysql://{user}:{password}@localhost:{actual_port}/{database}"
184
184
  )
185
185
 
186
- # Wait for MySQL to be ready (takes longer than PostgreSQL)
187
- await asyncio.sleep(10)
186
+ # Wait for MySQL to be ready (shorter wait for tests)
187
+ await asyncio.sleep(0.1)
188
188
 
189
189
  return DatabaseFixture(
190
190
  container=container,
@@ -2,10 +2,11 @@
2
2
 
3
3
  import logging
4
4
  import uuid
5
- from typing import TYPE_CHECKING, Any
5
+ from typing import TYPE_CHECKING, Any, Optional, Union
6
6
 
7
7
  from kailash.nodes.base import Node
8
8
  from kailash.sdk_exceptions import ConnectionError, WorkflowValidationError
9
+ from kailash.workflow.contracts import ConnectionContract, get_contract_registry
9
10
  from kailash.workflow.graph import Workflow
10
11
 
11
12
  logger = logging.getLogger(__name__)
@@ -14,8 +15,12 @@ logger = logging.getLogger(__name__)
14
15
  class WorkflowBuilder:
15
16
  """Builder pattern for creating Workflow instances."""
16
17
 
17
- def __init__(self):
18
- """Initialize an empty workflow builder."""
18
+ def __init__(self, edge_config: dict[str, Any] | None = None):
19
+ """Initialize an empty workflow builder.
20
+
21
+ Args:
22
+ edge_config: Optional edge infrastructure configuration
23
+ """
19
24
  self.nodes: dict[str, dict[str, Any]] = {}
20
25
  self.connections: list[dict[str, str]] = []
21
26
  self._metadata: dict[str, Any] = {}
@@ -23,6 +28,15 @@ class WorkflowBuilder:
23
28
  self.workflow_parameters: dict[str, Any] = {}
24
29
  self.parameter_mappings: dict[str, dict[str, str]] = {}
25
30
 
31
+ # Edge infrastructure support
32
+ self.edge_config = edge_config
33
+ self._has_edge_nodes = False
34
+ self._edge_infrastructure = None
35
+
36
+ # Connection contracts support
37
+ self.connection_contracts: dict[str, ConnectionContract] = {}
38
+ self._contract_registry = get_contract_registry()
39
+
26
40
  def add_node(self, *args, **kwargs) -> str:
27
41
  """
28
42
  Unified add_node method supporting multiple API patterns.
@@ -287,8 +301,52 @@ class WorkflowBuilder:
287
301
  )
288
302
 
289
303
  logger.info(f"Added node '{node_id}' of type '{type_name}'")
304
+
305
+ # Detect edge nodes
306
+ if self._is_edge_node(type_name):
307
+ self._has_edge_nodes = True
308
+ logger.debug(f"Detected edge node: {type_name}")
309
+
290
310
  return node_id
291
311
 
312
+ def _is_edge_node(self, node_type: str) -> bool:
313
+ """Check if a node type is an edge node.
314
+
315
+ Args:
316
+ node_type: The node type to check
317
+
318
+ Returns:
319
+ True if the node is an edge node
320
+ """
321
+ # Use the same logic as EdgeInfrastructure if available
322
+ if self._edge_infrastructure:
323
+ return self._edge_infrastructure.is_edge_node(node_type)
324
+
325
+ # Otherwise use local logic
326
+ # Check exact matches and subclasses
327
+ edge_prefixes = ["Edge", "edge"]
328
+ edge_suffixes = [
329
+ "EdgeNode",
330
+ "EdgeDataNode",
331
+ "EdgeStateMachine",
332
+ "EdgeCacheNode",
333
+ ]
334
+
335
+ # Exact match
336
+ if node_type in edge_suffixes:
337
+ return True
338
+
339
+ # Check if it starts with Edge/edge
340
+ for prefix in edge_prefixes:
341
+ if node_type.startswith(prefix):
342
+ return True
343
+
344
+ # Check if it ends with EdgeNode (for custom edge nodes)
345
+ if node_type.endswith("EdgeNode"):
346
+ return True
347
+
348
+ return False
349
+
292
350
  # Fluent API methods for backward compatibility
293
351
  def add_node_fluent(
294
352
  self, node_id: str, node_class_or_type: Any, **config
@@ -539,6 +597,142 @@ class WorkflowBuilder:
539
597
  self._metadata.update(kwargs)
540
598
  return self
541
599
 
600
+ def add_typed_connection(
601
+ self,
602
+ from_node: str,
603
+ from_output: str,
604
+ to_node: str,
605
+ to_input: str,
606
+ contract: Union[str, ConnectionContract],
607
+ validate_immediately: bool = False,
608
+ ) -> "WorkflowBuilder":
609
+ """
610
+ Add a typed connection with contract validation.
611
+
612
+ This is the new contract-based connection method that enforces
613
+ validation contracts on data flowing between nodes.
614
+
615
+ Args:
616
+ from_node: Source node ID
617
+ from_output: Output field from source
618
+ to_node: Target node ID
619
+ to_input: Input field on target
620
+ contract: Contract name (string) or ConnectionContract instance
621
+ validate_immediately: Whether to validate contract definitions now
622
+
623
+ Returns:
624
+ Self for chaining
625
+
626
+ Raises:
627
+ WorkflowValidationError: If contract is invalid or nodes don't exist
628
+ ConnectionError: If connection setup fails
629
+
630
+ Example:
631
+ # Using predefined contract
632
+ workflow.add_typed_connection(
633
+ "csv_reader", "data", "processor", "input_data",
634
+ contract="string_data"
635
+ )
636
+
637
+ # Using custom contract
638
+ custom_contract = ConnectionContract(
639
+ name="user_data_flow",
640
+ source_schema={"type": "object", "properties": {"id": {"type": "string"}}},
641
+ target_schema={"type": "object", "properties": {"id": {"type": "string"}}},
642
+ security_policies=[SecurityPolicy.NO_PII]
643
+ )
644
+ workflow.add_typed_connection(
645
+ "user_source", "user", "user_processor", "user_data",
646
+ contract=custom_contract
647
+ )
648
+ """
649
+ # Resolve contract
650
+ if isinstance(contract, str):
651
+ contract_obj = self._contract_registry.get(contract)
652
+ if not contract_obj:
653
+ available_contracts = self._contract_registry.list_contracts()
654
+ raise WorkflowValidationError(
655
+ f"Contract '{contract}' not found. Available contracts: {available_contracts}"
656
+ )
657
+ contract = contract_obj
658
+
659
+ # Add the standard connection first
660
+ self.add_connection(from_node, from_output, to_node, to_input)
661
+
662
+ # Store the contract for this connection
663
+ connection_id = f"{from_node}.{from_output} → {to_node}.{to_input}"
664
+ self.connection_contracts[connection_id] = contract
665
+
666
+ # Immediate validation if requested
667
+ if validate_immediately:
668
+ # Validate that contract schemas are valid
669
+ try:
670
+ if contract.source_schema:
671
+ from jsonschema import Draft7Validator
672
+
673
+ Draft7Validator.check_schema(contract.source_schema)
674
+ if contract.target_schema:
675
+ Draft7Validator.check_schema(contract.target_schema)
676
+ except Exception as e:
677
+ raise WorkflowValidationError(
678
+ f"Invalid contract schema for connection {connection_id}: {e}"
679
+ )
680
+
681
+ logger.info(
682
+ f"Added typed connection '{connection_id}' with contract '{contract.name}'"
683
+ )
684
+
685
+ return self
686
+
687
+ def get_connection_contract(
688
+ self, connection_id: str
689
+ ) -> Optional[ConnectionContract]:
690
+ """
691
+ Get the contract for a specific connection.
692
+
693
+ Args:
694
+ connection_id: Connection identifier in format "from.output → to.input"
695
+
696
+ Returns:
697
+ ConnectionContract if found, None otherwise
698
+ """
699
+ return self.connection_contracts.get(connection_id)
700
+
701
+ def list_connection_contracts(self) -> dict[str, str]:
702
+ """
703
+ List all connection contracts in this workflow.
704
+
705
+ Returns:
706
+ Dict mapping connection IDs to contract names
707
+ """
708
+ return {
709
+ conn_id: contract.name
710
+ for conn_id, contract in self.connection_contracts.items()
711
+ }
712
+
713
+ def validate_all_contracts(self) -> tuple[bool, list[str]]:
714
+ """
715
+ Validate all connection contracts in the workflow.
716
+
717
+ Returns:
718
+ Tuple of (all_valid, list_of_errors)
719
+ """
720
+ errors = []
721
+
722
+ for connection_id, contract in self.connection_contracts.items():
723
+ try:
724
+ # Validate contract schemas
725
+ if contract.source_schema:
726
+ from jsonschema import Draft7Validator
727
+
728
+ Draft7Validator.check_schema(contract.source_schema)
729
+ if contract.target_schema:
730
+ Draft7Validator.check_schema(contract.target_schema)
731
+ except Exception as e:
732
+ errors.append(f"Contract '{contract.name}' for {connection_id}: {e}")
733
+
734
+ return len(errors) == 0, errors
735
+
542
736
  def add_workflow_inputs(
543
737
  self, input_node_id: str, input_mappings: dict
544
738
  ) -> "WorkflowBuilder":
@@ -623,6 +817,13 @@ class WorkflowBuilder:
623
817
  version = metadata.pop("version", "1.0.0")
624
818
  author = metadata.pop("author", "")
625
819
 
820
+ # Initialize edge infrastructure if needed
821
+ if self._has_edge_nodes and not self._edge_infrastructure:
822
+ from kailash.workflow.edge_infrastructure import EdgeInfrastructure
823
+
824
+ self._edge_infrastructure = EdgeInfrastructure(self.edge_config)
825
+ logger.info("Initialized edge infrastructure for workflow")
826
+
626
827
  # Create workflow
627
828
  workflow = Workflow(
628
829
  workflow_id=workflow_id,
@@ -633,6 +834,10 @@ class WorkflowBuilder:
633
834
  metadata=metadata,
634
835
  )
635
836
 
837
+ # Store edge infrastructure reference in workflow metadata if present
838
+ if self._edge_infrastructure:
839
+ workflow.metadata["_edge_infrastructure"] = self._edge_infrastructure
840
+
636
841
  # Add nodes to workflow
637
842
  for node_id, node_info in self.nodes.items():
638
843
  try:
@@ -645,6 +850,16 @@ class WorkflowBuilder:
645
850
  # Node class was provided
646
851
  node_class = node_info["class"]
647
852
  node_config = node_info.get("config", {})
853
+
854
+ # Inject edge infrastructure if this is an edge node
855
+ if self._edge_infrastructure and self._is_edge_node(
856
+ node_class.__name__
857
+ ):
858
+ node_config["_edge_infrastructure"] = self._edge_infrastructure
859
+ logger.debug(
860
+ f"Injected edge infrastructure into {node_class.__name__}"
861
+ )
862
+
648
863
  workflow.add_node(
649
864
  node_id=node_id, node_or_type=node_class, **node_config
650
865
  )
@@ -652,6 +867,12 @@ class WorkflowBuilder:
652
867
  # String node type
653
868
  node_type = node_info["type"]
654
869
  node_config = node_info.get("config", {})
870
+
871
+ # Inject edge infrastructure if this is an edge node
872
+ if self._edge_infrastructure and self._is_edge_node(node_type):
873
+ node_config["_edge_infrastructure"] = self._edge_infrastructure
874
+ logger.debug(f"Injected edge infrastructure into {node_type}")
875
+
655
876
  workflow.add_node(
656
877
  node_id=node_id, node_or_type=node_type, **node_config
657
878
  )
@@ -711,9 +932,13 @@ class WorkflowBuilder:
711
932
  self.workflow_parameters[workflow_param]
712
933
  )
713
934
 
714
- # Store workflow parameters in metadata for runtime reference
935
+ # Store workflow parameters and contracts in metadata for runtime reference
715
936
  workflow.metadata["workflow_parameters"] = self.workflow_parameters
716
937
  workflow.metadata["parameter_mappings"] = self.parameter_mappings
938
+ workflow.metadata["connection_contracts"] = {
939
+ conn_id: contract.to_dict()
940
+ for conn_id, contract in self.connection_contracts.items()
941
+ }
717
942
 
718
943
  logger.info(
719
944
  f"Built workflow '{workflow_id}' with "
@@ -789,6 +1014,7 @@ class WorkflowBuilder:
789
1014
  self._metadata = {}
790
1015
  self.workflow_parameters = {}
791
1016
  self.parameter_mappings = {}
1017
+ self.connection_contracts = {}
792
1018
  return self
793
1019
 
794
1020
  @classmethod