kailash 0.8.4__py3-none-any.whl → 0.8.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +5 -11
- kailash/channels/__init__.py +2 -1
- kailash/channels/mcp_channel.py +23 -4
- kailash/cli/__init__.py +11 -1
- kailash/cli/validate_imports.py +202 -0
- kailash/cli/validation_audit.py +570 -0
- kailash/core/actors/supervisor.py +1 -1
- kailash/core/resilience/bulkhead.py +15 -5
- kailash/core/resilience/circuit_breaker.py +74 -1
- kailash/core/resilience/health_monitor.py +433 -33
- kailash/edge/compliance.py +33 -0
- kailash/edge/consistency.py +609 -0
- kailash/edge/coordination/__init__.py +30 -0
- kailash/edge/coordination/global_ordering.py +355 -0
- kailash/edge/coordination/leader_election.py +217 -0
- kailash/edge/coordination/partition_detector.py +296 -0
- kailash/edge/coordination/raft.py +485 -0
- kailash/edge/discovery.py +63 -1
- kailash/edge/migration/__init__.py +19 -0
- kailash/edge/migration/edge_migration_service.py +384 -0
- kailash/edge/migration/edge_migrator.py +832 -0
- kailash/edge/monitoring/__init__.py +21 -0
- kailash/edge/monitoring/edge_monitor.py +736 -0
- kailash/edge/prediction/__init__.py +10 -0
- kailash/edge/prediction/predictive_warmer.py +591 -0
- kailash/edge/resource/__init__.py +102 -0
- kailash/edge/resource/cloud_integration.py +796 -0
- kailash/edge/resource/cost_optimizer.py +949 -0
- kailash/edge/resource/docker_integration.py +919 -0
- kailash/edge/resource/kubernetes_integration.py +893 -0
- kailash/edge/resource/platform_integration.py +913 -0
- kailash/edge/resource/predictive_scaler.py +959 -0
- kailash/edge/resource/resource_analyzer.py +824 -0
- kailash/edge/resource/resource_pools.py +610 -0
- kailash/integrations/dataflow_edge.py +261 -0
- kailash/mcp_server/registry_integration.py +1 -1
- kailash/mcp_server/server.py +351 -8
- kailash/mcp_server/transports.py +305 -0
- kailash/middleware/gateway/event_store.py +1 -0
- kailash/monitoring/__init__.py +18 -0
- kailash/monitoring/alerts.py +646 -0
- kailash/monitoring/metrics.py +677 -0
- kailash/nodes/__init__.py +2 -0
- kailash/nodes/ai/semantic_memory.py +2 -2
- kailash/nodes/base.py +622 -1
- kailash/nodes/code/python.py +44 -3
- kailash/nodes/data/async_sql.py +42 -20
- kailash/nodes/edge/__init__.py +36 -0
- kailash/nodes/edge/base.py +240 -0
- kailash/nodes/edge/cloud_node.py +710 -0
- kailash/nodes/edge/coordination.py +239 -0
- kailash/nodes/edge/docker_node.py +825 -0
- kailash/nodes/edge/edge_data.py +582 -0
- kailash/nodes/edge/edge_migration_node.py +396 -0
- kailash/nodes/edge/edge_monitoring_node.py +421 -0
- kailash/nodes/edge/edge_state.py +673 -0
- kailash/nodes/edge/edge_warming_node.py +393 -0
- kailash/nodes/edge/kubernetes_node.py +652 -0
- kailash/nodes/edge/platform_node.py +766 -0
- kailash/nodes/edge/resource_analyzer_node.py +378 -0
- kailash/nodes/edge/resource_optimizer_node.py +501 -0
- kailash/nodes/edge/resource_scaler_node.py +397 -0
- kailash/nodes/governance.py +410 -0
- kailash/nodes/ports.py +676 -0
- kailash/nodes/rag/registry.py +1 -1
- kailash/nodes/transaction/distributed_transaction_manager.py +48 -1
- kailash/nodes/transaction/saga_state_storage.py +2 -1
- kailash/nodes/validation.py +8 -8
- kailash/runtime/local.py +374 -1
- kailash/runtime/validation/__init__.py +12 -0
- kailash/runtime/validation/connection_context.py +119 -0
- kailash/runtime/validation/enhanced_error_formatter.py +202 -0
- kailash/runtime/validation/error_categorizer.py +164 -0
- kailash/runtime/validation/import_validator.py +446 -0
- kailash/runtime/validation/metrics.py +380 -0
- kailash/runtime/validation/performance.py +615 -0
- kailash/runtime/validation/suggestion_engine.py +212 -0
- kailash/testing/fixtures.py +2 -2
- kailash/utils/data_paths.py +74 -0
- kailash/workflow/builder.py +413 -8
- kailash/workflow/contracts.py +418 -0
- kailash/workflow/edge_infrastructure.py +369 -0
- kailash/workflow/mermaid_visualizer.py +3 -1
- kailash/workflow/migration.py +3 -3
- kailash/workflow/templates.py +6 -6
- kailash/workflow/type_inference.py +669 -0
- kailash/workflow/validation.py +134 -3
- {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/METADATA +52 -34
- {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/RECORD +93 -42
- kailash/nexus/__init__.py +0 -21
- kailash/nexus/cli/__init__.py +0 -5
- kailash/nexus/cli/__main__.py +0 -6
- kailash/nexus/cli/main.py +0 -176
- kailash/nexus/factory.py +0 -413
- kailash/nexus/gateway.py +0 -545
- {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/WHEEL +0 -0
- {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/entry_points.txt +0 -0
- {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/top_level.txt +0 -0
kailash/workflow/builder.py
CHANGED
@@ -2,11 +2,17 @@
|
|
2
2
|
|
3
3
|
import logging
|
4
4
|
import uuid
|
5
|
-
from typing import TYPE_CHECKING, Any
|
5
|
+
from typing import TYPE_CHECKING, Any, Optional, Union
|
6
6
|
|
7
|
-
from kailash.nodes.base import Node
|
7
|
+
from kailash.nodes.base import Node, NodeRegistry
|
8
8
|
from kailash.sdk_exceptions import ConnectionError, WorkflowValidationError
|
9
|
+
from kailash.workflow.contracts import ConnectionContract, get_contract_registry
|
9
10
|
from kailash.workflow.graph import Workflow
|
11
|
+
from kailash.workflow.validation import (
|
12
|
+
IssueSeverity,
|
13
|
+
ParameterDeclarationValidator,
|
14
|
+
ValidationIssue,
|
15
|
+
)
|
10
16
|
|
11
17
|
logger = logging.getLogger(__name__)
|
12
18
|
|
@@ -14,8 +20,12 @@ logger = logging.getLogger(__name__)
|
|
14
20
|
class WorkflowBuilder:
|
15
21
|
"""Builder pattern for creating Workflow instances."""
|
16
22
|
|
17
|
-
def __init__(self):
|
18
|
-
"""Initialize an empty workflow builder.
|
23
|
+
def __init__(self, edge_config: dict[str, Any] | None = None):
|
24
|
+
"""Initialize an empty workflow builder.
|
25
|
+
|
26
|
+
Args:
|
27
|
+
edge_config: Optional edge infrastructure configuration
|
28
|
+
"""
|
19
29
|
self.nodes: dict[str, dict[str, Any]] = {}
|
20
30
|
self.connections: list[dict[str, str]] = []
|
21
31
|
self._metadata: dict[str, Any] = {}
|
@@ -23,6 +33,169 @@ class WorkflowBuilder:
|
|
23
33
|
self.workflow_parameters: dict[str, Any] = {}
|
24
34
|
self.parameter_mappings: dict[str, dict[str, str]] = {}
|
25
35
|
|
36
|
+
# Edge infrastructure support
|
37
|
+
self.edge_config = edge_config
|
38
|
+
self._has_edge_nodes = False
|
39
|
+
self._edge_infrastructure = None
|
40
|
+
|
41
|
+
# Connection contracts support
|
42
|
+
self.connection_contracts: dict[str, ConnectionContract] = {}
|
43
|
+
self._contract_registry = get_contract_registry()
|
44
|
+
|
45
|
+
# Parameter validation support
|
46
|
+
self._param_validator = ParameterDeclarationValidator()
|
47
|
+
|
48
|
+
def _is_sdk_node(self, node_class: type) -> bool:
|
49
|
+
"""Detect if node is SDK-provided vs custom implementation.
|
50
|
+
|
51
|
+
SDK nodes are registered in the NodeRegistry via @register_node decorator.
|
52
|
+
Custom nodes are not registered and require class reference usage.
|
53
|
+
|
54
|
+
Args:
|
55
|
+
node_class: The node class to check
|
56
|
+
|
57
|
+
Returns:
|
58
|
+
True if node is registered in SDK (can use string reference),
|
59
|
+
False if custom node (must use class reference)
|
60
|
+
"""
|
61
|
+
if not hasattr(node_class, "__name__"):
|
62
|
+
return False
|
63
|
+
|
64
|
+
# Check if the node class is registered in the NodeRegistry
|
65
|
+
try:
|
66
|
+
registered_class = NodeRegistry.get(node_class.__name__)
|
67
|
+
# Check if it's the same class (identity check)
|
68
|
+
return registered_class is node_class
|
69
|
+
except Exception:
|
70
|
+
# Node not found in registry = custom node
|
71
|
+
return False
|
72
|
+
|
73
|
+
def _generate_intelligent_node_warning(self, node_class: type, node_id: str) -> str:
|
74
|
+
"""Generate context-aware warnings based on node type.
|
75
|
+
|
76
|
+
Args:
|
77
|
+
node_class: The node class being added
|
78
|
+
node_id: The node ID
|
79
|
+
|
80
|
+
Returns:
|
81
|
+
Appropriate warning message for the node type
|
82
|
+
"""
|
83
|
+
if self._is_sdk_node(node_class):
|
84
|
+
# SDK node using class reference - suggest string pattern
|
85
|
+
return (
|
86
|
+
f"SDK node detected. Consider using string reference for better compatibility:\n"
|
87
|
+
f" CURRENT: add_node({node_class.__name__}, '{node_id}', {{...}})\n"
|
88
|
+
f" PREFERRED: add_node('{node_class.__name__}', '{node_id}', {{...}})\n"
|
89
|
+
f"String references work for all @register_node() decorated SDK nodes."
|
90
|
+
)
|
91
|
+
else:
|
92
|
+
# Custom node using class reference - this is CORRECT
|
93
|
+
return (
|
94
|
+
f"✅ CUSTOM NODE USAGE CORRECT\n"
|
95
|
+
f"\n"
|
96
|
+
f"Pattern: add_node({node_class.__name__}, '{node_id}', {{...}})\n"
|
97
|
+
f"Status: This is the CORRECT pattern for custom nodes\n"
|
98
|
+
f"\n"
|
99
|
+
f'⚠️ IGNORE "preferred pattern" suggestions for custom nodes\n'
|
100
|
+
f"String references only work for @register_node() decorated SDK nodes.\n"
|
101
|
+
f"Custom nodes MUST use class references as shown above.\n"
|
102
|
+
f"\n"
|
103
|
+
f"📚 Guide: sdk-users/7-gold-standards/GOLD-STANDARD-custom-node-development-guide.md"
|
104
|
+
)
|
105
|
+
|
106
|
+
def validate_parameter_declarations(
|
107
|
+
self, warn_on_issues: bool = True
|
108
|
+
) -> list[ValidationIssue]:
|
109
|
+
"""Validate parameter declarations for all nodes in the workflow.
|
110
|
+
|
111
|
+
This method detects common parameter declaration issues that lead to
|
112
|
+
silent parameter dropping and debugging difficulties.
|
113
|
+
|
114
|
+
Args:
|
115
|
+
warn_on_issues: Whether to log warnings for detected issues
|
116
|
+
|
117
|
+
Returns:
|
118
|
+
List of ValidationIssue objects for any problems found
|
119
|
+
"""
|
120
|
+
all_issues = []
|
121
|
+
|
122
|
+
for node_id, node_info in self.nodes.items():
|
123
|
+
try:
|
124
|
+
# Create a temporary instance to validate parameter declarations
|
125
|
+
if "instance" in node_info:
|
126
|
+
# Use existing instance
|
127
|
+
node_instance = node_info["instance"]
|
128
|
+
workflow_params = {} # Instance already has config
|
129
|
+
elif "class" in node_info:
|
130
|
+
# Create temporary instance of custom node
|
131
|
+
node_class = node_info["class"]
|
132
|
+
node_config = node_info.get("config", {})
|
133
|
+
# Create minimal instance just for parameter validation
|
134
|
+
try:
|
135
|
+
node_instance = node_class(**node_config)
|
136
|
+
workflow_params = node_config
|
137
|
+
except Exception as e:
|
138
|
+
# If we can't create instance, skip detailed validation
|
139
|
+
all_issues.append(
|
140
|
+
ValidationIssue(
|
141
|
+
severity=IssueSeverity.WARNING,
|
142
|
+
category="parameter_declaration",
|
143
|
+
code="PAR005",
|
144
|
+
message=f"Could not validate parameters for custom node '{node_id}': {e}",
|
145
|
+
suggestion="Ensure node constructor accepts provided configuration parameters",
|
146
|
+
node_id=node_id,
|
147
|
+
)
|
148
|
+
)
|
149
|
+
continue
|
150
|
+
else:
|
151
|
+
# SDK node - validate if we can create it
|
152
|
+
node_type = node_info["type"]
|
153
|
+
node_config = node_info.get("config", {})
|
154
|
+
try:
|
155
|
+
# Try to get the class from registry
|
156
|
+
node_class = NodeRegistry.get(node_type)
|
157
|
+
node_instance = node_class(**node_config)
|
158
|
+
workflow_params = node_config
|
159
|
+
except Exception:
|
160
|
+
# Skip validation for nodes we can't instantiate
|
161
|
+
continue
|
162
|
+
|
163
|
+
# Validate parameter declarations
|
164
|
+
issues = self._param_validator.validate_node_parameters(
|
165
|
+
node_instance, workflow_params
|
166
|
+
)
|
167
|
+
|
168
|
+
# Add node_id to issues
|
169
|
+
for issue in issues:
|
170
|
+
issue.node_id = node_id
|
171
|
+
all_issues.append(issue)
|
172
|
+
|
173
|
+
# Log warnings if requested
|
174
|
+
if warn_on_issues:
|
175
|
+
if issue.severity == IssueSeverity.ERROR:
|
176
|
+
logger.error(
|
177
|
+
f"Parameter validation error in node '{node_id}': {issue.message}"
|
178
|
+
)
|
179
|
+
elif issue.severity == IssueSeverity.WARNING:
|
180
|
+
logger.warning(
|
181
|
+
f"Parameter validation warning in node '{node_id}': {issue.message}"
|
182
|
+
)
|
183
|
+
|
184
|
+
except Exception as e:
|
185
|
+
# General validation error
|
186
|
+
all_issues.append(
|
187
|
+
ValidationIssue(
|
188
|
+
severity=IssueSeverity.WARNING,
|
189
|
+
category="parameter_declaration",
|
190
|
+
code="PAR006",
|
191
|
+
message=f"Parameter validation failed for node '{node_id}': {e}",
|
192
|
+
suggestion="Check node configuration and parameter declarations",
|
193
|
+
node_id=node_id,
|
194
|
+
)
|
195
|
+
)
|
196
|
+
|
197
|
+
return all_issues
|
198
|
+
|
26
199
|
def add_node(self, *args, **kwargs) -> str:
|
27
200
|
"""
|
28
201
|
Unified add_node method supporting multiple API patterns.
|
@@ -91,6 +264,9 @@ class WorkflowBuilder:
|
|
91
264
|
# Two strings - assume current API: add_node("NodeType", "node_id")
|
92
265
|
config = kwargs if kwargs else (args[2] if len(args) > 2 else {})
|
93
266
|
return self._add_node_current(args[0], args[1], config)
|
267
|
+
elif isinstance(args[1], dict):
|
268
|
+
# Pattern: add_node("NodeType", {config}) - treat as add_node("NodeType", None, {config})
|
269
|
+
return self._add_node_current(args[0], None, args[1])
|
94
270
|
else:
|
95
271
|
# Invalid second argument
|
96
272
|
raise WorkflowValidationError(
|
@@ -188,10 +364,10 @@ class WorkflowBuilder:
|
|
188
364
|
if node_id is None:
|
189
365
|
node_id = f"node_{uuid.uuid4().hex[:8]}"
|
190
366
|
|
367
|
+
# Generate context-aware warning based on node type
|
368
|
+
warning_message = self._generate_intelligent_node_warning(node_class, node_id)
|
191
369
|
warnings.warn(
|
192
|
-
|
193
|
-
f" CURRENT: add_node({node_class.__name__}, '{node_id}', {list(config.keys())})\n"
|
194
|
-
f" PREFERRED: add_node('{node_class.__name__}', '{node_id}', {config})",
|
370
|
+
warning_message,
|
195
371
|
UserWarning,
|
196
372
|
stacklevel=3,
|
197
373
|
)
|
@@ -287,8 +463,52 @@ class WorkflowBuilder:
|
|
287
463
|
)
|
288
464
|
|
289
465
|
logger.info(f"Added node '{node_id}' of type '{type_name}'")
|
466
|
+
|
467
|
+
# Detect edge nodes
|
468
|
+
if self._is_edge_node(type_name):
|
469
|
+
self._has_edge_nodes = True
|
470
|
+
logger.debug(f"Detected edge node: {type_name}")
|
471
|
+
|
290
472
|
return node_id
|
291
473
|
|
474
|
+
def _is_edge_node(self, node_type: str) -> bool:
|
475
|
+
"""Check if a node type is an edge node.
|
476
|
+
|
477
|
+
Args:
|
478
|
+
node_type: The node type to check
|
479
|
+
|
480
|
+
Returns:
|
481
|
+
True if the node is an edge node
|
482
|
+
"""
|
483
|
+
# Use the same logic as EdgeInfrastructure if available
|
484
|
+
if self._edge_infrastructure:
|
485
|
+
return self._edge_infrastructure.is_edge_node(node_type)
|
486
|
+
|
487
|
+
# Otherwise use local logic
|
488
|
+
# Check exact matches and subclasses
|
489
|
+
edge_prefixes = ["Edge", "edge"]
|
490
|
+
edge_suffixes = [
|
491
|
+
"EdgeNode",
|
492
|
+
"EdgeDataNode",
|
493
|
+
"EdgeStateMachine",
|
494
|
+
"EdgeCacheNode",
|
495
|
+
]
|
496
|
+
|
497
|
+
# Exact match
|
498
|
+
if node_type in edge_suffixes:
|
499
|
+
return True
|
500
|
+
|
501
|
+
# Check if it starts with Edge/edge
|
502
|
+
for prefix in edge_prefixes:
|
503
|
+
if node_type.startswith(prefix):
|
504
|
+
return True
|
505
|
+
|
506
|
+
# Check if it ends with EdgeNode (for custom edge nodes)
|
507
|
+
if node_type.endswith("EdgeNode"):
|
508
|
+
return True
|
509
|
+
|
510
|
+
return False
|
511
|
+
|
292
512
|
# Fluent API methods for backward compatibility
|
293
513
|
def add_node_fluent(
|
294
514
|
self, node_id: str, node_class_or_type: Any, **config
|
@@ -539,6 +759,142 @@ class WorkflowBuilder:
|
|
539
759
|
self._metadata.update(kwargs)
|
540
760
|
return self
|
541
761
|
|
762
|
+
def add_typed_connection(
|
763
|
+
self,
|
764
|
+
from_node: str,
|
765
|
+
from_output: str,
|
766
|
+
to_node: str,
|
767
|
+
to_input: str,
|
768
|
+
contract: Union[str, ConnectionContract],
|
769
|
+
validate_immediately: bool = False,
|
770
|
+
) -> "WorkflowBuilder":
|
771
|
+
"""
|
772
|
+
Add a typed connection with contract validation.
|
773
|
+
|
774
|
+
This is the new contract-based connection method that enforces
|
775
|
+
validation contracts on data flowing between nodes.
|
776
|
+
|
777
|
+
Args:
|
778
|
+
from_node: Source node ID
|
779
|
+
from_output: Output field from source
|
780
|
+
to_node: Target node ID
|
781
|
+
to_input: Input field on target
|
782
|
+
contract: Contract name (string) or ConnectionContract instance
|
783
|
+
validate_immediately: Whether to validate contract definitions now
|
784
|
+
|
785
|
+
Returns:
|
786
|
+
Self for chaining
|
787
|
+
|
788
|
+
Raises:
|
789
|
+
WorkflowValidationError: If contract is invalid or nodes don't exist
|
790
|
+
ConnectionError: If connection setup fails
|
791
|
+
|
792
|
+
Example:
|
793
|
+
# Using predefined contract
|
794
|
+
workflow.add_typed_connection(
|
795
|
+
"csv_reader", "data", "processor", "input_data",
|
796
|
+
contract="string_data"
|
797
|
+
)
|
798
|
+
|
799
|
+
# Using custom contract
|
800
|
+
custom_contract = ConnectionContract(
|
801
|
+
name="user_data_flow",
|
802
|
+
source_schema={"type": "object", "properties": {"id": {"type": "string"}}},
|
803
|
+
target_schema={"type": "object", "properties": {"id": {"type": "string"}}},
|
804
|
+
security_policies=[SecurityPolicy.NO_PII]
|
805
|
+
)
|
806
|
+
workflow.add_typed_connection(
|
807
|
+
"user_source", "user", "user_processor", "user_data",
|
808
|
+
contract=custom_contract
|
809
|
+
)
|
810
|
+
"""
|
811
|
+
# Resolve contract
|
812
|
+
if isinstance(contract, str):
|
813
|
+
contract_obj = self._contract_registry.get(contract)
|
814
|
+
if not contract_obj:
|
815
|
+
available_contracts = self._contract_registry.list_contracts()
|
816
|
+
raise WorkflowValidationError(
|
817
|
+
f"Contract '{contract}' not found. Available contracts: {available_contracts}"
|
818
|
+
)
|
819
|
+
contract = contract_obj
|
820
|
+
|
821
|
+
# Add the standard connection first
|
822
|
+
self.add_connection(from_node, from_output, to_node, to_input)
|
823
|
+
|
824
|
+
# Store the contract for this connection
|
825
|
+
connection_id = f"{from_node}.{from_output} → {to_node}.{to_input}"
|
826
|
+
self.connection_contracts[connection_id] = contract
|
827
|
+
|
828
|
+
# Immediate validation if requested
|
829
|
+
if validate_immediately:
|
830
|
+
# Validate that contract schemas are valid
|
831
|
+
try:
|
832
|
+
if contract.source_schema:
|
833
|
+
from jsonschema import Draft7Validator
|
834
|
+
|
835
|
+
Draft7Validator.check_schema(contract.source_schema)
|
836
|
+
if contract.target_schema:
|
837
|
+
Draft7Validator.check_schema(contract.target_schema)
|
838
|
+
except Exception as e:
|
839
|
+
raise WorkflowValidationError(
|
840
|
+
f"Invalid contract schema for connection {connection_id}: {e}"
|
841
|
+
)
|
842
|
+
|
843
|
+
logger.info(
|
844
|
+
f"Added typed connection '{connection_id}' with contract '{contract.name}'"
|
845
|
+
)
|
846
|
+
|
847
|
+
return self
|
848
|
+
|
849
|
+
def get_connection_contract(
|
850
|
+
self, connection_id: str
|
851
|
+
) -> Optional[ConnectionContract]:
|
852
|
+
"""
|
853
|
+
Get the contract for a specific connection.
|
854
|
+
|
855
|
+
Args:
|
856
|
+
connection_id: Connection identifier in format "from.output → to.input"
|
857
|
+
|
858
|
+
Returns:
|
859
|
+
ConnectionContract if found, None otherwise
|
860
|
+
"""
|
861
|
+
return self.connection_contracts.get(connection_id)
|
862
|
+
|
863
|
+
def list_connection_contracts(self) -> dict[str, str]:
|
864
|
+
"""
|
865
|
+
List all connection contracts in this workflow.
|
866
|
+
|
867
|
+
Returns:
|
868
|
+
Dict mapping connection IDs to contract names
|
869
|
+
"""
|
870
|
+
return {
|
871
|
+
conn_id: contract.name
|
872
|
+
for conn_id, contract in self.connection_contracts.items()
|
873
|
+
}
|
874
|
+
|
875
|
+
def validate_all_contracts(self) -> tuple[bool, list[str]]:
|
876
|
+
"""
|
877
|
+
Validate all connection contracts in the workflow.
|
878
|
+
|
879
|
+
Returns:
|
880
|
+
Tuple of (all_valid, list_of_errors)
|
881
|
+
"""
|
882
|
+
errors = []
|
883
|
+
|
884
|
+
for connection_id, contract in self.connection_contracts.items():
|
885
|
+
try:
|
886
|
+
# Validate contract schemas
|
887
|
+
if contract.source_schema:
|
888
|
+
from jsonschema import Draft7Validator
|
889
|
+
|
890
|
+
Draft7Validator.check_schema(contract.source_schema)
|
891
|
+
if contract.target_schema:
|
892
|
+
Draft7Validator.check_schema(contract.target_schema)
|
893
|
+
except Exception as e:
|
894
|
+
errors.append(f"Contract '{contract.name}' for {connection_id}: {e}")
|
895
|
+
|
896
|
+
return len(errors) == 0, errors
|
897
|
+
|
542
898
|
def add_workflow_inputs(
|
543
899
|
self, input_node_id: str, input_mappings: dict
|
544
900
|
) -> "WorkflowBuilder":
|
@@ -623,6 +979,13 @@ class WorkflowBuilder:
|
|
623
979
|
version = metadata.pop("version", "1.0.0")
|
624
980
|
author = metadata.pop("author", "")
|
625
981
|
|
982
|
+
# Initialize edge infrastructure if needed
|
983
|
+
if self._has_edge_nodes and not self._edge_infrastructure:
|
984
|
+
from kailash.workflow.edge_infrastructure import EdgeInfrastructure
|
985
|
+
|
986
|
+
self._edge_infrastructure = EdgeInfrastructure(self.edge_config)
|
987
|
+
logger.info("Initialized edge infrastructure for workflow")
|
988
|
+
|
626
989
|
# Create workflow
|
627
990
|
workflow = Workflow(
|
628
991
|
workflow_id=workflow_id,
|
@@ -633,6 +996,27 @@ class WorkflowBuilder:
|
|
633
996
|
metadata=metadata,
|
634
997
|
)
|
635
998
|
|
999
|
+
# Store edge infrastructure reference in workflow metadata if present
|
1000
|
+
if self._edge_infrastructure:
|
1001
|
+
workflow.metadata["_edge_infrastructure"] = self._edge_infrastructure
|
1002
|
+
|
1003
|
+
# Validate parameter declarations before building workflow
|
1004
|
+
param_issues = self.validate_parameter_declarations(warn_on_issues=True)
|
1005
|
+
|
1006
|
+
# Check for critical parameter errors that should block workflow creation
|
1007
|
+
critical_errors = [
|
1008
|
+
issue for issue in param_issues if issue.severity == IssueSeverity.ERROR
|
1009
|
+
]
|
1010
|
+
if critical_errors:
|
1011
|
+
error_messages = [
|
1012
|
+
f"{issue.node_id}: {issue.message}" for issue in critical_errors
|
1013
|
+
]
|
1014
|
+
raise WorkflowValidationError(
|
1015
|
+
"Cannot build workflow due to parameter declaration errors:\n"
|
1016
|
+
+ "\n".join(f" - {msg}" for msg in error_messages)
|
1017
|
+
+ "\n\nSee: sdk-users/7-gold-standards/enterprise-parameter-passing-gold-standard.md"
|
1018
|
+
)
|
1019
|
+
|
636
1020
|
# Add nodes to workflow
|
637
1021
|
for node_id, node_info in self.nodes.items():
|
638
1022
|
try:
|
@@ -645,6 +1029,16 @@ class WorkflowBuilder:
|
|
645
1029
|
# Node class was provided
|
646
1030
|
node_class = node_info["class"]
|
647
1031
|
node_config = node_info.get("config", {})
|
1032
|
+
|
1033
|
+
# Inject edge infrastructure if this is an edge node
|
1034
|
+
if self._edge_infrastructure and self._is_edge_node(
|
1035
|
+
node_class.__name__
|
1036
|
+
):
|
1037
|
+
node_config["_edge_infrastructure"] = self._edge_infrastructure
|
1038
|
+
logger.debug(
|
1039
|
+
f"Injected edge infrastructure into {node_class.__name__}"
|
1040
|
+
)
|
1041
|
+
|
648
1042
|
workflow.add_node(
|
649
1043
|
node_id=node_id, node_or_type=node_class, **node_config
|
650
1044
|
)
|
@@ -652,6 +1046,12 @@ class WorkflowBuilder:
|
|
652
1046
|
# String node type
|
653
1047
|
node_type = node_info["type"]
|
654
1048
|
node_config = node_info.get("config", {})
|
1049
|
+
|
1050
|
+
# Inject edge infrastructure if this is an edge node
|
1051
|
+
if self._edge_infrastructure and self._is_edge_node(node_type):
|
1052
|
+
node_config["_edge_infrastructure"] = self._edge_infrastructure
|
1053
|
+
logger.debug(f"Injected edge infrastructure into {node_type}")
|
1054
|
+
|
655
1055
|
workflow.add_node(
|
656
1056
|
node_id=node_id, node_or_type=node_type, **node_config
|
657
1057
|
)
|
@@ -711,9 +1111,13 @@ class WorkflowBuilder:
|
|
711
1111
|
self.workflow_parameters[workflow_param]
|
712
1112
|
)
|
713
1113
|
|
714
|
-
# Store workflow parameters in metadata for runtime reference
|
1114
|
+
# Store workflow parameters and contracts in metadata for runtime reference
|
715
1115
|
workflow.metadata["workflow_parameters"] = self.workflow_parameters
|
716
1116
|
workflow.metadata["parameter_mappings"] = self.parameter_mappings
|
1117
|
+
workflow.metadata["connection_contracts"] = {
|
1118
|
+
conn_id: contract.to_dict()
|
1119
|
+
for conn_id, contract in self.connection_contracts.items()
|
1120
|
+
}
|
717
1121
|
|
718
1122
|
logger.info(
|
719
1123
|
f"Built workflow '{workflow_id}' with "
|
@@ -789,6 +1193,7 @@ class WorkflowBuilder:
|
|
789
1193
|
self._metadata = {}
|
790
1194
|
self.workflow_parameters = {}
|
791
1195
|
self.parameter_mappings = {}
|
1196
|
+
self.connection_contracts = {}
|
792
1197
|
return self
|
793
1198
|
|
794
1199
|
@classmethod
|