kailash 0.8.4__py3-none-any.whl → 0.8.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. kailash/__init__.py +5 -11
  2. kailash/channels/__init__.py +2 -1
  3. kailash/channels/mcp_channel.py +23 -4
  4. kailash/cli/__init__.py +11 -1
  5. kailash/cli/validate_imports.py +202 -0
  6. kailash/cli/validation_audit.py +570 -0
  7. kailash/core/actors/supervisor.py +1 -1
  8. kailash/core/resilience/bulkhead.py +15 -5
  9. kailash/core/resilience/circuit_breaker.py +74 -1
  10. kailash/core/resilience/health_monitor.py +433 -33
  11. kailash/edge/compliance.py +33 -0
  12. kailash/edge/consistency.py +609 -0
  13. kailash/edge/coordination/__init__.py +30 -0
  14. kailash/edge/coordination/global_ordering.py +355 -0
  15. kailash/edge/coordination/leader_election.py +217 -0
  16. kailash/edge/coordination/partition_detector.py +296 -0
  17. kailash/edge/coordination/raft.py +485 -0
  18. kailash/edge/discovery.py +63 -1
  19. kailash/edge/migration/__init__.py +19 -0
  20. kailash/edge/migration/edge_migration_service.py +384 -0
  21. kailash/edge/migration/edge_migrator.py +832 -0
  22. kailash/edge/monitoring/__init__.py +21 -0
  23. kailash/edge/monitoring/edge_monitor.py +736 -0
  24. kailash/edge/prediction/__init__.py +10 -0
  25. kailash/edge/prediction/predictive_warmer.py +591 -0
  26. kailash/edge/resource/__init__.py +102 -0
  27. kailash/edge/resource/cloud_integration.py +796 -0
  28. kailash/edge/resource/cost_optimizer.py +949 -0
  29. kailash/edge/resource/docker_integration.py +919 -0
  30. kailash/edge/resource/kubernetes_integration.py +893 -0
  31. kailash/edge/resource/platform_integration.py +913 -0
  32. kailash/edge/resource/predictive_scaler.py +959 -0
  33. kailash/edge/resource/resource_analyzer.py +824 -0
  34. kailash/edge/resource/resource_pools.py +610 -0
  35. kailash/integrations/dataflow_edge.py +261 -0
  36. kailash/mcp_server/registry_integration.py +1 -1
  37. kailash/mcp_server/server.py +351 -8
  38. kailash/mcp_server/transports.py +305 -0
  39. kailash/middleware/gateway/event_store.py +1 -0
  40. kailash/monitoring/__init__.py +18 -0
  41. kailash/monitoring/alerts.py +646 -0
  42. kailash/monitoring/metrics.py +677 -0
  43. kailash/nodes/__init__.py +2 -0
  44. kailash/nodes/ai/semantic_memory.py +2 -2
  45. kailash/nodes/base.py +622 -1
  46. kailash/nodes/code/python.py +44 -3
  47. kailash/nodes/data/async_sql.py +42 -20
  48. kailash/nodes/edge/__init__.py +36 -0
  49. kailash/nodes/edge/base.py +240 -0
  50. kailash/nodes/edge/cloud_node.py +710 -0
  51. kailash/nodes/edge/coordination.py +239 -0
  52. kailash/nodes/edge/docker_node.py +825 -0
  53. kailash/nodes/edge/edge_data.py +582 -0
  54. kailash/nodes/edge/edge_migration_node.py +396 -0
  55. kailash/nodes/edge/edge_monitoring_node.py +421 -0
  56. kailash/nodes/edge/edge_state.py +673 -0
  57. kailash/nodes/edge/edge_warming_node.py +393 -0
  58. kailash/nodes/edge/kubernetes_node.py +652 -0
  59. kailash/nodes/edge/platform_node.py +766 -0
  60. kailash/nodes/edge/resource_analyzer_node.py +378 -0
  61. kailash/nodes/edge/resource_optimizer_node.py +501 -0
  62. kailash/nodes/edge/resource_scaler_node.py +397 -0
  63. kailash/nodes/governance.py +410 -0
  64. kailash/nodes/ports.py +676 -0
  65. kailash/nodes/rag/registry.py +1 -1
  66. kailash/nodes/transaction/distributed_transaction_manager.py +48 -1
  67. kailash/nodes/transaction/saga_state_storage.py +2 -1
  68. kailash/nodes/validation.py +8 -8
  69. kailash/runtime/local.py +374 -1
  70. kailash/runtime/validation/__init__.py +12 -0
  71. kailash/runtime/validation/connection_context.py +119 -0
  72. kailash/runtime/validation/enhanced_error_formatter.py +202 -0
  73. kailash/runtime/validation/error_categorizer.py +164 -0
  74. kailash/runtime/validation/import_validator.py +446 -0
  75. kailash/runtime/validation/metrics.py +380 -0
  76. kailash/runtime/validation/performance.py +615 -0
  77. kailash/runtime/validation/suggestion_engine.py +212 -0
  78. kailash/testing/fixtures.py +2 -2
  79. kailash/utils/data_paths.py +74 -0
  80. kailash/workflow/builder.py +413 -8
  81. kailash/workflow/contracts.py +418 -0
  82. kailash/workflow/edge_infrastructure.py +369 -0
  83. kailash/workflow/mermaid_visualizer.py +3 -1
  84. kailash/workflow/migration.py +3 -3
  85. kailash/workflow/templates.py +6 -6
  86. kailash/workflow/type_inference.py +669 -0
  87. kailash/workflow/validation.py +134 -3
  88. {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/METADATA +52 -34
  89. {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/RECORD +93 -42
  90. kailash/nexus/__init__.py +0 -21
  91. kailash/nexus/cli/__init__.py +0 -5
  92. kailash/nexus/cli/__main__.py +0 -6
  93. kailash/nexus/cli/main.py +0 -176
  94. kailash/nexus/factory.py +0 -413
  95. kailash/nexus/gateway.py +0 -545
  96. {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/WHEEL +0 -0
  97. {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/entry_points.txt +0 -0
  98. {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/licenses/LICENSE +0 -0
  99. {kailash-0.8.4.dist-info → kailash-0.8.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,212 @@
1
+ """
2
+ Suggestion engine for generating actionable guidance from validation errors.
3
+
4
+ Provides specific, actionable suggestions for fixing connection validation
5
+ errors based on error category, node type, and connection context.
6
+ """
7
+
8
+ from dataclasses import dataclass
9
+ from typing import Dict, List, Optional
10
+
11
+ from .connection_context import ConnectionContext
12
+ from .error_categorizer import ErrorCategory
13
+
14
+
15
+ @dataclass
16
+ class ValidationSuggestion:
17
+ """Actionable suggestion for fixing a validation error."""
18
+
19
+ message: str
20
+ """Human-readable suggestion message"""
21
+
22
+ code_example: Optional[str] = None
23
+ """Code example showing how to fix the issue"""
24
+
25
+ documentation_link: Optional[str] = None
26
+ """Link to relevant documentation"""
27
+
28
+ alternative_approaches: Optional[List[str]] = None
29
+ """List of alternative approaches to consider"""
30
+
31
+
32
+ class ValidationSuggestionEngine:
33
+ """Generates actionable suggestions for connection validation errors."""
34
+
35
+ def __init__(self):
36
+ self._suggestion_templates = self._initialize_suggestion_templates()
37
+
38
+ def generate_suggestion(
39
+ self,
40
+ error_category: ErrorCategory,
41
+ node_type: str,
42
+ connection_context: ConnectionContext,
43
+ original_error: str,
44
+ ) -> Optional[ValidationSuggestion]:
45
+ """Generate actionable suggestion for a validation error.
46
+
47
+ Args:
48
+ error_category: Categorized error type
49
+ node_type: Type of target node (e.g., 'CSVWriterNode')
50
+ connection_context: Context about the failing connection
51
+ original_error: Original error message
52
+
53
+ Returns:
54
+ ValidationSuggestion with actionable guidance, or None if no suggestion available
55
+ """
56
+ # Get base suggestion template for error category
57
+ template = self._suggestion_templates.get(error_category)
58
+ if not template:
59
+ return None
60
+
61
+ # Customize suggestion based on node type and context
62
+ return self._customize_suggestion(
63
+ template, node_type, connection_context, original_error
64
+ )
65
+
66
+ def _initialize_suggestion_templates(self) -> Dict[ErrorCategory, Dict]:
67
+ """Initialize suggestion templates for each error category."""
68
+ return {
69
+ ErrorCategory.TYPE_MISMATCH: {
70
+ "message": "The parameter type doesn't match what the node expects. Check the data types being passed through the connection.",
71
+ "code_template": '# Check the output type from source node\n# Expected: {expected_type}\n# Got: {actual_type}\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")',
72
+ "doc_link": "sdk-users/2-core-concepts/validation/common-mistakes.md#type-mismatch",
73
+ "alternatives": [
74
+ "Add a data transformation node between source and target",
75
+ "Check if you're connecting to the correct output port",
76
+ "Verify the source node produces the expected data type",
77
+ ],
78
+ },
79
+ ErrorCategory.MISSING_PARAMETER: {
80
+ "message": "A required parameter is missing. Make sure all required parameters are provided via connections or node configuration.",
81
+ "code_template": '# Add the missing parameter connection:\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{missing_param}")\n\n# Or provide it directly in node configuration:\nworkflow.add_node("{node_type}", "{target}", {{"{missing_param}": "value"}})',
82
+ "doc_link": "sdk-users/2-core-concepts/validation/common-mistakes.md#missing-parameters",
83
+ "alternatives": [
84
+ "Provide the parameter directly in node configuration",
85
+ "Create a PythonCodeNode to generate the required parameter",
86
+ "Check if another node output can provide this parameter",
87
+ ],
88
+ },
89
+ ErrorCategory.CONSTRAINT_VIOLATION: {
90
+ "message": "The parameter value violates validation constraints. Check the parameter requirements for this node type.",
91
+ "code_template": '# Ensure parameter meets requirements:\n# {constraint_details}\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")\n\n# Or add validation in source node:\nworkflow.add_node("PythonCodeNode", "validator", {{"code": "result = max(0, input_value)"}})',
92
+ "doc_link": "sdk-users/6-reference/nodes/node-selection-guide.md#parameter-validation",
93
+ "alternatives": [
94
+ "Add data validation/transformation before the target node",
95
+ "Check the node documentation for parameter requirements",
96
+ "Use a different node that accepts your data format",
97
+ ],
98
+ },
99
+ ErrorCategory.SECURITY_VIOLATION: {
100
+ "message": "Potential security issue detected in parameter value. This could indicate SQL injection, script injection, or other security vulnerabilities.",
101
+ "code_template": '# Use parameterized/sanitized approach:\n# For SQL operations:\nworkflow.add_node("SQLDatabaseNode", "safe_query", {{\n "query": "SELECT * FROM table WHERE id = $1",\n "params": ["user_input"]\n}})\n\n# For user input, add validation:\nworkflow.add_node("PythonCodeNode", "sanitizer", {{"code": "result = sanitize_input(user_data)"}})',
102
+ "doc_link": "sdk-users/5-enterprise/security-patterns.md#input-validation",
103
+ "alternatives": [
104
+ "Use parameterized queries instead of string concatenation",
105
+ "Add input sanitization/validation nodes",
106
+ "Review the data source for potential security issues",
107
+ "Use whitelisting instead of blacklisting for allowed values",
108
+ ],
109
+ },
110
+ ErrorCategory.UNKNOWN: {
111
+ "message": "An unexpected validation error occurred. Check the error details and node documentation.",
112
+ "code_template": '# General troubleshooting:\n# 1. Check node documentation for parameter requirements\n# 2. Verify data types and formats\n# 3. Test with simpler data first\nworkflow.add_connection("{source}", "{source_port}", "{target}", "{target_port}")',
113
+ "doc_link": "sdk-users/3-development/guides/troubleshooting.md",
114
+ "alternatives": [
115
+ "Check the node documentation for specific requirements",
116
+ "Test with simplified data to isolate the issue",
117
+ "Add debug logging to inspect the data flow",
118
+ "Review similar examples in the documentation",
119
+ ],
120
+ },
121
+ }
122
+
123
+ def _customize_suggestion(
124
+ self,
125
+ template: Dict,
126
+ node_type: str,
127
+ connection_context: ConnectionContext,
128
+ original_error: str,
129
+ ) -> ValidationSuggestion:
130
+ """Customize suggestion template with specific context."""
131
+ # Extract context information
132
+ source = connection_context.source_node
133
+ source_port = connection_context.source_port or "result"
134
+ target = connection_context.target_node
135
+ target_port = connection_context.target_port
136
+
137
+ # Customize code example
138
+ code_example = template["code_template"].format(
139
+ source=source,
140
+ source_port=source_port,
141
+ target=target,
142
+ target_port=target_port,
143
+ node_type=node_type,
144
+ expected_type="<check_node_docs>",
145
+ actual_type="<check_source_output>",
146
+ missing_param=target_port,
147
+ constraint_details="See node documentation for valid ranges/formats",
148
+ )
149
+
150
+ # Add node-specific customizations
151
+ message = template["message"]
152
+ if (
153
+ "DataFlow" in node_type
154
+ or node_type.endswith("CreateNode")
155
+ or node_type.endswith("UpdateNode")
156
+ ):
157
+ message += " For DataFlow nodes, ensure the data matches your model schema."
158
+
159
+ if "SQL" in node_type or "Database" in node_type:
160
+ message += " For database nodes, verify connection strings and SQL syntax."
161
+
162
+ if "LLM" in node_type or "Agent" in node_type:
163
+ message += " For AI nodes, check prompt formatting and model parameters."
164
+
165
+ return ValidationSuggestion(
166
+ message=message,
167
+ code_example=code_example,
168
+ documentation_link=template["doc_link"],
169
+ alternative_approaches=template["alternatives"],
170
+ )
171
+
172
+ def get_dataflow_specific_suggestion(
173
+ self, error_category: ErrorCategory, connection_context: ConnectionContext
174
+ ) -> Optional[str]:
175
+ """Get DataFlow-specific suggestions for common issues."""
176
+ dataflow_suggestions = {
177
+ ErrorCategory.TYPE_MISMATCH: (
178
+ "For DataFlow models, ensure the connected data matches your model field types. "
179
+ "Check your @db.model class definition for expected types."
180
+ ),
181
+ ErrorCategory.SECURITY_VIOLATION: (
182
+ "DataFlow automatically sanitizes SQL parameters, but connection-level validation "
183
+ "caught a potential issue. Review the data source for SQL injection attempts."
184
+ ),
185
+ ErrorCategory.MISSING_PARAMETER: (
186
+ "DataFlow nodes require all model fields to be provided. Check your model definition "
187
+ "and ensure all required fields have connections or default values."
188
+ ),
189
+ }
190
+ return dataflow_suggestions.get(error_category)
191
+
192
+ def get_common_connection_patterns(self, node_type: str) -> List[str]:
193
+ """Get common connection patterns for specific node types."""
194
+ patterns = {
195
+ "CSVReaderNode": [
196
+ "workflow.add_connection('reader', 'data', 'processor', 'input_data')",
197
+ "workflow.add_connection('reader', 'metadata.rows', 'counter', 'count')",
198
+ ],
199
+ "HTTPRequestNode": [
200
+ "workflow.add_connection('api', 'response.data', 'processor', 'json_data')",
201
+ "workflow.add_connection('api', 'status_code', 'checker', 'status')",
202
+ ],
203
+ "LLMAgentNode": [
204
+ "workflow.add_connection('input', 'text', 'llm', 'prompt')",
205
+ "workflow.add_connection('llm', 'result', 'output', 'analysis')",
206
+ ],
207
+ "SQLDatabaseNode": [
208
+ "workflow.add_connection('data', 'records', 'db', 'data')",
209
+ "workflow.add_connection('config', 'table_name', 'db', 'table')",
210
+ ],
211
+ }
212
+ return patterns.get(node_type, [])
@@ -183,8 +183,8 @@ class AsyncWorkflowFixtures:
183
183
  f"mysql://{user}:{password}@localhost:{actual_port}/{database}"
184
184
  )
185
185
 
186
- # Wait for MySQL to be ready (takes longer than PostgreSQL)
187
- await asyncio.sleep(10)
186
+ # Wait for MySQL to be ready (shorter wait for tests)
187
+ await asyncio.sleep(0.1)
188
188
 
189
189
  return DatabaseFixture(
190
190
  container=container,
@@ -0,0 +1,74 @@
1
+ """Data path utilities for examples and PythonCodeNode execution.
2
+
3
+ This module provides helper functions for constructing standardized paths
4
+ to input and output data files used in workflows and examples.
5
+ """
6
+
7
+ import os
8
+ from pathlib import Path
9
+
10
+
11
+ def get_project_root() -> Path:
12
+ """Get the project root directory.
13
+
14
+ Returns:
15
+ Path to the kailash_python_sdk project root
16
+ """
17
+ # Find the project root by looking for setup.py or pyproject.toml
18
+ current = Path(__file__).resolve()
19
+ for parent in current.parents:
20
+ if (parent / "setup.py").exists() or (parent / "pyproject.toml").exists():
21
+ return parent
22
+ # Fallback to going up from src/kailash/utils to project root
23
+ return current.parent.parent.parent
24
+
25
+
26
+ def get_input_data_path(filename: str) -> str:
27
+ """Get the full path to an input data file.
28
+
29
+ Args:
30
+ filename: Name of the input data file
31
+
32
+ Returns:
33
+ Full path to the input data file
34
+ """
35
+ project_root = get_project_root()
36
+ return str(project_root / "data" / "inputs" / filename)
37
+
38
+
39
+ def get_output_data_path(filename: str) -> str:
40
+ """Get the full path to an output data file.
41
+
42
+ Args:
43
+ filename: Name of the output data file
44
+
45
+ Returns:
46
+ Full path to the output data file
47
+ """
48
+ project_root = get_project_root()
49
+ output_path = project_root / "data" / "outputs" / filename
50
+
51
+ # Ensure the output directory exists
52
+ output_path.parent.mkdir(parents=True, exist_ok=True)
53
+
54
+ return str(output_path)
55
+
56
+
57
+ def get_data_path(subfolder: str, filename: str) -> str:
58
+ """Get the full path to a data file in a specific subfolder.
59
+
60
+ Args:
61
+ subfolder: Subfolder within the data directory (e.g., 'inputs', 'outputs', 'templates')
62
+ filename: Name of the data file
63
+
64
+ Returns:
65
+ Full path to the data file
66
+ """
67
+ project_root = get_project_root()
68
+ data_path = project_root / "data" / subfolder / filename
69
+
70
+ # Ensure the directory exists for output-type operations
71
+ if subfolder in ("outputs", "exports", "tracking"):
72
+ data_path.parent.mkdir(parents=True, exist_ok=True)
73
+
74
+ return str(data_path)