kailash 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -134,13 +134,15 @@ class AccessControlledRuntime:
134
134
  This method has the exact same signature as the standard runtime,
135
135
  ensuring complete compatibility.
136
136
  """
137
- # Check workflow-level access
138
- workflow_decision = self.acm.check_workflow_access(
139
- self.user_context, workflow.workflow_id, WorkflowPermission.EXECUTE
140
- )
141
-
142
- if not workflow_decision.allowed:
143
- raise PermissionError(f"Access denied: {workflow_decision.reason}")
137
+ # Only check access control if it's enabled
138
+ if self.acm.enabled:
139
+ # Check workflow-level access
140
+ workflow_decision = self.acm.check_workflow_access(
141
+ self.user_context, workflow.workflow_id, WorkflowPermission.EXECUTE
142
+ )
143
+
144
+ if not workflow_decision.allowed:
145
+ raise PermissionError(f"Access denied: {workflow_decision.reason}")
144
146
 
145
147
  # For simplicity, directly execute with the base runtime
146
148
  # In a full implementation, we would wrap nodes or intercept execution
kailash/runtime/runner.py CHANGED
@@ -91,7 +91,10 @@ class WorkflowRunner:
91
91
  Returns:
92
92
  Status information
93
93
  """
94
- return self.task_manager.get_run_status(run_id)
94
+ summary = self.task_manager.get_run_summary(run_id)
95
+ if summary:
96
+ return summary.model_dump()
97
+ return {}
95
98
 
96
99
  def get_run_history(
97
100
  self, workflow_name: str | None = None, limit: int = 10
@@ -105,6 +108,5 @@ class WorkflowRunner:
105
108
  Returns:
106
109
  List of run summaries
107
110
  """
108
- return self.task_manager.get_run_history(
109
- workflow_name=workflow_name, limit=limit
110
- )
111
+ runs = self.task_manager.list_runs(workflow_name=workflow_name, limit=limit)
112
+ return [run.model_dump() for run in runs]
@@ -470,7 +470,7 @@ class NodeTestHelper:
470
470
  pass # Expected
471
471
 
472
472
 
473
- class TestReporter:
473
+ class WorkflowTestReporter:
474
474
  """Generate test reports for workflows."""
475
475
 
476
476
  def __init__(self, task_manager: TaskManager):
kailash/security.py CHANGED
@@ -627,9 +627,13 @@ def sanitize_input(
627
627
 
628
628
  # Machine learning frameworks
629
629
  try:
630
- from sklearn.base import BaseEstimator, TransformerMixin
630
+ # Check if we're running under coverage to avoid instrumentation conflicts
631
+ import sys
631
632
 
632
- allowed_types.extend([BaseEstimator, TransformerMixin])
633
+ if "coverage" not in sys.modules:
634
+ from sklearn.base import BaseEstimator, TransformerMixin
635
+
636
+ allowed_types.extend([BaseEstimator, TransformerMixin])
633
637
  except ImportError:
634
638
  pass
635
639
 
@@ -427,9 +427,8 @@ class EnterpriseWorkflowServer(DurableWorkflowServer):
427
427
 
428
428
  def _register_root_endpoints(self):
429
429
  """Override to add enterprise info to root endpoint."""
430
- # Don't call super() to avoid duplicate endpoint registration
431
430
 
432
- # Register the enterprise root endpoint
431
+ # Register the enterprise root endpoint first (before super() to take precedence)
433
432
  @self.app.get("/")
434
433
  async def root():
435
434
  """Server information with enterprise details."""
@@ -464,3 +463,60 @@ class EnterpriseWorkflowServer(DurableWorkflowServer):
464
463
  }
465
464
 
466
465
  return base_info
466
+
467
+ # Now call super() to get other endpoints (health, workflows, etc.) but skip root
468
+ # We'll register them manually to avoid route conflicts
469
+ @self.app.get("/workflows")
470
+ async def list_workflows():
471
+ """List all registered workflows."""
472
+ return {
473
+ name: {
474
+ "type": reg.type,
475
+ "description": reg.description,
476
+ "version": reg.version,
477
+ "tags": reg.tags,
478
+ "endpoints": self._get_workflow_endpoints(name),
479
+ }
480
+ for name, reg in self.workflows.items()
481
+ }
482
+
483
+ @self.app.get("/health")
484
+ async def health_check():
485
+ """Server health check."""
486
+ health_status = {
487
+ "status": "healthy",
488
+ "server_type": "enterprise_workflow_server",
489
+ "workflows": {},
490
+ "mcp_servers": {},
491
+ }
492
+
493
+ # Check workflow health
494
+ for name, reg in self.workflows.items():
495
+ if reg.type == "embedded":
496
+ health_status["workflows"][name] = "healthy"
497
+ else:
498
+ # TODO: Implement proxy health check
499
+ health_status["workflows"][name] = "unknown"
500
+
501
+ # Check MCP server health
502
+ for name, server in self.mcp_servers.items():
503
+ # TODO: Implement MCP health check
504
+ health_status["mcp_servers"][name] = "unknown"
505
+
506
+ return health_status
507
+
508
+ @self.app.websocket("/ws")
509
+ async def websocket_endpoint(websocket):
510
+ """WebSocket for real-time updates."""
511
+ from fastapi import WebSocket
512
+
513
+ await websocket.accept()
514
+ try:
515
+ while True:
516
+ # Basic WebSocket echo - subclasses can override
517
+ data = await websocket.receive_text()
518
+ await websocket.send_text(f"Echo: {data}")
519
+ except Exception as e:
520
+ logger.error(f"WebSocket error: {e}")
521
+ finally:
522
+ await websocket.close()
@@ -158,6 +158,9 @@ class WorkflowServer:
158
158
 
159
159
  return health_status
160
160
 
161
+ # Note: Metrics and authentication endpoints are provided by EnterpriseWorkflowServer
162
+ # Basic WorkflowServer focuses on core workflow functionality
163
+
161
164
  @self.app.websocket("/ws")
162
165
  async def websocket_endpoint(websocket: WebSocket):
163
166
  """WebSocket for real-time updates."""
@@ -4,12 +4,10 @@ import logging
4
4
  import uuid
5
5
  from typing import TYPE_CHECKING, Any
6
6
 
7
+ from kailash.nodes.base import Node
7
8
  from kailash.sdk_exceptions import ConnectionError, WorkflowValidationError
8
9
  from kailash.workflow.graph import Workflow
9
10
 
10
- if TYPE_CHECKING:
11
- from kailash.nodes.base import Node
12
-
13
11
  logger = logging.getLogger(__name__)
14
12
 
15
13
 
@@ -21,6 +19,9 @@ class WorkflowBuilder:
21
19
  self.nodes: dict[str, dict[str, Any]] = {}
22
20
  self.connections: list[dict[str, str]] = []
23
21
  self._metadata: dict[str, Any] = {}
22
+ # Parameter injection capabilities
23
+ self.workflow_parameters: dict[str, Any] = {}
24
+ self.parameter_mappings: dict[str, dict[str, str]] = {}
24
25
 
25
26
  def add_node(self, *args, **kwargs) -> str:
26
27
  """
@@ -74,8 +75,6 @@ class WorkflowBuilder:
74
75
  # Pattern: add_node(NodeClass)
75
76
  return self._add_node_alternative(args[0], None, **kwargs)
76
77
  else:
77
- from kailash.nodes.base import Node
78
-
79
78
  if isinstance(args[0], Node):
80
79
  # Pattern: add_node(node_instance)
81
80
  return self._add_node_instance(args[0], None)
@@ -111,8 +110,6 @@ class WorkflowBuilder:
111
110
 
112
111
  elif len(args) >= 2:
113
112
  # Check if first arg is a Node instance
114
- from kailash.nodes.base import Node
115
-
116
113
  if isinstance(args[0], Node):
117
114
  # Pattern 4: Instance - add_node(node_instance, "node_id") or add_node(node_instance, "node_id", config)
118
115
  # Config is ignored for instances
@@ -149,8 +146,6 @@ class WorkflowBuilder:
149
146
  """Handle legacy fluent API pattern: add_node('node_id', NodeClass, param=value)"""
150
147
  import warnings
151
148
 
152
- from kailash.nodes.base import Node
153
-
154
149
  # If it's a class, validate it's a Node subclass
155
150
  if isinstance(node_class_or_type, type) and not issubclass(
156
151
  node_class_or_type, Node
@@ -183,8 +178,6 @@ class WorkflowBuilder:
183
178
  """Handle alternative pattern: add_node(NodeClass, 'node_id', param=value)"""
184
179
  import warnings
185
180
 
186
- from kailash.nodes.base import Node
187
-
188
181
  # Validate that node_class is actually a Node subclass
189
182
  if not isinstance(node_class, type) or not issubclass(node_class, Node):
190
183
  raise WorkflowValidationError(
@@ -267,9 +260,6 @@ class WorkflowBuilder:
267
260
  f"Node ID '{node_id}' already exists in workflow"
268
261
  )
269
262
 
270
- # Import Node here to avoid circular imports
271
- from kailash.nodes.base import Node
272
-
273
263
  # Handle different input types
274
264
  if isinstance(node_type, str):
275
265
  # String node type name
@@ -606,12 +596,108 @@ class WorkflowBuilder:
606
596
  f"Failed to connect '{from_node}' to '{to_node}': {e}"
607
597
  ) from e
608
598
 
599
+ # Parameter injection: Find nodes without incoming connections and inject parameters
600
+ if self.workflow_parameters:
601
+ nodes_with_inputs = set()
602
+ for conn in self.connections:
603
+ if not conn.get("is_workflow_input"):
604
+ nodes_with_inputs.add(conn["to_node"])
605
+
606
+ nodes_without_inputs = set(self.nodes.keys()) - nodes_with_inputs
607
+
608
+ # For each node without inputs, check if it needs workflow parameters
609
+ for node_id in nodes_without_inputs:
610
+ node_info = self.nodes[node_id]
611
+ node_instance = workflow.get_node(node_id)
612
+
613
+ if hasattr(node_instance, "get_parameters"):
614
+ params = node_instance.get_parameters()
615
+
616
+ # Check which required parameters are missing from config
617
+ for param_name, param_def in params.items():
618
+ if param_def.required and param_name not in node_info["config"]:
619
+ # Check if this parameter should come from workflow parameters
620
+ if param_name in self.workflow_parameters:
621
+ # Add to node config
622
+ node_info["config"][param_name] = (
623
+ self.workflow_parameters[param_name]
624
+ )
625
+ elif node_id in self.parameter_mappings:
626
+ # Check parameter mappings
627
+ mapping = self.parameter_mappings[node_id]
628
+ if param_name in mapping:
629
+ workflow_param = mapping[param_name]
630
+ if workflow_param in self.workflow_parameters:
631
+ node_info["config"][param_name] = (
632
+ self.workflow_parameters[workflow_param]
633
+ )
634
+
635
+ # Store workflow parameters in metadata for runtime reference
636
+ workflow.metadata["workflow_parameters"] = self.workflow_parameters
637
+ workflow.metadata["parameter_mappings"] = self.parameter_mappings
638
+
609
639
  logger.info(
610
640
  f"Built workflow '{workflow_id}' with "
611
641
  f"{len(self.nodes)} nodes and {len(self.connections)} connections"
612
642
  )
613
643
  return workflow
614
644
 
645
+ def set_workflow_parameters(self, **parameters) -> "WorkflowBuilder":
646
+ """
647
+ Set default parameters that will be passed to all nodes.
648
+
649
+ Args:
650
+ **parameters: Key-value pairs of workflow-level parameters
651
+
652
+ Returns:
653
+ Self for chaining
654
+ """
655
+ self.workflow_parameters.update(parameters)
656
+ return self
657
+
658
+ def add_parameter_mapping(
659
+ self, node_id: str, mappings: dict[str, str]
660
+ ) -> "WorkflowBuilder":
661
+ """
662
+ Add parameter mappings for a specific node.
663
+
664
+ Args:
665
+ node_id: Node to configure
666
+ mappings: Dict mapping workflow param names to node param names
667
+
668
+ Returns:
669
+ Self for chaining
670
+ """
671
+ if node_id not in self.parameter_mappings:
672
+ self.parameter_mappings[node_id] = {}
673
+ self.parameter_mappings[node_id].update(mappings)
674
+ return self
675
+
676
+ def add_input_connection(
677
+ self, to_node: str, to_input: str, from_workflow_param: str
678
+ ) -> "WorkflowBuilder":
679
+ """
680
+ Connect a workflow parameter directly to a node input.
681
+
682
+ Args:
683
+ to_node: Target node ID
684
+ to_input: Input parameter name on the node
685
+ from_workflow_param: Workflow parameter name
686
+
687
+ Returns:
688
+ Self for chaining
689
+ """
690
+ # Add a special connection type for workflow inputs
691
+ connection = {
692
+ "from_node": "__workflow_input__",
693
+ "from_output": from_workflow_param,
694
+ "to_node": to_node,
695
+ "to_input": to_input,
696
+ "is_workflow_input": True,
697
+ }
698
+ self.connections.append(connection)
699
+ return self
700
+
615
701
  def clear(self) -> "WorkflowBuilder":
616
702
  """
617
703
  Clear builder state.
@@ -622,6 +708,8 @@ class WorkflowBuilder:
622
708
  self.nodes = {}
623
709
  self.connections = []
624
710
  self._metadata = {}
711
+ self.workflow_parameters = {}
712
+ self.parameter_mappings = {}
625
713
  return self
626
714
 
627
715
  @classmethod
@@ -184,7 +184,7 @@ class CyclicWorkflowExecutor:
184
184
  if not workflow.has_cycles():
185
185
  # No cycles, use standard DAG execution
186
186
  logger.info("No cycles detected, using standard DAG execution")
187
- return self.dag_runner.run(workflow, parameters), run_id
187
+ return self.dag_runner.execute(workflow, parameters), run_id
188
188
 
189
189
  # Execute with cycle support
190
190
  try:
@@ -370,18 +370,110 @@ class CyclicWorkflowExecutor:
370
370
  )
371
371
  results.update(cycle_results)
372
372
  else:
373
- # Execute DAG nodes
374
- for node_id in stage.nodes:
375
- if node_id not in state.node_outputs:
376
- logger.info(f"Executing DAG node: {node_id}")
377
- node_result = self._execute_node(
378
- workflow, node_id, state, task_manager=task_manager
379
- )
380
- results[node_id] = node_result
381
- state.node_outputs[node_id] = node_result
373
+ # Execute DAG nodes using extracted method
374
+ dag_results = self._execute_dag_portion(
375
+ workflow, stage.nodes, state, task_manager
376
+ )
377
+ results.update(dag_results)
378
+
379
+ return results
380
+
381
+ def _execute_dag_portion(
382
+ self,
383
+ workflow: Workflow,
384
+ dag_nodes: list[str],
385
+ state: WorkflowState,
386
+ task_manager: TaskManager | None = None,
387
+ ) -> dict[str, Any]:
388
+ """Execute DAG (non-cyclic) portion of the workflow.
389
+
390
+ Args:
391
+ workflow: Workflow instance
392
+ dag_nodes: List of DAG node IDs to execute
393
+ state: Workflow state
394
+ task_manager: Optional task manager for tracking
395
+
396
+ Returns:
397
+ Dictionary with node IDs as keys and their results as values
398
+ """
399
+ results = {}
400
+
401
+ for node_id in dag_nodes:
402
+ if node_id not in state.node_outputs:
403
+ logger.info(f"Executing DAG node: {node_id}")
404
+ node_result = self._execute_node(
405
+ workflow, node_id, state, task_manager=task_manager
406
+ )
407
+ results[node_id] = node_result
408
+ state.node_outputs[node_id] = node_result
409
+
410
+ return results
411
+
412
+ def _execute_cycle_groups(
413
+ self,
414
+ workflow: Workflow,
415
+ cycle_groups: list["CycleGroup"],
416
+ state: WorkflowState,
417
+ task_manager: TaskManager | None = None,
418
+ ) -> dict[str, Any]:
419
+ """Execute cycle groups portion of the workflow.
420
+
421
+ Args:
422
+ workflow: Workflow instance
423
+ cycle_groups: List of cycle groups to execute
424
+ state: Workflow state
425
+ task_manager: Optional task manager for tracking
426
+
427
+ Returns:
428
+ Dictionary with node IDs as keys and their results as values
429
+ """
430
+ results = {}
431
+
432
+ for cycle_group in cycle_groups:
433
+ logger.info(f"Executing cycle group: {cycle_group.cycle_id}")
434
+ cycle_results = self._execute_cycle_group(
435
+ workflow, cycle_group, state, task_manager
436
+ )
437
+ results.update(cycle_results)
382
438
 
383
439
  return results
384
440
 
441
+ def _propagate_parameters(
442
+ self,
443
+ current_params: dict[str, Any],
444
+ current_results: dict[str, Any],
445
+ cycle_config: dict[str, Any] | None = None,
446
+ ) -> dict[str, Any]:
447
+ """Handle parameter propagation between cycle iterations.
448
+
449
+ Args:
450
+ current_params: Current iteration parameters
451
+ current_results: Results from current iteration
452
+ cycle_config: Cycle configuration (optional)
453
+
454
+ Returns:
455
+ Updated parameters for the next iteration
456
+ """
457
+ # Base propagation: copy current results for next iteration
458
+ next_params = current_results.copy() if current_results else {}
459
+
460
+ # Apply any cycle-specific parameter mappings if provided
461
+ if cycle_config and "parameter_mappings" in cycle_config:
462
+ mappings = cycle_config["parameter_mappings"]
463
+ for src_key, dst_key in mappings.items():
464
+ if src_key in current_results:
465
+ next_params[dst_key] = current_results[src_key]
466
+
467
+ # Preserve any initial parameters that aren't overridden
468
+ for key, value in current_params.items():
469
+ if key not in next_params:
470
+ next_params[key] = value
471
+
472
+ # Filter out None values to avoid validation errors
473
+ next_params = self._filter_none_values(next_params)
474
+
475
+ return next_params
476
+
385
477
  def _execute_cycle_group(
386
478
  self,
387
479
  workflow: Workflow,