kailash 0.1.2__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,439 @@
1
+ """Workflow node for wrapping workflows as reusable components.
2
+
3
+ This module provides the WorkflowNode class that enables hierarchical workflow
4
+ composition by wrapping entire workflows as single nodes. This allows complex
5
+ workflows to be reused as building blocks in larger workflows.
6
+
7
+ Design Philosophy:
8
+ - Workflows as first-class components
9
+ - Hierarchical composition patterns
10
+ - Clean abstraction of complexity
11
+ - Consistent node interface
12
+
13
+ Key Features:
14
+ - Dynamic parameter discovery from entry nodes
15
+ - Multiple loading methods (instance, file, dict)
16
+ - Automatic output mapping from exit nodes
17
+ - Full compatibility with existing runtime
18
+ """
19
+
20
+ import json
21
+ from pathlib import Path
22
+ from typing import Any, Dict, Optional
23
+
24
+ import yaml
25
+
26
+ from kailash.nodes.base import Node, NodeParameter, register_node
27
+ from kailash.sdk_exceptions import NodeConfigurationError, NodeExecutionError
28
+ from kailash.workflow.graph import Workflow
29
+
30
+
31
+ @register_node()
32
+ class WorkflowNode(Node):
33
+ """A node that encapsulates and executes an entire workflow.
34
+
35
+ This node allows workflows to be composed hierarchically, where a complex
36
+ workflow can be used as a single node within another workflow. This enables
37
+ powerful composition patterns and reusability.
38
+
39
+ Design Philosophy:
40
+ - Workflows become reusable components
41
+ - Complex logic hidden behind simple interface
42
+ - Hierarchical composition of workflows
43
+ - Consistent with standard node behavior
44
+
45
+ Upstream Components:
46
+ - Parent workflows that use this node
47
+ - Workflow builders creating composite workflows
48
+ - CLI/API creating nested workflow structures
49
+
50
+ Downstream Usage:
51
+ - The wrapped workflow and all its nodes
52
+ - Runtime executing the inner workflow
53
+ - Results passed to subsequent nodes
54
+
55
+ Usage Patterns:
56
+ 1. Direct workflow wrapping:
57
+ ```python
58
+ inner_workflow = Workflow("data_processing")
59
+ # ... build workflow ...
60
+ node = WorkflowNode(workflow=inner_workflow)
61
+ ```
62
+
63
+ 2. Loading from file:
64
+ ```python
65
+ node = WorkflowNode(workflow_path="workflows/processor.yaml")
66
+ ```
67
+
68
+ 3. Loading from dictionary:
69
+ ```python
70
+ workflow_dict = {"nodes": {...}, "connections": [...]}
71
+ node = WorkflowNode(workflow_dict=workflow_dict)
72
+ ```
73
+
74
+ Implementation Details:
75
+ - Parameters derived from workflow entry nodes
76
+ - Outputs mapped from workflow exit nodes
77
+ - Uses LocalRuntime for execution
78
+ - Validates workflow structure on load
79
+
80
+ Error Handling:
81
+ - Configuration errors for invalid workflows
82
+ - Execution errors wrapped with context
83
+ - Clear error messages for debugging
84
+
85
+ Side Effects:
86
+ - Executes entire workflow when run
87
+ - May create temporary files/state
88
+ - Logs execution progress
89
+ """
90
+
91
+ def __init__(self, workflow: Optional[Workflow] = None, **kwargs):
92
+ """Initialize the WorkflowNode.
93
+
94
+ Args:
95
+ workflow: Optional workflow instance to wrap
96
+ **kwargs: Additional configuration including:
97
+ - workflow_path: Path to load workflow from file
98
+ - workflow_dict: Dictionary representation of workflow
99
+ - name: Display name for the node
100
+ - description: Node description
101
+ - input_mapping: Map node inputs to workflow inputs
102
+ - output_mapping: Map workflow outputs to node outputs
103
+
104
+ Raises:
105
+ NodeConfigurationError: If no workflow source provided or
106
+ if workflow loading fails
107
+ """
108
+ # Store workflow configuration before parent init
109
+ self._workflow = workflow
110
+ self._workflow_path = kwargs.get("workflow_path")
111
+ self._workflow_dict = kwargs.get("workflow_dict")
112
+ self._input_mapping = kwargs.get("input_mapping", {})
113
+ self._output_mapping = kwargs.get("output_mapping", {})
114
+
115
+ # Initialize parent
116
+ super().__init__(**kwargs)
117
+
118
+ # Runtime will be created lazily to avoid circular imports
119
+ self._runtime = None
120
+
121
+ # Load workflow if not provided directly
122
+ if not self._workflow:
123
+ self._load_workflow()
124
+
125
+ def _validate_config(self):
126
+ """Override validation for WorkflowNode.
127
+
128
+ WorkflowNode has dynamic parameters based on the wrapped workflow,
129
+ so we skip the strict validation that base Node does.
130
+ """
131
+ # Skip parameter validation for WorkflowNode since parameters
132
+ # are dynamically determined from the wrapped workflow
133
+ pass
134
+
135
+ def _load_workflow(self):
136
+ """Load workflow from path or dictionary.
137
+
138
+ Attempts to load the workflow from configured sources:
139
+ 1. From file path (JSON or YAML)
140
+ 2. From dictionary representation
141
+
142
+ Raises:
143
+ NodeConfigurationError: If no valid source or loading fails
144
+ """
145
+ if self._workflow_path:
146
+ path = Path(self._workflow_path)
147
+ if not path.exists():
148
+ raise NodeConfigurationError(
149
+ f"Workflow file not found: {self._workflow_path}"
150
+ )
151
+
152
+ try:
153
+ if path.suffix == ".json":
154
+ with open(path, "r") as f:
155
+ data = json.load(f)
156
+ self._workflow = Workflow.from_dict(data)
157
+ elif path.suffix in [".yaml", ".yml"]:
158
+ with open(path, "r") as f:
159
+ data = yaml.safe_load(f)
160
+ self._workflow = Workflow.from_dict(data)
161
+ else:
162
+ raise NodeConfigurationError(
163
+ f"Unsupported workflow file format: {path.suffix}"
164
+ )
165
+ except Exception as e:
166
+ raise NodeConfigurationError(
167
+ f"Failed to load workflow from {path}: {e}"
168
+ ) from e
169
+
170
+ elif self._workflow_dict:
171
+ try:
172
+ self._workflow = Workflow.from_dict(self._workflow_dict)
173
+ except Exception as e:
174
+ raise NodeConfigurationError(
175
+ f"Failed to load workflow from dictionary: {e}"
176
+ ) from e
177
+ else:
178
+ raise NodeConfigurationError(
179
+ "WorkflowNode requires either 'workflow', 'workflow_path', "
180
+ "or 'workflow_dict' parameter"
181
+ )
182
+
183
+ def get_parameters(self) -> Dict[str, NodeParameter]:
184
+ """Define parameters based on workflow entry nodes.
185
+
186
+ Analyzes the wrapped workflow to determine required inputs:
187
+ 1. Finds entry nodes (no incoming connections)
188
+ 2. Aggregates their parameters
189
+ 3. Adds generic 'inputs' parameter for overrides
190
+
191
+ Returns:
192
+ Dictionary of parameters derived from workflow structure
193
+ """
194
+ if not self._workflow:
195
+ # Default parameters if workflow not loaded yet
196
+ return {
197
+ "inputs": NodeParameter(
198
+ name="inputs",
199
+ type=dict,
200
+ required=False,
201
+ default={},
202
+ description="Input data for the workflow",
203
+ )
204
+ }
205
+
206
+ params = {}
207
+
208
+ # Find entry nodes (nodes with no incoming edges)
209
+ entry_nodes = []
210
+ for node_id in self._workflow.nodes:
211
+ if self._workflow.graph.in_degree(node_id) == 0:
212
+ entry_nodes.append(node_id)
213
+
214
+ # If custom input mapping provided, use that
215
+ if self._input_mapping:
216
+ for param_name, mapping in self._input_mapping.items():
217
+ params[param_name] = NodeParameter(
218
+ name=param_name,
219
+ type=mapping.get("type", Any),
220
+ required=mapping.get("required", True),
221
+ default=mapping.get("default"),
222
+ description=mapping.get("description", f"Input for {param_name}"),
223
+ )
224
+ else:
225
+ # Auto-discover from entry nodes
226
+ for node_id in entry_nodes:
227
+ node = self._workflow.get_node(node_id)
228
+ if node:
229
+ node_params = node.get_parameters()
230
+ for param_name, param_def in node_params.items():
231
+ # Create flattened parameter name
232
+ full_param_name = f"{node_id}_{param_name}"
233
+ params[full_param_name] = NodeParameter(
234
+ name=full_param_name,
235
+ type=param_def.type,
236
+ required=False, # Make all workflow parameters optional
237
+ default=param_def.default,
238
+ description=f"{node_id}: {param_def.description}",
239
+ )
240
+
241
+ # Always include generic inputs parameter
242
+ params["inputs"] = NodeParameter(
243
+ name="inputs",
244
+ type=dict,
245
+ required=False,
246
+ default={},
247
+ description="Additional input overrides for workflow nodes",
248
+ )
249
+
250
+ return params
251
+
252
+ def get_output_schema(self) -> Dict[str, NodeParameter]:
253
+ """Define output schema based on workflow exit nodes.
254
+
255
+ Analyzes the wrapped workflow to determine outputs:
256
+ 1. Finds exit nodes (no outgoing connections)
257
+ 2. Aggregates their output schemas
258
+ 3. Includes general 'results' output
259
+
260
+ Returns:
261
+ Dictionary of output parameters from workflow structure
262
+ """
263
+ if not self._workflow:
264
+ return {
265
+ "results": NodeParameter(
266
+ name="results",
267
+ type=dict,
268
+ required=True,
269
+ description="Workflow execution results",
270
+ )
271
+ }
272
+
273
+ output_schema = {
274
+ "results": NodeParameter(
275
+ name="results",
276
+ type=dict,
277
+ required=True,
278
+ description="Complete workflow execution results by node",
279
+ )
280
+ }
281
+
282
+ # If custom output mapping provided, use that
283
+ if self._output_mapping:
284
+ for output_name, mapping in self._output_mapping.items():
285
+ output_schema[output_name] = NodeParameter(
286
+ name=output_name,
287
+ type=mapping.get("type", Any),
288
+ required=mapping.get("required", False),
289
+ description=mapping.get("description", f"Output {output_name}"),
290
+ )
291
+ else:
292
+ # Auto-discover from exit nodes
293
+ exit_nodes = []
294
+ for node_id in self._workflow.nodes:
295
+ if self._workflow.graph.out_degree(node_id) == 0:
296
+ exit_nodes.append(node_id)
297
+
298
+ for node_id in exit_nodes:
299
+ node = self._workflow.get_node(node_id)
300
+ if node and hasattr(node, "get_output_schema"):
301
+ try:
302
+ node_outputs = node.get_output_schema()
303
+ for output_name, output_def in node_outputs.items():
304
+ full_output_name = f"{node_id}_{output_name}"
305
+ output_schema[full_output_name] = NodeParameter(
306
+ name=full_output_name,
307
+ type=output_def.type,
308
+ required=False,
309
+ description=f"{node_id}: {output_def.description}",
310
+ )
311
+ except Exception:
312
+ # Skip nodes that fail to provide output schema
313
+ pass
314
+
315
+ return output_schema
316
+
317
+ def run(self, **kwargs) -> Dict[str, Any]:
318
+ """Execute the wrapped workflow.
319
+
320
+ Executes the inner workflow with proper input mapping:
321
+ 1. Maps node inputs to workflow node inputs
322
+ 2. Executes workflow using LocalRuntime
323
+ 3. Maps workflow outputs to node outputs
324
+
325
+ Args:
326
+ **kwargs: Input parameters for the workflow
327
+
328
+ Returns:
329
+ Dictionary containing:
330
+ - results: Complete workflow execution results
331
+ - Mapped outputs from exit nodes
332
+
333
+ Raises:
334
+ NodeExecutionError: If workflow execution fails
335
+ """
336
+ if not self._workflow:
337
+ raise NodeExecutionError("No workflow loaded")
338
+
339
+ # Prepare inputs for the workflow
340
+ workflow_inputs = {}
341
+
342
+ # Handle custom input mapping
343
+ if self._input_mapping:
344
+ for param_name, mapping in self._input_mapping.items():
345
+ if param_name in kwargs:
346
+ # mapping should specify target node and parameter
347
+ target_node = mapping.get("node")
348
+ target_param = mapping.get("parameter", param_name)
349
+ if target_node:
350
+ workflow_inputs.setdefault(target_node, {})[target_param] = (
351
+ kwargs[param_name]
352
+ )
353
+ else:
354
+ # Auto-map inputs based on parameter names
355
+ for key, value in kwargs.items():
356
+ if "_" in key and key != "inputs":
357
+ # Split node_id and param_name
358
+ parts = key.split("_", 1)
359
+ if len(parts) == 2:
360
+ node_id, param_name = parts
361
+ if node_id in self._workflow.nodes:
362
+ workflow_inputs.setdefault(node_id, {})[param_name] = value
363
+
364
+ # Add any additional inputs
365
+ if "inputs" in kwargs and isinstance(kwargs["inputs"], dict):
366
+ for node_id, node_inputs in kwargs["inputs"].items():
367
+ if node_id in self._workflow.nodes:
368
+ workflow_inputs.setdefault(node_id, {}).update(node_inputs)
369
+
370
+ try:
371
+ # Create runtime lazily to avoid circular imports
372
+ if self._runtime is None:
373
+ from kailash.runtime.local import LocalRuntime
374
+
375
+ self._runtime = LocalRuntime()
376
+
377
+ # Execute the workflow
378
+ self.logger.info(f"Executing wrapped workflow: {self._workflow.name}")
379
+ results, _ = self._runtime.execute(
380
+ self._workflow, parameters=workflow_inputs
381
+ )
382
+
383
+ # Process results
384
+ output = {"results": results}
385
+
386
+ # Handle custom output mapping
387
+ if self._output_mapping:
388
+ for output_name, mapping in self._output_mapping.items():
389
+ source_node = mapping.get("node")
390
+ source_output = mapping.get("output", output_name)
391
+ if source_node and source_node in results:
392
+ node_results = results[source_node]
393
+ if (
394
+ isinstance(node_results, dict)
395
+ and source_output in node_results
396
+ ):
397
+ output[output_name] = node_results[source_output]
398
+ else:
399
+ # Auto-map outputs from exit nodes
400
+ for node_id in self._workflow.nodes:
401
+ if self._workflow.graph.out_degree(node_id) == 0:
402
+ if node_id in results:
403
+ node_results = results[node_id]
404
+ if isinstance(node_results, dict):
405
+ for key, value in node_results.items():
406
+ output[f"{node_id}_{key}"] = value
407
+
408
+ return output
409
+
410
+ except Exception as e:
411
+ self.logger.error(f"Workflow execution failed: {e}")
412
+ raise NodeExecutionError(f"Failed to execute wrapped workflow: {e}") from e
413
+
414
+ def to_dict(self) -> Dict[str, Any]:
415
+ """Convert node to dictionary representation.
416
+
417
+ Serializes the WorkflowNode including its wrapped workflow
418
+ for persistence and export.
419
+
420
+ Returns:
421
+ Dictionary containing node configuration and workflow
422
+ """
423
+ base_dict = super().to_dict()
424
+
425
+ # Add workflow information
426
+ if self._workflow:
427
+ base_dict["wrapped_workflow"] = self._workflow.to_dict()
428
+ elif self._workflow_path:
429
+ base_dict["workflow_path"] = str(self._workflow_path)
430
+ elif self._workflow_dict:
431
+ base_dict["workflow_dict"] = self._workflow_dict
432
+
433
+ # Add mappings if present
434
+ if self._input_mapping:
435
+ base_dict["input_mapping"] = self._input_mapping
436
+ if self._output_mapping:
437
+ base_dict["output_mapping"] = self._output_mapping
438
+
439
+ return base_dict
@@ -404,7 +404,7 @@ class MCPResource(Node):
404
404
  try:
405
405
  version_num = float(old_version) + 0.1
406
406
  updates["version"] = f"{version_num:.1f}"
407
- except:
407
+ except (ValueError, TypeError):
408
408
  import datetime
409
409
 
410
410
  updates["version"] = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
@@ -171,11 +171,17 @@ class MCPServer(Node):
171
171
  try:
172
172
  # Import MCP SDK (graceful fallback if not installed)
173
173
  try:
174
- from mcp.server import Server
175
- from mcp.server.fastmcp import FastMCP
176
- from mcp.types import Prompt, Resource, Tool
174
+ import importlib.util
177
175
 
178
- mcp_available = True
176
+ mcp_spec = importlib.util.find_spec("mcp")
177
+ if mcp_spec is not None:
178
+ from mcp.server import Server # noqa: F401
179
+ from mcp.server.fastmcp import FastMCP # noqa: F401
180
+ from mcp.types import Prompt, Resource, Tool # noqa: F401
181
+
182
+ mcp_available = True
183
+ else:
184
+ mcp_available = False
179
185
  except ImportError:
180
186
  mcp_available = False
181
187
 
@@ -261,7 +261,7 @@ class DataTransformer(Node):
261
261
  local_vars["result"] = result
262
262
 
263
263
  # Execute the code block
264
- exec(transform_str, safe_globals, local_vars)
264
+ exec(transform_str, safe_globals, local_vars) # noqa: S102
265
265
 
266
266
  # Extract the result from local context
267
267
  result = local_vars.get("result", result)
@@ -271,7 +271,7 @@ class DataTransformer(Node):
271
271
  # For lambda functions like: "lambda x: x * 2"
272
272
  if transform_str.strip().startswith("lambda"):
273
273
  # First, compile the lambda function
274
- lambda_func = eval(transform_str, safe_globals)
274
+ lambda_func = eval(transform_str, safe_globals) # noqa: S307
275
275
 
276
276
  # Apply the lambda function based on input data
277
277
  if isinstance(result, list):
@@ -324,7 +324,9 @@ class DataTransformer(Node):
324
324
  else:
325
325
  local_vars = input_data.copy()
326
326
  local_vars["result"] = result
327
- result = eval(transform_str, safe_globals, local_vars)
327
+ result = eval(
328
+ transform_str, safe_globals, local_vars
329
+ ) # noqa: S307
328
330
 
329
331
  except Exception as e:
330
332
  tb = traceback.format_exc()
kailash/runtime/docker.py CHANGED
@@ -404,6 +404,8 @@ ENTRYPOINT ["/app/entrypoint.py"]
404
404
  result = subprocess.run(
405
405
  cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
406
406
  )
407
+ # Result could be used for logging output if needed
408
+ _ = result
407
409
 
408
410
  logger.info(f"Container for node {self.node_id} ran successfully")
409
411
  return True
@@ -203,26 +203,6 @@ class TaskRun(BaseModel):
203
203
 
204
204
  # Check other validation rules as needed
205
205
 
206
- def to_dict(self) -> Dict[str, Any]:
207
- """Convert to dictionary representation."""
208
- data = self.model_dump()
209
-
210
- # Convert datetime objects to strings
211
- if data.get("started_at"):
212
- data["started_at"] = data["started_at"].isoformat()
213
- if data.get("ended_at"):
214
- data["ended_at"] = data["ended_at"].isoformat()
215
- if data.get("completed_at"):
216
- data["completed_at"] = data["completed_at"].isoformat()
217
- if data.get("created_at"):
218
- data["created_at"] = data["created_at"].isoformat()
219
-
220
- # Convert metrics to dict if present
221
- if self.metrics:
222
- data["metrics"] = self.metrics.to_dict()
223
-
224
- return data
225
-
226
206
  @classmethod
227
207
  def from_dict(cls, data: Dict[str, Any]) -> "TaskRun":
228
208
  """Create from dictionary representation."""
@@ -320,7 +320,7 @@ class DatabaseStorage(StorageBackend):
320
320
  # Try to sanitize it
321
321
  try:
322
322
  data["input_data"] = {"value": data["input_data"]}
323
- except:
323
+ except Exception:
324
324
  data["input_data"] = None
325
325
  if data.get("output_data"):
326
326
  try:
@@ -331,7 +331,7 @@ class DatabaseStorage(StorageBackend):
331
331
  # Try to sanitize it
332
332
  try:
333
333
  data["output_data"] = {"value": data["output_data"]}
334
- except:
334
+ except Exception:
335
335
  data["output_data"] = None
336
336
 
337
337
  task = TaskRun.model_validate(data)
@@ -405,7 +405,7 @@ class DatabaseStorage(StorageBackend):
405
405
  # Try to sanitize it by wrapping in quotes if needed
406
406
  try:
407
407
  data["input_data"] = {"value": data["input_data"]}
408
- except:
408
+ except Exception:
409
409
  data["input_data"] = None
410
410
  if data.get("output_data"):
411
411
  try:
@@ -419,7 +419,7 @@ class DatabaseStorage(StorageBackend):
419
419
  # Try to sanitize it
420
420
  try:
421
421
  data["output_data"] = {"value": data["output_data"]}
422
- except:
422
+ except Exception:
423
423
  data["output_data"] = None
424
424
 
425
425
  tasks.append(TaskRun.model_validate(data))
@@ -270,7 +270,6 @@ class FileSystemStorage(StorageBackend):
270
270
  run = WorkflowRun.model_validate(run_data)
271
271
 
272
272
  # Generate new run ID to avoid conflicts
273
- original_run_id = run.run_id
274
273
  run.run_id = str(uuid4())
275
274
 
276
275
  # Save run
@@ -136,7 +136,7 @@ class PerformanceVisualizer:
136
136
 
137
137
  # Calculate timeline bounds
138
138
  min_time = min(t.started_at for t in tasks_with_times)
139
- max_time = max(t.ended_at for t in tasks_with_times)
139
+ max(t.ended_at for t in tasks_with_times)
140
140
 
141
141
  # Create timeline bars
142
142
  y_positions = []
@@ -266,8 +266,8 @@ class PerformanceVisualizer:
266
266
  )
267
267
 
268
268
  # Memory usage chart
269
- bars2 = ax2.bar(x, memory_usage, color="lightgreen", edgecolor="black")
270
- bars2_delta = ax2.bar(
269
+ ax2.bar(x, memory_usage, color="lightgreen", edgecolor="black")
270
+ ax2.bar(
271
271
  x,
272
272
  memory_delta,
273
273
  bottom=memory_usage,
@@ -482,7 +482,7 @@ class PerformanceVisualizer:
482
482
  width = 0.35
483
483
 
484
484
  # I/O bytes chart
485
- bars1 = ax1.bar(
485
+ ax1.bar(
486
486
  x - width / 2,
487
487
  io_read_bytes,
488
488
  width,
@@ -490,7 +490,7 @@ class PerformanceVisualizer:
490
490
  color="lightblue",
491
491
  edgecolor="black",
492
492
  )
493
- bars2 = ax1.bar(
493
+ ax1.bar(
494
494
  x + width / 2,
495
495
  io_write_bytes,
496
496
  width,
@@ -507,7 +507,7 @@ class PerformanceVisualizer:
507
507
  ax1.grid(True, axis="y", alpha=0.3)
508
508
 
509
509
  # I/O operations count chart
510
- bars3 = ax2.bar(
510
+ ax2.bar(
511
511
  x - width / 2,
512
512
  io_read_count,
513
513
  width,
@@ -515,7 +515,7 @@ class PerformanceVisualizer:
515
515
  color="lightblue",
516
516
  edgecolor="black",
517
517
  )
518
- bars4 = ax2.bar(
518
+ ax2.bar(
519
519
  x + width / 2,
520
520
  io_write_count,
521
521
  width,
@@ -639,7 +639,7 @@ class WorkflowPerformanceReporter:
639
639
 
640
640
  summary = analysis["summary"]
641
641
  bottlenecks = analysis["bottlenecks"]
642
- resource_analysis = analysis["resource_analysis"]
642
+ analysis["resource_analysis"]
643
643
  error_analysis = analysis["error_analysis"]
644
644
 
645
645
  # Efficiency insights
kailash/workflow/graph.py CHANGED
@@ -10,11 +10,11 @@ import networkx as nx
10
10
  import yaml
11
11
  from pydantic import BaseModel, Field, ValidationError
12
12
 
13
- from kailash.nodes import Node
13
+ from kailash.nodes.base import Node
14
14
 
15
15
  try:
16
16
  # For normal runtime, use the actual registry
17
- from kailash.nodes import NodeRegistry
17
+ from kailash.nodes.base import NodeRegistry
18
18
  except ImportError:
19
19
  # For tests, use the mock registry
20
20
  from kailash.workflow.mock_registry import MockRegistry as NodeRegistry
@@ -815,8 +815,8 @@ class Workflow:
815
815
 
816
816
  # Try to find another key with a BaseModel
817
817
  for key, value in last_node_results.items():
818
- if isinstance(value, BaseModel) and type(value) == type(
819
- state_model
818
+ if isinstance(value, BaseModel) and isinstance(
819
+ value, type(state_model)
820
820
  ):
821
821
  return value, results
822
822
 
@@ -43,7 +43,7 @@ NODE_TYPES = [
43
43
  for node_type in NODE_TYPES:
44
44
  try:
45
45
  NodeRegistry._registry[node_type] = MockNode
46
- except:
46
+ except Exception:
47
47
  pass
48
48
 
49
49