runnable 0.37.0__py3-none-any.whl → 0.39.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,7 +7,7 @@ from pydantic import Field, field_serializer, field_validator
7
7
  from runnable import console, defaults
8
8
  from runnable.datastore import Parameter
9
9
  from runnable.graph import Graph, create_graph
10
- from runnable.nodes import CompositeNode, MapVariableType
10
+ from runnable.nodes import CompositeNode, MapVariableType, NodeInD3
11
11
 
12
12
  logger = logging.getLogger(defaults.LOGGER_NAME)
13
13
 
@@ -241,3 +241,21 @@ class ConditionalNode(CompositeNode):
241
241
  step_log.status = defaults.FAIL
242
242
 
243
243
  self._context.run_log_store.add_step_log(step_log, self._context.run_id)
244
+
245
+ def to_d3_node(self) -> NodeInD3:
246
+ def get_display_string() -> str:
247
+ display = f"match {self.parameter}:\n"
248
+ for case in self.branches.keys():
249
+ display += f' case "{case}":\n ...\n'
250
+ if self.default:
251
+ display += " case _:\n ...\n"
252
+ return display
253
+
254
+ return NodeInD3(
255
+ id=self.internal_name,
256
+ label="conditional",
257
+ metadata={
258
+ "conditioned on": self.parameter,
259
+ "display": get_display_string(),
260
+ },
261
+ )
extensions/nodes/fail.py CHANGED
@@ -6,7 +6,7 @@ from pydantic import Field
6
6
  from runnable import datastore, defaults
7
7
  from runnable.datastore import StepLog
8
8
  from runnable.defaults import MapVariableType
9
- from runnable.nodes import TerminalNode
9
+ from runnable.nodes import NodeInD3, TerminalNode
10
10
 
11
11
 
12
12
  class FailNode(TerminalNode):
@@ -70,3 +70,9 @@ class FailNode(TerminalNode):
70
70
  step_log.attempts.append(attempt_log)
71
71
 
72
72
  return step_log
73
+
74
+ def to_d3_node(self) -> NodeInD3:
75
+ return NodeInD3(
76
+ id=self.internal_name,
77
+ label="fail",
78
+ )
extensions/nodes/map.py CHANGED
@@ -18,7 +18,7 @@ from runnable.datastore import (
18
18
  )
19
19
  from runnable.defaults import MapVariableType
20
20
  from runnable.graph import Graph, create_graph
21
- from runnable.nodes import CompositeNode
21
+ from runnable.nodes import CompositeNode, NodeInD3
22
22
 
23
23
  logger = logging.getLogger(defaults.LOGGER_NAME)
24
24
 
@@ -348,3 +348,18 @@ class MapNode(CompositeNode):
348
348
  self._context.run_log_store.set_parameters(
349
349
  parameters=params, run_id=self._context.run_id
350
350
  )
351
+
352
+ def to_d3_node(self) -> NodeInD3:
353
+ return NodeInD3(
354
+ id=self.internal_name,
355
+ label="map",
356
+ metadata={
357
+ "node_type": "map",
358
+ "iterate_on": self.iterate_on, # Parameter name containing the iterable
359
+ "iterate_as": self.iterate_as, # Name used for each iteration
360
+ "map_branch_id": self.internal_name
361
+ + "."
362
+ + defaults.MAP_PLACEHOLDER, # The branch identifier pattern
363
+ "is_composite": True, # Flag indicating this is a composite node
364
+ },
365
+ )
@@ -6,7 +6,7 @@ from pydantic import Field, field_serializer
6
6
  from runnable import defaults
7
7
  from runnable.defaults import MapVariableType
8
8
  from runnable.graph import Graph, create_graph
9
- from runnable.nodes import CompositeNode
9
+ from runnable.nodes import CompositeNode, NodeInD3
10
10
 
11
11
 
12
12
  class ParallelNode(CompositeNode):
@@ -157,3 +157,9 @@ class ParallelNode(CompositeNode):
157
157
  step_log.status = defaults.FAIL
158
158
 
159
159
  self._context.run_log_store.add_step_log(step_log, self._context.run_id)
160
+
161
+ def to_d3_node(self) -> NodeInD3:
162
+ return NodeInD3(
163
+ id=self.internal_name,
164
+ label="parallel",
165
+ )
extensions/nodes/stub.py CHANGED
@@ -7,7 +7,7 @@ from pydantic import ConfigDict, Field
7
7
  from runnable import datastore, defaults
8
8
  from runnable.datastore import StepLog
9
9
  from runnable.defaults import MapVariableType
10
- from runnable.nodes import ExecutableNode
10
+ from runnable.nodes import ExecutableNode, NodeInD3
11
11
 
12
12
  logger = logging.getLogger(defaults.LOGGER_NAME)
13
13
 
@@ -87,3 +87,9 @@ class StubNode(ExecutableNode):
87
87
  step_log.attempts.append(attempt_log)
88
88
 
89
89
  return step_log
90
+
91
+ def to_d3_node(self) -> NodeInD3:
92
+ return NodeInD3(
93
+ id=self.internal_name,
94
+ label="stub",
95
+ )
@@ -6,7 +6,7 @@ from pydantic import Field
6
6
  from runnable import datastore, defaults
7
7
  from runnable.datastore import StepLog
8
8
  from runnable.defaults import MapVariableType
9
- from runnable.nodes import TerminalNode
9
+ from runnable.nodes import NodeInD3, TerminalNode
10
10
 
11
11
 
12
12
  class SuccessNode(TerminalNode):
@@ -70,3 +70,9 @@ class SuccessNode(TerminalNode):
70
70
  step_log.attempts.append(attempt_log)
71
71
 
72
72
  return step_log
73
+
74
+ def to_d3_node(self) -> NodeInD3:
75
+ return NodeInD3(
76
+ id=self.internal_name,
77
+ label="success",
78
+ )
extensions/nodes/task.py CHANGED
@@ -7,7 +7,7 @@ from pydantic import ConfigDict, Field
7
7
  from runnable import datastore, defaults
8
8
  from runnable.datastore import StepLog
9
9
  from runnable.defaults import MapVariableType
10
- from runnable.nodes import ExecutableNode
10
+ from runnable.nodes import ExecutableNode, NodeInD3
11
11
  from runnable.tasks import BaseTaskType, create_task
12
12
 
13
13
  logger = logging.getLogger(defaults.LOGGER_NAME)
@@ -90,3 +90,17 @@ class TaskNode(ExecutableNode):
90
90
  step_log.attempts.append(attempt_log)
91
91
 
92
92
  return step_log
93
+
94
+ def to_d3_node(self) -> NodeInD3:
95
+ """
96
+ Convert the task node to a D3 node representation.
97
+
98
+ Returns:
99
+ NodeInD3: The D3 node representation of the task node.
100
+ """
101
+ return NodeInD3(
102
+ id=self.internal_name,
103
+ label="task",
104
+ task_type=self.executable.task_type,
105
+ metadata=self.executable.get_d3_metadata(),
106
+ )
runnable/cli.py CHANGED
@@ -274,5 +274,57 @@ def execute_job(
274
274
  )
275
275
 
276
276
 
277
+ @app.command()
278
+ def ui(
279
+ host: Annotated[
280
+ str,
281
+ typer.Option(
282
+ "--host",
283
+ "-h",
284
+ help="The host to bind the server to",
285
+ ),
286
+ ] = "127.0.0.1",
287
+ port: Annotated[
288
+ int,
289
+ typer.Option(
290
+ "--port",
291
+ "-p",
292
+ help="The port to bind the server to",
293
+ ),
294
+ ] = 8000,
295
+ reload: Annotated[
296
+ bool,
297
+ typer.Option(
298
+ "--reload",
299
+ help="Enable auto-reload for development",
300
+ ),
301
+ ] = False,
302
+ ):
303
+ """
304
+ Start the web UI for pipeline visualization.
305
+
306
+ This command starts a FastAPI web server that provides a user interface
307
+ for visualizing and exploring runnable pipelines.
308
+ """
309
+ try:
310
+ import uvicorn
311
+
312
+ from visualization.main import app as web_app
313
+ except ImportError:
314
+ typer.echo(
315
+ "UI dependencies not installed. Install with: pip install runnable[ui]",
316
+ err=True,
317
+ )
318
+ raise typer.Exit(1)
319
+
320
+ typer.echo(f"Starting web UI at http://{host}:{port}")
321
+ uvicorn.run(
322
+ web_app,
323
+ host=host,
324
+ port=port,
325
+ reload=reload,
326
+ )
327
+
328
+
277
329
  if __name__ == "__main__":
278
330
  app()
runnable/graph.py CHANGED
@@ -329,7 +329,7 @@ def create_graph(dag_config: Dict[str, Any], internal_branch_name: str = "") ->
329
329
  Returns:
330
330
  Graph: The created graph object
331
331
  """
332
- description: str = dag_config.get("description", None)
332
+ description: str | None = dag_config.get("description", None)
333
333
  start_at: str = cast(
334
334
  str, dag_config.get("start_at")
335
335
  ) # Let the start_at be relative to the graph
@@ -499,3 +499,274 @@ def search_branch_by_internal_name(dag: Graph, internal_name: str):
499
499
  return current_branch
500
500
 
501
501
  raise exceptions.BranchNotFoundError(internal_name)
502
+
503
+
504
+ def get_visualization_data(graph: Graph) -> Dict[str, Any]:
505
+ """
506
+ Convert the graph into a D3 visualization friendly format with nodes and links.
507
+ Handles composite nodes (parallel, map, conditional) by recursively processing their embedded graphs.
508
+
509
+ Args:
510
+ graph: The Graph object to convert
511
+
512
+ Returns:
513
+ Dict with two keys:
514
+ - nodes: List of node objects with id, type, name, and alias
515
+ - links: List of edge objects with source and target node ids
516
+ """
517
+ import rich.console
518
+
519
+ from extensions.nodes.conditional import ConditionalNode
520
+ from extensions.nodes.map import MapNode
521
+ from extensions.nodes.parallel import ParallelNode
522
+ from runnable.nodes import ExecutableNode
523
+
524
+ rich_print = rich.console.Console().print
525
+
526
+ rich_print(graph)
527
+
528
+ nodes = []
529
+ links = []
530
+ processed_nodes = set()
531
+
532
+ def process_node(
533
+ node: BaseNode,
534
+ parent_id: Optional[str] = None,
535
+ current_graph: Graph = graph,
536
+ map_node_id: Optional[str] = None,
537
+ conditional_node_id: Optional[str] = None,
538
+ ) -> str:
539
+ node_id = f"{node.internal_name}"
540
+ node_alias = node.name # Alias based on the node's name
541
+
542
+ if node_id not in processed_nodes:
543
+ node_data = node.to_d3_node().model_dump(exclude_none=True)
544
+ node_data["alias"] = node_alias # Add alias to the node data
545
+ node_data["display_name"] = node_alias # Use alias as the display name
546
+
547
+ # Add map or parallel related metadata if this node is part of a map branch or parallel branch
548
+ if map_node_id:
549
+ if "metadata" not in node_data:
550
+ node_data["metadata"] = {}
551
+
552
+ # Mark this node as being part of a map branch
553
+ node_data["metadata"]["belongs_to_node"] = map_node_id
554
+
555
+ # If this is the map node itself, add a special attribute
556
+ if node_id == map_node_id:
557
+ node_data["metadata"]["is_map_root"] = True
558
+
559
+ # Add conditional related metadata if this node is part of a conditional branch
560
+ if conditional_node_id:
561
+ if "metadata" not in node_data:
562
+ node_data["metadata"] = {}
563
+
564
+ # Mark this node as being part of a conditional branch
565
+ node_data["metadata"]["belongs_to_node"] = conditional_node_id
566
+
567
+ # If this is the conditional node itself, add a special attribute
568
+ if node_id == conditional_node_id:
569
+ node_data["metadata"]["is_conditional_root"] = True
570
+
571
+ # Mark parallel nodes with special metadata
572
+ if isinstance(node, ParallelNode):
573
+ if "metadata" not in node_data:
574
+ node_data["metadata"] = {}
575
+
576
+ # Add parallel node type to metadata
577
+ node_data["metadata"]["node_type"] = "parallel"
578
+ node_data["metadata"]["parallel_branch_id"] = node_id
579
+
580
+ # Mark conditional nodes with special metadata
581
+ if isinstance(node, ConditionalNode):
582
+ if "metadata" not in node_data:
583
+ node_data["metadata"] = {}
584
+
585
+ # Add conditional node type to metadata
586
+ node_data["metadata"]["node_type"] = "conditional"
587
+ node_data["metadata"]["conditional_branch_id"] = node_id
588
+
589
+ nodes.append(node_data)
590
+ processed_nodes.add(node_id)
591
+
592
+ # Add link from parent if it exists
593
+ if parent_id:
594
+ links.append({"source": parent_id, "target": node_id})
595
+
596
+ # Handle composite nodes with embedded graphs
597
+ if isinstance(node, (ParallelNode, MapNode, ConditionalNode)):
598
+ if isinstance(node, ParallelNode):
599
+ # Process each parallel branch
600
+ for _, branch in node.branches.items():
601
+ branch_start = branch.get_node_by_name(branch.start_at)
602
+ process_node(
603
+ branch_start,
604
+ node_id,
605
+ branch,
606
+ map_node_id=node_id,
607
+ conditional_node_id=conditional_node_id,
608
+ )
609
+
610
+ # Handle next node connection after parallel branches complete
611
+ if hasattr(node, "next_node") and node.next_node:
612
+ try:
613
+ next_node = current_graph.get_node_by_name(node.next_node)
614
+ next_id = process_node(
615
+ next_node,
616
+ None,
617
+ current_graph=current_graph,
618
+ map_node_id=map_node_id,
619
+ conditional_node_id=conditional_node_id,
620
+ )
621
+ links.append(
622
+ {
623
+ "source": node_id,
624
+ "target": next_id,
625
+ "type": "success",
626
+ }
627
+ )
628
+ except exceptions.NodeNotFoundError as e:
629
+ rich_print(
630
+ f"Warning: Next node '{node.next_node}' not found for parallel node '{node.name}': {e}"
631
+ )
632
+
633
+ elif isinstance(node, MapNode):
634
+ # Process map branch
635
+ branch_start = node.branch.get_node_by_name(node.branch.start_at)
636
+ # Process the branch with additional context about the map node
637
+ process_node(
638
+ branch_start,
639
+ node_id,
640
+ node.branch,
641
+ map_node_id=node_id,
642
+ conditional_node_id=conditional_node_id,
643
+ )
644
+
645
+ elif isinstance(node, ConditionalNode):
646
+ # Process each conditional branch
647
+ for _, branch in node.branches.items():
648
+ branch_start = branch.get_node_by_name(branch.start_at)
649
+ process_node(
650
+ branch_start,
651
+ node_id,
652
+ branch,
653
+ map_node_id=map_node_id,
654
+ conditional_node_id=node_id,
655
+ )
656
+ if node.default:
657
+ default_start = node.default.get_node_by_name(
658
+ node.default.start_at
659
+ )
660
+ process_node(
661
+ default_start,
662
+ node_id,
663
+ node.default,
664
+ map_node_id=map_node_id,
665
+ conditional_node_id=node_id,
666
+ )
667
+
668
+ # Handle next node connection after conditional branches complete
669
+ if hasattr(node, "next_node") and node.next_node:
670
+ try:
671
+ next_node = current_graph.get_node_by_name(node.next_node)
672
+ next_id = process_node(
673
+ next_node,
674
+ None,
675
+ current_graph=current_graph,
676
+ map_node_id=map_node_id,
677
+ conditional_node_id=conditional_node_id,
678
+ )
679
+ links.append(
680
+ {
681
+ "source": node_id,
682
+ "target": next_id,
683
+ "type": "success",
684
+ }
685
+ )
686
+ except exceptions.NodeNotFoundError as e:
687
+ rich_print(
688
+ f"Warning: Next node '{node.next_node}' not found for conditional node '{node.name}': {e}"
689
+ )
690
+
691
+ # Add links to next and on_failure nodes if they exist
692
+ if isinstance(node, ExecutableNode):
693
+ # Handle normal "next" links (success path)
694
+ if hasattr(node, "next_node") and node.next_node:
695
+ try:
696
+ next_node = current_graph.get_node_by_name(node.next_node)
697
+ next_id = process_node(
698
+ next_node,
699
+ None,
700
+ current_graph=current_graph,
701
+ map_node_id=map_node_id,
702
+ conditional_node_id=conditional_node_id,
703
+ )
704
+ links.append(
705
+ {"source": node_id, "target": next_id, "type": "success"}
706
+ )
707
+ except exceptions.NodeNotFoundError as e:
708
+ rich_print(
709
+ f"Warning: Next node '{node.next_node}' not found for node '{node.name}': {e}"
710
+ )
711
+
712
+ # Handle on_failure links (failure path)
713
+ if hasattr(node, "on_failure") and node.on_failure:
714
+ try:
715
+ failure_node = current_graph.get_node_by_name(node.on_failure)
716
+ failure_id = process_node(
717
+ failure_node,
718
+ None,
719
+ current_graph=current_graph,
720
+ map_node_id=map_node_id,
721
+ conditional_node_id=conditional_node_id,
722
+ )
723
+ links.append(
724
+ {"source": node_id, "target": failure_id, "type": "failure"}
725
+ )
726
+ except exceptions.NodeNotFoundError as e:
727
+ rich_print(
728
+ f"Warning: On-failure node '{node.on_failure}' not found for node '{node.name}': {e}"
729
+ )
730
+
731
+ # For backward compatibility, also process all neighbors
732
+ # This handles cases where node might have other connection types
733
+ next_nodes = node._get_neighbors()
734
+ for next_node_name in next_nodes:
735
+ # Skip nodes we've already handled explicitly
736
+ if (
737
+ hasattr(node, "next_node") and node.next_node == next_node_name
738
+ ) or (
739
+ hasattr(node, "on_failure")
740
+ and node.on_failure == next_node_name
741
+ ):
742
+ continue
743
+
744
+ try:
745
+ next_node = current_graph.get_node_by_name(next_node_name)
746
+ next_id = process_node(
747
+ next_node,
748
+ None,
749
+ current_graph=current_graph,
750
+ map_node_id=map_node_id,
751
+ conditional_node_id=conditional_node_id,
752
+ )
753
+ links.append(
754
+ {"source": node_id, "target": next_id, "type": "default"}
755
+ )
756
+ except exceptions.NodeNotFoundError as e:
757
+ rich_print(
758
+ f"Warning: Neighbor node '{next_node_name}' not found for node '{node.name}': {e}"
759
+ )
760
+
761
+ return node_id
762
+
763
+ # Start processing from the start node
764
+ start_node = graph.get_node_by_name(graph.start_at)
765
+ try:
766
+ process_node(
767
+ start_node, None, graph, map_node_id=None, conditional_node_id=None
768
+ )
769
+ except (exceptions.NodeNotFoundError, AttributeError, KeyError) as e:
770
+ rich_print(f"Error processing node {start_node}: {e}")
771
+
772
+ return {"nodes": nodes, "links": links}
runnable/nodes.py CHANGED
@@ -15,6 +15,13 @@ logger = logging.getLogger(defaults.LOGGER_NAME)
15
15
  # --8<-- [start:docs]
16
16
 
17
17
 
18
+ class NodeInD3(BaseModel):
19
+ id: str
20
+ label: str
21
+ task_type: Optional[str] = None
22
+ metadata: Optional[Dict[str, Any]] = None
23
+
24
+
18
25
  class BaseNode(ABC, BaseModel):
19
26
  """
20
27
  Base class with common functionality provided for a Node of a graph.
@@ -369,6 +376,15 @@ class BaseNode(ABC, BaseModel):
369
376
  Dict[str, Any]: _description_
370
377
  """
371
378
 
379
+ @abstractmethod
380
+ def to_d3_node(self) -> NodeInD3:
381
+ """
382
+ Convert the node to a D3 node representation.
383
+
384
+ Returns:
385
+ NodeInD3: The D3 node representation of the current node.
386
+ """
387
+
372
388
 
373
389
  # --8<-- [end:docs]
374
390
  class TraversalNode(BaseNode):
runnable/parameters.py CHANGED
@@ -1,10 +1,10 @@
1
+ import argparse
1
2
  import inspect
2
3
  import json
3
4
  import logging
4
5
  import os
5
6
  from typing import Any, Dict, Type
6
7
 
7
- import pydantic
8
8
  from pydantic import BaseModel, ConfigDict
9
9
  from typing_extensions import Callable
10
10
 
@@ -48,6 +48,25 @@ def get_user_set_parameters(remove: bool = False) -> Dict[str, JsonParameter]:
48
48
  return parameters
49
49
 
50
50
 
51
+ def return_json_parameters(params: Dict[str, Any]) -> Dict[str, Any]:
52
+ """
53
+ Returns the parameters as a JSON serializable dictionary.
54
+
55
+ Args:
56
+ params (dict): The parameters to serialize.
57
+
58
+ Returns:
59
+ dict: The JSON serializable dictionary.
60
+ """
61
+ return_params = {}
62
+ for key, value in params.items():
63
+ if isinstance(value, ObjectParameter):
64
+ continue
65
+
66
+ return_params[key] = value.get_value()
67
+ return return_params
68
+
69
+
51
70
  def filter_arguments_for_func(
52
71
  func: Callable[..., Any],
53
72
  params: Dict[str, Any],
@@ -55,8 +74,14 @@ def filter_arguments_for_func(
55
74
  ) -> Dict[str, Any]:
56
75
  """
57
76
  Inspects the function to be called as part of the pipeline to find the arguments of the function.
58
- Matches the function arguments to the parameters available either by command line or by up stream steps.
77
+ Matches the function arguments to the parameters available either by static parameters or by up stream steps.
59
78
 
79
+ The function "func" signature could be:
80
+ - def my_function(arg1: int, arg2: str, arg3: float):
81
+ - def my_function(arg1: int, arg2: str, arg3: float, **kwargs):
82
+ in this case, we would need to send in remaining keyword arguments as a dictionary.
83
+ - def my_function(arg1: int, arg2: str, arg3: float, args: argparse.Namespace):
84
+ In this case, we need to send the rest of the parameters as attributes of the args object.
60
85
 
61
86
  Args:
62
87
  func (Callable): The function to inspect
@@ -72,63 +97,109 @@ def filter_arguments_for_func(
72
97
  params[key] = JsonParameter(kind="json", value=v)
73
98
 
74
99
  bound_args = {}
75
- unassigned_params = set(params.keys())
76
- # Check if VAR_KEYWORD is used, it is we send back everything
77
- # If **kwargs is present in the function signature, we send back everything
78
- for name, value in function_args.items():
79
- if value.kind != inspect.Parameter.VAR_KEYWORD:
80
- continue
81
- # Found VAR_KEYWORD, we send back everything as found
82
- for key, value in params.items():
83
- bound_args[key] = params[key].get_value()
100
+ missing_required_args: list[str] = []
101
+ var_keyword_param = None
102
+ namespace_param = None
84
103
 
85
- return bound_args
86
-
87
- # Lets return what is asked for then!!
104
+ # First pass: Handle regular parameters and identify special parameters
88
105
  for name, value in function_args.items():
89
106
  # Ignore any *args
90
107
  if value.kind == inspect.Parameter.VAR_POSITIONAL:
91
108
  logger.warning(f"Ignoring parameter {name} as it is VAR_POSITIONAL")
92
109
  continue
93
110
 
94
- if name not in params:
95
- # No parameter of this name was provided
96
- if value.default == inspect.Parameter.empty:
97
- # No default value is given in the function signature. error as parameter is required.
98
- raise ValueError(
99
- f"Parameter {name} is required for {func.__name__} but not provided"
100
- )
101
- # default value is given in the function signature, nothing further to do.
111
+ # Check for **kwargs parameter
112
+ if value.kind == inspect.Parameter.VAR_KEYWORD:
113
+ var_keyword_param = name
102
114
  continue
103
115
 
104
- param_value = params[name]
105
-
106
- if type(value.annotation) in [
107
- BaseModel,
108
- pydantic._internal._model_construction.ModelMetaclass,
109
- ] and not isinstance(param_value, ObjectParameter):
110
- # Even if the annotation is a pydantic model, it can be passed as an object parameter
111
- # We try to cast it as a pydantic model if asked
112
- named_param = params[name].get_value()
113
-
114
- if not isinstance(named_param, dict):
115
- # A case where the parameter is a one attribute model
116
- named_param = {name: named_param}
117
-
118
- bound_model = bind_args_for_pydantic_model(named_param, value.annotation)
119
- bound_args[name] = bound_model
116
+ # Check for argparse.Namespace parameter
117
+ if value.annotation == argparse.Namespace:
118
+ namespace_param = name
119
+ continue
120
120
 
121
- elif value.annotation in [str, int, float, bool]:
122
- # Cast it if its a primitive type. Ensure the type matches the annotation.
123
- bound_args[name] = value.annotation(params[name].get_value())
121
+ # Handle regular parameters
122
+ if name not in params:
123
+ if value.default != inspect.Parameter.empty:
124
+ # Default value is given in the function signature, we can use it
125
+ bound_args[name] = value.default
126
+ else:
127
+ # This is a required parameter that's missing
128
+ missing_required_args.append(name)
124
129
  else:
125
- bound_args[name] = params[name].get_value()
126
-
127
- unassigned_params.remove(name)
128
-
129
- params = {
130
- key: params[key] for key in unassigned_params
131
- } # remove keys from params if they are assigned
130
+ # We have a parameter of this name, lets bind it
131
+ param_value = params[name]
132
+
133
+ if (
134
+ inspect.isclass(value.annotation)
135
+ and issubclass(value.annotation, BaseModel)
136
+ ) and not isinstance(param_value, ObjectParameter):
137
+ # Even if the annotation is a pydantic model, it can be passed as an object parameter
138
+ # We try to cast it as a pydantic model if asked
139
+ named_param = params[name].get_value()
140
+
141
+ if not isinstance(named_param, dict):
142
+ # A case where the parameter is a one attribute model
143
+ named_param = {name: named_param}
144
+
145
+ bound_model = bind_args_for_pydantic_model(
146
+ named_param, value.annotation
147
+ )
148
+ bound_args[name] = bound_model
149
+
150
+ elif value.annotation in [str, int, float, bool] and callable(
151
+ value.annotation
152
+ ):
153
+ # Cast it if its a primitive type. Ensure the type matches the annotation.
154
+ try:
155
+ bound_args[name] = value.annotation(params[name].get_value())
156
+ except (ValueError, TypeError) as e:
157
+ raise ValueError(
158
+ f"Cannot cast parameter '{name}' to {value.annotation.__name__}: {e}"
159
+ )
160
+ else:
161
+ # We do not know type of parameter, we send the value as found
162
+ bound_args[name] = params[name].get_value()
163
+
164
+ # Find extra parameters (parameters in params but not consumed by regular function parameters)
165
+ consumed_param_names = set(bound_args.keys()) | set(missing_required_args)
166
+ extra_params = {k: v for k, v in params.items() if k not in consumed_param_names}
167
+
168
+ # Second pass: Handle **kwargs and argparse.Namespace parameters
169
+ if var_keyword_param is not None:
170
+ # Function accepts **kwargs - add all extra parameters directly to bound_args
171
+ for param_name, param_value in extra_params.items():
172
+ bound_args[param_name] = param_value.get_value()
173
+ elif namespace_param is not None:
174
+ # Function accepts argparse.Namespace - create namespace with extra parameters
175
+ args_namespace = argparse.Namespace()
176
+ for param_name, param_value in extra_params.items():
177
+ setattr(args_namespace, param_name, param_value.get_value())
178
+ bound_args[namespace_param] = args_namespace
179
+ elif extra_params:
180
+ # Function doesn't accept **kwargs or namespace, but we have extra parameters
181
+ # This should only be an error if we also have missing required parameters
182
+ # or if the function truly can't handle the extra parameters
183
+ if missing_required_args:
184
+ # We have both missing required and extra parameters - this is an error
185
+ raise ValueError(
186
+ f"Function {func.__name__} has parameters {missing_required_args} that are not present in the parameters"
187
+ )
188
+ # If we only have extra parameters and no missing required ones, we just ignore the extras
189
+ # This allows for more flexible parameter passing
190
+
191
+ # Check for missing required parameters
192
+ if missing_required_args:
193
+ if var_keyword_param is None and namespace_param is None:
194
+ # No way to handle missing parameters
195
+ raise ValueError(
196
+ f"Function {func.__name__} has parameters {missing_required_args} that are not present in the parameters"
197
+ )
198
+ # If we have **kwargs or namespace, missing parameters might be handled there
199
+ # But if they're truly required (no default), we should still error
200
+ raise ValueError(
201
+ f"Function {func.__name__} has parameters {missing_required_args} that are not present in the parameters"
202
+ )
132
203
 
133
204
  return bound_args
134
205
 
runnable/tasks.py CHANGED
@@ -209,6 +209,9 @@ class BaseTaskType(BaseModel):
209
209
  parameters=diff_parameters, run_id=self._context.run_id
210
210
  )
211
211
 
212
+ def get_d3_metadata(self) -> dict[str, str]:
213
+ raise NotImplementedError
214
+
212
215
 
213
216
  def task_return_to_parameter(task_return: TaskReturns, value: Any) -> Parameter:
214
217
  # implicit support for pydantic models
@@ -287,6 +290,72 @@ class PythonTaskType(BaseTaskType): # pylint: disable=too-few-public-methods
287
290
  task_type: str = Field(default="python", serialization_alias="command_type")
288
291
  command: str
289
292
 
293
+ def get_d3_metadata(self) -> Dict[str, str]:
294
+ module, func = utils.get_module_and_attr_names(self.command)
295
+
296
+ # Import inspect module to get function signature
297
+ import inspect
298
+
299
+ def format_type_annotation(annotation):
300
+ """Format type annotation in a more readable way"""
301
+ if annotation == inspect._empty:
302
+ return "Any"
303
+ elif hasattr(annotation, "__name__"):
304
+ return annotation.__name__
305
+ elif hasattr(annotation, "__origin__"):
306
+ # Handle typing types like List, Dict, etc.
307
+ origin = (
308
+ annotation.__origin__.__name__
309
+ if hasattr(annotation.__origin__, "__name__")
310
+ else str(annotation.__origin__)
311
+ )
312
+ args = ", ".join(
313
+ format_type_annotation(arg) for arg in annotation.__args__
314
+ )
315
+ return f"{origin}[{args}]"
316
+ else:
317
+ # Fall back to string representation without 'typing.'
318
+ return str(annotation).replace("typing.", "")
319
+
320
+ # Import the module and get the function
321
+ sys.path.insert(0, os.getcwd())
322
+ try:
323
+ imported_module = importlib.import_module(module)
324
+ f = getattr(imported_module, func)
325
+
326
+ # Get function signature
327
+ sig = inspect.signature(f)
328
+
329
+ # Format parameters with type annotations
330
+ params_list = []
331
+ for param_name, param in sig.parameters.items():
332
+ type_annotation = format_type_annotation(param.annotation)
333
+ params_list.append(f"{param_name}: {type_annotation}")
334
+
335
+ params_str = ", ".join(params_list)
336
+
337
+ # Format returns based on self.returns or use "None" if no returns specified
338
+ if self.returns:
339
+ returns_str = ", ".join([f"{r.name}({r.kind})" for r in self.returns])
340
+ else:
341
+ returns_str = "None"
342
+
343
+ # Format function signature
344
+ signature = f"def {func}({params_str}) -> {returns_str}:"
345
+
346
+ return {
347
+ "module": module,
348
+ "function": func,
349
+ "signature": signature,
350
+ }
351
+ except Exception as e:
352
+ logger.warning(f"Could not inspect function {self.command}: {str(e)}")
353
+ return {
354
+ "module": module,
355
+ "function": func,
356
+ "signature": f"def {func}(...) -> ...",
357
+ }
358
+
290
359
  def execute_command(
291
360
  self,
292
361
  map_variable: MapVariableType = None,
@@ -440,6 +509,11 @@ class NotebookTaskType(BaseTaskType):
440
509
  command: str
441
510
  optional_ploomber_args: dict = {}
442
511
 
512
+ def get_d3_metadata(self) -> Dict[str, str]:
513
+ return {
514
+ "notebook": self.command,
515
+ }
516
+
443
517
  @field_validator("command")
444
518
  @classmethod
445
519
  def notebook_should_end_with_ipynb(cls, command: str) -> str:
@@ -640,6 +714,11 @@ class ShellTaskType(BaseTaskType):
640
714
  task_type: str = Field(default="shell", serialization_alias="command_type")
641
715
  command: str
642
716
 
717
+ def get_d3_metadata(self) -> Dict[str, str]:
718
+ return {
719
+ "command": self.command[:50],
720
+ }
721
+
643
722
  @field_validator("returns")
644
723
  @classmethod
645
724
  def returns_should_be_json(cls, returns: List[TaskReturns]):
@@ -1,12 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: runnable
3
- Version: 0.37.0
3
+ Version: 0.39.0
4
4
  Summary: Add your description here
5
5
  Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
6
6
  License-File: LICENSE
7
7
  Requires-Python: >=3.10
8
- Requires-Dist: click-plugins>=1.1.1
9
- Requires-Dist: click<=8.1.3
10
8
  Requires-Dist: cloudpathlib>=0.20.0
11
9
  Requires-Dist: dill>=0.3.9
12
10
  Requires-Dist: pydantic>=2.10.3
@@ -15,17 +13,24 @@ Requires-Dist: rich>=13.9.4
15
13
  Requires-Dist: ruamel-yaml>=0.18.6
16
14
  Requires-Dist: setuptools>=75.6.0
17
15
  Requires-Dist: stevedore>=5.4.0
18
- Requires-Dist: typer>=0.15.1
16
+ Requires-Dist: typer>=0.17.3
19
17
  Provides-Extra: docker
20
18
  Requires-Dist: docker>=7.1.0; extra == 'docker'
21
19
  Provides-Extra: examples
22
20
  Requires-Dist: pandas>=2.2.3; extra == 'examples'
21
+ Provides-Extra: examples-torch
22
+ Requires-Dist: torch>=2.7.1; extra == 'examples-torch'
23
23
  Provides-Extra: k8s
24
24
  Requires-Dist: kubernetes>=31.0.0; extra == 'k8s'
25
25
  Provides-Extra: notebook
26
26
  Requires-Dist: ploomber-engine>=0.0.33; extra == 'notebook'
27
27
  Provides-Extra: s3
28
28
  Requires-Dist: cloudpathlib[s3]; extra == 's3'
29
+ Provides-Extra: ui
30
+ Requires-Dist: fastapi>=0.95.0; extra == 'ui'
31
+ Requires-Dist: jinja2>=3.1.2; extra == 'ui'
32
+ Requires-Dist: python-multipart>=0.0.5; extra == 'ui'
33
+ Requires-Dist: uvicorn>=0.22.0; extra == 'ui'
29
34
  Description-Content-Type: text/markdown
30
35
 
31
36
 
@@ -16,14 +16,14 @@ extensions/job_executor/local_container.py,sha256=uoRbsyR5QiVytWJQtF7nXUA1h8wHAa
16
16
  extensions/job_executor/pyproject.toml,sha256=UIEgiCYHTXcRWSByNMFuKJFKgxTBpQqTqyUecIsb_Vc,286
17
17
  extensions/nodes/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
18
  extensions/nodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- extensions/nodes/conditional.py,sha256=lsXLerIDWJk0fHo4EEngl8rYw6dN40EgKIgdBuvt8ro,8641
20
- extensions/nodes/fail.py,sha256=P_wryruE2INiAmu4Vf7q4fTEkfbIMnhT71Y9IrOqRkM,2170
21
- extensions/nodes/map.py,sha256=s0l07uAc6j8MO7Fqch0DzOE3_CsV6pBNHj5E3nf1S60,13659
22
- extensions/nodes/parallel.py,sha256=cuxpR1e0KwuxvoZaSwiiEn9YmYskplbjLBWBzzU-Ti8,5952
19
+ extensions/nodes/conditional.py,sha256=5wDMrnkXu-cSxaELpHTHzFCebJ39pkVkq3Kzd5MrSWI,9245
20
+ extensions/nodes/fail.py,sha256=wkJW1PUgWvs6PP1J84sk-ed-DxLpL7XS8ltzAazgi3o,2315
21
+ extensions/nodes/map.py,sha256=_hLjkxVkf9dEz6t5k9DgAt6V7-rZwYCyM0sVX4V4l7o,14278
22
+ extensions/nodes/parallel.py,sha256=IMbjMFZcRshIXdVrX1K9QttPBUKTssxLvZDgCSU5-Mo,6101
23
23
  extensions/nodes/pyproject.toml,sha256=YTu-ETN3JNFSkMzzWeOwn4m-O2nbRH-PmiPBALDCUw4,278
24
- extensions/nodes/stub.py,sha256=o9DjBekNa9O4b0VQOiNOA9eNjJ3C2a9Sn9d2fX7KaWg,2715
25
- extensions/nodes/success.py,sha256=yT4WkUI-1YN6qy4Ji6zSoZFXI9jOl6Ond4WxZRq-33k,2179
26
- extensions/nodes/task.py,sha256=m5_s5u7lQDoTmeO6CG94btrG5If9AKm2-RvOzvm0fDc,3147
24
+ extensions/nodes/stub.py,sha256=TQPeqHkAAIQBI0hj_UdsloebyYDNrH1PadezExGgXEg,2860
25
+ extensions/nodes/success.py,sha256=XA5UOLmlhNronuhcS0Ct5bGxJjcgBjPTZ4sdako7FwI,2327
26
+ extensions/nodes/task.py,sha256=rfOGdsmgchBKQQy7tpaf7ul9H4r1WeY1zYi6MCSB_Ys,3565
27
27
  extensions/pipeline_executor/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  extensions/pipeline_executor/__init__.py,sha256=JqJFGgLOUaDINUi4_xC3zoDSZxDrUYLNuHQVzr8-ByE,24620
29
29
  extensions/pipeline_executor/argo.py,sha256=5nEItAv-gNVi2sylZA4spKLWfcKuJ3zyQx3RekoLb28,38240
@@ -49,24 +49,24 @@ extensions/secrets/dotenv.py,sha256=nADHXI6KJ_LUYOIe5EbtYH-21OBebSNVr0Pjb1GlZ7w,
49
49
  extensions/secrets/pyproject.toml,sha256=mLJNImNcBlbLKHh-0ugVWT9V83R4RibyyYDtBCSqVF4,282
50
50
  runnable/__init__.py,sha256=MN9x2jmQb2eOr-rap1DXLzNSC926U-aad_YwENzG52w,509
51
51
  runnable/catalog.py,sha256=6l0tT0jwHi40aE6fhQMgYtYe_-2As-bRKztAKiFvy3o,3842
52
- runnable/cli.py,sha256=CziCKBoL-dklSEo_x-YO1AysrG2eaf2LMQZbcNTeCbM,7283
52
+ runnable/cli.py,sha256=TlSjQ1wVlVZjyGqc-99ibAARjr1_y1FQcjyRrJXFmNc,8459
53
53
  runnable/context.py,sha256=mLpq5rtMsPawjnaN9Woq7HWZ1FAppeudZtYMT5vf6Fo,17594
54
54
  runnable/datastore.py,sha256=2pYg4i1JRMzw_CUUIsPOWt7wYPiGBamfo-CPVAkEH54,32375
55
55
  runnable/defaults.py,sha256=4UYuShnjEyWP529UlFnubvkBpOcczKIdE4jEOhPBwl4,3076
56
56
  runnable/entrypoints.py,sha256=46prgr3_FYtBMlRbUXIDSpgZUBgaxcdJAekXhgEIj7M,6578
57
57
  runnable/exceptions.py,sha256=t5tSlYqe_EjU5liXu32yLLh_yrnXeFL93BuXfmQzV98,3268
58
58
  runnable/executor.py,sha256=CwzHkeGVpocACZLzfFS94TzKeiaPLv4NtXtvT3eoocY,15222
59
- runnable/graph.py,sha256=poQz5zcvq89ju_u5sYlunQLPbHnXTaUmjcvstPwvT4U,16536
59
+ runnable/graph.py,sha256=KfT5nXSCRwVyKytG88h52xeu6kEEM3uytFftG_16Vt4,28666
60
60
  runnable/names.py,sha256=A9ldUyULXuWjJ1MoXihHqlg-xeTVX-oWYTO5Ah0trmo,8128
61
- runnable/nodes.py,sha256=JHBxJib7SSQXY51bLHBXUvb0DlNSLNvyqz3JNEDLt8c,16926
62
- runnable/parameters.py,sha256=zEehAliVvCOLOnNZ4ExJvSDJM_2PWY0URZ0bmZUgCQA,5289
61
+ runnable/nodes.py,sha256=-IEJKd9RP6SswMjiTaRh2SXqGsGWa_0NeGoQICUx6bs,17289
62
+ runnable/parameters.py,sha256=HZW0bhAYxgMyvRZzUlwp29MVxmzoFU5ZoVJMJHnOcX8,8734
63
63
  runnable/pickler.py,sha256=ydJ_eti_U1F4l-YacFp7BWm6g5vTn04UXye25S1HVok,2684
64
64
  runnable/sdk.py,sha256=blLBWzXV2x7jxKQXWpjmeJ9k22jt5CKBQBqQpnt4agk,32587
65
65
  runnable/secrets.py,sha256=4L_dBFxTgr8r_hHUD6RlZEtqaOHDRsFG5PXO5wlvMI0,2324
66
- runnable/tasks.py,sha256=7yuoeG4ZqfxFUmN4mPS4i6kbQmzEpAwbPQweAUWY-ic,31366
66
+ runnable/tasks.py,sha256=dKUFE3aJWcBg-aH9xdyyT4DaDBVa1IyBtEQK6LTcoRE,34234
67
67
  runnable/utils.py,sha256=amHW3KR_NGTDysGHcSafhh5WJUX7GPBSxqdPyzAIhao,11350
68
- runnable-0.37.0.dist-info/METADATA,sha256=nD9ezWOwkWw3Yi5_NBE_xqjRGKxqf_FnSsYb8P9oqxo,10047
69
- runnable-0.37.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
70
- runnable-0.37.0.dist-info/entry_points.txt,sha256=KkxihZ0LLEiwvFl7RquyqZ0tp2fJDIs7DgzHYDlmc3U,2018
71
- runnable-0.37.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
72
- runnable-0.37.0.dist-info/RECORD,,
68
+ runnable-0.39.0.dist-info/METADATA,sha256=l9vd673bcqL7Dgqz99MzXeBOOsi7_ilqNRMnSE3sMEU,10278
69
+ runnable-0.39.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
70
+ runnable-0.39.0.dist-info/entry_points.txt,sha256=KkxihZ0LLEiwvFl7RquyqZ0tp2fJDIs7DgzHYDlmc3U,2018
71
+ runnable-0.39.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
72
+ runnable-0.39.0.dist-info/RECORD,,