runnable 0.38.0__tar.gz → 0.39.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {runnable-0.38.0 → runnable-0.39.1}/PKG-INFO +8 -1
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/conditional.py +19 -1
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/fail.py +7 -1
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/map.py +16 -1
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/parallel.py +7 -1
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/stub.py +7 -1
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/success.py +7 -1
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/task.py +15 -1
- {runnable-0.38.0 → runnable-0.39.1}/pyproject.toml +13 -4
- {runnable-0.38.0 → runnable-0.39.1}/runnable/cli.py +52 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/graph.py +271 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/nodes.py +16 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/parameters.py +24 -38
- {runnable-0.38.0 → runnable-0.39.1}/runnable/tasks.py +79 -0
- {runnable-0.38.0 → runnable-0.39.1}/.gitignore +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/LICENSE +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/README.md +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/README.md +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/__init__.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/catalog/README.md +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/catalog/any_path.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/catalog/file_system.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/catalog/minio.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/catalog/pyproject.toml +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/catalog/s3.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/job_executor/README.md +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/job_executor/__init__.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/job_executor/emulate.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/job_executor/k8s.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/job_executor/k8s_job_spec.yaml +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/job_executor/local.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/job_executor/local_container.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/job_executor/pyproject.toml +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/README.md +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/__init__.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/nodes/pyproject.toml +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/README.md +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/__init__.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/argo.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/emulate.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/local.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/local_container.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/mocked.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/pyproject.toml +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/pipeline_executor/retry.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/README.md +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/__init__.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/any_path.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/chunked_fs.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/chunked_minio.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/db/implementation_FF.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/db/integration_FF.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/file_system.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/generic_chunked.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/minio.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/run_log_store/pyproject.toml +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/secrets/README.md +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/secrets/dotenv.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/extensions/secrets/pyproject.toml +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/__init__.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/catalog.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/context.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/datastore.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/defaults.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/entrypoints.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/exceptions.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/executor.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/names.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/pickler.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/sdk.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/secrets.py +0 -0
- {runnable-0.38.0 → runnable-0.39.1}/runnable/utils.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: runnable
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.39.1
|
4
4
|
Summary: Add your description here
|
5
5
|
Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
|
6
6
|
License-File: LICENSE
|
@@ -18,12 +18,19 @@ Provides-Extra: docker
|
|
18
18
|
Requires-Dist: docker>=7.1.0; extra == 'docker'
|
19
19
|
Provides-Extra: examples
|
20
20
|
Requires-Dist: pandas>=2.2.3; extra == 'examples'
|
21
|
+
Provides-Extra: examples-torch
|
22
|
+
Requires-Dist: torch>=2.7.1; extra == 'examples-torch'
|
21
23
|
Provides-Extra: k8s
|
22
24
|
Requires-Dist: kubernetes>=31.0.0; extra == 'k8s'
|
23
25
|
Provides-Extra: notebook
|
24
26
|
Requires-Dist: ploomber-engine>=0.0.33; extra == 'notebook'
|
25
27
|
Provides-Extra: s3
|
26
28
|
Requires-Dist: cloudpathlib[s3]; extra == 's3'
|
29
|
+
Provides-Extra: ui
|
30
|
+
Requires-Dist: fastapi>=0.95.0; extra == 'ui'
|
31
|
+
Requires-Dist: jinja2>=3.1.2; extra == 'ui'
|
32
|
+
Requires-Dist: python-multipart>=0.0.5; extra == 'ui'
|
33
|
+
Requires-Dist: uvicorn>=0.22.0; extra == 'ui'
|
27
34
|
Description-Content-Type: text/markdown
|
28
35
|
|
29
36
|
|
@@ -7,7 +7,7 @@ from pydantic import Field, field_serializer, field_validator
|
|
7
7
|
from runnable import console, defaults
|
8
8
|
from runnable.datastore import Parameter
|
9
9
|
from runnable.graph import Graph, create_graph
|
10
|
-
from runnable.nodes import CompositeNode, MapVariableType
|
10
|
+
from runnable.nodes import CompositeNode, MapVariableType, NodeInD3
|
11
11
|
|
12
12
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
13
13
|
|
@@ -241,3 +241,21 @@ class ConditionalNode(CompositeNode):
|
|
241
241
|
step_log.status = defaults.FAIL
|
242
242
|
|
243
243
|
self._context.run_log_store.add_step_log(step_log, self._context.run_id)
|
244
|
+
|
245
|
+
def to_d3_node(self) -> NodeInD3:
|
246
|
+
def get_display_string() -> str:
|
247
|
+
display = f"match {self.parameter}:\n"
|
248
|
+
for case in self.branches.keys():
|
249
|
+
display += f' case "{case}":\n ...\n'
|
250
|
+
if self.default:
|
251
|
+
display += " case _:\n ...\n"
|
252
|
+
return display
|
253
|
+
|
254
|
+
return NodeInD3(
|
255
|
+
id=self.internal_name,
|
256
|
+
label="conditional",
|
257
|
+
metadata={
|
258
|
+
"conditioned on": self.parameter,
|
259
|
+
"display": get_display_string(),
|
260
|
+
},
|
261
|
+
)
|
@@ -6,7 +6,7 @@ from pydantic import Field
|
|
6
6
|
from runnable import datastore, defaults
|
7
7
|
from runnable.datastore import StepLog
|
8
8
|
from runnable.defaults import MapVariableType
|
9
|
-
from runnable.nodes import TerminalNode
|
9
|
+
from runnable.nodes import NodeInD3, TerminalNode
|
10
10
|
|
11
11
|
|
12
12
|
class FailNode(TerminalNode):
|
@@ -70,3 +70,9 @@ class FailNode(TerminalNode):
|
|
70
70
|
step_log.attempts.append(attempt_log)
|
71
71
|
|
72
72
|
return step_log
|
73
|
+
|
74
|
+
def to_d3_node(self) -> NodeInD3:
|
75
|
+
return NodeInD3(
|
76
|
+
id=self.internal_name,
|
77
|
+
label="fail",
|
78
|
+
)
|
@@ -18,7 +18,7 @@ from runnable.datastore import (
|
|
18
18
|
)
|
19
19
|
from runnable.defaults import MapVariableType
|
20
20
|
from runnable.graph import Graph, create_graph
|
21
|
-
from runnable.nodes import CompositeNode
|
21
|
+
from runnable.nodes import CompositeNode, NodeInD3
|
22
22
|
|
23
23
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
24
24
|
|
@@ -348,3 +348,18 @@ class MapNode(CompositeNode):
|
|
348
348
|
self._context.run_log_store.set_parameters(
|
349
349
|
parameters=params, run_id=self._context.run_id
|
350
350
|
)
|
351
|
+
|
352
|
+
def to_d3_node(self) -> NodeInD3:
|
353
|
+
return NodeInD3(
|
354
|
+
id=self.internal_name,
|
355
|
+
label="map",
|
356
|
+
metadata={
|
357
|
+
"node_type": "map",
|
358
|
+
"iterate_on": self.iterate_on, # Parameter name containing the iterable
|
359
|
+
"iterate_as": self.iterate_as, # Name used for each iteration
|
360
|
+
"map_branch_id": self.internal_name
|
361
|
+
+ "."
|
362
|
+
+ defaults.MAP_PLACEHOLDER, # The branch identifier pattern
|
363
|
+
"is_composite": True, # Flag indicating this is a composite node
|
364
|
+
},
|
365
|
+
)
|
@@ -6,7 +6,7 @@ from pydantic import Field, field_serializer
|
|
6
6
|
from runnable import defaults
|
7
7
|
from runnable.defaults import MapVariableType
|
8
8
|
from runnable.graph import Graph, create_graph
|
9
|
-
from runnable.nodes import CompositeNode
|
9
|
+
from runnable.nodes import CompositeNode, NodeInD3
|
10
10
|
|
11
11
|
|
12
12
|
class ParallelNode(CompositeNode):
|
@@ -157,3 +157,9 @@ class ParallelNode(CompositeNode):
|
|
157
157
|
step_log.status = defaults.FAIL
|
158
158
|
|
159
159
|
self._context.run_log_store.add_step_log(step_log, self._context.run_id)
|
160
|
+
|
161
|
+
def to_d3_node(self) -> NodeInD3:
|
162
|
+
return NodeInD3(
|
163
|
+
id=self.internal_name,
|
164
|
+
label="parallel",
|
165
|
+
)
|
@@ -7,7 +7,7 @@ from pydantic import ConfigDict, Field
|
|
7
7
|
from runnable import datastore, defaults
|
8
8
|
from runnable.datastore import StepLog
|
9
9
|
from runnable.defaults import MapVariableType
|
10
|
-
from runnable.nodes import ExecutableNode
|
10
|
+
from runnable.nodes import ExecutableNode, NodeInD3
|
11
11
|
|
12
12
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
13
13
|
|
@@ -87,3 +87,9 @@ class StubNode(ExecutableNode):
|
|
87
87
|
step_log.attempts.append(attempt_log)
|
88
88
|
|
89
89
|
return step_log
|
90
|
+
|
91
|
+
def to_d3_node(self) -> NodeInD3:
|
92
|
+
return NodeInD3(
|
93
|
+
id=self.internal_name,
|
94
|
+
label="stub",
|
95
|
+
)
|
@@ -6,7 +6,7 @@ from pydantic import Field
|
|
6
6
|
from runnable import datastore, defaults
|
7
7
|
from runnable.datastore import StepLog
|
8
8
|
from runnable.defaults import MapVariableType
|
9
|
-
from runnable.nodes import TerminalNode
|
9
|
+
from runnable.nodes import NodeInD3, TerminalNode
|
10
10
|
|
11
11
|
|
12
12
|
class SuccessNode(TerminalNode):
|
@@ -70,3 +70,9 @@ class SuccessNode(TerminalNode):
|
|
70
70
|
step_log.attempts.append(attempt_log)
|
71
71
|
|
72
72
|
return step_log
|
73
|
+
|
74
|
+
def to_d3_node(self) -> NodeInD3:
|
75
|
+
return NodeInD3(
|
76
|
+
id=self.internal_name,
|
77
|
+
label="success",
|
78
|
+
)
|
@@ -7,7 +7,7 @@ from pydantic import ConfigDict, Field
|
|
7
7
|
from runnable import datastore, defaults
|
8
8
|
from runnable.datastore import StepLog
|
9
9
|
from runnable.defaults import MapVariableType
|
10
|
-
from runnable.nodes import ExecutableNode
|
10
|
+
from runnable.nodes import ExecutableNode, NodeInD3
|
11
11
|
from runnable.tasks import BaseTaskType, create_task
|
12
12
|
|
13
13
|
logger = logging.getLogger(defaults.LOGGER_NAME)
|
@@ -90,3 +90,17 @@ class TaskNode(ExecutableNode):
|
|
90
90
|
step_log.attempts.append(attempt_log)
|
91
91
|
|
92
92
|
return step_log
|
93
|
+
|
94
|
+
def to_d3_node(self) -> NodeInD3:
|
95
|
+
"""
|
96
|
+
Convert the task node to a D3 node representation.
|
97
|
+
|
98
|
+
Returns:
|
99
|
+
NodeInD3: The D3 node representation of the task node.
|
100
|
+
"""
|
101
|
+
return NodeInD3(
|
102
|
+
id=self.internal_name,
|
103
|
+
label="task",
|
104
|
+
task_type=self.executable.task_type,
|
105
|
+
metadata=self.executable.get_d3_metadata(),
|
106
|
+
)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "runnable"
|
3
|
-
version = "0.
|
3
|
+
version = "0.39.1"
|
4
4
|
description = "Add your description here"
|
5
5
|
readme = "README.md"
|
6
6
|
authors = [
|
@@ -35,6 +35,15 @@ k8s = [
|
|
35
35
|
s3 = [
|
36
36
|
"cloudpathlib[s3]"
|
37
37
|
]
|
38
|
+
examples-torch = [
|
39
|
+
"torch>=2.7.1",
|
40
|
+
]
|
41
|
+
ui = [
|
42
|
+
"fastapi>=0.95.0",
|
43
|
+
"jinja2>=3.1.2",
|
44
|
+
"uvicorn>=0.22.0",
|
45
|
+
"python-multipart>=0.0.5",
|
46
|
+
]
|
38
47
|
|
39
48
|
|
40
49
|
[dependency-groups]
|
@@ -56,9 +65,7 @@ docs = [
|
|
56
65
|
release = [
|
57
66
|
"python-semantic-release>=9.15.2",
|
58
67
|
]
|
59
|
-
|
60
|
-
"torch>=2.7.1",
|
61
|
-
]
|
68
|
+
|
62
69
|
|
63
70
|
[tool.uv.workspace]
|
64
71
|
members = ["extensions/catalog",
|
@@ -67,6 +74,7 @@ members = ["extensions/catalog",
|
|
67
74
|
"extensions/pipeline_executor",
|
68
75
|
"extensions/run_log_store",
|
69
76
|
"extensions/secrets",
|
77
|
+
"visualization"
|
70
78
|
]
|
71
79
|
|
72
80
|
[tool.uv.sources]
|
@@ -76,6 +84,7 @@ catalog = {workspace = true}
|
|
76
84
|
run_log_store = {workspace = true}
|
77
85
|
pipeline_executor = {workspace = true}
|
78
86
|
job_executor = {workspace = true}
|
87
|
+
visualization = {workspace = true}
|
79
88
|
|
80
89
|
|
81
90
|
[project.scripts]
|
@@ -274,5 +274,57 @@ def execute_job(
|
|
274
274
|
)
|
275
275
|
|
276
276
|
|
277
|
+
@app.command()
|
278
|
+
def ui(
|
279
|
+
host: Annotated[
|
280
|
+
str,
|
281
|
+
typer.Option(
|
282
|
+
"--host",
|
283
|
+
"-h",
|
284
|
+
help="The host to bind the server to",
|
285
|
+
),
|
286
|
+
] = "127.0.0.1",
|
287
|
+
port: Annotated[
|
288
|
+
int,
|
289
|
+
typer.Option(
|
290
|
+
"--port",
|
291
|
+
"-p",
|
292
|
+
help="The port to bind the server to",
|
293
|
+
),
|
294
|
+
] = 8000,
|
295
|
+
reload: Annotated[
|
296
|
+
bool,
|
297
|
+
typer.Option(
|
298
|
+
"--reload",
|
299
|
+
help="Enable auto-reload for development",
|
300
|
+
),
|
301
|
+
] = False,
|
302
|
+
):
|
303
|
+
"""
|
304
|
+
Start the web UI for pipeline visualization.
|
305
|
+
|
306
|
+
This command starts a FastAPI web server that provides a user interface
|
307
|
+
for visualizing and exploring runnable pipelines.
|
308
|
+
"""
|
309
|
+
try:
|
310
|
+
import uvicorn
|
311
|
+
|
312
|
+
from visualization.main import app as web_app
|
313
|
+
except ImportError:
|
314
|
+
typer.echo(
|
315
|
+
"UI dependencies not installed. Install with: pip install runnable[ui]",
|
316
|
+
err=True,
|
317
|
+
)
|
318
|
+
raise typer.Exit(1)
|
319
|
+
|
320
|
+
typer.echo(f"Starting web UI at http://{host}:{port}")
|
321
|
+
uvicorn.run(
|
322
|
+
web_app,
|
323
|
+
host=host,
|
324
|
+
port=port,
|
325
|
+
reload=reload,
|
326
|
+
)
|
327
|
+
|
328
|
+
|
277
329
|
if __name__ == "__main__":
|
278
330
|
app()
|
@@ -499,3 +499,274 @@ def search_branch_by_internal_name(dag: Graph, internal_name: str):
|
|
499
499
|
return current_branch
|
500
500
|
|
501
501
|
raise exceptions.BranchNotFoundError(internal_name)
|
502
|
+
|
503
|
+
|
504
|
+
def get_visualization_data(graph: Graph) -> Dict[str, Any]:
|
505
|
+
"""
|
506
|
+
Convert the graph into a D3 visualization friendly format with nodes and links.
|
507
|
+
Handles composite nodes (parallel, map, conditional) by recursively processing their embedded graphs.
|
508
|
+
|
509
|
+
Args:
|
510
|
+
graph: The Graph object to convert
|
511
|
+
|
512
|
+
Returns:
|
513
|
+
Dict with two keys:
|
514
|
+
- nodes: List of node objects with id, type, name, and alias
|
515
|
+
- links: List of edge objects with source and target node ids
|
516
|
+
"""
|
517
|
+
import rich.console
|
518
|
+
|
519
|
+
from extensions.nodes.conditional import ConditionalNode
|
520
|
+
from extensions.nodes.map import MapNode
|
521
|
+
from extensions.nodes.parallel import ParallelNode
|
522
|
+
from runnable.nodes import ExecutableNode
|
523
|
+
|
524
|
+
rich_print = rich.console.Console().print
|
525
|
+
|
526
|
+
rich_print(graph)
|
527
|
+
|
528
|
+
nodes = []
|
529
|
+
links = []
|
530
|
+
processed_nodes = set()
|
531
|
+
|
532
|
+
def process_node(
|
533
|
+
node: BaseNode,
|
534
|
+
parent_id: Optional[str] = None,
|
535
|
+
current_graph: Graph = graph,
|
536
|
+
map_node_id: Optional[str] = None,
|
537
|
+
conditional_node_id: Optional[str] = None,
|
538
|
+
) -> str:
|
539
|
+
node_id = f"{node.internal_name}"
|
540
|
+
node_alias = node.name # Alias based on the node's name
|
541
|
+
|
542
|
+
if node_id not in processed_nodes:
|
543
|
+
node_data = node.to_d3_node().model_dump(exclude_none=True)
|
544
|
+
node_data["alias"] = node_alias # Add alias to the node data
|
545
|
+
node_data["display_name"] = node_alias # Use alias as the display name
|
546
|
+
|
547
|
+
# Add map or parallel related metadata if this node is part of a map branch or parallel branch
|
548
|
+
if map_node_id:
|
549
|
+
if "metadata" not in node_data:
|
550
|
+
node_data["metadata"] = {}
|
551
|
+
|
552
|
+
# Mark this node as being part of a map branch
|
553
|
+
node_data["metadata"]["belongs_to_node"] = map_node_id
|
554
|
+
|
555
|
+
# If this is the map node itself, add a special attribute
|
556
|
+
if node_id == map_node_id:
|
557
|
+
node_data["metadata"]["is_map_root"] = True
|
558
|
+
|
559
|
+
# Add conditional related metadata if this node is part of a conditional branch
|
560
|
+
if conditional_node_id:
|
561
|
+
if "metadata" not in node_data:
|
562
|
+
node_data["metadata"] = {}
|
563
|
+
|
564
|
+
# Mark this node as being part of a conditional branch
|
565
|
+
node_data["metadata"]["belongs_to_node"] = conditional_node_id
|
566
|
+
|
567
|
+
# If this is the conditional node itself, add a special attribute
|
568
|
+
if node_id == conditional_node_id:
|
569
|
+
node_data["metadata"]["is_conditional_root"] = True
|
570
|
+
|
571
|
+
# Mark parallel nodes with special metadata
|
572
|
+
if isinstance(node, ParallelNode):
|
573
|
+
if "metadata" not in node_data:
|
574
|
+
node_data["metadata"] = {}
|
575
|
+
|
576
|
+
# Add parallel node type to metadata
|
577
|
+
node_data["metadata"]["node_type"] = "parallel"
|
578
|
+
node_data["metadata"]["parallel_branch_id"] = node_id
|
579
|
+
|
580
|
+
# Mark conditional nodes with special metadata
|
581
|
+
if isinstance(node, ConditionalNode):
|
582
|
+
if "metadata" not in node_data:
|
583
|
+
node_data["metadata"] = {}
|
584
|
+
|
585
|
+
# Add conditional node type to metadata
|
586
|
+
node_data["metadata"]["node_type"] = "conditional"
|
587
|
+
node_data["metadata"]["conditional_branch_id"] = node_id
|
588
|
+
|
589
|
+
nodes.append(node_data)
|
590
|
+
processed_nodes.add(node_id)
|
591
|
+
|
592
|
+
# Add link from parent if it exists
|
593
|
+
if parent_id:
|
594
|
+
links.append({"source": parent_id, "target": node_id})
|
595
|
+
|
596
|
+
# Handle composite nodes with embedded graphs
|
597
|
+
if isinstance(node, (ParallelNode, MapNode, ConditionalNode)):
|
598
|
+
if isinstance(node, ParallelNode):
|
599
|
+
# Process each parallel branch
|
600
|
+
for _, branch in node.branches.items():
|
601
|
+
branch_start = branch.get_node_by_name(branch.start_at)
|
602
|
+
process_node(
|
603
|
+
branch_start,
|
604
|
+
node_id,
|
605
|
+
branch,
|
606
|
+
map_node_id=node_id,
|
607
|
+
conditional_node_id=conditional_node_id,
|
608
|
+
)
|
609
|
+
|
610
|
+
# Handle next node connection after parallel branches complete
|
611
|
+
if hasattr(node, "next_node") and node.next_node:
|
612
|
+
try:
|
613
|
+
next_node = current_graph.get_node_by_name(node.next_node)
|
614
|
+
next_id = process_node(
|
615
|
+
next_node,
|
616
|
+
None,
|
617
|
+
current_graph=current_graph,
|
618
|
+
map_node_id=map_node_id,
|
619
|
+
conditional_node_id=conditional_node_id,
|
620
|
+
)
|
621
|
+
links.append(
|
622
|
+
{
|
623
|
+
"source": node_id,
|
624
|
+
"target": next_id,
|
625
|
+
"type": "success",
|
626
|
+
}
|
627
|
+
)
|
628
|
+
except exceptions.NodeNotFoundError as e:
|
629
|
+
rich_print(
|
630
|
+
f"Warning: Next node '{node.next_node}' not found for parallel node '{node.name}': {e}"
|
631
|
+
)
|
632
|
+
|
633
|
+
elif isinstance(node, MapNode):
|
634
|
+
# Process map branch
|
635
|
+
branch_start = node.branch.get_node_by_name(node.branch.start_at)
|
636
|
+
# Process the branch with additional context about the map node
|
637
|
+
process_node(
|
638
|
+
branch_start,
|
639
|
+
node_id,
|
640
|
+
node.branch,
|
641
|
+
map_node_id=node_id,
|
642
|
+
conditional_node_id=conditional_node_id,
|
643
|
+
)
|
644
|
+
|
645
|
+
elif isinstance(node, ConditionalNode):
|
646
|
+
# Process each conditional branch
|
647
|
+
for _, branch in node.branches.items():
|
648
|
+
branch_start = branch.get_node_by_name(branch.start_at)
|
649
|
+
process_node(
|
650
|
+
branch_start,
|
651
|
+
node_id,
|
652
|
+
branch,
|
653
|
+
map_node_id=map_node_id,
|
654
|
+
conditional_node_id=node_id,
|
655
|
+
)
|
656
|
+
if node.default:
|
657
|
+
default_start = node.default.get_node_by_name(
|
658
|
+
node.default.start_at
|
659
|
+
)
|
660
|
+
process_node(
|
661
|
+
default_start,
|
662
|
+
node_id,
|
663
|
+
node.default,
|
664
|
+
map_node_id=map_node_id,
|
665
|
+
conditional_node_id=node_id,
|
666
|
+
)
|
667
|
+
|
668
|
+
# Handle next node connection after conditional branches complete
|
669
|
+
if hasattr(node, "next_node") and node.next_node:
|
670
|
+
try:
|
671
|
+
next_node = current_graph.get_node_by_name(node.next_node)
|
672
|
+
next_id = process_node(
|
673
|
+
next_node,
|
674
|
+
None,
|
675
|
+
current_graph=current_graph,
|
676
|
+
map_node_id=map_node_id,
|
677
|
+
conditional_node_id=conditional_node_id,
|
678
|
+
)
|
679
|
+
links.append(
|
680
|
+
{
|
681
|
+
"source": node_id,
|
682
|
+
"target": next_id,
|
683
|
+
"type": "success",
|
684
|
+
}
|
685
|
+
)
|
686
|
+
except exceptions.NodeNotFoundError as e:
|
687
|
+
rich_print(
|
688
|
+
f"Warning: Next node '{node.next_node}' not found for conditional node '{node.name}': {e}"
|
689
|
+
)
|
690
|
+
|
691
|
+
# Add links to next and on_failure nodes if they exist
|
692
|
+
if isinstance(node, ExecutableNode):
|
693
|
+
# Handle normal "next" links (success path)
|
694
|
+
if hasattr(node, "next_node") and node.next_node:
|
695
|
+
try:
|
696
|
+
next_node = current_graph.get_node_by_name(node.next_node)
|
697
|
+
next_id = process_node(
|
698
|
+
next_node,
|
699
|
+
None,
|
700
|
+
current_graph=current_graph,
|
701
|
+
map_node_id=map_node_id,
|
702
|
+
conditional_node_id=conditional_node_id,
|
703
|
+
)
|
704
|
+
links.append(
|
705
|
+
{"source": node_id, "target": next_id, "type": "success"}
|
706
|
+
)
|
707
|
+
except exceptions.NodeNotFoundError as e:
|
708
|
+
rich_print(
|
709
|
+
f"Warning: Next node '{node.next_node}' not found for node '{node.name}': {e}"
|
710
|
+
)
|
711
|
+
|
712
|
+
# Handle on_failure links (failure path)
|
713
|
+
if hasattr(node, "on_failure") and node.on_failure:
|
714
|
+
try:
|
715
|
+
failure_node = current_graph.get_node_by_name(node.on_failure)
|
716
|
+
failure_id = process_node(
|
717
|
+
failure_node,
|
718
|
+
None,
|
719
|
+
current_graph=current_graph,
|
720
|
+
map_node_id=map_node_id,
|
721
|
+
conditional_node_id=conditional_node_id,
|
722
|
+
)
|
723
|
+
links.append(
|
724
|
+
{"source": node_id, "target": failure_id, "type": "failure"}
|
725
|
+
)
|
726
|
+
except exceptions.NodeNotFoundError as e:
|
727
|
+
rich_print(
|
728
|
+
f"Warning: On-failure node '{node.on_failure}' not found for node '{node.name}': {e}"
|
729
|
+
)
|
730
|
+
|
731
|
+
# For backward compatibility, also process all neighbors
|
732
|
+
# This handles cases where node might have other connection types
|
733
|
+
next_nodes = node._get_neighbors()
|
734
|
+
for next_node_name in next_nodes:
|
735
|
+
# Skip nodes we've already handled explicitly
|
736
|
+
if (
|
737
|
+
hasattr(node, "next_node") and node.next_node == next_node_name
|
738
|
+
) or (
|
739
|
+
hasattr(node, "on_failure")
|
740
|
+
and node.on_failure == next_node_name
|
741
|
+
):
|
742
|
+
continue
|
743
|
+
|
744
|
+
try:
|
745
|
+
next_node = current_graph.get_node_by_name(next_node_name)
|
746
|
+
next_id = process_node(
|
747
|
+
next_node,
|
748
|
+
None,
|
749
|
+
current_graph=current_graph,
|
750
|
+
map_node_id=map_node_id,
|
751
|
+
conditional_node_id=conditional_node_id,
|
752
|
+
)
|
753
|
+
links.append(
|
754
|
+
{"source": node_id, "target": next_id, "type": "default"}
|
755
|
+
)
|
756
|
+
except exceptions.NodeNotFoundError as e:
|
757
|
+
rich_print(
|
758
|
+
f"Warning: Neighbor node '{next_node_name}' not found for node '{node.name}': {e}"
|
759
|
+
)
|
760
|
+
|
761
|
+
return node_id
|
762
|
+
|
763
|
+
# Start processing from the start node
|
764
|
+
start_node = graph.get_node_by_name(graph.start_at)
|
765
|
+
try:
|
766
|
+
process_node(
|
767
|
+
start_node, None, graph, map_node_id=None, conditional_node_id=None
|
768
|
+
)
|
769
|
+
except (exceptions.NodeNotFoundError, AttributeError, KeyError) as e:
|
770
|
+
rich_print(f"Error processing node {start_node}: {e}")
|
771
|
+
|
772
|
+
return {"nodes": nodes, "links": links}
|
@@ -15,6 +15,13 @@ logger = logging.getLogger(defaults.LOGGER_NAME)
|
|
15
15
|
# --8<-- [start:docs]
|
16
16
|
|
17
17
|
|
18
|
+
class NodeInD3(BaseModel):
|
19
|
+
id: str
|
20
|
+
label: str
|
21
|
+
task_type: Optional[str] = None
|
22
|
+
metadata: Optional[Dict[str, Any]] = None
|
23
|
+
|
24
|
+
|
18
25
|
class BaseNode(ABC, BaseModel):
|
19
26
|
"""
|
20
27
|
Base class with common functionality provided for a Node of a graph.
|
@@ -369,6 +376,15 @@ class BaseNode(ABC, BaseModel):
|
|
369
376
|
Dict[str, Any]: _description_
|
370
377
|
"""
|
371
378
|
|
379
|
+
@abstractmethod
|
380
|
+
def to_d3_node(self) -> NodeInD3:
|
381
|
+
"""
|
382
|
+
Convert the node to a D3 node representation.
|
383
|
+
|
384
|
+
Returns:
|
385
|
+
NodeInD3: The D3 node representation of the current node.
|
386
|
+
"""
|
387
|
+
|
372
388
|
|
373
389
|
# --8<-- [end:docs]
|
374
390
|
class TraversalNode(BaseNode):
|
@@ -3,7 +3,7 @@ import inspect
|
|
3
3
|
import json
|
4
4
|
import logging
|
5
5
|
import os
|
6
|
-
from typing import Any, Dict, Type
|
6
|
+
from typing import Any, Dict, Type, get_origin
|
7
7
|
|
8
8
|
from pydantic import BaseModel, ConfigDict
|
9
9
|
from typing_extensions import Callable
|
@@ -97,7 +97,6 @@ def filter_arguments_for_func(
|
|
97
97
|
params[key] = JsonParameter(kind="json", value=v)
|
98
98
|
|
99
99
|
bound_args = {}
|
100
|
-
missing_required_args: list[str] = []
|
101
100
|
var_keyword_param = None
|
102
101
|
namespace_param = None
|
103
102
|
|
@@ -108,12 +107,12 @@ def filter_arguments_for_func(
|
|
108
107
|
logger.warning(f"Ignoring parameter {name} as it is VAR_POSITIONAL")
|
109
108
|
continue
|
110
109
|
|
111
|
-
# Check for **kwargs parameter
|
110
|
+
# Check for **kwargs parameter, we need to send in all the unnamed values in this as a dict
|
112
111
|
if value.kind == inspect.Parameter.VAR_KEYWORD:
|
113
112
|
var_keyword_param = name
|
114
113
|
continue
|
115
114
|
|
116
|
-
# Check for argparse.Namespace parameter
|
115
|
+
# Check for argparse.Namespace parameter, we need to send in all the unnamed values in this as a namespace
|
117
116
|
if value.annotation == argparse.Namespace:
|
118
117
|
namespace_param = name
|
119
118
|
continue
|
@@ -124,16 +123,17 @@ def filter_arguments_for_func(
|
|
124
123
|
# Default value is given in the function signature, we can use it
|
125
124
|
bound_args[name] = value.default
|
126
125
|
else:
|
127
|
-
# This is a required parameter that's missing
|
128
|
-
|
126
|
+
# This is a required parameter that's missing - error immediately
|
127
|
+
raise ValueError(
|
128
|
+
f"Function {func.__name__} has required parameter '{name}' that is not present in the parameters"
|
129
|
+
)
|
129
130
|
else:
|
130
131
|
# We have a parameter of this name, lets bind it
|
131
132
|
param_value = params[name]
|
132
133
|
|
133
|
-
if (
|
134
|
-
|
135
|
-
|
136
|
-
) and not isinstance(param_value, ObjectParameter):
|
134
|
+
if (issubclass(value.annotation, BaseModel)) and not isinstance(
|
135
|
+
param_value, ObjectParameter
|
136
|
+
):
|
137
137
|
# Even if the annotation is a pydantic model, it can be passed as an object parameter
|
138
138
|
# We try to cast it as a pydantic model if asked
|
139
139
|
named_param = params[name].get_value()
|
@@ -147,22 +147,32 @@ def filter_arguments_for_func(
|
|
147
147
|
)
|
148
148
|
bound_args[name] = bound_model
|
149
149
|
|
150
|
-
elif value.annotation
|
150
|
+
elif value.annotation is not inspect.Parameter.empty and callable(
|
151
151
|
value.annotation
|
152
152
|
):
|
153
153
|
# Cast it if its a primitive type. Ensure the type matches the annotation.
|
154
154
|
try:
|
155
|
-
|
155
|
+
# Handle typing generics like Dict[str, int], List[str] by using their origin
|
156
|
+
origin = get_origin(value.annotation)
|
157
|
+
if origin is not None:
|
158
|
+
# For generics like Dict[str, int], use dict() instead of Dict[str, int]()
|
159
|
+
bound_args[name] = origin(params[name].get_value())
|
160
|
+
else:
|
161
|
+
# Regular callable types like int, str, float, etc.
|
162
|
+
bound_args[name] = value.annotation(params[name].get_value())
|
156
163
|
except (ValueError, TypeError) as e:
|
164
|
+
annotation_name = getattr(
|
165
|
+
value.annotation, "__name__", str(value.annotation)
|
166
|
+
)
|
157
167
|
raise ValueError(
|
158
|
-
f"Cannot cast parameter '{name}' to {
|
168
|
+
f"Cannot cast parameter '{name}' to {annotation_name}: {e}"
|
159
169
|
)
|
160
170
|
else:
|
161
171
|
# We do not know type of parameter, we send the value as found
|
162
172
|
bound_args[name] = params[name].get_value()
|
163
173
|
|
164
174
|
# Find extra parameters (parameters in params but not consumed by regular function parameters)
|
165
|
-
consumed_param_names = set(bound_args.keys())
|
175
|
+
consumed_param_names = set(bound_args.keys())
|
166
176
|
extra_params = {k: v for k, v in params.items() if k not in consumed_param_names}
|
167
177
|
|
168
178
|
# Second pass: Handle **kwargs and argparse.Namespace parameters
|
@@ -176,30 +186,6 @@ def filter_arguments_for_func(
|
|
176
186
|
for param_name, param_value in extra_params.items():
|
177
187
|
setattr(args_namespace, param_name, param_value.get_value())
|
178
188
|
bound_args[namespace_param] = args_namespace
|
179
|
-
elif extra_params:
|
180
|
-
# Function doesn't accept **kwargs or namespace, but we have extra parameters
|
181
|
-
# This should only be an error if we also have missing required parameters
|
182
|
-
# or if the function truly can't handle the extra parameters
|
183
|
-
if missing_required_args:
|
184
|
-
# We have both missing required and extra parameters - this is an error
|
185
|
-
raise ValueError(
|
186
|
-
f"Function {func.__name__} has parameters {missing_required_args} that are not present in the parameters"
|
187
|
-
)
|
188
|
-
# If we only have extra parameters and no missing required ones, we just ignore the extras
|
189
|
-
# This allows for more flexible parameter passing
|
190
|
-
|
191
|
-
# Check for missing required parameters
|
192
|
-
if missing_required_args:
|
193
|
-
if var_keyword_param is None and namespace_param is None:
|
194
|
-
# No way to handle missing parameters
|
195
|
-
raise ValueError(
|
196
|
-
f"Function {func.__name__} has parameters {missing_required_args} that are not present in the parameters"
|
197
|
-
)
|
198
|
-
# If we have **kwargs or namespace, missing parameters might be handled there
|
199
|
-
# But if they're truly required (no default), we should still error
|
200
|
-
raise ValueError(
|
201
|
-
f"Function {func.__name__} has parameters {missing_required_args} that are not present in the parameters"
|
202
|
-
)
|
203
189
|
|
204
190
|
return bound_args
|
205
191
|
|
@@ -209,6 +209,9 @@ class BaseTaskType(BaseModel):
|
|
209
209
|
parameters=diff_parameters, run_id=self._context.run_id
|
210
210
|
)
|
211
211
|
|
212
|
+
def get_d3_metadata(self) -> dict[str, str]:
|
213
|
+
raise NotImplementedError
|
214
|
+
|
212
215
|
|
213
216
|
def task_return_to_parameter(task_return: TaskReturns, value: Any) -> Parameter:
|
214
217
|
# implicit support for pydantic models
|
@@ -287,6 +290,72 @@ class PythonTaskType(BaseTaskType): # pylint: disable=too-few-public-methods
|
|
287
290
|
task_type: str = Field(default="python", serialization_alias="command_type")
|
288
291
|
command: str
|
289
292
|
|
293
|
+
def get_d3_metadata(self) -> Dict[str, str]:
|
294
|
+
module, func = utils.get_module_and_attr_names(self.command)
|
295
|
+
|
296
|
+
# Import inspect module to get function signature
|
297
|
+
import inspect
|
298
|
+
|
299
|
+
def format_type_annotation(annotation):
|
300
|
+
"""Format type annotation in a more readable way"""
|
301
|
+
if annotation == inspect._empty:
|
302
|
+
return "Any"
|
303
|
+
elif hasattr(annotation, "__name__"):
|
304
|
+
return annotation.__name__
|
305
|
+
elif hasattr(annotation, "__origin__"):
|
306
|
+
# Handle typing types like List, Dict, etc.
|
307
|
+
origin = (
|
308
|
+
annotation.__origin__.__name__
|
309
|
+
if hasattr(annotation.__origin__, "__name__")
|
310
|
+
else str(annotation.__origin__)
|
311
|
+
)
|
312
|
+
args = ", ".join(
|
313
|
+
format_type_annotation(arg) for arg in annotation.__args__
|
314
|
+
)
|
315
|
+
return f"{origin}[{args}]"
|
316
|
+
else:
|
317
|
+
# Fall back to string representation without 'typing.'
|
318
|
+
return str(annotation).replace("typing.", "")
|
319
|
+
|
320
|
+
# Import the module and get the function
|
321
|
+
sys.path.insert(0, os.getcwd())
|
322
|
+
try:
|
323
|
+
imported_module = importlib.import_module(module)
|
324
|
+
f = getattr(imported_module, func)
|
325
|
+
|
326
|
+
# Get function signature
|
327
|
+
sig = inspect.signature(f)
|
328
|
+
|
329
|
+
# Format parameters with type annotations
|
330
|
+
params_list = []
|
331
|
+
for param_name, param in sig.parameters.items():
|
332
|
+
type_annotation = format_type_annotation(param.annotation)
|
333
|
+
params_list.append(f"{param_name}: {type_annotation}")
|
334
|
+
|
335
|
+
params_str = ", ".join(params_list)
|
336
|
+
|
337
|
+
# Format returns based on self.returns or use "None" if no returns specified
|
338
|
+
if self.returns:
|
339
|
+
returns_str = ", ".join([f"{r.name}({r.kind})" for r in self.returns])
|
340
|
+
else:
|
341
|
+
returns_str = "None"
|
342
|
+
|
343
|
+
# Format function signature
|
344
|
+
signature = f"def {func}({params_str}) -> {returns_str}:"
|
345
|
+
|
346
|
+
return {
|
347
|
+
"module": module,
|
348
|
+
"function": func,
|
349
|
+
"signature": signature,
|
350
|
+
}
|
351
|
+
except Exception as e:
|
352
|
+
logger.warning(f"Could not inspect function {self.command}: {str(e)}")
|
353
|
+
return {
|
354
|
+
"module": module,
|
355
|
+
"function": func,
|
356
|
+
"signature": f"def {func}(...) -> ...",
|
357
|
+
}
|
358
|
+
|
290
359
|
def execute_command(
|
291
360
|
self,
|
292
361
|
map_variable: MapVariableType = None,
|
@@ -440,6 +509,11 @@ class NotebookTaskType(BaseTaskType):
|
|
440
509
|
command: str
|
441
510
|
optional_ploomber_args: dict = {}
|
442
511
|
|
512
|
+
def get_d3_metadata(self) -> Dict[str, str]:
|
513
|
+
return {
|
514
|
+
"notebook": self.command,
|
515
|
+
}
|
516
|
+
|
443
517
|
@field_validator("command")
|
444
518
|
@classmethod
|
445
519
|
def notebook_should_end_with_ipynb(cls, command: str) -> str:
|
@@ -640,6 +714,11 @@ class ShellTaskType(BaseTaskType):
|
|
640
714
|
task_type: str = Field(default="shell", serialization_alias="command_type")
|
641
715
|
command: str
|
642
716
|
|
717
|
+
def get_d3_metadata(self) -> Dict[str, str]:
|
718
|
+
return {
|
719
|
+
"command": self.command[:50],
|
720
|
+
}
|
721
|
+
|
643
722
|
@field_validator("returns")
|
644
723
|
@classmethod
|
645
724
|
def returns_should_be_json(cls, returns: List[TaskReturns]):
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|