argo-kedro 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,10 @@
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # Virtual environments
10
+ .venv
@@ -0,0 +1 @@
1
+ 3.11
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 kedro-argo contributors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,3 @@
1
+ include README.md
2
+ include LICENSE
3
+ recursive-include kedro_argo *.tmpl
@@ -0,0 +1,116 @@
1
+ Metadata-Version: 2.4
2
+ Name: argo-kedro
3
+ Version: 0.1.1
4
+ Summary: Kedro plugin for running pipelines on Argo Workflows
5
+ Author-email: Laurens Vijnck <laurens@everycure.org>, Nelson Alfonso <nelson@everycure.org>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/everycure-org/kedro-argo
8
+ Project-URL: Repository, https://github.com/everycure-org/kedro-argo
9
+ Project-URL: Issues, https://github.com/everycure-org/kedro-argo/issues
10
+ Keywords: kedro,argo,workflows,argo workflows,kubernetes,pipeline
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Programming Language :: Python :: 3.14
20
+ Classifier: Framework :: Kedro
21
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
22
+ Requires-Python: >=3.10
23
+ Description-Content-Type: text/markdown
24
+ License-File: LICENSE
25
+ Requires-Dist: kedro
26
+ Requires-Dist: pyyaml>=6.0.2
27
+ Requires-Dist: jinja2>=3.0.0
28
+ Dynamic: license-file
29
+
30
+ # kedro-argo
31
+
32
+ A Kedro plugin for running pipelines on Argo Workflows in Kubernetes clusters.
33
+
34
+ ## Features
35
+
36
+ - ✅ Submit Kedro pipelines to Argo Workflows
37
+ - ✅ Fuse pipeline nodes for optimized execution
38
+ - ✅ Memory dataset management during fusing
39
+ - 🚧 Image building guidelines
40
+ - 🚧 Hardware configuration support
41
+ - 🚧 Custom template registration
42
+
43
+ ## Installation
44
+
45
+ ```bash
46
+ pip install kedro-argo
47
+ ```
48
+
49
+ ## Prerequisites
50
+
51
+ - Kubernetes cluster with Argo Workflows installed
52
+ - `kubeconfig` configured correctly
53
+ - Argo Workflows namespace set up
54
+
55
+ ## Quick Start
56
+
57
+ ### 1. Configure your Kedro project
58
+
59
+ Add Argo configuration to your Kedro project (e.g., in `conf/base/argo.yml`).
60
+
61
+ ### 2. Use the CLI
62
+
63
+ ```bash
64
+ # Submit pipeline to Argo
65
+ kedro argo submit
66
+
67
+ # Other commands
68
+ kedro argo --help
69
+ ```
70
+
71
+ ## Usage
72
+
73
+ - Ensure kubeconfig is set correctly.
74
+ - Ensure Argo installed correctly on cluster and namespace present.
75
+
76
+ ## Project setup
77
+
78
+ - Distinguish new cloud environment for running remotely
79
+
80
+ ## Current Assumptions
81
+
82
+ - For image building, we assume the user enters the path to a valid GAR repository, which the cluster is assumed to have permissions to
83
+ - We work with the `latest` tag only, as Argo ensures to apply `imagePullPolicy: Always` in that case
84
+
85
+ ## Known Issues
86
+
87
+ - Installing both the k8s and gcfs libs may cause authentication issues
88
+ - Current workaround: `uv pip uninstall gcsfs`
89
+
90
+ ## Development Roadmap
91
+
92
+ ### Horizon 1
93
+ - [x] Package and deploy to pypi
94
+ - [x] Submitting to cluster
95
+ - [ ] Provide image building guidelines
96
+ - [ ] Kedro MLFlow plugin shows nice way of consuming "config", argo.yml
97
+
98
+ ### Horizon 2
99
+ - [ ] Hardware configuration
100
+
101
+ ### Horizon 3 (making feature complete)
102
+ - [x] Fusing pipelines
103
+ - [x] Set memory datasets during fusing
104
+ - [ ] Allow registering custom templates, e.g., neo4j
105
+ - [ ] Allow environment variables
106
+
107
+ ### Horizon 4 (Open sourcing)
108
+ - [ ] Complete feature set
109
+
110
+ ## Contributing
111
+
112
+ Contributions are welcome! Please feel free to submit a Pull Request.
113
+
114
+ ## License
115
+
116
+ MIT License - see LICENSE file for details
@@ -0,0 +1,87 @@
1
+ # kedro-argo
2
+
3
+ A Kedro plugin for running pipelines on Argo Workflows in Kubernetes clusters.
4
+
5
+ ## Features
6
+
7
+ - ✅ Submit Kedro pipelines to Argo Workflows
8
+ - ✅ Fuse pipeline nodes for optimized execution
9
+ - ✅ Memory dataset management during fusing
10
+ - 🚧 Image building guidelines
11
+ - 🚧 Hardware configuration support
12
+ - 🚧 Custom template registration
13
+
14
+ ## Installation
15
+
16
+ ```bash
17
+ pip install kedro-argo
18
+ ```
19
+
20
+ ## Prerequisites
21
+
22
+ - Kubernetes cluster with Argo Workflows installed
23
+ - `kubeconfig` configured correctly
24
+ - Argo Workflows namespace set up
25
+
26
+ ## Quick Start
27
+
28
+ ### 1. Configure your Kedro project
29
+
30
+ Add Argo configuration to your Kedro project (e.g., in `conf/base/argo.yml`).
31
+
32
+ ### 2. Use the CLI
33
+
34
+ ```bash
35
+ # Submit pipeline to Argo
36
+ kedro argo submit
37
+
38
+ # Other commands
39
+ kedro argo --help
40
+ ```
41
+
42
+ ## Usage
43
+
44
+ - Ensure kubeconfig is set correctly.
45
+ - Ensure Argo installed correctly on cluster and namespace present.
46
+
47
+ ## Project setup
48
+
49
+ - Distinguish new cloud environment for running remotely
50
+
51
+ ## Current Assumptions
52
+
53
+ - For image building, we assume the user enters the path to a valid GAR repository, which the cluster is assumed to have permissions to
54
+ - We work with the `latest` tag only, as Argo ensures to apply `imagePullPolicy: Always` in that case
55
+
56
+ ## Known Issues
57
+
58
+ - Installing both the k8s and gcfs libs may cause authentication issues
59
+ - Current workaround: `uv pip uninstall gcsfs`
60
+
61
+ ## Development Roadmap
62
+
63
+ ### Horizon 1
64
+ - [x] Package and deploy to pypi
65
+ - [x] Submitting to cluster
66
+ - [ ] Provide image building guidelines
67
+ - [ ] Kedro MLFlow plugin shows nice way of consuming "config", argo.yml
68
+
69
+ ### Horizon 2
70
+ - [ ] Hardware configuration
71
+
72
+ ### Horizon 3 (making feature complete)
73
+ - [x] Fusing pipelines
74
+ - [x] Set memory datasets during fusing
75
+ - [ ] Allow registering custom templates, e.g., neo4j
76
+ - [ ] Allow environment variables
77
+
78
+ ### Horizon 4 (Open sourcing)
79
+ - [ ] Complete feature set
80
+
81
+ ## Contributing
82
+
83
+ Contributions are welcome! Please feel free to submit a Pull Request.
84
+
85
+ ## License
86
+
87
+ MIT License - see LICENSE file for details
@@ -0,0 +1,116 @@
1
+ Metadata-Version: 2.4
2
+ Name: argo-kedro
3
+ Version: 0.1.1
4
+ Summary: Kedro plugin for running pipelines on Argo Workflows
5
+ Author-email: Laurens Vijnck <laurens@everycure.org>, Nelson Alfonso <nelson@everycure.org>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/everycure-org/kedro-argo
8
+ Project-URL: Repository, https://github.com/everycure-org/kedro-argo
9
+ Project-URL: Issues, https://github.com/everycure-org/kedro-argo/issues
10
+ Keywords: kedro,argo,workflows,argo workflows,kubernetes,pipeline
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Programming Language :: Python :: 3.14
20
+ Classifier: Framework :: Kedro
21
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
22
+ Requires-Python: >=3.10
23
+ Description-Content-Type: text/markdown
24
+ License-File: LICENSE
25
+ Requires-Dist: kedro
26
+ Requires-Dist: pyyaml>=6.0.2
27
+ Requires-Dist: jinja2>=3.0.0
28
+ Dynamic: license-file
29
+
30
+ # kedro-argo
31
+
32
+ A Kedro plugin for running pipelines on Argo Workflows in Kubernetes clusters.
33
+
34
+ ## Features
35
+
36
+ - ✅ Submit Kedro pipelines to Argo Workflows
37
+ - ✅ Fuse pipeline nodes for optimized execution
38
+ - ✅ Memory dataset management during fusing
39
+ - 🚧 Image building guidelines
40
+ - 🚧 Hardware configuration support
41
+ - 🚧 Custom template registration
42
+
43
+ ## Installation
44
+
45
+ ```bash
46
+ pip install kedro-argo
47
+ ```
48
+
49
+ ## Prerequisites
50
+
51
+ - Kubernetes cluster with Argo Workflows installed
52
+ - `kubeconfig` configured correctly
53
+ - Argo Workflows namespace set up
54
+
55
+ ## Quick Start
56
+
57
+ ### 1. Configure your Kedro project
58
+
59
+ Add Argo configuration to your Kedro project (e.g., in `conf/base/argo.yml`).
60
+
61
+ ### 2. Use the CLI
62
+
63
+ ```bash
64
+ # Submit pipeline to Argo
65
+ kedro argo submit
66
+
67
+ # Other commands
68
+ kedro argo --help
69
+ ```
70
+
71
+ ## Usage
72
+
73
+ - Ensure kubeconfig is set correctly.
74
+ - Ensure Argo installed correctly on cluster and namespace present.
75
+
76
+ ## Project setup
77
+
78
+ - Distinguish new cloud environment for running remotely
79
+
80
+ ## Current Assumptions
81
+
82
+ - For image building, we assume the user enters the path to a valid GAR repository, which the cluster is assumed to have permissions to
83
+ - We work with the `latest` tag only, as Argo ensures to apply `imagePullPolicy: Always` in that case
84
+
85
+ ## Known Issues
86
+
87
+ - Installing both the k8s and gcfs libs may cause authentication issues
88
+ - Current workaround: `uv pip uninstall gcsfs`
89
+
90
+ ## Development Roadmap
91
+
92
+ ### Horizon 1
93
+ - [x] Package and deploy to pypi
94
+ - [x] Submitting to cluster
95
+ - [ ] Provide image building guidelines
96
+ - [ ] Kedro MLFlow plugin shows nice way of consuming "config", argo.yml
97
+
98
+ ### Horizon 2
99
+ - [ ] Hardware configuration
100
+
101
+ ### Horizon 3 (making feature complete)
102
+ - [x] Fusing pipelines
103
+ - [x] Set memory datasets during fusing
104
+ - [ ] Allow registering custom templates, e.g., neo4j
105
+ - [ ] Allow environment variables
106
+
107
+ ### Horizon 4 (Open sourcing)
108
+ - [ ] Complete feature set
109
+
110
+ ## Contributing
111
+
112
+ Contributions are welcome! Please feel free to submit a Pull Request.
113
+
114
+ ## License
115
+
116
+ MIT License - see LICENSE file for details
@@ -0,0 +1,21 @@
1
+ .gitignore
2
+ .python-version
3
+ LICENSE
4
+ MANIFEST.in
5
+ README.md
6
+ pyproject.toml
7
+ uv.lock
8
+ argo_kedro.egg-info/PKG-INFO
9
+ argo_kedro.egg-info/SOURCES.txt
10
+ argo_kedro.egg-info/dependency_links.txt
11
+ argo_kedro.egg-info/entry_points.txt
12
+ argo_kedro.egg-info/requires.txt
13
+ argo_kedro.egg-info/top_level.txt
14
+ kedro_argo/framework/__init__.py
15
+ kedro_argo/framework/cli/__init__.py
16
+ kedro_argo/framework/cli/cli.py
17
+ kedro_argo/pipeline/__init__.py
18
+ kedro_argo/pipeline/fused_pipeline.py
19
+ kedro_argo/runners/__init__.py
20
+ kedro_argo/runners/fuse_runner.py
21
+ kedro_argo/templates/argo_wf_spec.tmpl
@@ -0,0 +1,5 @@
1
+ [kedro.global_commands]
2
+ run = argo_kedro.framework.cli.cli:cli
3
+
4
+ [kedro.project_commands]
5
+ argo = argo_kedro.framework.cli.cli:commands
@@ -0,0 +1,3 @@
1
+ kedro
2
+ pyyaml>=6.0.2
3
+ jinja2>=3.0.0
File without changes
File without changes
@@ -0,0 +1,242 @@
1
+ import re
2
+ from pathlib import Path
3
+ from typing import Any, Dict, List, Iterable
4
+ from logging import getLogger
5
+
6
+ import click
7
+ import yaml
8
+ from kubernetes import config
9
+ from kubernetes.dynamic import DynamicClient
10
+ from jinja2 import Environment, FileSystemLoader
11
+ from kedro.framework.cli.utils import CONTEXT_SETTINGS, KedroCliError
12
+ from kedro.framework.session import KedroSession
13
+ from kedro.framework.cli.project import (
14
+ ASYNC_ARG_HELP,
15
+ CONF_SOURCE_HELP,
16
+ FROM_INPUTS_HELP,
17
+ FROM_NODES_HELP,
18
+ LOAD_VERSION_HELP,
19
+ NODE_ARG_HELP,
20
+ PARAMS_ARG_HELP,
21
+ PIPELINE_ARG_HELP,
22
+ RUNNER_ARG_HELP,
23
+ TAG_ARG_HELP,
24
+ TO_NODES_HELP,
25
+ TO_OUTPUTS_HELP,
26
+ project_group,
27
+ )
28
+ from kedro.framework.project import pipelines as kedro_pipelines
29
+ from kedro.pipeline import Pipeline
30
+ from kedro.pipeline.node import Node
31
+ from kedro.runner.sequential_runner import SequentialRunner
32
+ from kedro_argo.runners.fuse_runner import FusedRunner
33
+
34
+ LOGGER = getLogger(__name__)
35
+ ARGO_TEMPLATES_DIR_PATH = Path(__file__).parent.parent.parent / "templates"
36
+
37
+
38
+ @click.group(context_settings=CONTEXT_SETTINGS)
39
+ def cli():
40
+ pass
41
+
42
+ @cli.command(name="run")
43
+ @click.option("--pipeline", "-p", type=str, default="__default__", help="Name of the pipeline to execute")
44
+ @click.option("--env", "-e", type=str, default=None, help="Kedro environment to run the pipeline in")
45
+ @click.option("--config", "-c", type=str, multiple=True, help="Extra config to pass to KedroContext")
46
+ @click.option("--params", type=str, multiple=True, help="Override parameters")
47
+ @click.option("--tags", "-t", type=str, multiple=True, help=TAG_ARG_HELP)
48
+ @click.option("--nodes", "-n", type=str, multiple=True, help="Run only nodes with specified names")
49
+ @click.option("--to-nodes", type=str, multiple=True, help="Run a sub-pipeline up to certain nodes")
50
+ @click.option("--from-nodes", type=str, multiple=True, help="Run a sub-pipeline starting from certain nodes")
51
+ @click.option("--from-inputs", type=str, multiple=True, help="Run a sub-pipeline starting from nodes that produce these inputs")
52
+ @click.option("--to-outputs", type=str, multiple=True, help="Run a sub-pipeline up to nodes that produce these outputs")
53
+ @click.option("--load-version", type=str, multiple=True, help="Specify a particular dataset version")
54
+ @click.option("--namespaces", type=str, multiple=True, help="Namespaces of the pipeline")
55
+ @click.pass_obj
56
+ def _run_command_impl(
57
+ ctx,
58
+ pipeline: str,
59
+ env: str,
60
+ config: tuple,
61
+ params: tuple,
62
+ tags: list[str],
63
+ nodes: tuple,
64
+ to_nodes: tuple,
65
+ from_nodes: tuple,
66
+ from_inputs: tuple,
67
+ to_outputs: tuple,
68
+ load_version: tuple,
69
+ namespaces: Iterable[str],
70
+ ):
71
+ """Run the pipeline with the FusedRunner."""
72
+
73
+ LOGGER.warning(f"Using plugin entrypoint")
74
+
75
+ load_versions = None
76
+ if load_version:
77
+ load_versions = {}
78
+ for version_spec in load_version:
79
+ if ":" in version_spec:
80
+ dataset, version = version_spec.split(":", 1)
81
+ load_versions[dataset] = version
82
+
83
+ conf_source = getattr(ctx, "conf_source", None)
84
+ env_value = env or getattr(ctx, "env", None)
85
+
86
+ with KedroSession.create(
87
+ env=env_value,
88
+ conf_source=conf_source,
89
+ ) as session:
90
+
91
+ session.run(
92
+ pipeline_name=pipeline,
93
+ tags=tags,
94
+ runner=FusedRunner(pipeline_name=pipeline),
95
+ node_names=list(nodes) if nodes else None,
96
+ from_nodes=list(from_nodes) if from_nodes else None,
97
+ to_nodes=list(to_nodes) if to_nodes else None,
98
+ from_inputs=list(from_inputs) if from_inputs else None,
99
+ to_outputs=list(to_outputs) if to_outputs else None,
100
+ load_versions=load_versions,
101
+ namespaces=namespaces,
102
+ )
103
+
104
+ @click.group(name="argo")
105
+ def commands():
106
+ pass
107
+
108
+ @commands.command(name="submit")
109
+ @click.option("--pipeline", "-p", type=str, default="__default__", help="Specify which pipeline to execute")
110
+ @click.option("--environment", "-e", type=str, default="base", help="Kedro environment to execute in")
111
+ @click.option("--image", type=str, required=True, help="Image to execute")
112
+ @click.option("--namespace", "-n", type=str, required=True, help="Namespace to execute in")
113
+ @click.pass_obj
114
+ def submit(
115
+ ctx,
116
+ pipeline: str,
117
+ image: str,
118
+ namespace: str,
119
+ environment: str
120
+ ):
121
+ """Submit the pipeline to Argo."""
122
+ LOGGER.info("Loading spec template..")
123
+
124
+ loader = FileSystemLoader(searchpath=ARGO_TEMPLATES_DIR_PATH)
125
+ template_env = Environment(loader=loader, trim_blocks=True, lstrip_blocks=True)
126
+ template = template_env.get_template("argo_wf_spec.tmpl")
127
+
128
+ pipeline_tasks = get_argo_dag(kedro_pipelines[pipeline])
129
+
130
+ LOGGER.info("Rendering Argo spec...")
131
+
132
+ # Render the template
133
+ rendered_template = template.render(
134
+ pipeline_tasks=[task.to_dict() for task in pipeline_tasks.values()],
135
+ pipeline_name=pipeline,
136
+ image=image,
137
+ namespace=namespace,
138
+ environment=environment
139
+ )
140
+
141
+ # Load as yaml
142
+ yaml_data = yaml.safe_load(rendered_template)
143
+ yaml_without_anchors = yaml.dump(yaml_data, sort_keys=False, default_flow_style=False)
144
+ save_argo_template(
145
+ yaml_without_anchors,
146
+ )
147
+
148
+ # Use kubeconfig to submit to kubernetes
149
+ config.load_kube_config()
150
+ client = DynamicClient(config.new_client_from_config())
151
+
152
+ resource = client.resources.get(
153
+ api_version=yaml_data["apiVersion"],
154
+ kind=yaml_data["kind"],
155
+ )
156
+
157
+ resource.create(
158
+ body=yaml_data,
159
+ namespace=namespace
160
+ )
161
+
162
+
163
+ def save_argo_template(argo_template: str) -> str:
164
+ file_path = Path("templates") / "argo-workflow-template.yml"
165
+ with open(file_path, "w") as f:
166
+ f.write(argo_template)
167
+ return str(file_path)
168
+
169
+
170
+ class ArgoTask:
171
+ """Class to model an Argo task.
172
+
173
+ Argo's operating model slightly differs from Kedro's, i.e., while Kedro uses dataset
174
+ dependecies to model relationships, Argo uses task dependencies."""
175
+
176
+ def __init__(self, node: Node):
177
+ self._node = node
178
+ self._parents = []
179
+
180
+ @property
181
+ def node(self):
182
+ return self._node
183
+
184
+ def add_parents(self, nodes: List[Node]):
185
+ self._parents.extend(nodes)
186
+
187
+ def to_dict(self):
188
+ return {
189
+ "name": clean_name(self._node.name),
190
+ "nodes": self._node.name,
191
+ "deps": [clean_name(parent.name) for parent in sorted(self._parents)],
192
+ }
193
+
194
+
195
+ def get_argo_dag(pipeline: Pipeline) -> List[Dict[str, Any]]:
196
+ """Function to convert the Kedro pipeline into Argo Tasks. The function
197
+ iterates the nodes of the pipeline and generates Argo tasks with dependencies.
198
+ These dependencies are inferred based on the input and output datasets for
199
+ each node.
200
+
201
+ NOTE: This function is now agnostic to the fact that nodes might be fused. The nodes
202
+ returned as part of the pipeline may optionally contain FusedNodes, which have correct
203
+ inputs and outputs for the perspective of the Argo Task.
204
+ """
205
+ tasks = {}
206
+
207
+ # The `grouped_nodes` property returns the nodes list, in a toplogical order,
208
+ # allowing us to easily translate the Kedro DAG to an Argo WF.
209
+ for group in pipeline.grouped_nodes:
210
+ for target_node in group:
211
+ task = ArgoTask(target_node)
212
+ task.add_parents(
213
+ [
214
+ parent.node
215
+ for parent in tasks.values()
216
+ if set(clean_dependencies(target_node.inputs)) & set(clean_dependencies(parent.node.outputs))
217
+ ]
218
+ )
219
+
220
+ tasks[target_node.name] = task
221
+
222
+ return tasks
223
+
224
+
225
+ def clean_name(name: str) -> str:
226
+ """Function to clean the node name.
227
+
228
+ Args:
229
+ name: name of the node
230
+ Returns:
231
+ Clean node name, according to Argo's requirements
232
+ """
233
+ return re.sub(r"[\W_]+", "-", name).strip("-")
234
+
235
+
236
+ def clean_dependencies(elements) -> List[str]:
237
+ """Function to clean node dependencies.
238
+
239
+ Operates by removing `params:` from the list and dismissing
240
+ the transcoding operator.
241
+ """
242
+ return [el.split("@")[0] for el in elements if not el.startswith("params:")]
@@ -0,0 +1,3 @@
1
+ from .fused_pipeline import FusedPipeline
2
+
3
+ __all__ = ["FusedPipeline", ]