runnable 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. runnable/__init__.py +34 -0
  2. runnable/catalog.py +141 -0
  3. runnable/cli.py +272 -0
  4. runnable/context.py +34 -0
  5. runnable/datastore.py +687 -0
  6. runnable/defaults.py +182 -0
  7. runnable/entrypoints.py +448 -0
  8. runnable/exceptions.py +94 -0
  9. runnable/executor.py +421 -0
  10. runnable/experiment_tracker.py +139 -0
  11. runnable/extensions/catalog/__init__.py +21 -0
  12. runnable/extensions/catalog/file_system/__init__.py +0 -0
  13. runnable/extensions/catalog/file_system/implementation.py +227 -0
  14. runnable/extensions/catalog/k8s_pvc/__init__.py +0 -0
  15. runnable/extensions/catalog/k8s_pvc/implementation.py +16 -0
  16. runnable/extensions/catalog/k8s_pvc/integration.py +59 -0
  17. runnable/extensions/executor/__init__.py +725 -0
  18. runnable/extensions/executor/argo/__init__.py +0 -0
  19. runnable/extensions/executor/argo/implementation.py +1183 -0
  20. runnable/extensions/executor/argo/specification.yaml +51 -0
  21. runnable/extensions/executor/k8s_job/__init__.py +0 -0
  22. runnable/extensions/executor/k8s_job/implementation_FF.py +259 -0
  23. runnable/extensions/executor/k8s_job/integration_FF.py +69 -0
  24. runnable/extensions/executor/local/__init__.py +0 -0
  25. runnable/extensions/executor/local/implementation.py +70 -0
  26. runnable/extensions/executor/local_container/__init__.py +0 -0
  27. runnable/extensions/executor/local_container/implementation.py +361 -0
  28. runnable/extensions/executor/mocked/__init__.py +0 -0
  29. runnable/extensions/executor/mocked/implementation.py +189 -0
  30. runnable/extensions/experiment_tracker/__init__.py +0 -0
  31. runnable/extensions/experiment_tracker/mlflow/__init__.py +0 -0
  32. runnable/extensions/experiment_tracker/mlflow/implementation.py +94 -0
  33. runnable/extensions/nodes.py +655 -0
  34. runnable/extensions/run_log_store/__init__.py +0 -0
  35. runnable/extensions/run_log_store/chunked_file_system/__init__.py +0 -0
  36. runnable/extensions/run_log_store/chunked_file_system/implementation.py +106 -0
  37. runnable/extensions/run_log_store/chunked_k8s_pvc/__init__.py +0 -0
  38. runnable/extensions/run_log_store/chunked_k8s_pvc/implementation.py +21 -0
  39. runnable/extensions/run_log_store/chunked_k8s_pvc/integration.py +61 -0
  40. runnable/extensions/run_log_store/db/implementation_FF.py +157 -0
  41. runnable/extensions/run_log_store/db/integration_FF.py +0 -0
  42. runnable/extensions/run_log_store/file_system/__init__.py +0 -0
  43. runnable/extensions/run_log_store/file_system/implementation.py +136 -0
  44. runnable/extensions/run_log_store/generic_chunked.py +541 -0
  45. runnable/extensions/run_log_store/k8s_pvc/__init__.py +0 -0
  46. runnable/extensions/run_log_store/k8s_pvc/implementation.py +21 -0
  47. runnable/extensions/run_log_store/k8s_pvc/integration.py +56 -0
  48. runnable/extensions/secrets/__init__.py +0 -0
  49. runnable/extensions/secrets/dotenv/__init__.py +0 -0
  50. runnable/extensions/secrets/dotenv/implementation.py +100 -0
  51. runnable/extensions/secrets/env_secrets/__init__.py +0 -0
  52. runnable/extensions/secrets/env_secrets/implementation.py +42 -0
  53. runnable/graph.py +464 -0
  54. runnable/integration.py +205 -0
  55. runnable/interaction.py +404 -0
  56. runnable/names.py +546 -0
  57. runnable/nodes.py +501 -0
  58. runnable/parameters.py +183 -0
  59. runnable/pickler.py +102 -0
  60. runnable/sdk.py +472 -0
  61. runnable/secrets.py +95 -0
  62. runnable/tasks.py +395 -0
  63. runnable/utils.py +630 -0
  64. runnable-0.3.0.dist-info/METADATA +437 -0
  65. runnable-0.3.0.dist-info/RECORD +69 -0
  66. {runnable-0.1.0.dist-info → runnable-0.3.0.dist-info}/WHEEL +1 -1
  67. runnable-0.3.0.dist-info/entry_points.txt +44 -0
  68. runnable-0.1.0.dist-info/METADATA +0 -16
  69. runnable-0.1.0.dist-info/RECORD +0 -6
  70. /runnable/{.gitkeep → extensions/__init__.py} +0 -0
  71. {runnable-0.1.0.dist-info → runnable-0.3.0.dist-info}/LICENSE +0 -0
@@ -0,0 +1,100 @@
1
+ import logging
2
+ import os
3
+
4
+ from runnable import defaults, exceptions, utils
5
+ from runnable.secrets import BaseSecrets
6
+
7
+ logger = logging.getLogger(defaults.LOGGER_NAME)
8
+
9
+
10
+ class DotEnvSecrets(BaseSecrets):
11
+ """
12
+ A secret manager which uses .env files for secrets.
13
+
14
+ We recommend this secrets manager only for local development and should not be used for anything close to
15
+ production.
16
+ """
17
+
18
+ service_name: str = "dotenv"
19
+ location: str = defaults.DOTENV_FILE_LOCATION
20
+ secrets: dict = {}
21
+
22
+ @property
23
+ def secrets_location(self):
24
+ """
25
+ Return the location of the .env file.
26
+ If the user has not over-ridden it, it defaults to .env file in the project root.
27
+
28
+ Returns:
29
+ str: The location of the secrets file
30
+ """
31
+ return self.location
32
+
33
+ def _load_secrets(self):
34
+ """
35
+ We assume that a dotenv file is of format,
36
+ key=value -> secrets[key]='value'
37
+ key=value# comment -> secrets[key1]='value1'
38
+ key=value2 # comment. -> secrets[key2]='value2'
39
+
40
+ Any of the above formats with export or set in front of them.
41
+
42
+ We strip the secret value of any empty spaces at the start and end.
43
+
44
+ Raises:
45
+ Exception: If the file at secrets_location is not found.
46
+ Exception: If the secrets are not formatted correctly.
47
+ """
48
+ # It was loaded in the previous call and need not to be reloaded
49
+ if self.secrets:
50
+ return
51
+
52
+ secrets_location = self.secrets_location
53
+ if not utils.does_file_exist(secrets_location):
54
+ raise Exception(f"Did not find the secrets file in {secrets_location}")
55
+
56
+ with open(secrets_location, "r") as fr:
57
+ for secret_line in fr:
58
+ # The order of removing fluff around the expression
59
+ # the new line
60
+ # the comment
61
+ # the white space
62
+ # Any export or set in front of the key any spaces after that.
63
+
64
+ secret_line = secret_line.strip(os.linesep).split("#")[0].strip()
65
+
66
+ if secret_line == "":
67
+ continue
68
+
69
+ secret_line = utils.remove_prefix(secret_line, prefix="export").strip()
70
+ secret_line = utils.remove_prefix(secret_line, prefix="EXPORT").strip()
71
+ secret_line = utils.remove_prefix(secret_line, prefix="set").strip()
72
+ secret_line = utils.remove_prefix(secret_line, prefix="SET").strip()
73
+
74
+ data = secret_line.split("=")
75
+ if len(data) != 2:
76
+ raise Exception("A secret should be of format, secret_name=secret_value[# any comment]")
77
+
78
+ key, value = data
79
+ self.secrets[key] = value.strip().strip('"').strip(os.linesep)
80
+
81
+ def get(self, name: str = "", **kwargs) -> str:
82
+ """
83
+ Get a secret of name from the secrets file.
84
+
85
+
86
+ Args:
87
+ name (str): The name of the secret to retrieve
88
+
89
+ Raises:
90
+ Exception: If the secret by the name is not found.
91
+
92
+ Returns:
93
+ str: The value of the secret
94
+ """
95
+ self._load_secrets()
96
+
97
+ if name in self.secrets:
98
+ return self.secrets[name]
99
+
100
+ raise exceptions.SecretNotFoundError(secret_name=name, secret_setting=self.secrets_location)
File without changes
@@ -0,0 +1,42 @@
1
+ import logging
2
+ import os
3
+
4
+ from runnable import defaults, exceptions
5
+ from runnable.secrets import BaseSecrets
6
+
7
+ logger = logging.getLogger(defaults.LOGGER_NAME)
8
+
9
+
10
+ class EnvSecretsManager(BaseSecrets):
11
+ """
12
+ A secret manager via environment variables.
13
+
14
+ This secret manager returns nothing if the key does not match
15
+ """
16
+
17
+ service_name: str = "env-secrets-manager"
18
+ prefix: str = ""
19
+ suffix: str = ""
20
+
21
+ def get(self, name: str = "", **kwargs) -> str:
22
+ """
23
+ If a name is provided, we look for that in the environment.
24
+ If a environment variable by that name is not found, we raise an Exception.
25
+
26
+ If a name is not provided, we return an empty dictionary.
27
+
28
+ Args:
29
+ name (str): The name of the secret to retrieve
30
+
31
+ Raises:
32
+ Exception: If the secret by the name is not found.
33
+
34
+ Returns:
35
+ [type]: [description]
36
+ """
37
+
38
+ try:
39
+ return os.environ[f"{self.prefix}{name}{self.suffix}"]
40
+ except KeyError as _e:
41
+ logger.exception(f"Secret {self.prefix}{name}{self.suffix} not found in environment")
42
+ raise exceptions.SecretNotFoundError(secret_name=name, secret_setting="environment") from _e
runnable/graph.py ADDED
@@ -0,0 +1,464 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from typing import Any, Dict, List, Optional, cast
5
+
6
+ from pydantic import BaseModel, Field, SerializeAsAny
7
+ from stevedore import driver
8
+
9
+ from runnable import defaults, exceptions
10
+
11
+ logger = logging.getLogger(defaults.LOGGER_NAME)
12
+ logging.getLogger("stevedore").setLevel(logging.CRITICAL)
13
+
14
+
15
+ class Graph(BaseModel):
16
+ """
17
+ A class representing a graph.
18
+
19
+ The representation is similar to AWS step functions.
20
+ We have nodes and traversal is based on start_at and on_failure definition of individual nodes of the graph
21
+ """
22
+
23
+ start_at: str
24
+ name: str = ""
25
+ description: Optional[str] = ""
26
+ internal_branch_name: str = Field(default="", exclude=True)
27
+ nodes: SerializeAsAny[Dict[str, "BaseNode"]] = Field(default_factory=dict, serialization_alias="steps")
28
+
29
+ def get_node_by_name(self, name: str) -> "BaseNode":
30
+ """
31
+ Return the Node object by the name
32
+ The name is always relative to the graph
33
+
34
+ Args:
35
+ name (str): Name of the node
36
+
37
+ Raises:
38
+ NodeNotFoundError: If the node of name is not found in the graph
39
+
40
+ Returns:
41
+ Node: The Node object by name
42
+ """
43
+ for key, value in self.nodes.items():
44
+ if key == name:
45
+ return value
46
+ raise exceptions.NodeNotFoundError(name)
47
+
48
+ def get_node_by_internal_name(self, internal_name: str) -> "BaseNode":
49
+ """
50
+ Return the node by the internal name of the node.
51
+ The internal name uses dot path convention.
52
+ This method is only relative to the nodes of the graph and does not perform graph search of sub-graphs
53
+
54
+ Args:
55
+ internal_name (str): The internal name of the node, follows a dot path convention
56
+
57
+ Raises:
58
+ NodeNotFoundError: If the node of the internal name is not found in the graph
59
+
60
+ Returns:
61
+ Node: The Node object by the name
62
+ """
63
+ for _, value in self.nodes.items():
64
+ if value.internal_name == internal_name:
65
+ return value
66
+ raise exceptions.NodeNotFoundError(internal_name)
67
+
68
+ def __str__(self): # pragma: no cover
69
+ """
70
+ Return a string representation of the graph
71
+ """
72
+ node_str = ", ".join([x.name for x in list(self.nodes.values())])
73
+ return f"Starts at: {self.start_at} and {node_str}"
74
+
75
+ def add_node(self, node: "BaseNode"):
76
+ """
77
+ Add a node to the nodes of the graph
78
+
79
+ Args:
80
+ node (object): The node to add
81
+ """
82
+ self.nodes[node.name] = node
83
+
84
+ def check_graph(self):
85
+ """
86
+ Validate the graph to make sure,
87
+ 1). All the neighbors of nodes are present.
88
+ 2). Detection of cycles.
89
+ 3). Confirming that the start_at is actually a node present in graph
90
+ 4). Detection of one and only one success node.
91
+ 5). Detection of one and only one fail node.
92
+ 6). Missing nodes if referred by next or on_failure
93
+
94
+ Raises:
95
+ Exception: [description]
96
+ """
97
+ messages = []
98
+
99
+ missing_nodes = self.missing_neighbors()
100
+ if missing_nodes:
101
+ message = "The graph has references to nodes (next, on_failure), these nodes are missing from the DAG:\n"
102
+ message += f'{", ".join(missing_nodes)}'
103
+ logger.error(message)
104
+ messages.append(message)
105
+
106
+ if not missing_nodes:
107
+ if not self.is_dag():
108
+ message = "The DAG is cyclic or does not reach an end state"
109
+ logger.error(message)
110
+ messages.append(message)
111
+
112
+ if not self.is_start_node_present():
113
+ message = "The start node is not part of the graph"
114
+ logger.error(message)
115
+ messages.append(message)
116
+
117
+ if not self.success_node_validation():
118
+ message = "There should be exactly one success node"
119
+ logger.error(message)
120
+ messages.append(message)
121
+
122
+ if not self.fail_node_validation():
123
+ message = "There should be exactly one fail node"
124
+ logger.error(message)
125
+ messages.append(message)
126
+
127
+ if messages:
128
+ raise Exception(", ".join(messages))
129
+
130
+ def get_success_node(self) -> "BaseNode":
131
+ """
132
+ Return the success node of the graph
133
+
134
+ Raises:
135
+ Exception: If no success node is present in the graph
136
+
137
+ Returns:
138
+ object: The success node
139
+ """
140
+ for _, value in self.nodes.items():
141
+ if value.node_type == "success":
142
+ return value
143
+ raise Exception("No success node defined")
144
+
145
+ def get_fail_node(self) -> "BaseNode":
146
+ """
147
+ Returns the fail node of the graph
148
+
149
+ Raises:
150
+ Exception: If no fail node is present in the graph
151
+
152
+ Returns:
153
+ object: The fail node of the graph
154
+ """
155
+ for _, value in self.nodes.items():
156
+ if value.node_type == "fail":
157
+ return value
158
+ raise Exception("No fail node defined")
159
+
160
+ def is_start_node_present(self) -> bool:
161
+ """
162
+ A check to ensure the start_at is part of the graph
163
+
164
+ Returns:
165
+ bool: True if start_at is one of the nodes, false otherwise
166
+ """
167
+ try:
168
+ self.get_node_by_name(self.start_at)
169
+ return True
170
+ except exceptions.NodeNotFoundError:
171
+ logger.exception("Could not find the node")
172
+ return False
173
+
174
+ def success_node_validation(self) -> bool:
175
+ """
176
+ Check to ensure there is one and only one success node in the graph
177
+
178
+ Returns:
179
+ bool: True if there is only one, false otherwise
180
+ """
181
+ node_count = 0
182
+ for _, value in self.nodes.items():
183
+ if value.node_type == "success":
184
+ node_count += 1
185
+ if node_count == 1:
186
+ return True
187
+ return False
188
+
189
+ def fail_node_validation(self) -> bool:
190
+ """
191
+ Check to make sure there is one and only one fail node in the graph
192
+
193
+ Returns:
194
+ bool: true if there is one and only one fail node, false otherwise
195
+ """
196
+ node_count = 0
197
+ for _, value in self.nodes.items():
198
+ if value.node_type == "fail":
199
+ node_count += 1
200
+ if node_count == 1:
201
+ return True
202
+ return False
203
+
204
+ def is_dag(self) -> bool:
205
+ """
206
+ Determines whether the graph is acyclic and directed
207
+
208
+ Returns:
209
+ bool: Returns True if it is directed and acyclic.
210
+ """
211
+ visited = {n: False for n in self.nodes.keys()}
212
+ recstack = {n: False for n in self.nodes.keys()}
213
+
214
+ for name, node in self.nodes.items():
215
+ if not visited[name]:
216
+ if self.is_cyclic_util(node, visited, recstack):
217
+ return False
218
+ return True
219
+
220
+ def is_cyclic_util(self, node: "BaseNode", visited: Dict[str, bool], recstack: Dict[str, bool]) -> bool:
221
+ """
222
+ Recursive utility that determines if a node and neighbors has a cycle. Is used in is_dag method.
223
+
224
+ Args:
225
+ node (BaseNode): The node to check
226
+ visited (dict): Dictionary storing which nodes have been checked
227
+ recstack (dict): Stores what nodes have been visited recursively
228
+
229
+ Returns:
230
+ bool: True if cyclic.
231
+ """
232
+ visited[node.name] = True
233
+ recstack[node.name] = True
234
+
235
+ neighbors = node._get_neighbors()
236
+ for neighbor in neighbors:
237
+ neighbor_node = self.get_node_by_name(neighbor)
238
+ if not visited[neighbor]:
239
+ if self.is_cyclic_util(neighbor_node, visited, recstack):
240
+ return True
241
+ elif recstack[neighbor]:
242
+ return True
243
+
244
+ recstack[node.name] = False
245
+ return False
246
+
247
+ def missing_neighbors(self) -> List[str]:
248
+ """
249
+ Iterates through nodes and gets their connecting neighbors and checks if they exist in the graph.
250
+
251
+ Returns:
252
+ list: List of the missing nodes. Empty list if all neighbors are in the graph.
253
+ """
254
+ missing_nodes = []
255
+ for _, node in self.nodes.items():
256
+ neighbors = node._get_neighbors()
257
+ for neighbor in neighbors:
258
+ try:
259
+ self.get_node_by_name(neighbor)
260
+ except exceptions.NodeNotFoundError:
261
+ logger.exception(f"Could not find the node {neighbor}")
262
+ if neighbor not in missing_nodes:
263
+ missing_nodes.append(neighbor)
264
+ return missing_nodes
265
+
266
+ def add_terminal_nodes(
267
+ self,
268
+ success_node_name: str = "success",
269
+ failure_node_name: str = "fail",
270
+ internal_branch_name: str = "",
271
+ ):
272
+ """
273
+ Add the success and fail nodes to the graph
274
+
275
+ Args:
276
+ success_node_name (str, optional): The name of the success node. Defaults to 'success'.
277
+ failure_node_name (str, optional): The name of the failure node. Defaults to 'fail'.
278
+ """
279
+ success_step_config = {"type": "success"}
280
+ success_node = create_node(
281
+ success_node_name,
282
+ step_config=success_step_config,
283
+ internal_branch_name=internal_branch_name,
284
+ )
285
+ fail_step_config = {"type": "fail"}
286
+ fail_node = create_node(
287
+ failure_node_name,
288
+ step_config=fail_step_config,
289
+ internal_branch_name=internal_branch_name,
290
+ )
291
+ self.add_node(success_node)
292
+ self.add_node(fail_node)
293
+
294
+
295
+ from runnable.nodes import BaseNode # noqa: E402
296
+
297
+ Graph.model_rebuild()
298
+
299
+
300
+ def create_graph(dag_config: Dict[str, Any], internal_branch_name: str = "") -> Graph:
301
+ """
302
+ Creates a dag object from the dag definition.
303
+
304
+ Composite nodes like map, parallel, dag can have sub-branches which are internally graphs.
305
+ Use internal_branch_name to fit the right dot path convention.
306
+
307
+ Args:
308
+ dag_config (dict): The dag definition
309
+ internal_branch_name ([type], optional): In case of sub-graph, the name of the node. Defaults to None.
310
+
311
+ Raises:
312
+ Exception: If the node or graph validation fails.
313
+
314
+ Returns:
315
+ Graph: The created graph object
316
+ """
317
+ description: str = dag_config.get("description", None)
318
+ start_at: str = cast(str, dag_config.get("start_at")) # Let the start_at be relative to the graph
319
+
320
+ graph = Graph(
321
+ start_at=start_at,
322
+ description=description,
323
+ internal_branch_name=internal_branch_name,
324
+ )
325
+
326
+ logger.info(f"Initialized a graph object that starts at {start_at}")
327
+ for name, step_config in dag_config.get("steps", {}).items():
328
+ logger.info(f"Adding node {name} with :{step_config}")
329
+
330
+ node = create_node(name, step_config=step_config, internal_branch_name=internal_branch_name)
331
+ graph.add_node(node)
332
+
333
+ graph.check_graph()
334
+
335
+ return graph
336
+
337
+
338
+ def create_node(name: str, step_config: dict, internal_branch_name: Optional[str] = ""):
339
+ """
340
+ Creates a node object from the step configuration.
341
+
342
+ Args:
343
+ name (str): The name of the node
344
+ step_config (dict): The configuration of the node
345
+ internal_branch_name (str, optional): If the node belongs to a internal branch. Defaults to None.
346
+
347
+ Raises:
348
+ Exception: If the node type is not supported
349
+
350
+ Returns:
351
+ BaseNode: The created node object
352
+ """
353
+ internal_name = name
354
+ if internal_branch_name:
355
+ internal_name = internal_branch_name + "." + name
356
+
357
+ try:
358
+ node_type = step_config.pop("type") # Remove the type as it is not used in node creation.
359
+ node_mgr: BaseNode = driver.DriverManager(namespace="nodes", name=node_type).driver
360
+
361
+ next_node = step_config.pop("next", None)
362
+
363
+ if next_node:
364
+ step_config["next_node"] = next_node
365
+
366
+ invoke_kwds = {
367
+ "name": name,
368
+ "internal_name": internal_name,
369
+ "internal_branch_name": internal_branch_name,
370
+ **step_config,
371
+ }
372
+ node = node_mgr.parse_from_config(config=invoke_kwds)
373
+ return node
374
+ except KeyError:
375
+ # type is missing!!
376
+ msg = "The node configuration does not contain the required key 'type'."
377
+ logger.exception(step_config)
378
+ raise Exception(msg)
379
+ except Exception as _e:
380
+ msg = (
381
+ f"Could not find the node type {node_type}. Please ensure you have installed "
382
+ "the extension that provides the node type."
383
+ "\nCore supports: task, success, fail, parallel, dag, map, stub"
384
+ )
385
+ raise Exception(msg) from _e
386
+
387
+
388
+ def search_node_by_internal_name(dag: Graph, internal_name: str):
389
+ """
390
+ Given a DAG, search the node by internal name of the node.
391
+
392
+ The node naming convention follows dot path naming convention
393
+
394
+ Currently it is implemented to search only against the base dag.
395
+
396
+ Args:
397
+ dag (Graph): The graph to search the node
398
+ internal_name (str): The internal name of the node.
399
+ """
400
+ # If the node is not part of any branches, then the base graph is where the node belongs
401
+ dot_path = internal_name.split(".")
402
+ if len(dot_path) == 1:
403
+ return dag.get_node_by_internal_name(internal_name), dag
404
+
405
+ # Any node internal name is: And is always going to be odd in number when split against .
406
+ # Step.Branch.Step.Branch etc
407
+ current_node = None
408
+ current_branch = dag
409
+
410
+ for i in range(len(dot_path)):
411
+ if i % 2:
412
+ # Its odd, so we are in brach name
413
+ current_branch = current_node._get_branch_by_name(".".join(dot_path[: i + 1])) # type: ignore
414
+ logger.debug(f"Finding step for {internal_name} in branch: {current_branch}")
415
+ else:
416
+ # Its even, so we are in Step, we start here!
417
+ current_node = current_branch.get_node_by_internal_name(".".join(dot_path[: i + 1]))
418
+ logger.debug(f"Finding {internal_name} in node: {current_node}")
419
+
420
+ logger.debug(f"current branch : {current_branch}, current step {current_node}")
421
+ if current_branch and current_node:
422
+ return current_node, current_branch
423
+
424
+ raise exceptions.NodeNotFoundError(internal_name)
425
+
426
+
427
+ def search_branch_by_internal_name(dag: Graph, internal_name: str):
428
+ """
429
+ Given a DAG, search the branch by internal name of the branch.
430
+
431
+ The branch naming convention follows dot path naming convention
432
+
433
+ Currently it is implemented to search only against the base dag.
434
+
435
+ Args:
436
+ dag (Graph): The graph to search the node
437
+ internal_name (str): The internal name of the branch.
438
+ """
439
+ # If the node is not part of any branches, then the base graph is where the node belongs
440
+ dot_path = internal_name.split(".")
441
+ if len(dot_path) == 1:
442
+ return dag
443
+
444
+ # Any branch internal name is: And is always going to be even in number when split against .
445
+ # Step.Branch.Step.Branch
446
+ current_node = None
447
+ current_branch = dag
448
+
449
+ for i in range(len(dot_path)):
450
+ if i % 2:
451
+ # Its odd, so we are in brach name
452
+ current_branch = current_node._get_branch_by_name(".".join(dot_path[: i + 1])) # type: ignore
453
+ logger.debug(f"Finding step for {internal_name} in branch: {current_branch}")
454
+
455
+ else:
456
+ # Its even, so we are in Step, we start here!
457
+ current_node = current_branch.get_node_by_internal_name(".".join(dot_path[: i + 1]))
458
+ logger.debug(f"Finding {internal_name} in node: {current_node}")
459
+
460
+ logger.debug(f"current branch : {current_branch}, current step {current_node}")
461
+ if current_branch and current_node:
462
+ return current_branch
463
+
464
+ raise exceptions.BranchNotFoundError(internal_name)