runnable 0.12.3__py3-none-any.whl → 0.14.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (64) hide show
  1. runnable/__init__.py +0 -11
  2. runnable/catalog.py +27 -5
  3. runnable/cli.py +122 -26
  4. runnable/datastore.py +71 -35
  5. runnable/defaults.py +0 -1
  6. runnable/entrypoints.py +107 -32
  7. runnable/exceptions.py +6 -2
  8. runnable/executor.py +28 -9
  9. runnable/graph.py +37 -12
  10. runnable/integration.py +7 -2
  11. runnable/nodes.py +15 -17
  12. runnable/parameters.py +27 -8
  13. runnable/pickler.py +1 -1
  14. runnable/sdk.py +101 -33
  15. runnable/secrets.py +3 -1
  16. runnable/tasks.py +246 -34
  17. runnable/utils.py +41 -13
  18. {runnable-0.12.3.dist-info → runnable-0.14.0.dist-info}/METADATA +25 -31
  19. runnable-0.14.0.dist-info/RECORD +24 -0
  20. {runnable-0.12.3.dist-info → runnable-0.14.0.dist-info}/WHEEL +1 -1
  21. runnable-0.14.0.dist-info/entry_points.txt +40 -0
  22. runnable/extensions/__init__.py +0 -0
  23. runnable/extensions/catalog/__init__.py +0 -21
  24. runnable/extensions/catalog/file_system/__init__.py +0 -0
  25. runnable/extensions/catalog/file_system/implementation.py +0 -234
  26. runnable/extensions/catalog/k8s_pvc/__init__.py +0 -0
  27. runnable/extensions/catalog/k8s_pvc/implementation.py +0 -16
  28. runnable/extensions/catalog/k8s_pvc/integration.py +0 -59
  29. runnable/extensions/executor/__init__.py +0 -649
  30. runnable/extensions/executor/argo/__init__.py +0 -0
  31. runnable/extensions/executor/argo/implementation.py +0 -1194
  32. runnable/extensions/executor/argo/specification.yaml +0 -51
  33. runnable/extensions/executor/k8s_job/__init__.py +0 -0
  34. runnable/extensions/executor/k8s_job/implementation_FF.py +0 -259
  35. runnable/extensions/executor/k8s_job/integration_FF.py +0 -69
  36. runnable/extensions/executor/local/__init__.py +0 -0
  37. runnable/extensions/executor/local/implementation.py +0 -71
  38. runnable/extensions/executor/local_container/__init__.py +0 -0
  39. runnable/extensions/executor/local_container/implementation.py +0 -446
  40. runnable/extensions/executor/mocked/__init__.py +0 -0
  41. runnable/extensions/executor/mocked/implementation.py +0 -154
  42. runnable/extensions/executor/retry/__init__.py +0 -0
  43. runnable/extensions/executor/retry/implementation.py +0 -168
  44. runnable/extensions/nodes.py +0 -855
  45. runnable/extensions/run_log_store/__init__.py +0 -0
  46. runnable/extensions/run_log_store/chunked_file_system/__init__.py +0 -0
  47. runnable/extensions/run_log_store/chunked_file_system/implementation.py +0 -111
  48. runnable/extensions/run_log_store/chunked_k8s_pvc/__init__.py +0 -0
  49. runnable/extensions/run_log_store/chunked_k8s_pvc/implementation.py +0 -21
  50. runnable/extensions/run_log_store/chunked_k8s_pvc/integration.py +0 -61
  51. runnable/extensions/run_log_store/db/implementation_FF.py +0 -157
  52. runnable/extensions/run_log_store/db/integration_FF.py +0 -0
  53. runnable/extensions/run_log_store/file_system/__init__.py +0 -0
  54. runnable/extensions/run_log_store/file_system/implementation.py +0 -140
  55. runnable/extensions/run_log_store/generic_chunked.py +0 -557
  56. runnable/extensions/run_log_store/k8s_pvc/__init__.py +0 -0
  57. runnable/extensions/run_log_store/k8s_pvc/implementation.py +0 -21
  58. runnable/extensions/run_log_store/k8s_pvc/integration.py +0 -56
  59. runnable/extensions/secrets/__init__.py +0 -0
  60. runnable/extensions/secrets/dotenv/__init__.py +0 -0
  61. runnable/extensions/secrets/dotenv/implementation.py +0 -100
  62. runnable-0.12.3.dist-info/RECORD +0 -64
  63. runnable-0.12.3.dist-info/entry_points.txt +0 -41
  64. {runnable-0.12.3.dist-info → runnable-0.14.0.dist-info/licenses}/LICENSE +0 -0
@@ -1,1194 +0,0 @@
1
- import json
2
- import logging
3
- import random
4
- import shlex
5
- import string
6
- from abc import ABC, abstractmethod
7
- from collections import OrderedDict
8
- from typing import Dict, List, Optional, Union, cast
9
-
10
- from pydantic import (
11
- BaseModel,
12
- ConfigDict,
13
- Field,
14
- computed_field,
15
- field_serializer,
16
- field_validator,
17
- )
18
- from pydantic.functional_serializers import PlainSerializer
19
- from ruamel.yaml import YAML
20
- from typing_extensions import Annotated
21
-
22
- from runnable import defaults, exceptions, integration, utils
23
- from runnable.defaults import TypeMapVariable
24
- from runnable.extensions.executor import GenericExecutor
25
- from runnable.extensions.nodes import DagNode, MapNode, ParallelNode
26
- from runnable.graph import Graph, create_node, search_node_by_internal_name
27
- from runnable.integration import BaseIntegration
28
- from runnable.nodes import BaseNode
29
-
30
- logger = logging.getLogger(defaults.NAME)
31
-
32
- # TODO: Leave the run log in consistent state.
33
-
34
- """
35
- executor:
36
- type: argo
37
- config:
38
- image: # apply to template
39
- max_workflow_duration: # Apply to spec
40
- nodeSelector: #Apply to spec
41
- parallelism: #apply to spec
42
- resources: # convert to podSpecPath
43
- limits:
44
- requests:
45
- retryStrategy:
46
- max_step_duration: # apply to templateDefaults
47
- step_timeout: # apply to templateDefaults
48
- tolerations: # apply to spec
49
- imagePullPolicy: # apply to template
50
-
51
- overrides:
52
- override:
53
- tolerations: # template
54
- image: # container
55
- max_step_duration: # template
56
- step_timeout: #template
57
- nodeSelector: #template
58
- parallelism: # this need to applied for map
59
- resources: # container
60
- imagePullPolicy: #container
61
- retryStrategy: # template
62
- """
63
-
64
-
65
- class SecretEnvVar(BaseModel):
66
- """
67
- Renders:
68
- env:
69
- - name: MYSECRETPASSWORD
70
- valueFrom:
71
- secretKeyRef:
72
- name: my-secret
73
- key: mypassword
74
- """
75
-
76
- environment_variable: str = Field(serialization_alias="name")
77
- secret_name: str = Field(exclude=True)
78
- secret_key: str = Field(exclude=True)
79
-
80
- @computed_field # type: ignore
81
- @property
82
- def valueFrom(self) -> Dict[str, Dict[str, str]]:
83
- return {
84
- "secretKeyRef": {
85
- "name": self.secret_name,
86
- "key": self.secret_key,
87
- }
88
- }
89
-
90
-
91
- class EnvVar(BaseModel):
92
- """
93
- Renders:
94
- parameters: # in arguments
95
- - name: x
96
- value: 3 # This is optional for workflow parameters
97
-
98
- """
99
-
100
- name: str
101
- value: Union[str, int, float] = Field(default="")
102
-
103
-
104
- class Parameter(BaseModel):
105
- name: str
106
- value: Optional[str] = None
107
-
108
- @field_serializer("name")
109
- def serialize_name(self, name: str) -> str:
110
- return f"{str(name)}"
111
-
112
- @field_serializer("value")
113
- def serialize_value(self, value: str) -> str:
114
- return f"{value}"
115
-
116
-
117
- class OutputParameter(Parameter):
118
- """
119
- Renders:
120
- - name: step-name
121
- valueFrom:
122
- path: /tmp/output.txt
123
- """
124
-
125
- path: str = Field(default="/tmp/output.txt", exclude=True)
126
-
127
- @computed_field # type: ignore
128
- @property
129
- def valueFrom(self) -> Dict[str, str]:
130
- return {"path": self.path}
131
-
132
-
133
- class Argument(BaseModel):
134
- """
135
- Templates are called with arguments, which become inputs for the template
136
- Renders:
137
- arguments:
138
- parameters:
139
- - name: The name of the parameter
140
- value: The value of the parameter
141
- """
142
-
143
- name: str
144
- value: str
145
-
146
- @field_serializer("name")
147
- def serialize_name(self, name: str) -> str:
148
- return f"{str(name)}"
149
-
150
- @field_serializer("value")
151
- def serialize_value(self, value: str) -> str:
152
- return f"{value}"
153
-
154
-
155
- class Request(BaseModel):
156
- """
157
- The default requests
158
- """
159
-
160
- memory: str = "1Gi"
161
- cpu: str = "250m"
162
-
163
-
164
- VendorGPU = Annotated[
165
- Optional[int],
166
- PlainSerializer(lambda x: str(x), return_type=str, when_used="unless-none"),
167
- ]
168
-
169
-
170
- class Limit(Request):
171
- """
172
- The default limits
173
- """
174
-
175
- gpu: VendorGPU = Field(default=None, serialization_alias="nvidia.com/gpu")
176
-
177
-
178
- class Resources(BaseModel):
179
- limits: Limit = Field(default=Limit(), serialization_alias="limits")
180
- requests: Request = Field(default=Request(), serialization_alias="requests")
181
-
182
-
183
- class BackOff(BaseModel):
184
- duration_in_seconds: int = Field(default=2 * 60, serialization_alias="duration")
185
- factor: float = Field(default=2, serialization_alias="factor")
186
- max_duration: int = Field(default=60 * 60, serialization_alias="maxDuration")
187
-
188
- @field_serializer("duration_in_seconds")
189
- def cast_duration_as_str(self, duration_in_seconds: int, _info) -> str:
190
- return str(duration_in_seconds)
191
-
192
- @field_serializer("max_duration")
193
- def cast_mas_duration_as_str(self, max_duration: int, _info) -> str:
194
- return str(max_duration)
195
-
196
-
197
- class Retry(BaseModel):
198
- limit: int = 0
199
- retry_policy: str = Field(default="Always", serialization_alias="retryPolicy")
200
- back_off: BackOff = Field(default=BackOff(), serialization_alias="backoff")
201
-
202
- @field_serializer("limit")
203
- def cast_limit_as_str(self, limit: int, _info) -> str:
204
- return str(limit)
205
-
206
-
207
- class Toleration(BaseModel):
208
- effect: str
209
- key: str
210
- operator: str
211
- value: str
212
-
213
-
214
- class TemplateDefaults(BaseModel):
215
- max_step_duration: int = Field(
216
- default=60 * 60 * 2,
217
- serialization_alias="activeDeadlineSeconds",
218
- gt=0,
219
- description="Max run time of a step",
220
- )
221
-
222
- @computed_field # type: ignore
223
- @property
224
- def timeout(self) -> str:
225
- return f"{self.max_step_duration + 60*60}s"
226
-
227
-
228
- ShlexCommand = Annotated[str, PlainSerializer(lambda x: shlex.split(x), return_type=List[str])]
229
-
230
-
231
- class Container(BaseModel):
232
- image: str
233
- command: ShlexCommand
234
- volume_mounts: Optional[List["ContainerVolume"]] = Field(default=None, serialization_alias="volumeMounts")
235
- image_pull_policy: str = Field(default="", serialization_alias="imagePullPolicy")
236
- resources: Optional[Resources] = Field(default=None, serialization_alias="resources")
237
-
238
- env_vars: List[EnvVar] = Field(default_factory=list, exclude=True)
239
- secrets_from_k8s: List[SecretEnvVar] = Field(default_factory=list, exclude=True)
240
-
241
- @computed_field # type: ignore
242
- @property
243
- def env(self) -> Optional[List[Union[EnvVar, SecretEnvVar]]]:
244
- if not self.env_vars and not self.secrets_from_k8s:
245
- return None
246
-
247
- return self.env_vars + self.secrets_from_k8s
248
-
249
-
250
- class DagTaskTemplate(BaseModel):
251
- """
252
- dag:
253
- tasks:
254
- name: A
255
- template: nested-diamond
256
- arguments:
257
- parameters: [{name: message, value: A}]
258
- """
259
-
260
- name: str
261
- template: str
262
- depends: List[str] = []
263
- arguments: Optional[List[Argument]] = Field(default=None)
264
- with_param: Optional[str] = Field(default=None, serialization_alias="withParam")
265
-
266
- @field_serializer("depends")
267
- def transform_depends_as_str(self, depends: List[str]) -> str:
268
- return " || ".join(depends)
269
-
270
- @field_serializer("arguments", when_used="unless-none")
271
- def empty_arguments_to_none(self, arguments: List[Argument]) -> Dict[str, List[Argument]]:
272
- return {"parameters": arguments}
273
-
274
-
275
- class ContainerTemplate(BaseModel):
276
- # These templates are used for actual execution nodes.
277
- name: str
278
- active_deadline_seconds: Optional[int] = Field(default=None, serialization_alias="activeDeadlineSeconds", gt=0)
279
- node_selector: Optional[Dict[str, str]] = Field(default=None, serialization_alias="nodeSelector")
280
- retry_strategy: Optional[Retry] = Field(default=None, serialization_alias="retryStrategy")
281
- tolerations: Optional[List[Toleration]] = Field(default=None, serialization_alias="tolerations")
282
-
283
- container: Container
284
-
285
- outputs: Optional[List[OutputParameter]] = Field(default=None, serialization_alias="outputs")
286
- inputs: Optional[List[Parameter]] = Field(default=None, serialization_alias="inputs")
287
-
288
- def __hash__(self):
289
- return hash(self.name)
290
-
291
- @field_serializer("outputs", when_used="unless-none")
292
- def reshape_outputs(self, outputs: List[OutputParameter]) -> Dict[str, List[OutputParameter]]:
293
- return {"parameters": outputs}
294
-
295
- @field_serializer("inputs", when_used="unless-none")
296
- def reshape_inputs(self, inputs: List[Parameter]) -> Dict[str, List[Parameter]]:
297
- return {"parameters": inputs}
298
-
299
-
300
- class DagTemplate(BaseModel):
301
- # These are used for parallel, map nodes dag definition
302
- name: str = "runnable-dag"
303
- tasks: List[DagTaskTemplate] = Field(default=[], exclude=True)
304
- inputs: Optional[List[Parameter]] = Field(default=None, serialization_alias="inputs")
305
- parallelism: Optional[int] = None
306
- fail_fast: bool = Field(default=False, serialization_alias="failFast")
307
-
308
- @field_validator("parallelism")
309
- @classmethod
310
- def validate_parallelism(cls, parallelism: Optional[int]) -> Optional[int]:
311
- if parallelism is not None and parallelism <= 0:
312
- raise ValueError("Parallelism must be a positive integer greater than 0")
313
- return parallelism
314
-
315
- @computed_field # type: ignore
316
- @property
317
- def dag(self) -> Dict[str, List[DagTaskTemplate]]:
318
- return {"tasks": self.tasks}
319
-
320
- @field_serializer("inputs", when_used="unless-none")
321
- def reshape_inputs(self, inputs: List[Parameter], _info) -> Dict[str, List[Parameter]]:
322
- return {"parameters": inputs}
323
-
324
-
325
- class Volume(BaseModel):
326
- """
327
- spec config requires, name and persistentVolumeClaim
328
- step requires name and mountPath
329
- """
330
-
331
- name: str
332
- claim: str = Field(exclude=True)
333
- mount_path: str = Field(serialization_alias="mountPath", exclude=True)
334
-
335
- @computed_field # type: ignore
336
- @property
337
- def persistentVolumeClaim(self) -> Dict[str, str]:
338
- return {"claimName": self.claim}
339
-
340
-
341
- class ContainerVolume(BaseModel):
342
- name: str
343
- mount_path: str = Field(serialization_alias="mountPath")
344
-
345
-
346
- class UserVolumeMounts(BaseModel):
347
- """
348
- The volume specification as user defines it.
349
- """
350
-
351
- name: str # This is the name of the PVC on K8s
352
- mount_path: str # This is mount path on the container
353
-
354
-
355
- class NodeRenderer(ABC):
356
- allowed_node_types: List[str] = []
357
-
358
- def __init__(self, executor: "ArgoExecutor", node: BaseNode) -> None:
359
- self.executor = executor
360
- self.node = node
361
-
362
- @abstractmethod
363
- def render(self, list_of_iter_values: Optional[List] = None):
364
- pass
365
-
366
-
367
- class ExecutionNode(NodeRenderer):
368
- allowed_node_types = ["task", "stub", "success", "fail"]
369
-
370
- def render(self, list_of_iter_values: Optional[List] = None):
371
- """
372
- Compose the map variable and create the execution command.
373
- Create an input to the command.
374
- create_container_template : creates an argument for the list of iter values
375
- """
376
- map_variable = self.executor.compose_map_variable(list_of_iter_values)
377
- command = utils.get_node_execution_command(
378
- self.node,
379
- over_write_run_id=self.executor._run_id_placeholder,
380
- map_variable=map_variable,
381
- log_level=self.executor._log_level,
382
- )
383
-
384
- inputs = []
385
- if list_of_iter_values:
386
- for val in list_of_iter_values:
387
- inputs.append(Parameter(name=val))
388
-
389
- # Create the container template
390
- container_template = self.executor.create_container_template(
391
- working_on=self.node,
392
- command=command,
393
- inputs=inputs,
394
- )
395
-
396
- self.executor._container_templates.append(container_template)
397
-
398
-
399
- class DagNodeRenderer(NodeRenderer):
400
- allowed_node_types = ["dag"]
401
-
402
- def render(self, list_of_iter_values: Optional[List] = None):
403
- self.node = cast(DagNode, self.node)
404
- task_template_arguments = []
405
- dag_inputs = []
406
- if list_of_iter_values:
407
- for value in list_of_iter_values:
408
- task_template_arguments.append(Argument(name=value, value="{{inputs.parameters." + value + "}}"))
409
- dag_inputs.append(Parameter(name=value))
410
-
411
- clean_name = self.executor.get_clean_name(self.node)
412
- fan_out_template = self.executor._create_fan_out_template(
413
- composite_node=self.node, list_of_iter_values=list_of_iter_values
414
- )
415
- fan_out_template.arguments = task_template_arguments if task_template_arguments else None
416
-
417
- fan_in_template = self.executor._create_fan_in_template(
418
- composite_node=self.node, list_of_iter_values=list_of_iter_values
419
- )
420
- fan_in_template.arguments = task_template_arguments if task_template_arguments else None
421
-
422
- self.executor._gather_task_templates_of_dag(
423
- self.node.branch,
424
- dag_name=f"{clean_name}-branch",
425
- list_of_iter_values=list_of_iter_values,
426
- )
427
-
428
- branch_template = DagTaskTemplate(
429
- name=f"{clean_name}-branch",
430
- template=f"{clean_name}-branch",
431
- arguments=task_template_arguments if task_template_arguments else None,
432
- )
433
- branch_template.depends.append(f"{clean_name}-fan-out.Succeeded")
434
- fan_in_template.depends.append(f"{clean_name}-branch.Succeeded")
435
- fan_in_template.depends.append(f"{clean_name}-branch.Failed")
436
-
437
- self.executor._dag_templates.append(
438
- DagTemplate(
439
- tasks=[fan_out_template, branch_template, fan_in_template],
440
- name=clean_name,
441
- inputs=dag_inputs if dag_inputs else None,
442
- )
443
- )
444
-
445
-
446
- class ParallelNodeRender(NodeRenderer):
447
- allowed_node_types = ["parallel"]
448
-
449
- def render(self, list_of_iter_values: Optional[List] = None):
450
- self.node = cast(ParallelNode, self.node)
451
- task_template_arguments = []
452
- dag_inputs = []
453
- if list_of_iter_values:
454
- for value in list_of_iter_values:
455
- task_template_arguments.append(Argument(name=value, value="{{inputs.parameters." + value + "}}"))
456
- dag_inputs.append(Parameter(name=value))
457
-
458
- clean_name = self.executor.get_clean_name(self.node)
459
- fan_out_template = self.executor._create_fan_out_template(
460
- composite_node=self.node, list_of_iter_values=list_of_iter_values
461
- )
462
- fan_out_template.arguments = task_template_arguments if task_template_arguments else None
463
-
464
- fan_in_template = self.executor._create_fan_in_template(
465
- composite_node=self.node, list_of_iter_values=list_of_iter_values
466
- )
467
- fan_in_template.arguments = task_template_arguments if task_template_arguments else None
468
-
469
- branch_templates = []
470
- for name, branch in self.node.branches.items():
471
- branch_name = self.executor.sanitize_name(name)
472
- self.executor._gather_task_templates_of_dag(
473
- branch,
474
- dag_name=f"{clean_name}-{branch_name}",
475
- list_of_iter_values=list_of_iter_values,
476
- )
477
- task_template = DagTaskTemplate(
478
- name=f"{clean_name}-{branch_name}",
479
- template=f"{clean_name}-{branch_name}",
480
- arguments=task_template_arguments if task_template_arguments else None,
481
- )
482
- task_template.depends.append(f"{clean_name}-fan-out.Succeeded")
483
- fan_in_template.depends.append(f"{task_template.name}.Succeeded")
484
- fan_in_template.depends.append(f"{task_template.name}.Failed")
485
- branch_templates.append(task_template)
486
-
487
- executor_config = self.executor._resolve_executor_config(self.node)
488
-
489
- self.executor._dag_templates.append(
490
- DagTemplate(
491
- tasks=[fan_out_template] + branch_templates + [fan_in_template],
492
- name=clean_name,
493
- inputs=dag_inputs if dag_inputs else None,
494
- parallelism=executor_config.get("parallelism", None),
495
- )
496
- )
497
-
498
-
499
- class MapNodeRender(NodeRenderer):
500
- allowed_node_types = ["map"]
501
-
502
- def render(self, list_of_iter_values: Optional[List] = None):
503
- self.node = cast(MapNode, self.node)
504
- task_template_arguments = []
505
- dag_inputs = []
506
-
507
- if not list_of_iter_values:
508
- list_of_iter_values = []
509
-
510
- for value in list_of_iter_values:
511
- task_template_arguments.append(Argument(name=value, value="{{inputs.parameters." + value + "}}"))
512
- dag_inputs.append(Parameter(name=value))
513
-
514
- clean_name = self.executor.get_clean_name(self.node)
515
-
516
- fan_out_template = self.executor._create_fan_out_template(
517
- composite_node=self.node, list_of_iter_values=list_of_iter_values
518
- )
519
- fan_out_template.arguments = task_template_arguments if task_template_arguments else None
520
-
521
- fan_in_template = self.executor._create_fan_in_template(
522
- composite_node=self.node, list_of_iter_values=list_of_iter_values
523
- )
524
- fan_in_template.arguments = task_template_arguments if task_template_arguments else None
525
-
526
- list_of_iter_values.append(self.node.iterate_as)
527
-
528
- self.executor._gather_task_templates_of_dag(
529
- self.node.branch,
530
- dag_name=f"{clean_name}-map",
531
- list_of_iter_values=list_of_iter_values,
532
- )
533
-
534
- task_template = DagTaskTemplate(
535
- name=f"{clean_name}-map",
536
- template=f"{clean_name}-map",
537
- arguments=task_template_arguments if task_template_arguments else None,
538
- )
539
- task_template.with_param = "{{tasks." + f"{clean_name}-fan-out" + ".outputs.parameters." + "iterate-on" + "}}"
540
-
541
- argument = Argument(name=self.node.iterate_as, value="{{item}}")
542
- if task_template.arguments is None:
543
- task_template.arguments = []
544
- task_template.arguments.append(argument)
545
-
546
- task_template.depends.append(f"{clean_name}-fan-out.Succeeded")
547
- fan_in_template.depends.append(f"{clean_name}-map.Succeeded")
548
- fan_in_template.depends.append(f"{clean_name}-map.Failed")
549
-
550
- executor_config = self.executor._resolve_executor_config(self.node)
551
-
552
- self.executor._dag_templates.append(
553
- DagTemplate(
554
- tasks=[fan_out_template, task_template, fan_in_template],
555
- name=clean_name,
556
- inputs=dag_inputs if dag_inputs else None,
557
- parallelism=executor_config.get("parallelism", None),
558
- fail_fast=executor_config.get("fail_fast", True),
559
- )
560
- )
561
-
562
-
563
- def get_renderer(node):
564
- renderers = NodeRenderer.__subclasses__()
565
-
566
- for renderer in renderers:
567
- if node.node_type in renderer.allowed_node_types:
568
- return renderer
569
- raise Exception("This node type is not render-able")
570
-
571
-
572
- class MetaData(BaseModel):
573
- generate_name: str = Field(default="runnable-dag-", serialization_alias="generateName")
574
- annotations: Optional[Dict[str, str]] = Field(default_factory=dict)
575
- labels: Optional[Dict[str, str]] = Field(default_factory=dict)
576
- namespace: Optional[str] = Field(default=None)
577
-
578
-
579
- class Spec(BaseModel):
580
- active_deadline_seconds: int = Field(serialization_alias="activeDeadlineSeconds")
581
- entrypoint: str = Field(default="runnable-dag")
582
- node_selector: Optional[Dict[str, str]] = Field(default_factory=dict, serialization_alias="nodeSelector")
583
- tolerations: Optional[List[Toleration]] = Field(default=None, serialization_alias="tolerations")
584
- parallelism: Optional[int] = Field(default=None, serialization_alias="parallelism")
585
-
586
- # TODO: This has to be user driven
587
- pod_gc: Dict[str, str] = Field(
588
- default={"strategy": "OnPodSuccess", "deleteDelayDuration": "600s"},
589
- serialization_alias="podGC",
590
- )
591
-
592
- retry_strategy: Retry = Field(default=Retry(), serialization_alias="retryStrategy")
593
- service_account_name: Optional[str] = Field(default=None, serialization_alias="serviceAccountName")
594
-
595
- templates: List[Union[DagTemplate, ContainerTemplate]] = Field(default_factory=list)
596
- template_defaults: Optional[TemplateDefaults] = Field(default=None, serialization_alias="templateDefaults")
597
-
598
- arguments: Optional[List[EnvVar]] = Field(default_factory=list)
599
- persistent_volumes: List[UserVolumeMounts] = Field(default_factory=list, exclude=True)
600
-
601
- @field_validator("parallelism")
602
- @classmethod
603
- def validate_parallelism(cls, parallelism: Optional[int]) -> Optional[int]:
604
- if parallelism is not None and parallelism <= 0:
605
- raise ValueError("Parallelism must be a positive integer greater than 0")
606
- return parallelism
607
-
608
- @computed_field # type: ignore
609
- @property
610
- def volumes(self) -> List[Volume]:
611
- volumes: List[Volume] = []
612
- claim_names = {}
613
- for i, user_volume in enumerate(self.persistent_volumes):
614
- if user_volume.name in claim_names:
615
- raise Exception(f"Duplicate claim name {user_volume.name}")
616
- claim_names[user_volume.name] = user_volume.name
617
-
618
- volume = Volume(name=f"executor-{i}", claim=user_volume.name, mount_path=user_volume.mount_path)
619
- volumes.append(volume)
620
- return volumes
621
-
622
- @field_serializer("arguments", when_used="unless-none")
623
- def reshape_arguments(self, arguments: List[EnvVar], _info) -> Dict[str, List[EnvVar]]:
624
- return {"parameters": arguments}
625
-
626
-
627
- class Workflow(BaseModel):
628
- api_version: str = Field(
629
- default="argoproj.io/v1alpha1",
630
- serialization_alias="apiVersion",
631
- )
632
- kind: str = "Workflow"
633
- metadata: MetaData = Field(default=MetaData())
634
- spec: Spec
635
-
636
-
637
- class Override(BaseModel):
638
- model_config = ConfigDict(extra="ignore")
639
-
640
- image: str
641
- tolerations: Optional[List[Toleration]] = Field(default=None)
642
-
643
- max_step_duration_in_seconds: int = Field(
644
- default=2 * 60 * 60, # 2 hours
645
- gt=0,
646
- )
647
-
648
- node_selector: Optional[Dict[str, str]] = Field(
649
- default=None,
650
- serialization_alias="nodeSelector",
651
- )
652
-
653
- parallelism: Optional[int] = Field(
654
- default=None,
655
- serialization_alias="parallelism",
656
- )
657
-
658
- resources: Resources = Field(
659
- default=Resources(),
660
- serialization_alias="resources",
661
- )
662
-
663
- image_pull_policy: str = Field(default="")
664
-
665
- retry_strategy: Retry = Field(
666
- default=Retry(),
667
- serialization_alias="retryStrategy",
668
- description="Common across all templates",
669
- )
670
-
671
- @field_validator("parallelism")
672
- @classmethod
673
- def validate_parallelism(cls, parallelism: Optional[int]) -> Optional[int]:
674
- if parallelism is not None and parallelism <= 0:
675
- raise ValueError("Parallelism must be a positive integer greater than 0")
676
- return parallelism
677
-
678
-
679
- class ArgoExecutor(GenericExecutor):
680
- service_name: str = "argo"
681
- _local: bool = False
682
-
683
- # TODO: Add logging level as option.
684
-
685
- model_config = ConfigDict(extra="forbid")
686
-
687
- image: str
688
- expose_parameters_as_inputs: bool = True
689
- secrets_from_k8s: List[SecretEnvVar] = Field(default_factory=list)
690
- output_file: str = "argo-pipeline.yaml"
691
-
692
- # Metadata related fields
693
- name: str = Field(default="runnable-dag-", description="Used as an identifier for the workflow")
694
- annotations: Dict[str, str] = Field(default_factory=dict)
695
- labels: Dict[str, str] = Field(default_factory=dict)
696
-
697
- max_workflow_duration_in_seconds: int = Field(
698
- 2 * 24 * 60 * 60, # 2 days
699
- serialization_alias="activeDeadlineSeconds",
700
- gt=0,
701
- )
702
- node_selector: Optional[Dict[str, str]] = Field(
703
- default=None,
704
- serialization_alias="nodeSelector",
705
- )
706
- parallelism: Optional[int] = Field(
707
- default=None,
708
- serialization_alias="parallelism",
709
- )
710
- resources: Resources = Field(
711
- default=Resources(),
712
- serialization_alias="resources",
713
- exclude=True,
714
- )
715
- retry_strategy: Retry = Field(
716
- default=Retry(),
717
- serialization_alias="retryStrategy",
718
- description="Common across all templates",
719
- )
720
- max_step_duration_in_seconds: int = Field(
721
- default=2 * 60 * 60, # 2 hours
722
- gt=0,
723
- )
724
- tolerations: Optional[List[Toleration]] = Field(default=None)
725
- image_pull_policy: str = Field(default="")
726
- service_account_name: Optional[str] = None
727
- persistent_volumes: List[UserVolumeMounts] = Field(default_factory=list)
728
-
729
- _run_id_placeholder: str = "{{workflow.parameters.run_id}}"
730
- _log_level: str = "{{workflow.parameters.log_level}}"
731
- _container_templates: List[ContainerTemplate] = []
732
- _dag_templates: List[DagTemplate] = []
733
- _clean_names: Dict[str, str] = {}
734
- _container_volumes: List[ContainerVolume] = []
735
-
736
- @field_validator("parallelism")
737
- @classmethod
738
- def validate_parallelism(cls, parallelism: Optional[int]) -> Optional[int]:
739
- if parallelism is not None and parallelism <= 0:
740
- raise ValueError("Parallelism must be a positive integer greater than 0")
741
- return parallelism
742
-
743
- @computed_field # type: ignore
744
- @property
745
- def step_timeout(self) -> int:
746
- """
747
- Maximum time the step can take to complete, including the pending state.
748
- """
749
- return self.max_step_duration_in_seconds + 2 * 60 * 60 # 2 hours + max_step_duration_in_seconds
750
-
751
- @property
752
- def metadata(self) -> MetaData:
753
- return MetaData(
754
- generate_name=self.name,
755
- annotations=self.annotations,
756
- labels=self.labels,
757
- )
758
-
759
- @property
760
- def spec(self) -> Spec:
761
- return Spec(
762
- active_deadline_seconds=self.max_workflow_duration_in_seconds,
763
- node_selector=self.node_selector,
764
- tolerations=self.tolerations,
765
- parallelism=self.parallelism,
766
- retry_strategy=self.retry_strategy,
767
- service_account_name=self.service_account_name,
768
- persistent_volumes=self.persistent_volumes,
769
- template_defaults=TemplateDefaults(max_step_duration=self.max_step_duration_in_seconds),
770
- )
771
-
772
- def prepare_for_graph_execution(self):
773
- """
774
- This method should be called prior to calling execute_graph.
775
- Perform any steps required before doing the graph execution.
776
-
777
- The most common implementation is to prepare a run log for the run if the run uses local interactive compute.
778
-
779
- But in cases of actual rendering the job specs (eg: AWS step functions, K8's) we check if the services are OK.
780
- We do not set up a run log as its not relevant.
781
- """
782
-
783
- integration.validate(self, self._context.run_log_store)
784
- integration.configure_for_traversal(self, self._context.run_log_store)
785
-
786
- integration.validate(self, self._context.catalog_handler)
787
- integration.configure_for_traversal(self, self._context.catalog_handler)
788
-
789
- integration.validate(self, self._context.secrets_handler)
790
- integration.configure_for_traversal(self, self._context.secrets_handler)
791
-
792
- def prepare_for_node_execution(self):
793
- """
794
- Perform any modifications to the services prior to execution of the node.
795
-
796
- Args:
797
- node (Node): [description]
798
- map_variable (dict, optional): [description]. Defaults to None.
799
- """
800
-
801
- super().prepare_for_node_execution()
802
- self._set_up_run_log(exists_ok=True)
803
-
804
- def execute_node(self, node: BaseNode, map_variable: TypeMapVariable = None, **kwargs):
805
- step_log = self._context.run_log_store.create_step_log(node.name, node._get_step_log_name(map_variable))
806
-
807
- self.add_code_identities(node=node, step_log=step_log)
808
-
809
- step_log.step_type = node.node_type
810
- step_log.status = defaults.PROCESSING
811
- self._context.run_log_store.add_step_log(step_log, self._context.run_id)
812
-
813
- super()._execute_node(node, map_variable=map_variable, **kwargs)
814
-
815
- # Implicit fail
816
- if self._context.dag:
817
- # functions and notebooks do not have dags
818
- _, current_branch = search_node_by_internal_name(dag=self._context.dag, internal_name=node.internal_name)
819
- _, next_node_name = self._get_status_and_next_node_name(node, current_branch, map_variable=map_variable)
820
- if next_node_name:
821
- # Terminal nodes do not have next node name
822
- next_node = current_branch.get_node_by_name(next_node_name)
823
-
824
- if next_node.node_type == defaults.FAIL:
825
- self.execute_node(next_node, map_variable=map_variable)
826
-
827
- step_log = self._context.run_log_store.get_step_log(node._get_step_log_name(map_variable), self._context.run_id)
828
- if step_log.status == defaults.FAIL:
829
- raise Exception(f"Step {node.name} failed")
830
-
831
- def fan_out(self, node: BaseNode, map_variable: TypeMapVariable = None):
832
- super().fan_out(node, map_variable)
833
-
834
- # If its a map node, write the list values to "/tmp/output.txt"
835
- if node.node_type == "map":
836
- node = cast(MapNode, node)
837
- iterate_on = self._context.run_log_store.get_parameters(self._context.run_id)[node.iterate_on]
838
-
839
- with open("/tmp/output.txt", mode="w", encoding="utf-8") as myfile:
840
- json.dump(iterate_on.get_value(), myfile, indent=4)
841
-
842
- def sanitize_name(self, name):
843
- return name.replace(" ", "-").replace(".", "-").replace("_", "-")
844
-
845
- def get_clean_name(self, node: BaseNode):
846
- # Cache names for the node
847
- if node.internal_name not in self._clean_names:
848
- sanitized = self.sanitize_name(node.name)
849
- tag = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
850
- self._clean_names[node.internal_name] = f"{sanitized}-{node.node_type}-{tag}"
851
-
852
- return self._clean_names[node.internal_name]
853
-
854
- def compose_map_variable(self, list_of_iter_values: Optional[List] = None) -> TypeMapVariable:
855
- map_variable = OrderedDict()
856
-
857
- # If we are inside a map node, compose a map_variable
858
- # The values of "iterate_as" are sent over as inputs to the container template
859
- if list_of_iter_values:
860
- for var in list_of_iter_values:
861
- map_variable[var] = "{{inputs.parameters." + str(var) + "}}"
862
-
863
- return map_variable # type: ignore
864
-
865
- def create_container_template(
866
- self,
867
- working_on: BaseNode,
868
- command: str,
869
- inputs: Optional[List] = None,
870
- outputs: Optional[List] = None,
871
- overwrite_name: str = "",
872
- ):
873
- effective_node_config = self._resolve_executor_config(working_on)
874
-
875
- override: Override = Override(**effective_node_config)
876
-
877
- container = Container(
878
- command=command,
879
- image=override.image,
880
- volume_mounts=self._container_volumes,
881
- image_pull_policy=override.image_pull_policy,
882
- resources=override.resources,
883
- secrets_from_k8s=self.secrets_from_k8s,
884
- )
885
-
886
- if working_on.name == self._context.dag.start_at and self.expose_parameters_as_inputs:
887
- for key, value in self._get_parameters().items():
888
- value = value.get_value() # type: ignore
889
- # Get the value from work flow parameters for dynamic behavior
890
- if isinstance(value, int) or isinstance(value, float) or isinstance(value, str):
891
- env_var = EnvVar(
892
- name=defaults.PARAMETER_PREFIX + key,
893
- value="{{workflow.parameters." + key + "}}",
894
- )
895
- container.env_vars.append(env_var)
896
-
897
- clean_name = self.get_clean_name(working_on)
898
- if overwrite_name:
899
- clean_name = overwrite_name
900
-
901
- container_template = ContainerTemplate(
902
- name=clean_name,
903
- active_deadline_seconds=(
904
- override.max_step_duration_in_seconds
905
- if self.max_step_duration_in_seconds != override.max_step_duration_in_seconds
906
- else None
907
- ),
908
- container=container,
909
- retry_strategy=override.retry_strategy if self.retry_strategy != override.retry_strategy else None,
910
- tolerations=override.tolerations if self.tolerations != override.tolerations else None,
911
- node_selector=override.node_selector if self.node_selector != override.node_selector else None,
912
- )
913
-
914
- # inputs are the "iterate_as" value map variables in the same order as they are observed
915
- # We need to expose the map variables in the command of the container
916
- if inputs:
917
- if not container_template.inputs:
918
- container_template.inputs = []
919
- container_template.inputs.extend(inputs)
920
-
921
- # The map step fan out would create an output that we should propagate via Argo
922
- if outputs:
923
- if not container_template.outputs:
924
- container_template.outputs = []
925
- container_template.outputs.extend(outputs)
926
-
927
- return container_template
928
-
929
- def _create_fan_out_template(self, composite_node, list_of_iter_values: Optional[List] = None):
930
- clean_name = self.get_clean_name(composite_node)
931
- inputs = []
932
- # If we are fanning out already map state, we need to send the map variable inside
933
- # The container template also should be accepting an input parameter
934
- map_variable = None
935
- if list_of_iter_values:
936
- map_variable = self.compose_map_variable(list_of_iter_values=list_of_iter_values)
937
-
938
- for val in list_of_iter_values:
939
- inputs.append(Parameter(name=val))
940
-
941
- command = utils.get_fan_command(
942
- mode="out",
943
- node=composite_node,
944
- run_id=self._run_id_placeholder,
945
- map_variable=map_variable,
946
- log_level=self._log_level,
947
- )
948
-
949
- outputs = []
950
- # If the node is a map node, we have to set the output parameters
951
- # Output is always the step's internal name + iterate-on
952
- if composite_node.node_type == "map":
953
- output_parameter = OutputParameter(name="iterate-on")
954
- outputs.append(output_parameter)
955
-
956
- # Create the node now
957
- step_config = {"command": command, "type": "task", "next": "dummy"}
958
- node = create_node(name=f"{clean_name}-fan-out", step_config=step_config)
959
-
960
- container_template = self.create_container_template(
961
- working_on=node,
962
- command=command,
963
- outputs=outputs,
964
- inputs=inputs,
965
- overwrite_name=f"{clean_name}-fan-out",
966
- )
967
-
968
- self._container_templates.append(container_template)
969
- return DagTaskTemplate(name=f"{clean_name}-fan-out", template=f"{clean_name}-fan-out")
970
-
971
- def _create_fan_in_template(self, composite_node, list_of_iter_values: Optional[List] = None):
972
- clean_name = self.get_clean_name(composite_node)
973
- inputs = []
974
- # If we are fanning in already map state, we need to send the map variable inside
975
- # The container template also should be accepting an input parameter
976
- map_variable = None
977
- if list_of_iter_values:
978
- map_variable = self.compose_map_variable(list_of_iter_values=list_of_iter_values)
979
-
980
- for val in list_of_iter_values:
981
- inputs.append(Parameter(name=val))
982
-
983
- command = utils.get_fan_command(
984
- mode="in",
985
- node=composite_node,
986
- run_id=self._run_id_placeholder,
987
- map_variable=map_variable,
988
- log_level=self._log_level,
989
- )
990
-
991
- step_config = {"command": command, "type": "task", "next": "dummy"}
992
- node = create_node(name=f"{clean_name}-fan-in", step_config=step_config)
993
- container_template = self.create_container_template(
994
- working_on=node,
995
- command=command,
996
- inputs=inputs,
997
- overwrite_name=f"{clean_name}-fan-in",
998
- )
999
- self._container_templates.append(container_template)
1000
- clean_name = self.get_clean_name(composite_node)
1001
- return DagTaskTemplate(name=f"{clean_name}-fan-in", template=f"{clean_name}-fan-in")
1002
-
1003
- def _gather_task_templates_of_dag(
1004
- self, dag: Graph, dag_name="runnable-dag", list_of_iter_values: Optional[List] = None
1005
- ):
1006
- current_node = dag.start_at
1007
- previous_node = None
1008
- previous_node_template_name = None
1009
-
1010
- templates: Dict[str, DagTaskTemplate] = {}
1011
-
1012
- if not list_of_iter_values:
1013
- list_of_iter_values = []
1014
-
1015
- while True:
1016
- working_on = dag.get_node_by_name(current_node)
1017
- if previous_node == current_node:
1018
- raise Exception("Potentially running in a infinite loop")
1019
-
1020
- render_obj = get_renderer(working_on)(executor=self, node=working_on)
1021
- render_obj.render(list_of_iter_values=list_of_iter_values.copy())
1022
-
1023
- clean_name = self.get_clean_name(working_on)
1024
-
1025
- # If a task template for clean name exists, retrieve it (could have been created by on_failure)
1026
- template = templates.get(clean_name, DagTaskTemplate(name=clean_name, template=clean_name))
1027
-
1028
- # Link the current node to previous node, if the previous node was successful.
1029
- if previous_node:
1030
- template.depends.append(f"{previous_node_template_name}.Succeeded")
1031
-
1032
- templates[clean_name] = template
1033
-
1034
- # On failure nodes
1035
- if working_on.node_type not in ["success", "fail"] and working_on._get_on_failure_node():
1036
- failure_node = dag.get_node_by_name(working_on._get_on_failure_node())
1037
-
1038
- # same logic, if a template exists, retrieve it
1039
- # if not, create a new one
1040
- render_obj = get_renderer(working_on)(executor=self, node=failure_node)
1041
- render_obj.render(list_of_iter_values=list_of_iter_values.copy())
1042
-
1043
- failure_template_name = self.get_clean_name(failure_node)
1044
- # If a task template for clean name exists, retrieve it
1045
- failure_template = templates.get(
1046
- failure_template_name,
1047
- DagTaskTemplate(name=failure_template_name, template=failure_template_name),
1048
- )
1049
- failure_template.depends.append(f"{clean_name}.Failed")
1050
- templates[failure_template_name] = failure_template
1051
-
1052
- # If we are in a map node, we need to add the values as arguments
1053
- template = templates[clean_name]
1054
- if list_of_iter_values:
1055
- if not template.arguments:
1056
- template.arguments = []
1057
- for value in list_of_iter_values:
1058
- template.arguments.append(Argument(name=value, value="{{inputs.parameters." + value + "}}"))
1059
-
1060
- # Move ahead to the next node
1061
- previous_node = current_node
1062
- previous_node_template_name = self.get_clean_name(working_on)
1063
-
1064
- if working_on.node_type in ["success", "fail"]:
1065
- break
1066
-
1067
- current_node = working_on._get_next_node()
1068
-
1069
- # Add the iteration values as input to dag template
1070
- dag_template = DagTemplate(tasks=list(templates.values()), name=dag_name)
1071
- if list_of_iter_values:
1072
- if not dag_template.inputs:
1073
- dag_template.inputs = []
1074
- dag_template.inputs.extend([Parameter(name=val) for val in list_of_iter_values])
1075
-
1076
- # Add the dag template to the list of templates
1077
- self._dag_templates.append(dag_template)
1078
-
1079
- def _get_template_defaults(self) -> TemplateDefaults:
1080
- user_provided_config = self.model_dump(by_alias=False)
1081
-
1082
- return TemplateDefaults(**user_provided_config)
1083
-
1084
- def execute_graph(self, dag: Graph, map_variable: Optional[dict] = None, **kwargs):
1085
- # TODO: Add metadata
1086
- arguments = []
1087
- # Expose "simple" parameters as workflow arguments for dynamic behavior
1088
- if self.expose_parameters_as_inputs:
1089
- for key, value in self._get_parameters().items():
1090
- value = value.get_value() # type: ignore
1091
- if isinstance(value, dict) or isinstance(value, list):
1092
- continue
1093
-
1094
- env_var = EnvVar(name=key, value=value) # type: ignore
1095
- arguments.append(env_var)
1096
-
1097
- run_id_var = EnvVar(name="run_id", value="{{workflow.uid}}")
1098
- log_level_var = EnvVar(name="log_level", value=defaults.LOG_LEVEL)
1099
- arguments.append(run_id_var)
1100
- arguments.append(log_level_var)
1101
-
1102
- # TODO: Can we do reruns?
1103
-
1104
- for volume in self.spec.volumes:
1105
- self._container_volumes.append(ContainerVolume(name=volume.name, mount_path=volume.mount_path))
1106
-
1107
- # Container specifications are globally collected and added at the end.
1108
- # Dag specifications are added as part of the dag traversal.
1109
- templates: List[Union[DagTemplate, ContainerTemplate]] = []
1110
- self._gather_task_templates_of_dag(dag=dag, list_of_iter_values=[])
1111
- templates.extend(self._dag_templates)
1112
- templates.extend(self._container_templates)
1113
-
1114
- spec = self.spec
1115
- spec.templates = templates
1116
- spec.arguments = arguments
1117
- workflow = Workflow(metadata=self.metadata, spec=spec)
1118
-
1119
- yaml = YAML()
1120
- with open(self.output_file, "w") as f:
1121
- yaml.indent(mapping=2, sequence=4, offset=2)
1122
-
1123
- yaml.dump(workflow.model_dump(by_alias=True, exclude_none=True), f)
1124
-
1125
- def execute_job(self, node: BaseNode):
1126
- """
1127
- Use K8's job instead
1128
- """
1129
- raise NotImplementedError("Use K8's job instead")
1130
-
1131
- def send_return_code(self, stage="traversal"):
1132
- """
1133
- Convenience function used by pipeline to send return code to the caller of the cli
1134
-
1135
- Raises:
1136
- Exception: If the pipeline execution failed
1137
- """
1138
- if stage != "traversal": # traversal does no actual execution, so return code is pointless
1139
- run_id = self._context.run_id
1140
-
1141
- run_log = self._context.run_log_store.get_run_log_by_id(run_id=run_id, full=False)
1142
- if run_log.status == defaults.FAIL:
1143
- raise exceptions.ExecutionFailedError(run_id)
1144
-
1145
-
1146
- class FileSystemRunLogStore(BaseIntegration):
1147
- """
1148
- Only local execution mode is possible for Buffered Run Log store
1149
- """
1150
-
1151
- executor_type = "argo"
1152
- service_type = "run_log_store" # One of secret, catalog, datastore
1153
- service_provider = "file-system" # The actual implementation of the service
1154
-
1155
- def validate(self, **kwargs):
1156
- msg = (
1157
- "Argo cannot run work with file-system run log store. "
1158
- "Unless you have made a mechanism to use volume mounts."
1159
- "Using this run log store if the pipeline has concurrent tasks might lead to unexpected results"
1160
- )
1161
- logger.warning(msg)
1162
-
1163
-
1164
- class ChunkedFileSystemRunLogStore(BaseIntegration):
1165
- """
1166
- Only local execution mode is possible for Buffered Run Log store
1167
- """
1168
-
1169
- executor_type = "argo"
1170
- service_type = "run_log_store" # One of secret, catalog, datastore
1171
- service_provider = "chunked-fs" # The actual implementation of the service
1172
-
1173
- def validate(self, **kwargs):
1174
- msg = (
1175
- "Argo cannot run work with chunked file-system run log store. "
1176
- "Unless you have made a mechanism to use volume mounts"
1177
- )
1178
- logger.warning(msg)
1179
-
1180
-
1181
- class FileSystemCatalog(BaseIntegration):
1182
- """
1183
- Only local execution mode is possible for Buffered Run Log store
1184
- """
1185
-
1186
- executor_type = "argo"
1187
- service_type = "catalog" # One of secret, catalog, datastore
1188
- service_provider = "file-system" # The actual implementation of the service
1189
-
1190
- def validate(self, **kwargs):
1191
- msg = (
1192
- "Argo cannot run work with file-system run log store. Unless you have made a mechanism to use volume mounts"
1193
- )
1194
- logger.warning(msg)