runnable 0.13.0__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. runnable/__init__.py +1 -12
  2. runnable/catalog.py +29 -5
  3. runnable/cli.py +268 -215
  4. runnable/context.py +10 -3
  5. runnable/datastore.py +212 -53
  6. runnable/defaults.py +13 -55
  7. runnable/entrypoints.py +270 -183
  8. runnable/exceptions.py +28 -2
  9. runnable/executor.py +133 -86
  10. runnable/graph.py +37 -13
  11. runnable/nodes.py +50 -22
  12. runnable/parameters.py +27 -8
  13. runnable/pickler.py +1 -1
  14. runnable/sdk.py +230 -66
  15. runnable/secrets.py +3 -1
  16. runnable/tasks.py +99 -41
  17. runnable/utils.py +59 -39
  18. {runnable-0.13.0.dist-info → runnable-0.16.0.dist-info}/METADATA +28 -31
  19. runnable-0.16.0.dist-info/RECORD +23 -0
  20. {runnable-0.13.0.dist-info → runnable-0.16.0.dist-info}/WHEEL +1 -1
  21. runnable-0.16.0.dist-info/entry_points.txt +45 -0
  22. runnable/extensions/__init__.py +0 -0
  23. runnable/extensions/catalog/__init__.py +0 -21
  24. runnable/extensions/catalog/file_system/__init__.py +0 -0
  25. runnable/extensions/catalog/file_system/implementation.py +0 -234
  26. runnable/extensions/catalog/k8s_pvc/__init__.py +0 -0
  27. runnable/extensions/catalog/k8s_pvc/implementation.py +0 -16
  28. runnable/extensions/catalog/k8s_pvc/integration.py +0 -59
  29. runnable/extensions/executor/__init__.py +0 -649
  30. runnable/extensions/executor/argo/__init__.py +0 -0
  31. runnable/extensions/executor/argo/implementation.py +0 -1194
  32. runnable/extensions/executor/argo/specification.yaml +0 -51
  33. runnable/extensions/executor/k8s_job/__init__.py +0 -0
  34. runnable/extensions/executor/k8s_job/implementation_FF.py +0 -259
  35. runnable/extensions/executor/k8s_job/integration_FF.py +0 -69
  36. runnable/extensions/executor/local.py +0 -69
  37. runnable/extensions/executor/local_container/__init__.py +0 -0
  38. runnable/extensions/executor/local_container/implementation.py +0 -446
  39. runnable/extensions/executor/mocked/__init__.py +0 -0
  40. runnable/extensions/executor/mocked/implementation.py +0 -154
  41. runnable/extensions/executor/retry/__init__.py +0 -0
  42. runnable/extensions/executor/retry/implementation.py +0 -168
  43. runnable/extensions/nodes.py +0 -870
  44. runnable/extensions/run_log_store/__init__.py +0 -0
  45. runnable/extensions/run_log_store/chunked_file_system/__init__.py +0 -0
  46. runnable/extensions/run_log_store/chunked_file_system/implementation.py +0 -111
  47. runnable/extensions/run_log_store/chunked_k8s_pvc/__init__.py +0 -0
  48. runnable/extensions/run_log_store/chunked_k8s_pvc/implementation.py +0 -21
  49. runnable/extensions/run_log_store/chunked_k8s_pvc/integration.py +0 -61
  50. runnable/extensions/run_log_store/db/implementation_FF.py +0 -157
  51. runnable/extensions/run_log_store/db/integration_FF.py +0 -0
  52. runnable/extensions/run_log_store/file_system/__init__.py +0 -0
  53. runnable/extensions/run_log_store/file_system/implementation.py +0 -140
  54. runnable/extensions/run_log_store/generic_chunked.py +0 -557
  55. runnable/extensions/run_log_store/k8s_pvc/__init__.py +0 -0
  56. runnable/extensions/run_log_store/k8s_pvc/implementation.py +0 -21
  57. runnable/extensions/run_log_store/k8s_pvc/integration.py +0 -56
  58. runnable/extensions/secrets/__init__.py +0 -0
  59. runnable/extensions/secrets/dotenv/__init__.py +0 -0
  60. runnable/extensions/secrets/dotenv/implementation.py +0 -100
  61. runnable/integration.py +0 -192
  62. runnable-0.13.0.dist-info/RECORD +0 -63
  63. runnable-0.13.0.dist-info/entry_points.txt +0 -41
  64. {runnable-0.13.0.dist-info → runnable-0.16.0.dist-info/licenses}/LICENSE +0 -0
@@ -1,557 +0,0 @@
1
- import logging
2
- import time
3
- from abc import abstractmethod
4
- from enum import Enum
5
- from pathlib import Path
6
- from string import Template
7
- from typing import Any, Dict, Optional, Sequence, Union
8
-
9
- from runnable import defaults, exceptions
10
- from runnable.datastore import (
11
- BaseRunLogStore,
12
- BranchLog,
13
- JsonParameter,
14
- MetricParameter,
15
- ObjectParameter,
16
- Parameter,
17
- RunLog,
18
- StepLog,
19
- )
20
-
21
- logger = logging.getLogger(defaults.LOGGER_NAME)
22
-
23
-
24
- T = Union[str, Path] # Holds str, path
25
-
26
-
27
- class EntityNotFoundError(Exception):
28
- pass
29
-
30
-
31
- class ChunkedRunLogStore(BaseRunLogStore):
32
- """
33
- A generic implementation of a RunLogStore that stores RunLogs in chunks.
34
- """
35
-
36
- service_name: str = ""
37
-
38
- class LogTypes(Enum):
39
- RUN_LOG = "RunLog"
40
- PARAMETER = "Parameter"
41
- STEP_LOG = "StepLog"
42
- BRANCH_LOG = "BranchLog"
43
-
44
- class ModelTypes(Enum):
45
- RUN_LOG = RunLog
46
- PARAMETER = dict
47
- STEP_LOG = StepLog
48
- BRANCH_LOG = BranchLog
49
-
50
- def naming_pattern(self, log_type: LogTypes, name: str = "") -> str:
51
- """
52
- Naming pattern to store RunLog, Parameter, StepLog or BranchLog.
53
-
54
- The reasoning for name to be defaulted to empty string:
55
- Its actually conditionally empty. For RunLog and Parameter it is empty.
56
- For StepLog and BranchLog it should be provided.
57
-
58
- Args:
59
- log_type (LogTypes): One of RunLog, Parameter, StepLog or BranchLog
60
- name (str, optional): The name to be included or left. Defaults to ''.
61
-
62
- Raises:
63
- Exception: If log_type is not recognized
64
-
65
- Returns:
66
- str: The naming pattern
67
- """
68
- if log_type == self.LogTypes.RUN_LOG:
69
- return f"{self.LogTypes.RUN_LOG.value}"
70
-
71
- if log_type == self.LogTypes.PARAMETER:
72
- return "-".join([self.LogTypes.PARAMETER.value, name])
73
-
74
- if not name:
75
- raise Exception(f"Name should be provided for naming pattern for {log_type}")
76
-
77
- if log_type == self.LogTypes.STEP_LOG:
78
- return "-".join([self.LogTypes.STEP_LOG.value, name, "${creation_time}"])
79
-
80
- if log_type == self.LogTypes.BRANCH_LOG:
81
- return "-".join([self.LogTypes.BRANCH_LOG.value, name, "${creation_time}"])
82
-
83
- raise Exception("Unexpected log type")
84
-
85
- @abstractmethod
86
- def get_matches(self, run_id: str, name: str, multiple_allowed: bool = False) -> Optional[Union[Sequence[T], T]]:
87
- """
88
- Get contents of persistence layer matching the pattern name*
89
-
90
- Args:
91
- run_id (str): The run id
92
- name (str): The suffix of the entity name to check in the run log store.
93
- """
94
- ...
95
-
96
- @abstractmethod
97
- def _store(self, run_id: str, contents: dict, name: T, insert: bool = False):
98
- """
99
- Store the contents against the name in the persistence layer.
100
-
101
- Args:
102
- run_id (str): The run id
103
- contents (dict): The dict to store
104
- name (str): The name to store as
105
- """
106
- ...
107
-
108
- @abstractmethod
109
- def _retrieve(self, name: T) -> dict:
110
- """
111
- Does the job of retrieving from the persistent layer.
112
-
113
- Args:
114
- name (str): the name of the file to retrieve
115
-
116
- Returns:
117
- dict: The contents
118
- """
119
- ...
120
-
121
- def store(self, run_id: str, log_type: LogTypes, contents: dict, name: str = ""):
122
- """Store a SINGLE log type in the file system
123
-
124
- Args:
125
- run_id (str): The run id to store against
126
- log_type (LogTypes): The type of log to store
127
- contents (dict): The dict of contents to store
128
- name (str, optional): The name against the contents have to be stored. Defaults to ''.
129
- """
130
- naming_pattern = self.naming_pattern(log_type=log_type, name=name)
131
- match = self.get_matches(run_id=run_id, name=naming_pattern, multiple_allowed=False)
132
- # The boolean multiple allowed confuses mypy a lot!
133
- name_to_give: str = ""
134
- insert = False
135
-
136
- if match:
137
- existing_contents = self._retrieve(name=match) # type: ignore
138
- contents = dict(existing_contents, **contents)
139
- name_to_give = match # type: ignore
140
- else:
141
- name_to_give = Template(naming_pattern).safe_substitute({"creation_time": str(int(time.time_ns()))})
142
- insert = True
143
-
144
- self._store(run_id=run_id, contents=contents, name=name_to_give, insert=insert)
145
-
146
- def retrieve(self, run_id: str, log_type: LogTypes, name: str = "", multiple_allowed=False) -> Any:
147
- """
148
- Retrieve the model given a log_type and a name.
149
- Use multiple_allowed to control if you are expecting multiple of them.
150
- eg: There could be multiple of Parameters- but only one of StepLog-stepname
151
-
152
- The reasoning for name to be defaulted to empty string:
153
- Its actually conditionally empty. For RunLog and Parameter it is empty.
154
- For StepLog and BranchLog it should be provided.
155
-
156
- Args:
157
- run_id (str): The run id
158
- log_type (LogTypes): One of RunLog, Parameter, StepLog, BranchLog
159
- name (str, optional): The name to match. Defaults to ''.
160
- multiple_allowed (bool, optional): Are multiple allowed. Defaults to False.
161
-
162
- Raises:
163
- FileNotFoundError: If there is no match found
164
-
165
- Returns:
166
- Any: One of StepLog, BranchLog, Parameter or RunLog
167
- """
168
- # The reason of any is it could be one of Logs or dict or list of the
169
- if not name and log_type not in [
170
- self.LogTypes.RUN_LOG,
171
- self.LogTypes.PARAMETER,
172
- ]:
173
- raise Exception(f"Name is required during retrieval for {log_type}")
174
-
175
- naming_pattern = self.naming_pattern(log_type=log_type, name=name)
176
-
177
- matches = self.get_matches(run_id=run_id, name=naming_pattern, multiple_allowed=multiple_allowed)
178
-
179
- if matches:
180
- if not multiple_allowed:
181
- contents = self._retrieve(name=matches) # type: ignore
182
- model = self.ModelTypes[log_type.name].value
183
- return model(**contents)
184
-
185
- models = []
186
- for match in matches: # type: ignore
187
- contents = self._retrieve(name=match)
188
- model = self.ModelTypes[log_type.name].value
189
- models.append(model(**contents))
190
- return models
191
-
192
- raise EntityNotFoundError()
193
-
194
- def orderly_retrieve(self, run_id: str, log_type: LogTypes) -> Dict[str, Union[StepLog, BranchLog]]:
195
- """Should only be used by prepare full run log.
196
-
197
- Retrieves the StepLog or BranchLog sorted according to creation time.
198
-
199
- Args:
200
- run_id (str): _description_
201
- log_type (LogTypes): _description_
202
- """
203
- prefix: str = self.LogTypes.STEP_LOG.value
204
-
205
- if log_type == self.LogTypes.BRANCH_LOG:
206
- prefix = self.LogTypes.BRANCH_LOG.value
207
-
208
- matches = self.get_matches(run_id=run_id, name=prefix, multiple_allowed=True)
209
-
210
- if log_type == self.LogTypes.BRANCH_LOG and not matches:
211
- # No branch logs are found
212
- return {}
213
- # Forcing get_matches to always return a list is a better design
214
- epoch_created = [str(match).split("-")[-1] for match in matches] # type: ignore
215
-
216
- # sort matches by epoch created
217
- epoch_created, matches = zip(*sorted(zip(epoch_created, matches))) # type: ignore
218
-
219
- logs: Dict[str, Union[StepLog, BranchLog]] = {}
220
-
221
- for match in matches:
222
- model = self.ModelTypes[log_type.name].value
223
- log_model = model(**self._retrieve(match))
224
- logs[log_model.internal_name] = log_model # type: ignore
225
-
226
- return logs
227
-
228
- def _get_parent_branch(self, name: str) -> Union[str, None]:
229
- """
230
- Returns the name of the parent branch.
231
- If the step is part of main dag, return None.
232
-
233
- Args:
234
- name (str): The name of the step.
235
-
236
- Returns:
237
- str: The name of the branch containing the step.
238
- """
239
- dot_path = name.split(".")
240
-
241
- if len(dot_path) == 1:
242
- return None
243
- # Ignore the step name
244
- return ".".join(dot_path[:-1])
245
-
246
- def _get_parent_step(self, name: str) -> Union[str, None]:
247
- """
248
- Returns the step containing the step, useful when we have steps within a branch.
249
- Returns None, if the step belongs to parent dag.
250
-
251
- Args:
252
- name (str): The name of the step to find the parent step it belongs to.
253
-
254
- Returns:
255
- str: The parent step the step belongs to, None if the step belongs to parent dag.
256
- """
257
- dot_path = name.split(".")
258
-
259
- if len(dot_path) == 1:
260
- return None
261
- # Ignore the branch.step_name
262
- return ".".join(dot_path[:-2])
263
-
264
- def _prepare_full_run_log(self, run_log: RunLog):
265
- """
266
- Populates the run log with the branches and steps.
267
-
268
- Args:
269
- run_log (RunLog): The partial run log containing empty step logs
270
- """
271
- run_id = run_log.run_id
272
- run_log.parameters = self.get_parameters(run_id=run_id)
273
-
274
- ordered_steps = self.orderly_retrieve(run_id=run_id, log_type=self.LogTypes.STEP_LOG)
275
- ordered_branches = self.orderly_retrieve(run_id=run_id, log_type=self.LogTypes.BRANCH_LOG)
276
-
277
- current_branch: Any = None # It could be str, None, RunLog
278
- for step_internal_name in ordered_steps:
279
- current_branch = self._get_parent_branch(step_internal_name)
280
- step_to_add_branch = self._get_parent_step(step_internal_name)
281
-
282
- if not current_branch:
283
- current_branch = run_log
284
- else:
285
- current_branch = ordered_branches[current_branch]
286
- step_to_add_branch = ordered_steps[step_to_add_branch] # type: ignore
287
- step_to_add_branch.branches[current_branch.internal_name] = current_branch # type: ignore
288
-
289
- current_branch.steps[step_internal_name] = ordered_steps[step_internal_name]
290
-
291
- def create_run_log(
292
- self,
293
- run_id: str,
294
- dag_hash: str = "",
295
- use_cached: bool = False,
296
- tag: str = "",
297
- original_run_id: str = "",
298
- status: str = defaults.CREATED,
299
- **kwargs,
300
- ):
301
- """
302
- Creates a Run Log object by using the config
303
-
304
- Logically the method should do the following:
305
- * Creates a Run log
306
- * Adds it to the db
307
- * Return the log
308
- """
309
- try:
310
- self.get_run_log_by_id(run_id=run_id, full=False)
311
- raise exceptions.RunLogExistsError(run_id=run_id)
312
- except exceptions.RunLogNotFoundError:
313
- pass
314
-
315
- logger.info(f"{self.service_name} Creating a Run Log for : {run_id}")
316
- run_log = RunLog(
317
- run_id=run_id,
318
- dag_hash=dag_hash,
319
- tag=tag,
320
- status=status,
321
- )
322
-
323
- self.store(run_id=run_id, contents=run_log.model_dump(), log_type=self.LogTypes.RUN_LOG)
324
- return run_log
325
-
326
- def get_run_log_by_id(self, run_id: str, full: bool = False, **kwargs) -> RunLog:
327
- """
328
- Retrieves a Run log from the database using the config and the run_id
329
-
330
- Args:
331
- run_id (str): The run_id of the run
332
- full (bool): return the full run log store or only the RunLog object
333
-
334
- Returns:
335
- RunLog: The RunLog object identified by the run_id
336
-
337
- Logically the method should:
338
- * Returns the run_log defined by id from the data store defined by the config
339
-
340
- """
341
- try:
342
- logger.info(f"{self.service_name} Getting a Run Log for : {run_id}")
343
- run_log = self.retrieve(run_id=run_id, log_type=self.LogTypes.RUN_LOG, multiple_allowed=False)
344
-
345
- if full:
346
- self._prepare_full_run_log(run_log=run_log)
347
-
348
- return run_log
349
- except EntityNotFoundError as e:
350
- raise exceptions.RunLogNotFoundError(run_id) from e
351
-
352
- def put_run_log(self, run_log: RunLog, **kwargs):
353
- """
354
- Puts the Run Log in the database as defined by the config
355
-
356
- Args:
357
- run_log (RunLog): The Run log of the run
358
-
359
- Logically the method should:
360
- Puts the run_log into the database
361
-
362
- Raises:
363
- NotImplementedError: This is a base class and therefore has no default implementation
364
- """
365
- run_id = run_log.run_id
366
- self.store(run_id=run_id, contents=run_log.model_dump(), log_type=self.LogTypes.RUN_LOG)
367
-
368
- def get_parameters(self, run_id: str, **kwargs) -> dict:
369
- """
370
- Get the parameters from the Run log defined by the run_id
371
-
372
- Args:
373
- run_id (str): The run_id of the run
374
-
375
- The method should:
376
- * Call get_run_log_by_id(run_id) to retrieve the run_log
377
- * Return the parameters as identified in the run_log
378
-
379
- Returns:
380
- dict: A dictionary of the run_log parameters
381
- Raises:
382
- RunLogNotFoundError: If the run log for run_id is not found in the datastore
383
- """
384
- parameters: Dict[str, Parameter] = {}
385
- try:
386
- parameters_list = self.retrieve(run_id=run_id, log_type=self.LogTypes.PARAMETER, multiple_allowed=True)
387
- for param in parameters_list:
388
- for key, value in param.items():
389
- if value["kind"] == "json":
390
- parameters[key] = JsonParameter(**value)
391
- if value["kind"] == "metric":
392
- parameters[key] = MetricParameter(**value)
393
- if value["kind"] == "object":
394
- parameters[key] = ObjectParameter(**value)
395
- except EntityNotFoundError:
396
- # No parameters are set
397
- pass
398
-
399
- return parameters
400
-
401
- def set_parameters(self, run_id: str, parameters: dict, **kwargs):
402
- """
403
- Update the parameters of the Run log with the new parameters
404
-
405
- This method would over-write the parameters, if the parameter exists in the run log already
406
-
407
- The method should:
408
- * Call get_run_log_by_id(run_id) to retrieve the run_log
409
- * Update the parameters of the run_log
410
- * Call put_run_log(run_log) to put the run_log in the datastore
411
-
412
- Args:
413
- run_id (str): The run_id of the run
414
- parameters (dict): The parameters to update in the run log
415
- Raises:
416
- RunLogNotFoundError: If the run log for run_id is not found in the datastore
417
- """
418
- for key, value in parameters.items():
419
- self.store(
420
- run_id=run_id,
421
- log_type=self.LogTypes.PARAMETER,
422
- contents={key: value.model_dump(by_alias=True)},
423
- name=key,
424
- )
425
-
426
- def get_run_config(self, run_id: str, **kwargs) -> dict:
427
- """
428
- Given a run_id, return the run_config used to perform the run.
429
-
430
- Args:
431
- run_id (str): The run_id of the run
432
-
433
- Returns:
434
- dict: The run config used for the run
435
- """
436
-
437
- run_log = self.get_run_log_by_id(run_id=run_id)
438
- return run_log.run_config
439
-
440
- def set_run_config(self, run_id: str, run_config: dict, **kwargs):
441
- """Set the run config used to run the run_id
442
-
443
- Args:
444
- run_id (str): The run_id of the run
445
- run_config (dict): The run_config of the run
446
- """
447
-
448
- run_log = self.get_run_log_by_id(run_id=run_id)
449
- run_log.run_config.update(run_config)
450
- self.put_run_log(run_log=run_log)
451
-
452
- def get_step_log(self, internal_name: str, run_id: str, **kwargs) -> StepLog:
453
- """
454
- Get a step log from the datastore for run_id and the internal naming of the step log
455
-
456
- The internal naming of the step log is a dot path convention.
457
-
458
- The method should:
459
- * Call get_run_log_by_id(run_id) to retrieve the run_log
460
- * Identify the step location by decoding the internal naming
461
- * Return the step log
462
-
463
- Args:
464
- internal_name (str): The internal name of the step log
465
- run_id (str): The run_id of the run
466
-
467
- Returns:
468
- StepLog: The step log object for the step defined by the internal naming and run_id
469
-
470
- Raises:
471
- RunLogNotFoundError: If the run log for run_id is not found in the datastore
472
- StepLogNotFoundError: If the step log for internal_name is not found in the datastore for run_id
473
- """
474
- logger.info(f"{self.service_name} Getting the step log: {internal_name} of {run_id}")
475
-
476
- step_log = self.retrieve(
477
- run_id=run_id,
478
- log_type=self.LogTypes.STEP_LOG,
479
- name=internal_name,
480
- multiple_allowed=False,
481
- )
482
-
483
- return step_log
484
-
485
- def add_step_log(self, step_log: StepLog, run_id: str, **kwargs):
486
- """
487
- Add the step log in the run log as identified by the run_id in the datastore
488
-
489
- The method should:
490
- * Call get_run_log_by_id(run_id) to retrieve the run_log
491
- * Identify the branch to add the step by decoding the step_logs internal name
492
- * Add the step log to the identified branch log
493
- * Call put_run_log(run_log) to put the run_log in the datastore
494
-
495
- Args:
496
- step_log (StepLog): The Step log to add to the database
497
- run_id (str): The run id of the run
498
-
499
- Raises:
500
- RunLogNotFoundError: If the run log for run_id is not found in the datastore
501
- BranchLogNotFoundError: If the branch of the step log for internal_name is not found in the datastore
502
- for run_id
503
- """
504
- logger.info(f"{self.service_name} Adding the step log to DB: {step_log.internal_name}")
505
-
506
- self.store(
507
- run_id=run_id,
508
- log_type=self.LogTypes.STEP_LOG,
509
- contents=step_log.model_dump(),
510
- name=step_log.internal_name,
511
- )
512
-
513
- def get_branch_log(self, internal_branch_name: str, run_id: str, **kwargs) -> Union[BranchLog, RunLog]:
514
- """
515
- Returns the branch log by the internal branch name for the run id
516
-
517
- If the internal branch name is none, returns the run log
518
-
519
- Args:
520
- internal_branch_name (str): The internal branch name to retrieve.
521
- run_id (str): The run id of interest
522
-
523
- Returns:
524
- BranchLog: The branch log or the run log as requested.
525
- """
526
- if not internal_branch_name:
527
- return self.get_run_log_by_id(run_id=run_id)
528
- branch = self.retrieve(run_id=run_id, log_type=self.LogTypes.BRANCH_LOG, name=internal_branch_name)
529
- return branch
530
-
531
- def add_branch_log(self, branch_log: Union[BranchLog, RunLog], run_id: str, **kwargs):
532
- """
533
- The method should:
534
- # Get the run log
535
- # Get the branch and step containing the branch
536
- # Add the branch to the step
537
- # Write the run_log
538
-
539
- The branch log could some times be a Run log and should be handled appropriately
540
-
541
- Args:
542
- branch_log (BranchLog): The branch log/run log to add to the database
543
- run_id (str): The run id to which the branch/run log is added
544
- """
545
- if not isinstance(branch_log, BranchLog):
546
- self.put_run_log(branch_log)
547
- return
548
-
549
- internal_branch_name = branch_log.internal_name
550
-
551
- logger.info(f"{self.service_name} Adding the branch log to DB: {branch_log.internal_name}")
552
- self.store(
553
- run_id=run_id,
554
- log_type=self.LogTypes.BRANCH_LOG,
555
- contents=branch_log.model_dump(),
556
- name=internal_branch_name,
557
- )
File without changes
@@ -1,21 +0,0 @@
1
- import logging
2
- from pathlib import Path
3
-
4
- from runnable import defaults
5
- from runnable.extensions.run_log_store.file_system.implementation import FileSystemRunLogstore
6
-
7
- logger = logging.getLogger(defaults.NAME)
8
-
9
-
10
- class K8PersistentVolumeRunLogstore(FileSystemRunLogstore):
11
- """
12
- Uses the K8s Persistent Volumes to store run logs.
13
- """
14
-
15
- service_name: str = "k8s-pvc"
16
- persistent_volume_name: str
17
- mount_path: str
18
-
19
- @property
20
- def log_folder_name(self) -> str:
21
- return str(Path(self.mount_path) / self.log_folder)
@@ -1,56 +0,0 @@
1
- import logging
2
- from typing import cast
3
-
4
- from runnable import defaults
5
- from runnable.integration import BaseIntegration
6
-
7
- logger = logging.getLogger(defaults.NAME)
8
-
9
-
10
- class LocalCompute(BaseIntegration):
11
- """
12
- Integration between local and k8's pvc
13
- """
14
-
15
- executor_type = "local"
16
- service_type = "run_log_store" # One of secret, catalog, datastore
17
- service_provider = "k8s-pvc" # The actual implementation of the service
18
-
19
- def validate(self, **kwargs):
20
- msg = "We can't use the local compute k8s pvc store integration."
21
- raise Exception(msg)
22
-
23
-
24
- class LocalContainerCompute(BaseIntegration):
25
- """
26
- Integration between local-container and k8's pvc
27
- """
28
-
29
- executor_type = "local-container"
30
- service_type = "run_log_store" # One of secret, catalog, datastore
31
- service_provider = "k8s-pvc" # The actual implementation of the service
32
-
33
- def validate(self, **kwargs):
34
- msg = "We can't use the local-container compute k8s pvc store integration."
35
- raise Exception(msg)
36
-
37
-
38
- class ArgoCompute(BaseIntegration):
39
- """
40
- Integration between argo and k8's pvc
41
- """
42
-
43
- executor_type = "argo"
44
- service_type = "run_log_store" # One of secret, catalog, datastore
45
- service_provider = "k8s-pvc" # The actual implementation of the service
46
-
47
- def configure_for_traversal(self, **kwargs):
48
- from runnable.extensions.executor.argo.implementation import ArgoExecutor, UserVolumeMounts
49
- from runnable.extensions.run_log_store.k8s_pvc.implementation import K8PersistentVolumeRunLogstore
50
-
51
- self.executor = cast(ArgoExecutor, self.executor)
52
- self.service = cast(K8PersistentVolumeRunLogstore, self.service)
53
-
54
- volume_mount = UserVolumeMounts(name=self.service.persistent_volume_name, mount_path=self.service.mount_path)
55
-
56
- self.executor.persistent_volumes.append(volume_mount)
File without changes
File without changes