runnable 0.30.2__tar.gz → 0.30.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {runnable-0.30.2 → runnable-0.30.4}/PKG-INFO +1 -1
  2. {runnable-0.30.2 → runnable-0.30.4}/extensions/catalog/any_path.py +4 -2
  3. {runnable-0.30.2 → runnable-0.30.4}/extensions/job_executor/__init__.py +2 -1
  4. {runnable-0.30.2 → runnable-0.30.4}/extensions/job_executor/k8s.py +6 -2
  5. {runnable-0.30.2 → runnable-0.30.4}/extensions/job_executor/local.py +7 -2
  6. {runnable-0.30.2 → runnable-0.30.4}/extensions/job_executor/local_container.py +7 -1
  7. {runnable-0.30.2 → runnable-0.30.4}/extensions/nodes/torch.py +16 -1
  8. {runnable-0.30.2 → runnable-0.30.4}/extensions/pipeline_executor/__init__.py +11 -7
  9. {runnable-0.30.2 → runnable-0.30.4}/extensions/pipeline_executor/argo.py +1 -1
  10. {runnable-0.30.2 → runnable-0.30.4}/pyproject.toml +1 -1
  11. {runnable-0.30.2 → runnable-0.30.4}/runnable/catalog.py +6 -2
  12. {runnable-0.30.2 → runnable-0.30.4}/runnable/executor.py +5 -1
  13. {runnable-0.30.2 → runnable-0.30.4}/.gitignore +0 -0
  14. {runnable-0.30.2 → runnable-0.30.4}/LICENSE +0 -0
  15. {runnable-0.30.2 → runnable-0.30.4}/README.md +0 -0
  16. {runnable-0.30.2 → runnable-0.30.4}/extensions/README.md +0 -0
  17. {runnable-0.30.2 → runnable-0.30.4}/extensions/__init__.py +0 -0
  18. {runnable-0.30.2 → runnable-0.30.4}/extensions/catalog/README.md +0 -0
  19. {runnable-0.30.2 → runnable-0.30.4}/extensions/catalog/file_system.py +0 -0
  20. {runnable-0.30.2 → runnable-0.30.4}/extensions/catalog/minio.py +0 -0
  21. {runnable-0.30.2 → runnable-0.30.4}/extensions/catalog/pyproject.toml +0 -0
  22. {runnable-0.30.2 → runnable-0.30.4}/extensions/catalog/s3.py +0 -0
  23. {runnable-0.30.2 → runnable-0.30.4}/extensions/job_executor/README.md +0 -0
  24. {runnable-0.30.2 → runnable-0.30.4}/extensions/job_executor/k8s_job_spec.yaml +0 -0
  25. {runnable-0.30.2 → runnable-0.30.4}/extensions/job_executor/pyproject.toml +0 -0
  26. {runnable-0.30.2 → runnable-0.30.4}/extensions/nodes/README.md +0 -0
  27. {runnable-0.30.2 → runnable-0.30.4}/extensions/nodes/nodes.py +0 -0
  28. {runnable-0.30.2 → runnable-0.30.4}/extensions/nodes/pyproject.toml +0 -0
  29. {runnable-0.30.2 → runnable-0.30.4}/extensions/nodes/torch_config.py +0 -0
  30. {runnable-0.30.2 → runnable-0.30.4}/extensions/pipeline_executor/README.md +0 -0
  31. {runnable-0.30.2 → runnable-0.30.4}/extensions/pipeline_executor/local.py +0 -0
  32. {runnable-0.30.2 → runnable-0.30.4}/extensions/pipeline_executor/local_container.py +0 -0
  33. {runnable-0.30.2 → runnable-0.30.4}/extensions/pipeline_executor/mocked.py +0 -0
  34. {runnable-0.30.2 → runnable-0.30.4}/extensions/pipeline_executor/pyproject.toml +0 -0
  35. {runnable-0.30.2 → runnable-0.30.4}/extensions/pipeline_executor/retry.py +0 -0
  36. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/README.md +0 -0
  37. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/__init__.py +0 -0
  38. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/any_path.py +0 -0
  39. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/chunked_fs.py +0 -0
  40. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/chunked_minio.py +0 -0
  41. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/db/implementation_FF.py +0 -0
  42. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/db/integration_FF.py +0 -0
  43. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/file_system.py +0 -0
  44. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/generic_chunked.py +0 -0
  45. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/minio.py +0 -0
  46. {runnable-0.30.2 → runnable-0.30.4}/extensions/run_log_store/pyproject.toml +0 -0
  47. {runnable-0.30.2 → runnable-0.30.4}/extensions/secrets/README.md +0 -0
  48. {runnable-0.30.2 → runnable-0.30.4}/extensions/secrets/dotenv.py +0 -0
  49. {runnable-0.30.2 → runnable-0.30.4}/extensions/secrets/pyproject.toml +0 -0
  50. {runnable-0.30.2 → runnable-0.30.4}/runnable/__init__.py +0 -0
  51. {runnable-0.30.2 → runnable-0.30.4}/runnable/cli.py +0 -0
  52. {runnable-0.30.2 → runnable-0.30.4}/runnable/context.py +0 -0
  53. {runnable-0.30.2 → runnable-0.30.4}/runnable/datastore.py +0 -0
  54. {runnable-0.30.2 → runnable-0.30.4}/runnable/defaults.py +0 -0
  55. {runnable-0.30.2 → runnable-0.30.4}/runnable/entrypoints.py +0 -0
  56. {runnable-0.30.2 → runnable-0.30.4}/runnable/exceptions.py +0 -0
  57. {runnable-0.30.2 → runnable-0.30.4}/runnable/graph.py +0 -0
  58. {runnable-0.30.2 → runnable-0.30.4}/runnable/names.py +0 -0
  59. {runnable-0.30.2 → runnable-0.30.4}/runnable/nodes.py +0 -0
  60. {runnable-0.30.2 → runnable-0.30.4}/runnable/parameters.py +0 -0
  61. {runnable-0.30.2 → runnable-0.30.4}/runnable/pickler.py +0 -0
  62. {runnable-0.30.2 → runnable-0.30.4}/runnable/sdk.py +0 -0
  63. {runnable-0.30.2 → runnable-0.30.4}/runnable/secrets.py +0 -0
  64. {runnable-0.30.2 → runnable-0.30.4}/runnable/tasks.py +0 -0
  65. {runnable-0.30.2 → runnable-0.30.4}/runnable/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: runnable
3
- Version: 0.30.2
3
+ Version: 0.30.4
4
4
  Summary: Add your description here
5
5
  Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
6
6
  License-File: LICENSE
@@ -94,7 +94,9 @@ class AnyPathCatalog(BaseCatalog):
94
94
 
95
95
  return data_catalogs
96
96
 
97
- def put(self, name: str) -> List[DataCatalog]:
97
+ def put(
98
+ self, name: str, allow_file_not_found_exc: bool = False
99
+ ) -> List[DataCatalog]:
98
100
  """
99
101
  Put the files matching the glob pattern into the catalog.
100
102
 
@@ -154,7 +156,7 @@ class AnyPathCatalog(BaseCatalog):
154
156
  # TODO: Think about syncing only if the file is changed
155
157
  self.upload_to_catalog(file)
156
158
 
157
- if not data_catalogs:
159
+ if not data_catalogs and not allow_file_not_found_exc:
158
160
  raise Exception(f"Did not find any files matching {name} in {copy_from}")
159
161
 
160
162
  return data_catalogs
@@ -139,6 +139,7 @@ class GenericJobExecutor(BaseJobExecutor):
139
139
  def _sync_catalog(
140
140
  self,
141
141
  catalog_settings=Optional[List[str]],
142
+ allow_file_not_found_exc: bool = False,
142
143
  ) -> List[DataCatalog] | None:
143
144
  if not catalog_settings:
144
145
  logger.info("No catalog settings found")
@@ -147,7 +148,7 @@ class GenericJobExecutor(BaseJobExecutor):
147
148
  data_catalogs = []
148
149
  for name_pattern in catalog_settings:
149
150
  data_catalog = self._context.catalog_handler.put(
150
- name=name_pattern,
151
+ name=name_pattern, allow_file_not_found_exc=allow_file_not_found_exc
151
152
  )
152
153
 
153
154
  logger.debug(f"Added data catalog: {data_catalog} to job log")
@@ -7,7 +7,6 @@ from kubernetes import client
7
7
  from kubernetes import config as k8s_config
8
8
  from pydantic import BaseModel, ConfigDict, Field, PlainSerializer, PrivateAttr
9
9
  from pydantic.alias_generators import to_camel
10
- from rich import print
11
10
 
12
11
  from extensions.job_executor import GenericJobExecutor
13
12
  from runnable import console, defaults, utils
@@ -223,8 +222,13 @@ class GenericK8sJobExecutor(GenericJobExecutor):
223
222
  job_log.status = attempt_log.status
224
223
  job_log.attempts.append(attempt_log)
225
224
 
225
+ allow_file_not_found_exc = True
226
+ if job_log.status == defaults.SUCCESS:
227
+ allow_file_not_found_exc = False
228
+
226
229
  data_catalogs_put: Optional[List[DataCatalog]] = self._sync_catalog(
227
- catalog_settings=catalog_settings
230
+ catalog_settings=catalog_settings,
231
+ allow_file_not_found_exc=allow_file_not_found_exc,
228
232
  )
229
233
  logger.debug(f"data_catalogs_put: {data_catalogs_put}")
230
234
 
@@ -55,11 +55,16 @@ class LocalJobExecutor(GenericJobExecutor):
55
55
  job_log.status = attempt_log.status
56
56
  job_log.attempts.append(attempt_log)
57
57
 
58
+ allow_file_not_found_exc = True
59
+ if job_log.status == defaults.SUCCESS:
60
+ allow_file_not_found_exc = False
61
+
58
62
  data_catalogs_put: Optional[List[DataCatalog]] = self._sync_catalog(
59
- catalog_settings=catalog_settings
63
+ catalog_settings=catalog_settings,
64
+ allow_file_not_found_exc=allow_file_not_found_exc,
60
65
  )
61
- logger.debug(f"data_catalogs_put: {data_catalogs_put}")
62
66
 
67
+ logger.debug(f"data_catalogs_put: {data_catalogs_put}")
63
68
  job_log.add_data_catalogs(data_catalogs_put or [])
64
69
 
65
70
  console.print("Summary of job")
@@ -64,9 +64,15 @@ class LocalContainerJobExecutor(GenericJobExecutor):
64
64
  job_log.status = attempt_log.status
65
65
  job_log.attempts.append(attempt_log)
66
66
 
67
+ allow_file_not_found_exc = True
68
+ if job_log.status == defaults.SUCCESS:
69
+ allow_file_not_found_exc = False
70
+
67
71
  data_catalogs_put: Optional[List[DataCatalog]] = self._sync_catalog(
68
- catalog_settings=catalog_settings
72
+ catalog_settings=catalog_settings,
73
+ allow_file_not_found_exc=allow_file_not_found_exc,
69
74
  )
75
+
70
76
  logger.debug(f"data_catalogs_put: {data_catalogs_put}")
71
77
 
72
78
  job_log.add_data_catalogs(data_catalogs_put or [])
@@ -1,6 +1,8 @@
1
1
  import importlib
2
2
  import logging
3
3
  import os
4
+ import random
5
+ import string
4
6
  from datetime import datetime
5
7
  from typing import Any, Callable
6
8
 
@@ -28,7 +30,13 @@ def training_subprocess():
28
30
  command = os.environ.get("RUNNABLE_TORCH_COMMAND")
29
31
  run_id = os.environ.get("RUNNABLE_TORCH_RUN_ID", "")
30
32
  parameters_files = os.environ.get("RUNNABLE_TORCH_PARAMETERS_FILES", "")
31
- process_run_id = run_id + "-" + os.environ.get("RANK", "")
33
+ process_run_id = (
34
+ run_id
35
+ + "-"
36
+ + os.environ.get("RANK", "")
37
+ + "-"
38
+ + "".join(random.choices(string.ascii_lowercase, k=3))
39
+ )
32
40
 
33
41
  delete_env_vars_with_prefix("RUNNABLE_")
34
42
 
@@ -40,6 +48,13 @@ def training_subprocess():
40
48
  job_id=process_run_id,
41
49
  )
42
50
 
51
+ from runnable.context import run_context
52
+
53
+ job_log = run_context.run_log_store.get_run_log_by_id(run_id=run_context.run_id)
54
+
55
+ if job_log.status == defaults.FAIL:
56
+ raise Exception(f"Job {process_run_id} failed")
57
+
43
58
 
44
59
  def get_callable_from_dotted_path(dotted_path) -> Callable:
45
60
  try:
@@ -111,7 +111,7 @@ class GenericPipelineExecutor(BasePipelineExecutor):
111
111
  )
112
112
 
113
113
  def _sync_catalog(
114
- self, stage: str, synced_catalogs=None
114
+ self, stage: str, synced_catalogs=None, allow_file_no_found_exc: bool = False
115
115
  ) -> Optional[List[DataCatalog]]:
116
116
  """
117
117
  1). Identify the catalog settings by over-riding node settings with the global settings.
@@ -160,7 +160,7 @@ class GenericPipelineExecutor(BasePipelineExecutor):
160
160
 
161
161
  elif stage == "put":
162
162
  data_catalog = self._context.catalog_handler.put(
163
- name=name_pattern,
163
+ name=name_pattern, allow_file_not_found_exc=allow_file_no_found_exc
164
164
  )
165
165
  else:
166
166
  raise Exception(f"Stage {stage} not supported")
@@ -233,12 +233,16 @@ class GenericPipelineExecutor(BasePipelineExecutor):
233
233
  mock=mock,
234
234
  )
235
235
 
236
+ allow_file_not_found_exc = True
236
237
  if step_log.status == defaults.SUCCESS:
237
- data_catalogs_put: Optional[List[DataCatalog]] = self._sync_catalog(
238
- stage="put"
239
- )
240
- logger.debug(f"data_catalogs_put: {data_catalogs_put}")
241
- step_log.add_data_catalogs(data_catalogs_put or [])
238
+ # raise exception if we succeeded but the file was not found
239
+ allow_file_not_found_exc = False
240
+
241
+ data_catalogs_put: Optional[List[DataCatalog]] = self._sync_catalog(
242
+ stage="put", allow_file_no_found_exc=allow_file_not_found_exc
243
+ )
244
+ logger.debug(f"data_catalogs_put: {data_catalogs_put}")
245
+ step_log.add_data_catalogs(data_catalogs_put or [])
242
246
 
243
247
  # get catalog should always be added to the step log
244
248
  step_log.add_data_catalogs(data_catalogs_get or [])
@@ -658,7 +658,7 @@ class ArgoExecutor(GenericPipelineExecutor):
658
658
  def _set_env_vars_to_task(
659
659
  self, working_on: BaseNode, container_template: CoreContainerTemplate
660
660
  ):
661
- if not isinstance(working_on, TaskNode) or isinstance(working_on, TorchNode):
661
+ if not (isinstance(working_on, TaskNode) or isinstance(working_on, TorchNode)):
662
662
  return
663
663
 
664
664
  global_envs: dict[str, str] = {}
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "runnable"
3
- version = "0.30.2"
3
+ version = "0.30.4"
4
4
  description = "Add your description here"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -76,7 +76,9 @@ class BaseCatalog(ABC, BaseModel):
76
76
  raise NotImplementedError
77
77
 
78
78
  @abstractmethod
79
- def put(self, name: str) -> List[DataCatalog]:
79
+ def put(
80
+ self, name: str, allow_file_not_found_exc: bool = False
81
+ ) -> List[DataCatalog]:
80
82
  """
81
83
  Put the file by 'name' from the 'compute_data_folder' in the catalog for the run_id.
82
84
 
@@ -137,7 +139,9 @@ class DoNothingCatalog(BaseCatalog):
137
139
  logger.info("Using a do-nothing catalog, doing nothing in get")
138
140
  return []
139
141
 
140
- def put(self, name: str) -> List[DataCatalog]:
142
+ def put(
143
+ self, name: str, allow_file_not_found_exc: bool = False
144
+ ) -> List[DataCatalog]:
141
145
  """
142
146
  Does nothing
143
147
  """
@@ -123,6 +123,7 @@ class BaseJobExecutor(BaseExecutor):
123
123
  def _sync_catalog(
124
124
  self,
125
125
  catalog_settings: Optional[List[str]],
126
+ allow_file_not_found_exc: bool = False,
126
127
  ) -> Optional[List[DataCatalog]]:
127
128
  """
128
129
  1). Identify the catalog settings by over-riding node settings with the global settings.
@@ -175,7 +176,10 @@ class BasePipelineExecutor(BaseExecutor):
175
176
 
176
177
  @abstractmethod
177
178
  def _sync_catalog(
178
- self, stage: str, synced_catalogs=None
179
+ self,
180
+ stage: str,
181
+ synced_catalogs=None,
182
+ allow_file_no_found_exc: bool = False,
179
183
  ) -> Optional[List[DataCatalog]]:
180
184
  """
181
185
  1). Identify the catalog settings by over-riding node settings with the global settings.
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes