runnable 0.26.0__py3-none-any.whl → 0.27.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,7 @@ from pathlib import Path
4
4
  from typing import Any
5
5
 
6
6
  from cloudpathlib import CloudPath, S3Client, S3Path
7
+ from pydantic import Field, SecretStr
7
8
 
8
9
  from extensions.catalog.any_path import AnyPathCatalog
9
10
  from runnable import defaults
@@ -25,9 +26,9 @@ def get_minio_client(
25
26
  class MinioCatalog(AnyPathCatalog):
26
27
  service_name: str = "minio"
27
28
 
28
- endpoint_url: str = "http://localhost:9002"
29
- aws_access_key_id: str = "minioadmin"
30
- aws_secret_access_key: str = "minioadmin"
29
+ endpoint_url: str = Field(default="http://localhost:9002")
30
+ aws_access_key_id: SecretStr = SecretStr(secret_value="minioadmin")
31
+ aws_secret_access_key: SecretStr = SecretStr(secret_value="minioadmin")
31
32
  bucket: str = "runnable"
32
33
 
33
34
  def get_summary(self) -> dict[str, Any]:
@@ -44,7 +45,9 @@ class MinioCatalog(AnyPathCatalog):
44
45
  return S3Path(
45
46
  f"s3://{self.bucket}/{run_id}/{self.compute_data_folder}".strip("."),
46
47
  client=get_minio_client(
47
- self.endpoint_url, self.aws_access_key_id, self.aws_secret_access_key
48
+ self.endpoint_url,
49
+ self.aws_access_key_id.get_secret_value(),
50
+ self.aws_secret_access_key.get_secret_value(),
48
51
  ),
49
52
  )
50
53
 
@@ -0,0 +1,104 @@
1
+ import logging
2
+ from abc import abstractmethod
3
+ from typing import Any, Dict
4
+
5
+ from runnable import defaults, exceptions
6
+ from runnable.datastore import BaseRunLogStore, RunLog
7
+
8
+ logger = logging.getLogger(defaults.LOGGER_NAME)
9
+
10
+
11
+ class AnyPathRunLogStore(BaseRunLogStore):
12
+ """
13
+ In this type of Run Log store, we use a file system to store the JSON run log.
14
+
15
+ Every single run is stored as a different file which makes it compatible across other store types.
16
+
17
+ When to use:
18
+ When locally testing a pipeline and have the need to compare across runs.
19
+ Its fully featured and perfectly fine if your local environment is where you would do everything.
20
+
21
+ Do not use:
22
+ If you need parallelization on local, this run log would not support it.
23
+
24
+ Example config:
25
+
26
+ run_log:
27
+ type: file-system
28
+ config:
29
+ log_folder: The folder to out the logs. Defaults to .run_log_store
30
+
31
+ """
32
+
33
+ service_name: str = "file-system"
34
+ log_folder: str = defaults.LOG_LOCATION_FOLDER
35
+
36
+ @property
37
+ def log_folder_name(self):
38
+ return self.log_folder
39
+
40
+ def get_summary(self) -> Dict[str, Any]:
41
+ summary = {"Type": self.service_name, "Location": self.log_folder}
42
+
43
+ return summary
44
+
45
+ @abstractmethod
46
+ def write_to_path(self, run_log: RunLog): ...
47
+
48
+ @abstractmethod
49
+ def read_from_path(self, run_id: str) -> RunLog: ...
50
+
51
+ def create_run_log(
52
+ self,
53
+ run_id: str,
54
+ dag_hash: str = "",
55
+ use_cached: bool = False,
56
+ tag: str = "",
57
+ original_run_id: str = "",
58
+ status: str = defaults.CREATED,
59
+ ) -> RunLog:
60
+ """
61
+ # Creates a Run log
62
+ # Adds it to the db
63
+ """
64
+
65
+ try:
66
+ self.get_run_log_by_id(run_id=run_id, full=False)
67
+ raise exceptions.RunLogExistsError(run_id=run_id)
68
+ except exceptions.RunLogNotFoundError:
69
+ pass
70
+
71
+ logger.info(f"{self.service_name} Creating a Run Log for : {run_id}")
72
+ run_log = RunLog(
73
+ run_id=run_id,
74
+ dag_hash=dag_hash,
75
+ tag=tag,
76
+ status=status,
77
+ )
78
+ self.write_to_path(run_log)
79
+ return run_log
80
+
81
+ def get_run_log_by_id(
82
+ self,
83
+ run_id: str,
84
+ full: bool = False,
85
+ ) -> RunLog:
86
+ """
87
+ # Returns the run_log defined by id
88
+ # Raises Exception if not found
89
+ """
90
+ try:
91
+ logger.info(f"{self.service_name} Getting a Run Log for : {run_id}")
92
+ run_log = self.read_from_path(run_id)
93
+ return run_log
94
+ except FileNotFoundError as e:
95
+ raise exceptions.RunLogNotFoundError(run_id) from e
96
+
97
+ def put_run_log(self, run_log: RunLog):
98
+ """
99
+ # Puts the run_log into the database
100
+ """
101
+ logger.info(
102
+ f"{self.service_name} Putting the run log in the DB: {run_log.run_id}"
103
+ )
104
+ self.write_to_path(run_log)
@@ -2,14 +2,16 @@ import json
2
2
  import logging
3
3
  from pathlib import Path
4
4
  from string import Template
5
- from typing import Any, Dict, Optional, Sequence, Union
5
+ from typing import Any, Dict, Optional, Union
6
+
7
+ from cloudpathlib import CloudPath
6
8
 
7
9
  from extensions.run_log_store.generic_chunked import ChunkedRunLogStore
8
10
  from runnable import defaults, utils
9
11
 
10
12
  logger = logging.getLogger(defaults.LOGGER_NAME)
11
13
 
12
- T = Union[str, Path]
14
+ MixT = Union[CloudPath, Path]
13
15
 
14
16
 
15
17
  class ChunkedFileSystemRunLogStore(ChunkedRunLogStore):
@@ -28,7 +30,7 @@ class ChunkedFileSystemRunLogStore(ChunkedRunLogStore):
28
30
 
29
31
  def get_matches(
30
32
  self, run_id: str, name: str, multiple_allowed: bool = False
31
- ) -> Optional[Union[Sequence[T], T]]:
33
+ ) -> Optional[Union[list[Path], list[CloudPath], MixT]]:
32
34
  """
33
35
  Get contents of files matching the pattern name*
34
36
 
@@ -78,7 +80,7 @@ class ChunkedFileSystemRunLogStore(ChunkedRunLogStore):
78
80
 
79
81
  return str(name) + ".json"
80
82
 
81
- def _store(self, run_id: str, contents: dict, name: Union[Path, str], insert=False):
83
+ def _store(self, run_id: str, contents: dict, name: MixT, insert=False):
82
84
  """
83
85
  Store the contents against the name in the folder.
84
86
 
@@ -87,15 +89,16 @@ class ChunkedFileSystemRunLogStore(ChunkedRunLogStore):
87
89
  contents (dict): The dict to store
88
90
  name (str): The name to store as
89
91
  """
92
+ log_folder_with_run_id = self.log_folder_with_run_id(run_id=run_id)
90
93
  if insert:
91
- name = self.log_folder_with_run_id(run_id=run_id) / name
94
+ name = log_folder_with_run_id / name
92
95
 
93
- utils.safe_make_dir(self.log_folder_with_run_id(run_id=run_id))
96
+ utils.safe_make_dir(log_folder_with_run_id)
94
97
 
95
- with open(self.safe_suffix_json(name), "w") as fw:
98
+ with open(log_folder_with_run_id / self.safe_suffix_json(name.name), "w") as fw:
96
99
  json.dump(contents, fw, ensure_ascii=True, indent=4)
97
100
 
98
- def _retrieve(self, name: Union[str, Path]) -> dict:
101
+ def _retrieve(self, run_id: str, name: MixT) -> dict:
99
102
  """
100
103
  Does the job of retrieving from the folder.
101
104
 
@@ -106,8 +109,9 @@ class ChunkedFileSystemRunLogStore(ChunkedRunLogStore):
106
109
  dict: The contents
107
110
  """
108
111
  contents: dict = {}
112
+ log_folder_with_run_id = self.log_folder_with_run_id(run_id=run_id)
109
113
 
110
- with open(self.safe_suffix_json(name), "r") as fr:
114
+ with open(log_folder_with_run_id / self.safe_suffix_json(name.name), "r") as fr:
111
115
  contents = json.load(fr)
112
116
 
113
117
  return contents
@@ -3,13 +3,14 @@ import logging
3
3
  from pathlib import Path
4
4
  from typing import Any, Dict
5
5
 
6
- from runnable import defaults, exceptions, utils
7
- from runnable.datastore import BaseRunLogStore, RunLog
6
+ from extensions.run_log_store.any_path import AnyPathRunLogStore
7
+ from runnable import defaults, utils
8
+ from runnable.datastore import RunLog
8
9
 
9
10
  logger = logging.getLogger(defaults.LOGGER_NAME)
10
11
 
11
12
 
12
- class FileSystemRunLogstore(BaseRunLogStore):
13
+ class FileSystemRunLogstore(AnyPathRunLogStore):
13
14
  """
14
15
  In this type of Run Log store, we use a file system to store the JSON run log.
15
16
 
@@ -43,7 +44,7 @@ class FileSystemRunLogstore(BaseRunLogStore):
43
44
 
44
45
  return summary
45
46
 
46
- def write_to_folder(self, run_log: RunLog):
47
+ def write_to_path(self, run_log: RunLog):
47
48
  """
48
49
  Write the run log to the folder
49
50
 
@@ -60,7 +61,7 @@ class FileSystemRunLogstore(BaseRunLogStore):
60
61
  with json_file_path.open("w") as fw:
61
62
  json.dump(run_log.model_dump(), fw, ensure_ascii=True, indent=4) # pylint: disable=no-member
62
63
 
63
- def get_from_folder(self, run_id: str) -> RunLog:
64
+ def read_from_path(self, run_id: str) -> RunLog:
64
65
  """
65
66
  Look into the run log folder for the run log for the run id.
66
67
 
@@ -88,58 +89,3 @@ class FileSystemRunLogstore(BaseRunLogStore):
88
89
  json_str = json.load(fr)
89
90
  run_log = RunLog(**json_str) # pylint: disable=no-member
90
91
  return run_log
91
-
92
- def create_run_log(
93
- self,
94
- run_id: str,
95
- dag_hash: str = "",
96
- use_cached: bool = False,
97
- tag: str = "",
98
- original_run_id: str = "",
99
- status: str = defaults.CREATED,
100
- ) -> RunLog:
101
- """
102
- # Creates a Run log
103
- # Adds it to the db
104
- """
105
-
106
- try:
107
- self.get_run_log_by_id(run_id=run_id, full=False)
108
- raise exceptions.RunLogExistsError(run_id=run_id)
109
- except exceptions.RunLogNotFoundError:
110
- pass
111
-
112
- logger.info(f"{self.service_name} Creating a Run Log for : {run_id}")
113
- run_log = RunLog(
114
- run_id=run_id,
115
- dag_hash=dag_hash,
116
- tag=tag,
117
- status=status,
118
- )
119
- self.write_to_folder(run_log)
120
- return run_log
121
-
122
- def get_run_log_by_id(
123
- self,
124
- run_id: str,
125
- full: bool = False,
126
- ) -> RunLog:
127
- """
128
- # Returns the run_log defined by id
129
- # Raises Exception if not found
130
- """
131
- try:
132
- logger.info(f"{self.service_name} Getting a Run Log for : {run_id}")
133
- run_log = self.get_from_folder(run_id)
134
- return run_log
135
- except FileNotFoundError as e:
136
- raise exceptions.RunLogNotFoundError(run_id) from e
137
-
138
- def put_run_log(self, run_log: RunLog):
139
- """
140
- # Puts the run_log into the database
141
- """
142
- logger.info(
143
- f"{self.service_name} Putting the run log in the DB: {run_log.run_id}"
144
- )
145
- self.write_to_folder(run_log)
@@ -4,7 +4,9 @@ from abc import abstractmethod
4
4
  from enum import Enum
5
5
  from pathlib import Path
6
6
  from string import Template
7
- from typing import Any, Dict, Optional, Sequence, Union
7
+ from typing import Any, Dict, Optional, Union
8
+
9
+ from cloudpathlib import CloudPath
8
10
 
9
11
  from runnable import defaults, exceptions
10
12
  from runnable.datastore import (
@@ -21,7 +23,7 @@ from runnable.datastore import (
21
23
  logger = logging.getLogger(defaults.LOGGER_NAME)
22
24
 
23
25
 
24
- T = Union[str, Path] # Holds str, path
26
+ MixT = Union[CloudPath, Path] # Holds str, path
25
27
 
26
28
 
27
29
  class EntityNotFoundError(Exception):
@@ -87,7 +89,7 @@ class ChunkedRunLogStore(BaseRunLogStore):
87
89
  @abstractmethod
88
90
  def get_matches(
89
91
  self, run_id: str, name: str, multiple_allowed: bool = False
90
- ) -> Optional[Union[Sequence[T], T]]:
92
+ ) -> Optional[Union[list[Path], list[CloudPath], MixT]]:
91
93
  """
92
94
  Get contents of persistence layer matching the pattern name*
93
95
 
@@ -98,7 +100,7 @@ class ChunkedRunLogStore(BaseRunLogStore):
98
100
  ...
99
101
 
100
102
  @abstractmethod
101
- def _store(self, run_id: str, contents: dict, name: T, insert: bool = False):
103
+ def _store(self, run_id: str, contents: dict, name: MixT, insert: bool = False):
102
104
  """
103
105
  Store the contents against the name in the persistence layer.
104
106
 
@@ -110,7 +112,7 @@ class ChunkedRunLogStore(BaseRunLogStore):
110
112
  ...
111
113
 
112
114
  @abstractmethod
113
- def _retrieve(self, name: T) -> dict:
115
+ def _retrieve(self, run_id: str, name: MixT) -> dict:
114
116
  """
115
117
  Does the job of retrieving from the persistent layer.
116
118
 
@@ -140,7 +142,7 @@ class ChunkedRunLogStore(BaseRunLogStore):
140
142
  insert = False
141
143
 
142
144
  if match:
143
- existing_contents = self._retrieve(name=match) # type: ignore
145
+ existing_contents = self._retrieve(run_id=run_id, name=match) # type: ignore
144
146
  contents = dict(existing_contents, **contents)
145
147
  name_to_give = match # type: ignore
146
148
  else:
@@ -149,7 +151,9 @@ class ChunkedRunLogStore(BaseRunLogStore):
149
151
  )
150
152
  insert = True
151
153
 
152
- self._store(run_id=run_id, contents=contents, name=name_to_give, insert=insert)
154
+ self._store(
155
+ run_id=run_id, contents=contents, name=Path(name_to_give), insert=insert
156
+ )
153
157
 
154
158
  def retrieve(
155
159
  self, run_id: str, log_type: LogTypes, name: str = "", multiple_allowed=False
@@ -190,13 +194,13 @@ class ChunkedRunLogStore(BaseRunLogStore):
190
194
 
191
195
  if matches:
192
196
  if not multiple_allowed:
193
- contents = self._retrieve(name=matches) # type: ignore
197
+ contents = self._retrieve(run_id=run_id, name=matches) # type: ignore
194
198
  model = self.ModelTypes[log_type.name].value
195
199
  return model(**contents)
196
200
 
197
201
  models = []
198
202
  for match in matches: # type: ignore
199
- contents = self._retrieve(name=match)
203
+ contents = self._retrieve(run_id=run_id, name=match)
200
204
  model = self.ModelTypes[log_type.name].value
201
205
  models.append(model(**contents))
202
206
  return models
@@ -225,7 +229,9 @@ class ChunkedRunLogStore(BaseRunLogStore):
225
229
  # No branch logs are found
226
230
  return {}
227
231
  # Forcing get_matches to always return a list is a better design
228
- epoch_created = [str(match).split("-")[-1] for match in matches] # type: ignore
232
+
233
+ assert isinstance(matches, list)
234
+ epoch_created = [str(match).split("-")[-1] for match in matches]
229
235
 
230
236
  # sort matches by epoch created
231
237
  epoch_created, matches = zip(*sorted(zip(epoch_created, matches))) # type: ignore
@@ -234,7 +240,7 @@ class ChunkedRunLogStore(BaseRunLogStore):
234
240
 
235
241
  for match in matches:
236
242
  model = self.ModelTypes[log_type.name].value
237
- log_model = model(**self._retrieve(match))
243
+ log_model = model(**self._retrieve(run_id=run_id, name=match))
238
244
  logs[log_model.internal_name] = log_model # type: ignore
239
245
 
240
246
  return logs
@@ -0,0 +1,111 @@
1
+ import json
2
+ import logging
3
+ from functools import lru_cache
4
+ from typing import Any, Dict
5
+
6
+ from cloudpathlib import S3Client, S3Path
7
+ from pydantic import Field, SecretStr
8
+
9
+ from extensions.run_log_store.any_path import AnyPathRunLogStore
10
+ from runnable import defaults
11
+ from runnable.datastore import RunLog
12
+
13
+ logger = logging.getLogger(defaults.LOGGER_NAME)
14
+
15
+
16
+ @lru_cache
17
+ def get_minio_client(
18
+ endpoint_url: str, aws_access_key_id: str, aws_secret_access_key: str
19
+ ) -> S3Client:
20
+ return S3Client(
21
+ endpoint_url=endpoint_url,
22
+ aws_access_key_id=aws_access_key_id,
23
+ aws_secret_access_key=aws_secret_access_key,
24
+ )
25
+
26
+
27
+ class MinioRunLogStore(AnyPathRunLogStore):
28
+ """
29
+ In this type of Run Log store, we use a file system to store the JSON run log.
30
+
31
+ Every single run is stored as a different file which makes it compatible across other store types.
32
+
33
+ When to use:
34
+ When locally testing a pipeline and have the need to compare across runs.
35
+ Its fully featured and perfectly fine if your local environment is where you would do everything.
36
+
37
+ Do not use:
38
+ If you need parallelization on local, this run log would not support it.
39
+
40
+ Example config:
41
+
42
+ run_log:
43
+ type: file-system
44
+ config:
45
+ log_folder: The folder to out the logs. Defaults to .run_log_store
46
+
47
+ """
48
+
49
+ service_name: str = "file-system"
50
+
51
+ endpoint_url: str = Field(default="http://localhost:9002")
52
+ aws_access_key_id: SecretStr = SecretStr(secret_value="minioadmin")
53
+ aws_secret_access_key: SecretStr = SecretStr(secret_value="minioadmin")
54
+ bucket: str = Field(default="runnable/run-logs")
55
+
56
+ def get_summary(self) -> Dict[str, Any]:
57
+ summary = {"Type": self.service_name, "Location": self.log_folder}
58
+
59
+ return summary
60
+
61
+ def get_run_log_bucket(self) -> S3Path:
62
+ run_id = self._context.run_id
63
+
64
+ return S3Path(
65
+ f"s3://{self.bucket}/{run_id}/",
66
+ client=get_minio_client(
67
+ self.endpoint_url,
68
+ self.aws_access_key_id.get_secret_value(),
69
+ self.aws_secret_access_key.get_secret_value(),
70
+ ),
71
+ )
72
+
73
+ def write_to_path(self, run_log: RunLog):
74
+ """
75
+ Write the run log to the folder
76
+
77
+ Args:
78
+ run_log (RunLog): The run log to be added to the database
79
+ """
80
+ run_log_bucket = self.get_run_log_bucket()
81
+ run_log_bucket.mkdir(parents=True, exist_ok=True)
82
+
83
+ run_log_object = run_log_bucket / f"{run_log.run_id}.json"
84
+ run_log_object.write_text(
85
+ json.dumps(run_log.model_dump_json(), ensure_ascii=True, indent=4)
86
+ )
87
+
88
+ def read_from_path(self, run_id: str) -> RunLog:
89
+ """
90
+ Look into the run log folder for the run log for the run id.
91
+
92
+ If the run log does not exist, raise an exception. If it does, decode it
93
+ as a RunLog and return it
94
+
95
+ Args:
96
+ run_id (str): The requested run id to retrieve the run log store
97
+
98
+ Raises:
99
+ FileNotFoundError: If the Run Log has not been found.
100
+
101
+ Returns:
102
+ RunLog: The decoded Run log
103
+ """
104
+ run_log_bucket = self.get_run_log_bucket()
105
+
106
+ run_log_object = run_log_bucket / f"{run_id}.json"
107
+
108
+ run_log_text = json.loads(run_log_object.read_text())
109
+ run_log = RunLog(**json.loads(run_log_text))
110
+
111
+ return run_log
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: runnable
3
- Version: 0.26.0
3
+ Version: 0.27.0
4
4
  Summary: Add your description here
5
5
  Author-email: "Vammi, Vijay" <vijay.vammi@astrazeneca.com>
6
6
  License-File: LICENSE
@@ -3,7 +3,7 @@ extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  extensions/catalog/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  extensions/catalog/any_path.py,sha256=aNjphoPIyllUfY2uNDFWD1ErM3Px6izSGr0-oGowN8k,7263
5
5
  extensions/catalog/file_system.py,sha256=T_qFPFfrmykoAMc1rjNi_DBb437me8WPRcFglwAK744,1767
6
- extensions/catalog/minio.py,sha256=D5ofitU75OJGZdPM8s-ALCHrSR6jawIe6blDo8ebiXM,2179
6
+ extensions/catalog/minio.py,sha256=R3GvfCxN1GTcs4bQIAWh79_GHDTVd14gnpKlzwFeKUI,2363
7
7
  extensions/catalog/pyproject.toml,sha256=lLNxY6v04c8I5QK_zKw_E6sJTArSJRA_V-79ktaA3Hk,279
8
8
  extensions/catalog/s3.py,sha256=Sw5t8_kVRprn3uGGJCiHn7M9zw1CLaCOFj6YErtfG0o,287
9
9
  extensions/job_executor/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -26,9 +26,11 @@ extensions/pipeline_executor/pyproject.toml,sha256=ykTX7srR10PBYb8LsIwEj8vIPPIEZ
26
26
  extensions/pipeline_executor/retry.py,sha256=KGenhWrLLmOQgzMvqloXHDRJyoNs91t05rRW8aLW6FA,6969
27
27
  extensions/run_log_store/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  extensions/run_log_store/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
- extensions/run_log_store/chunked_fs.py,sha256=ElftNIwBmA2U2QAVGxruhcqepV312M2C9-GWVtiFaMM,3331
30
- extensions/run_log_store/file_system.py,sha256=SANQ3aFjQeUaq8euvdpwju-8uci9UxdiEDupXtLYppQ,4303
31
- extensions/run_log_store/generic_chunked.py,sha256=BX0j6S1Fwma3wuitHelUYm69FqXGToh10Zk2kamw6ZY,20253
29
+ extensions/run_log_store/any_path.py,sha256=2Ff9-jd1mWazFXn4HvsF0MF-oQAdMNYrl698Kjpip4Q,2938
30
+ extensions/run_log_store/chunked_fs.py,sha256=_2KmTaxK-p1e_YJqnQwyOqkLoCr80y-Wg1X2XdCC_9k,3546
31
+ extensions/run_log_store/file_system.py,sha256=hhrbhSnuzv8yzBr6DAu45NT8-sawPP86WA2-LY70vjw,2781
32
+ extensions/run_log_store/generic_chunked.py,sha256=D08ADWK_rw4ed46_yeEankO-g9rwOjVYi4V-JaOOT6M,20445
33
+ extensions/run_log_store/minio.py,sha256=pTHUzCHnehJ5JZoIrlZGjb4Cl-6yinp3MgqLZPvIxq4,3403
32
34
  extensions/run_log_store/pyproject.toml,sha256=YnmXsFvFG9uv_c0spLYBsNI_1sbktqxtHsOuClyvZ3g,288
33
35
  extensions/run_log_store/db/implementation_FF.py,sha256=euTnh0xzNF0e_DyfHQ4W-kG1AwTr8u7OuO3_cZkR5bM,5237
34
36
  extensions/run_log_store/db/integration_FF.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -53,8 +55,8 @@ runnable/sdk.py,sha256=T1nqDpLN9fULvvU9L-oY0EHqYdKUI9qk7oekLynm02Y,33568
53
55
  runnable/secrets.py,sha256=PXcEJw-4WPzeWRLfsatcPPyr1zkqgHzdRWRcS9vvpvM,2354
54
56
  runnable/tasks.py,sha256=X6xijut7ffwpfYDcXoN6y0AcRVd7fWHs676DJ00Kma4,29134
55
57
  runnable/utils.py,sha256=hBr7oGwGL2VgfITlQCTz-a1iwvvf7Mfl-HY8UdENZac,19929
56
- runnable-0.26.0.dist-info/METADATA,sha256=IiPhsPo9Vws83V72pYoPNG7cdexyVi7Ctf49lsgv1bY,10047
57
- runnable-0.26.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
58
- runnable-0.26.0.dist-info/entry_points.txt,sha256=UCXvfBsVLpBjQY6znXNVzF6hof3Lro7oxtUD0t7kUp4,1704
59
- runnable-0.26.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
60
- runnable-0.26.0.dist-info/RECORD,,
58
+ runnable-0.27.0.dist-info/METADATA,sha256=Bmncq3RAxN2eZGtlWQf5X8O0CkWRqmiCQiY-rH90odM,10047
59
+ runnable-0.27.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
60
+ runnable-0.27.0.dist-info/entry_points.txt,sha256=VZ-VOpcr16mj9jwxxMycwGNJlG_cziICRIe-LQrDwig,1760
61
+ runnable-0.27.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
62
+ runnable-0.27.0.dist-info/RECORD,,
@@ -36,6 +36,7 @@ retry = extensions.pipeline_executor.retry:RetryExecutor
36
36
  buffered = runnable.datastore:BufferRunLogstore
37
37
  chunked-fs = extensions.run_log_store.chunked_fs:ChunkedFileSystemRunLogStore
38
38
  file-system = extensions.run_log_store.file_system:FileSystemRunLogstore
39
+ minio = extensions.run_log_store.minio:MinioRunLogStore
39
40
 
40
41
  [secrets]
41
42
  do-nothing = runnable.secrets:DoNothingSecretManager