runnable 0.34.0a3__py3-none-any.whl → 0.36.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. extensions/job_executor/__init__.py +3 -4
  2. extensions/job_executor/emulate.py +106 -0
  3. extensions/job_executor/k8s.py +8 -8
  4. extensions/job_executor/local_container.py +13 -14
  5. extensions/nodes/__init__.py +0 -0
  6. extensions/nodes/conditional.py +243 -0
  7. extensions/nodes/fail.py +72 -0
  8. extensions/nodes/map.py +350 -0
  9. extensions/nodes/parallel.py +159 -0
  10. extensions/nodes/stub.py +89 -0
  11. extensions/nodes/success.py +72 -0
  12. extensions/nodes/task.py +92 -0
  13. extensions/pipeline_executor/__init__.py +24 -26
  14. extensions/pipeline_executor/argo.py +50 -41
  15. extensions/pipeline_executor/emulate.py +112 -0
  16. extensions/pipeline_executor/local.py +4 -4
  17. extensions/pipeline_executor/local_container.py +19 -79
  18. extensions/pipeline_executor/mocked.py +4 -4
  19. extensions/pipeline_executor/retry.py +6 -10
  20. extensions/tasks/torch.py +1 -1
  21. runnable/__init__.py +2 -9
  22. runnable/catalog.py +1 -21
  23. runnable/cli.py +0 -59
  24. runnable/context.py +519 -28
  25. runnable/datastore.py +51 -54
  26. runnable/defaults.py +12 -34
  27. runnable/entrypoints.py +82 -440
  28. runnable/exceptions.py +35 -34
  29. runnable/executor.py +13 -20
  30. runnable/names.py +1 -1
  31. runnable/nodes.py +18 -16
  32. runnable/parameters.py +2 -2
  33. runnable/sdk.py +117 -164
  34. runnable/tasks.py +62 -21
  35. runnable/utils.py +6 -268
  36. {runnable-0.34.0a3.dist-info → runnable-0.36.0.dist-info}/METADATA +1 -2
  37. runnable-0.36.0.dist-info/RECORD +74 -0
  38. {runnable-0.34.0a3.dist-info → runnable-0.36.0.dist-info}/entry_points.txt +9 -8
  39. extensions/nodes/nodes.py +0 -778
  40. extensions/nodes/torch.py +0 -273
  41. extensions/nodes/torch_config.py +0 -76
  42. runnable-0.34.0a3.dist-info/RECORD +0 -67
  43. {runnable-0.34.0a3.dist-info → runnable-0.36.0.dist-info}/WHEEL +0 -0
  44. {runnable-0.34.0a3.dist-info → runnable-0.36.0.dist-info}/licenses/LICENSE +0 -0
extensions/nodes/torch.py DELETED
@@ -1,273 +0,0 @@
1
- import importlib
2
- import logging
3
- import os
4
- import random
5
- import string
6
- from datetime import datetime
7
- from pathlib import Path
8
- from typing import Any, Callable, Optional
9
-
10
- from pydantic import BaseModel, ConfigDict, Field, field_serializer
11
-
12
- from extensions.nodes.torch_config import EasyTorchConfig, TorchConfig
13
- from runnable import PythonJob, datastore, defaults
14
- from runnable.datastore import StepLog
15
- from runnable.nodes import ExecutableNode
16
- from runnable.tasks import PythonTaskType, create_task
17
- from runnable.utils import TypeMapVariable
18
-
19
- logger = logging.getLogger(defaults.LOGGER_NAME)
20
-
21
- try:
22
- from torch.distributed.elastic.multiprocessing.api import DefaultLogsSpecs, Std
23
- from torch.distributed.launcher.api import LaunchConfig, elastic_launch
24
- except ImportError:
25
- logger.exception("Torch is not installed. Please install torch first.")
26
- raise Exception("Torch is not installed. Please install torch first.")
27
-
28
-
29
- def training_subprocess():
30
- """
31
- This function is called by the torch.distributed.launcher.api.elastic_launch
32
- It happens in a subprocess and is responsible for executing the user's function
33
-
34
- It is unrelated to the actual node execution, so any cataloging, run_log_store should be
35
- handled to match to main process.
36
-
37
- We have these variables to use:
38
-
39
- os.environ["RUNNABLE_TORCH_COMMAND"] = self.executable.command
40
- os.environ["RUNNABLE_TORCH_PARAMETERS_FILES"] = (
41
- self._context.parameters_file or ""
42
- )
43
- os.environ["RUNNABLE_TORCH_RUN_ID"] = self._context.run_id
44
- os.environ["RUNNABLE_TORCH_COPY_CONTENTS_TO"] = (
45
- self._context.catalog_handler.compute_data_folder
46
- )
47
- os.environ["RUNNABLE_TORCH_TORCH_LOGS"] = self.log_dir or ""
48
-
49
- """
50
- command = os.environ.get("RUNNABLE_TORCH_COMMAND")
51
- run_id = os.environ.get("RUNNABLE_TORCH_RUN_ID", "")
52
- parameters_files = os.environ.get("RUNNABLE_TORCH_PARAMETERS_FILES", "")
53
-
54
- process_run_id = (
55
- run_id
56
- + "-"
57
- + os.environ.get("RANK", "")
58
- + "-"
59
- + "".join(random.choices(string.ascii_lowercase, k=3))
60
- )
61
- os.environ["TORCH_DISTRIBUTED_DEBUG"] = "DETAIL"
62
-
63
- delete_env_vars_with_prefix("RUNNABLE_")
64
-
65
- func = get_callable_from_dotted_path(command)
66
-
67
- # The job runs with the default configuration
68
- # ALl the execution logs are stored in .catalog
69
- job = PythonJob(function=func)
70
-
71
- job.execute(
72
- parameters_file=parameters_files,
73
- job_id=process_run_id,
74
- )
75
-
76
- from runnable.context import run_context
77
-
78
- job_log = run_context.run_log_store.get_run_log_by_id(run_id=run_context.run_id)
79
-
80
- if job_log.status == defaults.FAIL:
81
- raise Exception(f"Job {process_run_id} failed")
82
-
83
-
84
- # TODO: Can this be utils.get_module_and_attr_names
85
- def get_callable_from_dotted_path(dotted_path) -> Callable:
86
- try:
87
- # Split the path into module path and callable object
88
- module_path, callable_name = dotted_path.rsplit(".", 1)
89
-
90
- # Import the module
91
- module = importlib.import_module(module_path)
92
-
93
- # Get the callable from the module
94
- callable_obj = getattr(module, callable_name)
95
-
96
- # Check if the object is callable
97
- if not callable(callable_obj):
98
- raise TypeError(f"The object {callable_name} is not callable.")
99
-
100
- return callable_obj
101
-
102
- except (ImportError, AttributeError, ValueError) as e:
103
- raise ImportError(f"Could not import '{dotted_path}'.") from e
104
-
105
-
106
- def delete_env_vars_with_prefix(prefix):
107
- to_delete = [] # List to keep track of variables to delete
108
-
109
- # Iterate over a list of all environment variable keys
110
- for var in os.environ:
111
- if var.startswith(prefix):
112
- to_delete.append(var)
113
-
114
- # Delete each of the variables collected
115
- for var in to_delete:
116
- del os.environ[var]
117
-
118
-
119
- # TODO: The design of this class is not final
120
- class TorchNode(ExecutableNode, TorchConfig):
121
- node_type: str = Field(default="torch", serialization_alias="type")
122
- executable: PythonTaskType = Field(exclude=True)
123
-
124
- # Similar to TaskNode
125
- model_config = ConfigDict(extra="allow")
126
-
127
- def get_summary(self) -> dict[str, Any]:
128
- summary = {
129
- "name": self.name,
130
- "type": self.node_type,
131
- }
132
-
133
- return summary
134
-
135
- @classmethod
136
- def parse_from_config(cls, config: dict[str, Any]) -> "TorchNode":
137
- task_config = {
138
- k: v for k, v in config.items() if k not in TorchNode.model_fields.keys()
139
- }
140
- node_config = {
141
- k: v for k, v in config.items() if k in TorchNode.model_fields.keys()
142
- }
143
-
144
- executable = create_task(task_config)
145
-
146
- assert isinstance(executable, PythonTaskType)
147
- return cls(executable=executable, **node_config, **task_config)
148
-
149
- def get_launch_config(self) -> LaunchConfig:
150
- internal_log_spec = InternalLogSpecs(**self.model_dump(exclude_none=True))
151
- log_spec: DefaultLogsSpecs = DefaultLogsSpecs(
152
- **internal_log_spec.model_dump(exclude_none=True)
153
- )
154
- easy_torch_config = EasyTorchConfig(
155
- **self.model_dump(
156
- exclude_none=True,
157
- )
158
- )
159
-
160
- launch_config = LaunchConfig(
161
- **easy_torch_config.model_dump(
162
- exclude_none=True,
163
- ),
164
- logs_specs=log_spec,
165
- run_id=self._context.run_id,
166
- )
167
- logger.info(f"launch_config: {launch_config}")
168
- return launch_config
169
-
170
- def execute(
171
- self,
172
- mock=False,
173
- map_variable: TypeMapVariable = None,
174
- attempt_number: int = 1,
175
- ) -> StepLog:
176
- assert (
177
- map_variable is None or not map_variable
178
- ), "TorchNode does not support map_variable"
179
-
180
- step_log = self._context.run_log_store.get_step_log(
181
- self._get_step_log_name(map_variable), self._context.run_id
182
- )
183
-
184
- # Attempt to call the function or elastic launch
185
- launch_config = self.get_launch_config()
186
- logger.info(f"launch_config: {launch_config}")
187
-
188
- # ENV variables are shared with the subprocess, use that as communication
189
- os.environ["RUNNABLE_TORCH_COMMAND"] = self.executable.command
190
- os.environ["RUNNABLE_TORCH_PARAMETERS_FILES"] = (
191
- self._context.parameters_file or ""
192
- )
193
- os.environ["RUNNABLE_TORCH_RUN_ID"] = self._context.run_id
194
-
195
- launcher = elastic_launch(
196
- launch_config,
197
- training_subprocess,
198
- )
199
- try:
200
- launcher()
201
- attempt_log = datastore.StepAttempt(
202
- status=defaults.SUCCESS,
203
- start_time=str(datetime.now()),
204
- end_time=str(datetime.now()),
205
- attempt_number=attempt_number,
206
- )
207
- except Exception as e:
208
- attempt_log = datastore.StepAttempt(
209
- status=defaults.FAIL,
210
- start_time=str(datetime.now()),
211
- end_time=str(datetime.now()),
212
- attempt_number=attempt_number,
213
- )
214
- logger.error(f"Error executing TorchNode: {e}")
215
- finally:
216
- # This can only come from the subprocess
217
- if Path(".catalog").exists():
218
- os.rename(".catalog", "proc_logs")
219
- # Move .catalog and torch_logs to the parent node's catalog location
220
- self._context.catalog_handler.put(
221
- "proc_logs/**/*", allow_file_not_found_exc=True
222
- )
223
-
224
- # TODO: This is not working!!
225
- if self.log_dir:
226
- self._context.catalog_handler.put(
227
- self.log_dir + "/**/*", allow_file_not_found_exc=True
228
- )
229
-
230
- delete_env_vars_with_prefix("RUNNABLE_TORCH")
231
-
232
- logger.info(f"attempt_log: {attempt_log}")
233
- logger.info(f"Step {self.name} completed with status: {attempt_log.status}")
234
-
235
- step_log.status = attempt_log.status
236
- step_log.attempts.append(attempt_log)
237
-
238
- return step_log
239
-
240
- def fan_in(self, map_variable: dict[str, str | int | float] | None = None):
241
- # Destroy the service
242
- # Destroy the statefulset
243
- assert (
244
- map_variable is None or not map_variable
245
- ), "TorchNode does not support map_variable"
246
-
247
- def fan_out(self, map_variable: dict[str, str | int | float] | None = None):
248
- # Create a service
249
- # Create a statefulset
250
- # Gather the IPs and set them as parameters downstream
251
- assert (
252
- map_variable is None or not map_variable
253
- ), "TorchNode does not support map_variable"
254
-
255
-
256
- # This internal model makes it easier to extract the required fields
257
- # of log specs from user specification.
258
- # https://github.com/pytorch/pytorch/blob/main/torch/distributed/elastic/multiprocessing/api.py#L243
259
- class InternalLogSpecs(BaseModel):
260
- log_dir: Optional[str] = Field(default="torch_logs")
261
- redirects: str = Field(default="0") # Std.NONE
262
- tee: str = Field(default="0") # Std.NONE
263
- local_ranks_filter: Optional[set[int]] = Field(default=None)
264
-
265
- model_config = ConfigDict(extra="ignore")
266
-
267
- @field_serializer("redirects")
268
- def convert_redirects(self, redirects: str) -> Std | dict[int, Std]:
269
- return Std.from_str(redirects)
270
-
271
- @field_serializer("tee")
272
- def convert_tee(self, tee: str) -> Std | dict[int, Std]:
273
- return Std.from_str(tee)
@@ -1,76 +0,0 @@
1
- from enum import Enum
2
- from typing import Any, Optional
3
-
4
- from pydantic import BaseModel, ConfigDict, Field, computed_field
5
-
6
-
7
- class StartMethod(str, Enum):
8
- spawn = "spawn"
9
- fork = "fork"
10
- forkserver = "forkserver"
11
-
12
-
13
- ## The idea is the following:
14
- # Users can configure any of the options present in TorchConfig class.
15
- # The LaunchConfig class will be created from TorchConfig.
16
- # The LogSpecs is sent as a parameter to the launch config.
17
-
18
- ## NO idea of standalone and how to send it
19
-
20
-
21
- # The user sees this as part of the config of the node.
22
- # It is kept as similar as possible to torchrun
23
- class TorchConfig(BaseModel):
24
- model_config = ConfigDict(extra="forbid")
25
-
26
- # excluded as LaunchConfig requires min and max nodes
27
- nnodes: str = Field(default="1:1", exclude=True, description="min:max")
28
- nproc_per_node: int = Field(default=1, description="Number of processes per node")
29
-
30
- # will be used to create the log specs
31
- # But they are excluded from dump as logs specs is a class for LaunchConfig
32
- # from_str("0") -> Std.NONE
33
- # from_str("1") -> Std.OUT
34
- # from_str("0:3,1:0,2:1,3:2") -> {0: Std.ALL, 1: Std.NONE, 2: Std.OUT, 3: Std.ERR}
35
- log_dir: Optional[str] = Field(default="torch_logs", exclude=True)
36
- redirects: str = Field(default="0", exclude=True) # Std.NONE
37
- tee: str = Field(default="0", exclude=True) # Std.NONE
38
- local_ranks_filter: Optional[set[int]] = Field(default=None, exclude=True)
39
-
40
- role: str | None = Field(default=None)
41
-
42
- # run_id would be the run_id of the context
43
- # and sent at the creation of the LaunchConfig
44
-
45
- # This section is about the communication between nodes/processes
46
- rdzv_backend: str | None = Field(default="static")
47
- rdzv_endpoint: str | None = Field(default="")
48
- rdzv_configs: dict[str, Any] = Field(default_factory=dict)
49
- rdzv_timeout: int | None = Field(default=None)
50
-
51
- max_restarts: int | None = Field(default=None)
52
- monitor_interval: float | None = Field(default=None)
53
- start_method: str | None = Field(default=StartMethod.spawn)
54
- log_line_prefix_template: str | None = Field(default=None)
55
- local_addr: Optional[str] = None
56
-
57
- # https://github.com/pytorch/pytorch/blob/main/torch/distributed/run.py#L753
58
- # master_addr: str | None = Field(default="localhost")
59
- # master_port: str | None = Field(default="29500")
60
- # training_script: str = Field(default="dummy_training_script")
61
- # training_script_args: str = Field(default="")
62
-
63
-
64
- class EasyTorchConfig(TorchConfig):
65
- model_config = ConfigDict(extra="ignore")
66
-
67
- # TODO: Validate min < max
68
- @computed_field # type: ignore
69
- @property
70
- def min_nodes(self) -> int:
71
- return int(self.nnodes.split(":")[0])
72
-
73
- @computed_field # type: ignore
74
- @property
75
- def max_nodes(self) -> int:
76
- return int(self.nnodes.split(":")[1])
@@ -1,67 +0,0 @@
1
- extensions/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- extensions/catalog/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- extensions/catalog/any_path.py,sha256=atB5gWPRX6ptW6zwYeCVb_fh0qhs7WAFO9HIsnMZl98,7350
5
- extensions/catalog/file_system.py,sha256=T_qFPFfrmykoAMc1rjNi_DBb437me8WPRcFglwAK744,1767
6
- extensions/catalog/minio.py,sha256=R3GvfCxN1GTcs4bQIAWh79_GHDTVd14gnpKlzwFeKUI,2363
7
- extensions/catalog/pyproject.toml,sha256=lLNxY6v04c8I5QK_zKw_E6sJTArSJRA_V-79ktaA3Hk,279
8
- extensions/catalog/s3.py,sha256=Sw5t8_kVRprn3uGGJCiHn7M9zw1CLaCOFj6YErtfG0o,287
9
- extensions/job_executor/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- extensions/job_executor/__init__.py,sha256=VeLuYCcShCIYT0TNtAXfUF9tOk4ZHoLzdTEvbsz0spM,5870
11
- extensions/job_executor/k8s.py,sha256=Jl0s3YryISx-SJIhDhyNskzlUlhy4ynBHEc9DfAXjAY,16394
12
- extensions/job_executor/k8s_job_spec.yaml,sha256=7aFpxHdO_p6Hkc3YxusUOuAQTD1Myu0yTPX9DrhxbOg,1158
13
- extensions/job_executor/local.py,sha256=3ZbCFXBvbLlMp10JTmQJJrjBKG2keHI6SH8hEvmHDkA,2230
14
- extensions/job_executor/local_container.py,sha256=1JcLJ0zrNSNHdubrSO9miN54iwvPLHqKMZ08aOC8WWo,6886
15
- extensions/job_executor/pyproject.toml,sha256=UIEgiCYHTXcRWSByNMFuKJFKgxTBpQqTqyUecIsb_Vc,286
16
- extensions/nodes/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- extensions/nodes/nodes.py,sha256=s9ub1dqy4qHjRQG6YElCdL7rCOTYNs9RUIrStZ6tEB4,28256
18
- extensions/nodes/pyproject.toml,sha256=YTu-ETN3JNFSkMzzWeOwn4m-O2nbRH-PmiPBALDCUw4,278
19
- extensions/nodes/torch.py,sha256=64DTjdPNSJ8vfMwUN9h9Ly5g9qj-Bga7LSGrfCAO0BY,9389
20
- extensions/nodes/torch_config.py,sha256=tO3sG2_fj8a6FmPZZllwKVx3WaRr4QmQYcACseg8YXM,2839
21
- extensions/pipeline_executor/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- extensions/pipeline_executor/__init__.py,sha256=wfigTL2T9OHrmE8b2Ydmb8h6hr-oF--Yc2FectC7WaY,24623
23
- extensions/pipeline_executor/argo.py,sha256=Xj3rasvJfgdEze_s3ILB77VY92NNk7iO8yT46A-_Y4c,37627
24
- extensions/pipeline_executor/local.py,sha256=6oWUJ6b6NvIkpeQJBoCT1hbfX4_6WCB4HzMgHZ4ik1A,1887
25
- extensions/pipeline_executor/local_container.py,sha256=3kZ2QCsrq_YjH9dcAz8v05knKShQ_JtbIU-IA_-G538,12724
26
- extensions/pipeline_executor/mocked.py,sha256=0sMmypuvstBIv9uQg-WAcPrF3oOFpeEXNi6N8Nzdnl0,5680
27
- extensions/pipeline_executor/pyproject.toml,sha256=ykTX7srR10PBYb8LsIwEj8vIPPIEZQ5V_R7VYbZ-ido,291
28
- extensions/pipeline_executor/retry.py,sha256=6ClFXJYtr0M6nWIZiI-mbUGshobOtVH_KADN8JCfvH0,6881
29
- extensions/run_log_store/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- extensions/run_log_store/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
- extensions/run_log_store/any_path.py,sha256=0nN_LHbm2W6AHkerQmsVHq3EoybFQF8lxpCicacHo8Y,2861
32
- extensions/run_log_store/chunked_fs.py,sha256=wHMKcAx6uFI4OOTp7QWCdGq9WvEFesbLp9VxHZU28l0,3341
33
- extensions/run_log_store/chunked_minio.py,sha256=Itfkw4Ycf0uLCqxH3Uk_itmVgT7ipJp05yKfD22WBiY,4007
34
- extensions/run_log_store/file_system.py,sha256=hhrbhSnuzv8yzBr6DAu45NT8-sawPP86WA2-LY70vjw,2781
35
- extensions/run_log_store/generic_chunked.py,sha256=EnhRxlqm1jG-Tdxul4sY8OeCX5fK9FY2v8DZanX9-5o,20455
36
- extensions/run_log_store/minio.py,sha256=omrKDSdRzmnVBg9xXkkdQb-icBIgBDRdpmwGRlMyCGk,3453
37
- extensions/run_log_store/pyproject.toml,sha256=YnmXsFvFG9uv_c0spLYBsNI_1sbktqxtHsOuClyvZ3g,288
38
- extensions/run_log_store/db/implementation_FF.py,sha256=euTnh0xzNF0e_DyfHQ4W-kG1AwTr8u7OuO3_cZkR5bM,5237
39
- extensions/run_log_store/db/integration_FF.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
- extensions/secrets/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
- extensions/secrets/dotenv.py,sha256=nADHXI6KJ_LUYOIe5EbtYH-21OBebSNVr0Pjb1GlZ7w,1573
42
- extensions/secrets/pyproject.toml,sha256=mLJNImNcBlbLKHh-0ugVWT9V83R4RibyyYDtBCSqVF4,282
43
- extensions/tasks/torch.py,sha256=oeXRkmuttFIAuBwH7-h4SOVXMDOZXX5mvqI2aFrR3Vo,10283
44
- extensions/tasks/torch_config.py,sha256=UjfMitT-TXASRDGR30I2vDRnyk7JQnR-5CsOVidjpSY,2833
45
- runnable/__init__.py,sha256=3ZKuvGEkY_zHVQlJtarXd4jkjICxjgnw-bbKN_5SiJI,691
46
- runnable/catalog.py,sha256=4msQxLhLKlsDDrHFnGauPYe-Or-q9g8_RYCn_4dpxaU,4466
47
- runnable/cli.py,sha256=3BiKSj95h2Drn__YlchMPZ5rBMafuRb2OGIsVpbsO5Y,8788
48
- runnable/context.py,sha256=by5uepmuCP0dmM9BmsliXihSes5QEFejwAsmekcqylE,1388
49
- runnable/datastore.py,sha256=ZobM1aVkgeUJ2fZYt63IFDsoNzObwc93hdByegS5YKQ,32396
50
- runnable/defaults.py,sha256=3o9IVGryyCE6PoQTOoaIaHHTbJGEzmdXMcwzOhwAYoI,3518
51
- runnable/entrypoints.py,sha256=1xCbWVUQLGmg5gkWnAVWFLAUf6j4avP9azX_vuGQUMY,18985
52
- runnable/exceptions.py,sha256=LFbp0-Qxg2PAMLEVt7w2whhBxSG-5pzUEv5qN-Rc4_c,3003
53
- runnable/executor.py,sha256=Jr9yJtSH7CzjXJLWx3VWIUAQblstuGqzpFtajv7d39M,15348
54
- runnable/graph.py,sha256=poQz5zcvq89ju_u5sYlunQLPbHnXTaUmjcvstPwvT4U,16536
55
- runnable/names.py,sha256=vn92Kv9ANROYSZX6Z4z1v_WA3WiEdIYmG6KEStBFZug,8134
56
- runnable/nodes.py,sha256=QGHMznriEz4AcmntHICBZKrDT6zbc7WD1sV0MgwK10c,16691
57
- runnable/parameters.py,sha256=u77CdqqDAbVdzNeBFPNUfGnWPy9-SpBVmwEJ56xmDm8,5289
58
- runnable/pickler.py,sha256=ydJ_eti_U1F4l-YacFp7BWm6g5vTn04UXye25S1HVok,2684
59
- runnable/sdk.py,sha256=-hsoZctbGKsrfOQW3Z7RqWVGJI4GhbsOjqjMRb2OAUo,35181
60
- runnable/secrets.py,sha256=4L_dBFxTgr8r_hHUD6RlZEtqaOHDRsFG5PXO5wlvMI0,2324
61
- runnable/tasks.py,sha256=lOtCninvosGI2bNIzblrzNa-lN7TMwel1KQ1g23M85A,32088
62
- runnable/utils.py,sha256=hBr7oGwGL2VgfITlQCTz-a1iwvvf7Mfl-HY8UdENZac,19929
63
- runnable-0.34.0a3.dist-info/METADATA,sha256=AYMw1jtTzhBN_Y2dMJiguAnYwc82LLxa-WHYApUYpCs,10203
64
- runnable-0.34.0a3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
65
- runnable-0.34.0a3.dist-info/entry_points.txt,sha256=wKfW6aIWMQFlwrwpPBVWlMQDcxQmOupDKNkKyXoPFV4,1917
66
- runnable-0.34.0a3.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
67
- runnable-0.34.0a3.dist-info/RECORD,,