flyte 2.0.0b32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flyte might be problematic. Click here for more details.
- flyte/__init__.py +108 -0
- flyte/_bin/__init__.py +0 -0
- flyte/_bin/debug.py +38 -0
- flyte/_bin/runtime.py +195 -0
- flyte/_bin/serve.py +178 -0
- flyte/_build.py +26 -0
- flyte/_cache/__init__.py +12 -0
- flyte/_cache/cache.py +147 -0
- flyte/_cache/defaults.py +9 -0
- flyte/_cache/local_cache.py +216 -0
- flyte/_cache/policy_function_body.py +42 -0
- flyte/_code_bundle/__init__.py +8 -0
- flyte/_code_bundle/_ignore.py +121 -0
- flyte/_code_bundle/_packaging.py +218 -0
- flyte/_code_bundle/_utils.py +347 -0
- flyte/_code_bundle/bundle.py +266 -0
- flyte/_constants.py +1 -0
- flyte/_context.py +155 -0
- flyte/_custom_context.py +73 -0
- flyte/_debug/__init__.py +0 -0
- flyte/_debug/constants.py +38 -0
- flyte/_debug/utils.py +17 -0
- flyte/_debug/vscode.py +307 -0
- flyte/_deploy.py +408 -0
- flyte/_deployer.py +109 -0
- flyte/_doc.py +29 -0
- flyte/_docstring.py +32 -0
- flyte/_environment.py +122 -0
- flyte/_excepthook.py +37 -0
- flyte/_group.py +32 -0
- flyte/_hash.py +8 -0
- flyte/_image.py +1055 -0
- flyte/_initialize.py +628 -0
- flyte/_interface.py +119 -0
- flyte/_internal/__init__.py +3 -0
- flyte/_internal/controllers/__init__.py +129 -0
- flyte/_internal/controllers/_local_controller.py +239 -0
- flyte/_internal/controllers/_trace.py +48 -0
- flyte/_internal/controllers/remote/__init__.py +58 -0
- flyte/_internal/controllers/remote/_action.py +211 -0
- flyte/_internal/controllers/remote/_client.py +47 -0
- flyte/_internal/controllers/remote/_controller.py +583 -0
- flyte/_internal/controllers/remote/_core.py +465 -0
- flyte/_internal/controllers/remote/_informer.py +381 -0
- flyte/_internal/controllers/remote/_service_protocol.py +50 -0
- flyte/_internal/imagebuild/__init__.py +3 -0
- flyte/_internal/imagebuild/docker_builder.py +706 -0
- flyte/_internal/imagebuild/image_builder.py +277 -0
- flyte/_internal/imagebuild/remote_builder.py +386 -0
- flyte/_internal/imagebuild/utils.py +78 -0
- flyte/_internal/resolvers/__init__.py +0 -0
- flyte/_internal/resolvers/_task_module.py +21 -0
- flyte/_internal/resolvers/common.py +31 -0
- flyte/_internal/resolvers/default.py +28 -0
- flyte/_internal/runtime/__init__.py +0 -0
- flyte/_internal/runtime/convert.py +486 -0
- flyte/_internal/runtime/entrypoints.py +204 -0
- flyte/_internal/runtime/io.py +188 -0
- flyte/_internal/runtime/resources_serde.py +152 -0
- flyte/_internal/runtime/reuse.py +125 -0
- flyte/_internal/runtime/rusty.py +193 -0
- flyte/_internal/runtime/task_serde.py +362 -0
- flyte/_internal/runtime/taskrunner.py +209 -0
- flyte/_internal/runtime/trigger_serde.py +160 -0
- flyte/_internal/runtime/types_serde.py +54 -0
- flyte/_keyring/__init__.py +0 -0
- flyte/_keyring/file.py +115 -0
- flyte/_logging.py +300 -0
- flyte/_map.py +312 -0
- flyte/_module.py +72 -0
- flyte/_pod.py +30 -0
- flyte/_resources.py +473 -0
- flyte/_retry.py +32 -0
- flyte/_reusable_environment.py +102 -0
- flyte/_run.py +724 -0
- flyte/_secret.py +96 -0
- flyte/_task.py +550 -0
- flyte/_task_environment.py +316 -0
- flyte/_task_plugins.py +47 -0
- flyte/_timeout.py +47 -0
- flyte/_tools.py +27 -0
- flyte/_trace.py +119 -0
- flyte/_trigger.py +1000 -0
- flyte/_utils/__init__.py +30 -0
- flyte/_utils/asyn.py +121 -0
- flyte/_utils/async_cache.py +139 -0
- flyte/_utils/coro_management.py +27 -0
- flyte/_utils/docker_credentials.py +173 -0
- flyte/_utils/file_handling.py +72 -0
- flyte/_utils/helpers.py +134 -0
- flyte/_utils/lazy_module.py +54 -0
- flyte/_utils/module_loader.py +104 -0
- flyte/_utils/org_discovery.py +57 -0
- flyte/_utils/uv_script_parser.py +49 -0
- flyte/_version.py +34 -0
- flyte/app/__init__.py +22 -0
- flyte/app/_app_environment.py +157 -0
- flyte/app/_deploy.py +125 -0
- flyte/app/_input.py +160 -0
- flyte/app/_runtime/__init__.py +3 -0
- flyte/app/_runtime/app_serde.py +347 -0
- flyte/app/_types.py +101 -0
- flyte/app/extras/__init__.py +3 -0
- flyte/app/extras/_fastapi.py +151 -0
- flyte/cli/__init__.py +12 -0
- flyte/cli/_abort.py +28 -0
- flyte/cli/_build.py +114 -0
- flyte/cli/_common.py +468 -0
- flyte/cli/_create.py +371 -0
- flyte/cli/_delete.py +45 -0
- flyte/cli/_deploy.py +293 -0
- flyte/cli/_gen.py +176 -0
- flyte/cli/_get.py +370 -0
- flyte/cli/_option.py +33 -0
- flyte/cli/_params.py +554 -0
- flyte/cli/_plugins.py +209 -0
- flyte/cli/_run.py +597 -0
- flyte/cli/_serve.py +64 -0
- flyte/cli/_update.py +37 -0
- flyte/cli/_user.py +17 -0
- flyte/cli/main.py +221 -0
- flyte/config/__init__.py +3 -0
- flyte/config/_config.py +248 -0
- flyte/config/_internal.py +73 -0
- flyte/config/_reader.py +225 -0
- flyte/connectors/__init__.py +11 -0
- flyte/connectors/_connector.py +270 -0
- flyte/connectors/_server.py +197 -0
- flyte/connectors/utils.py +135 -0
- flyte/errors.py +243 -0
- flyte/extend.py +19 -0
- flyte/extras/__init__.py +5 -0
- flyte/extras/_container.py +286 -0
- flyte/git/__init__.py +3 -0
- flyte/git/_config.py +21 -0
- flyte/io/__init__.py +29 -0
- flyte/io/_dataframe/__init__.py +131 -0
- flyte/io/_dataframe/basic_dfs.py +223 -0
- flyte/io/_dataframe/dataframe.py +1026 -0
- flyte/io/_dir.py +910 -0
- flyte/io/_file.py +914 -0
- flyte/io/_hashing_io.py +342 -0
- flyte/models.py +479 -0
- flyte/py.typed +0 -0
- flyte/remote/__init__.py +35 -0
- flyte/remote/_action.py +738 -0
- flyte/remote/_app.py +57 -0
- flyte/remote/_client/__init__.py +0 -0
- flyte/remote/_client/_protocols.py +189 -0
- flyte/remote/_client/auth/__init__.py +12 -0
- flyte/remote/_client/auth/_auth_utils.py +14 -0
- flyte/remote/_client/auth/_authenticators/__init__.py +0 -0
- flyte/remote/_client/auth/_authenticators/base.py +403 -0
- flyte/remote/_client/auth/_authenticators/client_credentials.py +73 -0
- flyte/remote/_client/auth/_authenticators/device_code.py +117 -0
- flyte/remote/_client/auth/_authenticators/external_command.py +79 -0
- flyte/remote/_client/auth/_authenticators/factory.py +200 -0
- flyte/remote/_client/auth/_authenticators/pkce.py +516 -0
- flyte/remote/_client/auth/_channel.py +213 -0
- flyte/remote/_client/auth/_client_config.py +85 -0
- flyte/remote/_client/auth/_default_html.py +32 -0
- flyte/remote/_client/auth/_grpc_utils/__init__.py +0 -0
- flyte/remote/_client/auth/_grpc_utils/auth_interceptor.py +288 -0
- flyte/remote/_client/auth/_grpc_utils/default_metadata_interceptor.py +151 -0
- flyte/remote/_client/auth/_keyring.py +152 -0
- flyte/remote/_client/auth/_token_client.py +260 -0
- flyte/remote/_client/auth/errors.py +16 -0
- flyte/remote/_client/controlplane.py +128 -0
- flyte/remote/_common.py +30 -0
- flyte/remote/_console.py +19 -0
- flyte/remote/_data.py +161 -0
- flyte/remote/_logs.py +185 -0
- flyte/remote/_project.py +88 -0
- flyte/remote/_run.py +386 -0
- flyte/remote/_secret.py +142 -0
- flyte/remote/_task.py +527 -0
- flyte/remote/_trigger.py +306 -0
- flyte/remote/_user.py +33 -0
- flyte/report/__init__.py +3 -0
- flyte/report/_report.py +182 -0
- flyte/report/_template.html +124 -0
- flyte/storage/__init__.py +36 -0
- flyte/storage/_config.py +237 -0
- flyte/storage/_parallel_reader.py +274 -0
- flyte/storage/_remote_fs.py +34 -0
- flyte/storage/_storage.py +456 -0
- flyte/storage/_utils.py +5 -0
- flyte/syncify/__init__.py +56 -0
- flyte/syncify/_api.py +375 -0
- flyte/types/__init__.py +52 -0
- flyte/types/_interface.py +40 -0
- flyte/types/_pickle.py +145 -0
- flyte/types/_renderer.py +162 -0
- flyte/types/_string_literals.py +119 -0
- flyte/types/_type_engine.py +2254 -0
- flyte/types/_utils.py +80 -0
- flyte-2.0.0b32.data/scripts/debug.py +38 -0
- flyte-2.0.0b32.data/scripts/runtime.py +195 -0
- flyte-2.0.0b32.dist-info/METADATA +351 -0
- flyte-2.0.0b32.dist-info/RECORD +204 -0
- flyte-2.0.0b32.dist-info/WHEEL +5 -0
- flyte-2.0.0b32.dist-info/entry_points.txt +7 -0
- flyte-2.0.0b32.dist-info/licenses/LICENSE +201 -0
- flyte-2.0.0b32.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import os
|
|
3
|
+
import traceback
|
|
4
|
+
from typing import List, Optional, Tuple, Type
|
|
5
|
+
|
|
6
|
+
import flyte.errors
|
|
7
|
+
from flyte._code_bundle import download_bundle
|
|
8
|
+
from flyte._context import contextual_run
|
|
9
|
+
from flyte._internal import Controller
|
|
10
|
+
from flyte._internal.imagebuild.image_builder import ImageCache
|
|
11
|
+
from flyte._logging import log, logger
|
|
12
|
+
from flyte._task import TaskTemplate
|
|
13
|
+
from flyte.models import ActionID, Checkpoints, CodeBundle, RawDataPath
|
|
14
|
+
|
|
15
|
+
from ..._utils import adjust_sys_path
|
|
16
|
+
from .convert import Error, Inputs, Outputs
|
|
17
|
+
from .taskrunner import (
|
|
18
|
+
convert_and_run,
|
|
19
|
+
extract_download_run_upload,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def direct_dispatch(
|
|
24
|
+
task: TaskTemplate,
|
|
25
|
+
*,
|
|
26
|
+
action: ActionID,
|
|
27
|
+
raw_data_path: RawDataPath,
|
|
28
|
+
controller: Controller,
|
|
29
|
+
version: str,
|
|
30
|
+
output_path: str,
|
|
31
|
+
run_base_dir: str,
|
|
32
|
+
checkpoints: Checkpoints | None = None,
|
|
33
|
+
code_bundle: CodeBundle | None = None,
|
|
34
|
+
inputs: Inputs | None = None,
|
|
35
|
+
) -> Tuple[Optional[Outputs], Optional[Error]]:
|
|
36
|
+
"""
|
|
37
|
+
This method is used today by the local_controller and is positioned to be used by a rust core in the future.
|
|
38
|
+
The caller, loads the task and invokes this method. This method is used to convert the inputs to native types,
|
|
39
|
+
The reason for this is that the rust entrypoint will not have access to the python context, and
|
|
40
|
+
will not be able to run the tasks in the context tree.
|
|
41
|
+
"""
|
|
42
|
+
return await contextual_run(
|
|
43
|
+
convert_and_run,
|
|
44
|
+
task=task,
|
|
45
|
+
inputs=inputs or Inputs.empty(),
|
|
46
|
+
action=action,
|
|
47
|
+
raw_data_path=raw_data_path,
|
|
48
|
+
checkpoints=checkpoints,
|
|
49
|
+
code_bundle=code_bundle,
|
|
50
|
+
controller=controller,
|
|
51
|
+
version=version,
|
|
52
|
+
output_path=output_path,
|
|
53
|
+
run_base_dir=run_base_dir,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def load_class(qualified_name) -> Type:
|
|
58
|
+
"""
|
|
59
|
+
Load a class from a qualified name. The qualified name should be in the format 'module.ClassName'.
|
|
60
|
+
:param qualified_name: The qualified name of the class to load.
|
|
61
|
+
:return: The class object.
|
|
62
|
+
"""
|
|
63
|
+
module_name, class_name = qualified_name.rsplit(".", 1) # Split module and class
|
|
64
|
+
module = importlib.import_module(module_name) # Import the module
|
|
65
|
+
return getattr(module, class_name) # Retrieve the class
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def load_task(resolver: str, *resolver_args: str) -> TaskTemplate:
|
|
69
|
+
"""
|
|
70
|
+
Load a task from a resolver. This is a placeholder function.
|
|
71
|
+
|
|
72
|
+
:param resolver: The resolver to use to load the task.
|
|
73
|
+
:param resolver_args: Arguments to pass to the resolver.
|
|
74
|
+
:return: The loaded task.
|
|
75
|
+
"""
|
|
76
|
+
resolver_class = load_class(resolver)
|
|
77
|
+
resolver_instance = resolver_class()
|
|
78
|
+
try:
|
|
79
|
+
return resolver_instance.load_task(resolver_args)
|
|
80
|
+
except ModuleNotFoundError as e:
|
|
81
|
+
cwd = os.getcwd()
|
|
82
|
+
files = []
|
|
83
|
+
try:
|
|
84
|
+
for root, dirs, filenames in os.walk(cwd):
|
|
85
|
+
for name in dirs + filenames:
|
|
86
|
+
rel_path = os.path.relpath(os.path.join(root, name), cwd)
|
|
87
|
+
files.append(rel_path)
|
|
88
|
+
except Exception as list_err:
|
|
89
|
+
files = [f"(Failed to list directory: {list_err})"]
|
|
90
|
+
|
|
91
|
+
msg = (
|
|
92
|
+
"\n\nFull traceback:\n" + "".join(traceback.format_exc()) + f"\n[ImportError Diagnostics]\n"
|
|
93
|
+
f"Module '{e.name}' not found in either the Python virtual environment or the current working directory.\n"
|
|
94
|
+
f"Current working directory: {cwd}\n"
|
|
95
|
+
f"Files found under current directory:\n" + "\n".join(f" - {f}" for f in files)
|
|
96
|
+
)
|
|
97
|
+
raise ModuleNotFoundError(msg) from e
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def load_pkl_task(code_bundle: CodeBundle) -> TaskTemplate:
|
|
101
|
+
"""
|
|
102
|
+
Loads a task from a pickled code bundle.
|
|
103
|
+
:param code_bundle: The code bundle to load the task from.
|
|
104
|
+
:return: The loaded task template.
|
|
105
|
+
"""
|
|
106
|
+
logger.debug(f"Loading task from pkl: {code_bundle.downloaded_path}")
|
|
107
|
+
try:
|
|
108
|
+
import gzip
|
|
109
|
+
|
|
110
|
+
import cloudpickle
|
|
111
|
+
|
|
112
|
+
with gzip.open(str(code_bundle.downloaded_path), "rb") as f:
|
|
113
|
+
return cloudpickle.load(f)
|
|
114
|
+
except Exception as e:
|
|
115
|
+
logger.exception(f"Failed to load pickled task from {code_bundle.downloaded_path}. Reason: {e!s}")
|
|
116
|
+
raise
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
async def download_code_bundle(code_bundle: CodeBundle) -> CodeBundle:
|
|
120
|
+
"""
|
|
121
|
+
Downloads the code bundle if it is not already downloaded.
|
|
122
|
+
:param code_bundle: The code bundle to download.
|
|
123
|
+
:return: The code bundle with the downloaded path.
|
|
124
|
+
"""
|
|
125
|
+
adjust_sys_path()
|
|
126
|
+
logger.debug(f"Downloading {code_bundle}")
|
|
127
|
+
downloaded_path = await download_bundle(code_bundle)
|
|
128
|
+
return code_bundle.with_downloaded_path(downloaded_path)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
async def _download_and_load_task(
|
|
132
|
+
code_bundle: CodeBundle | None, resolver: str | None = None, resolver_args: List[str] | None = None
|
|
133
|
+
) -> TaskTemplate:
|
|
134
|
+
if code_bundle and (code_bundle.tgz or code_bundle.pkl):
|
|
135
|
+
logger.debug(f"Downloading {code_bundle}")
|
|
136
|
+
code_bundle = await download_code_bundle(code_bundle)
|
|
137
|
+
if code_bundle.pkl:
|
|
138
|
+
return load_pkl_task(code_bundle)
|
|
139
|
+
|
|
140
|
+
if not resolver or not resolver_args:
|
|
141
|
+
raise flyte.errors.RuntimeSystemError(
|
|
142
|
+
"MalformedCommand", "Resolver and resolver args are required. for task"
|
|
143
|
+
)
|
|
144
|
+
logger.debug(
|
|
145
|
+
f"Loading task from tgz: {code_bundle.downloaded_path}, resolver: {resolver}, args: {resolver_args}"
|
|
146
|
+
)
|
|
147
|
+
return load_task(resolver, *resolver_args)
|
|
148
|
+
if not resolver or not resolver_args:
|
|
149
|
+
raise flyte.errors.RuntimeSystemError("MalformedCommand", "Resolver and resolver args are required. for task")
|
|
150
|
+
logger.debug(f"No code bundle provided, loading task from resolver: {resolver}, args: {resolver_args}")
|
|
151
|
+
return load_task(resolver, *resolver_args)
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
@log
|
|
155
|
+
async def load_and_run_task(
|
|
156
|
+
action: ActionID,
|
|
157
|
+
raw_data_path: RawDataPath,
|
|
158
|
+
output_path: str,
|
|
159
|
+
run_base_dir: str,
|
|
160
|
+
version: str,
|
|
161
|
+
controller: Controller,
|
|
162
|
+
resolver: str,
|
|
163
|
+
resolver_args: List[str],
|
|
164
|
+
checkpoints: Checkpoints | None = None,
|
|
165
|
+
code_bundle: CodeBundle | None = None,
|
|
166
|
+
input_path: str | None = None,
|
|
167
|
+
image_cache: ImageCache | None = None,
|
|
168
|
+
interactive_mode: bool = False,
|
|
169
|
+
):
|
|
170
|
+
"""
|
|
171
|
+
This method is invoked from the runtime/CLI and is used to run a task. This creates the context tree,
|
|
172
|
+
for the tasks to run in. It also handles the loading of the task.
|
|
173
|
+
|
|
174
|
+
:param controller: Controller to use for the task.
|
|
175
|
+
:param resolver: The resolver to use to load the task.
|
|
176
|
+
:param resolver_args: The arguments to pass to the resolver.
|
|
177
|
+
:param action: The ActionID to use for the task.
|
|
178
|
+
:param raw_data_path: The raw data path to use for the task.
|
|
179
|
+
:param output_path: The output path to use for the task.
|
|
180
|
+
:param run_base_dir: Base output directory to pass down to child tasks.
|
|
181
|
+
:param version: The version of the task to run.
|
|
182
|
+
:param checkpoints: The checkpoints to use for the task.
|
|
183
|
+
:param code_bundle: The code bundle to use for the task.
|
|
184
|
+
:param input_path: The input path to use for the task.
|
|
185
|
+
:param image_cache: Mappings of Image identifiers to image URIs.
|
|
186
|
+
:param interactive_mode: Whether to run the task in interactive mode.
|
|
187
|
+
"""
|
|
188
|
+
task = await _download_and_load_task(code_bundle, resolver, resolver_args)
|
|
189
|
+
|
|
190
|
+
await contextual_run(
|
|
191
|
+
extract_download_run_upload,
|
|
192
|
+
task,
|
|
193
|
+
action=action,
|
|
194
|
+
version=version,
|
|
195
|
+
controller=controller,
|
|
196
|
+
raw_data_path=raw_data_path,
|
|
197
|
+
output_path=output_path,
|
|
198
|
+
run_base_dir=run_base_dir,
|
|
199
|
+
checkpoints=checkpoints,
|
|
200
|
+
code_bundle=code_bundle,
|
|
201
|
+
input_path=input_path,
|
|
202
|
+
image_cache=image_cache,
|
|
203
|
+
interactive_mode=interactive_mode,
|
|
204
|
+
)
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module contains the methods for uploading and downloading inputs and outputs.
|
|
3
|
+
It uses the storage module to handle the actual uploading and downloading of files.
|
|
4
|
+
|
|
5
|
+
TODO: Convert to use streaming apis
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from flyteidl.core import errors_pb2
|
|
9
|
+
from flyteidl2.core import execution_pb2
|
|
10
|
+
from flyteidl2.task import common_pb2
|
|
11
|
+
|
|
12
|
+
import flyte.storage as storage
|
|
13
|
+
from flyte.models import PathRewrite
|
|
14
|
+
|
|
15
|
+
from .convert import Inputs, Outputs, _clean_error_code
|
|
16
|
+
|
|
17
|
+
# ------------------------------- CONSTANTS ------------------------------- #
|
|
18
|
+
_INPUTS_FILE_NAME = "inputs.pb"
|
|
19
|
+
_OUTPUTS_FILE_NAME = "outputs.pb"
|
|
20
|
+
_CHECKPOINT_FILE_NAME = "_flytecheckpoints"
|
|
21
|
+
_ERROR_FILE_NAME = "error.pb"
|
|
22
|
+
_REPORT_FILE_NAME = "report.html"
|
|
23
|
+
_PKL_EXT = ".pkl.gz"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def pkl_path(base_path: str, pkl_name: str) -> str:
|
|
27
|
+
return storage.join(base_path, f"{pkl_name}{_PKL_EXT}")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def inputs_path(base_path: str) -> str:
|
|
31
|
+
return storage.join(base_path, _INPUTS_FILE_NAME)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def outputs_path(base_path: str) -> str:
|
|
35
|
+
return storage.join(base_path, _OUTPUTS_FILE_NAME)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def error_path(base_path: str) -> str:
|
|
39
|
+
return storage.join(base_path, _ERROR_FILE_NAME)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def report_path(base_path: str) -> str:
|
|
43
|
+
return storage.join(base_path, _REPORT_FILE_NAME)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
# ------------------------------- UPLOAD Methods ------------------------------- #
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
async def upload_inputs(inputs: Inputs, input_path: str):
|
|
50
|
+
"""
|
|
51
|
+
:param Inputs inputs: Inputs
|
|
52
|
+
:param str input_path: The path to upload the input file.
|
|
53
|
+
"""
|
|
54
|
+
await storage.put_stream(data_iterable=inputs.proto_inputs.SerializeToString(), to_path=input_path)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def upload_outputs(outputs: Outputs, output_path: str, max_bytes: int = -1):
|
|
58
|
+
"""
|
|
59
|
+
:param outputs: Outputs
|
|
60
|
+
:param output_path: The path to upload the output file.
|
|
61
|
+
:param max_bytes: Maximum number of bytes to write to the output file. Default is -1, which means no limit.
|
|
62
|
+
"""
|
|
63
|
+
if max_bytes != -1 and outputs.proto_outputs.ByteSize() > max_bytes:
|
|
64
|
+
import flyte.errors
|
|
65
|
+
|
|
66
|
+
raise flyte.errors.InlineIOMaxBytesBreached(
|
|
67
|
+
f"Output file at {output_path} exceeds max_bytes limit of {max_bytes},"
|
|
68
|
+
f" size: {outputs.proto_outputs.ByteSize()}"
|
|
69
|
+
)
|
|
70
|
+
output_uri = outputs_path(output_path)
|
|
71
|
+
await storage.put_stream(data_iterable=outputs.proto_outputs.SerializeToString(), to_path=output_uri)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
async def upload_error(err: execution_pb2.ExecutionError, output_prefix: str) -> str:
|
|
75
|
+
"""
|
|
76
|
+
:param err: execution_pb2.ExecutionError
|
|
77
|
+
:param output_prefix: The output prefix of the remote uri.
|
|
78
|
+
"""
|
|
79
|
+
# TODO - clean this up + conditionally set kind
|
|
80
|
+
error_document = errors_pb2.ErrorDocument(
|
|
81
|
+
error=errors_pb2.ContainerError(
|
|
82
|
+
code=err.code,
|
|
83
|
+
message=err.message,
|
|
84
|
+
kind=errors_pb2.ContainerError.RECOVERABLE,
|
|
85
|
+
origin=err.kind,
|
|
86
|
+
timestamp=err.timestamp,
|
|
87
|
+
worker=err.worker,
|
|
88
|
+
)
|
|
89
|
+
)
|
|
90
|
+
error_uri = error_path(output_prefix)
|
|
91
|
+
return await storage.put_stream(data_iterable=error_document.SerializeToString(), to_path=error_uri)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
# ------------------------------- DOWNLOAD Methods ------------------------------- #
|
|
95
|
+
async def load_inputs(path: str, max_bytes: int = -1, path_rewrite_config: PathRewrite | None = None) -> Inputs:
|
|
96
|
+
"""
|
|
97
|
+
:param path: Input file to be downloaded
|
|
98
|
+
:param max_bytes: Maximum number of bytes to read from the input file. Default is -1, which means no limit.
|
|
99
|
+
:param path_rewrite_config: If provided, rewrites paths in the input blobs according to the configuration.
|
|
100
|
+
:return: Inputs object
|
|
101
|
+
"""
|
|
102
|
+
lm = common_pb2.Inputs()
|
|
103
|
+
|
|
104
|
+
if max_bytes == -1:
|
|
105
|
+
proto_str = b"".join([c async for c in storage.get_stream(path=path)])
|
|
106
|
+
else:
|
|
107
|
+
proto_bytes = []
|
|
108
|
+
total_bytes = 0
|
|
109
|
+
async for chunk in storage.get_stream(path=path):
|
|
110
|
+
if total_bytes + len(chunk) > max_bytes:
|
|
111
|
+
import flyte.errors
|
|
112
|
+
|
|
113
|
+
raise flyte.errors.InlineIOMaxBytesBreached(
|
|
114
|
+
f"Input file at {path} exceeds max_bytes limit of {max_bytes}"
|
|
115
|
+
)
|
|
116
|
+
proto_bytes.append(chunk)
|
|
117
|
+
total_bytes += len(chunk)
|
|
118
|
+
proto_str = b"".join(proto_bytes)
|
|
119
|
+
|
|
120
|
+
lm.ParseFromString(proto_str)
|
|
121
|
+
|
|
122
|
+
if path_rewrite_config is not None:
|
|
123
|
+
for inp in lm.literals:
|
|
124
|
+
if inp.value.HasField("scalar") and inp.value.scalar.HasField("blob"):
|
|
125
|
+
scalar_blob = inp.value.scalar.blob
|
|
126
|
+
if scalar_blob.uri.startswith(path_rewrite_config.old_prefix):
|
|
127
|
+
scalar_blob.uri = scalar_blob.uri.replace(
|
|
128
|
+
path_rewrite_config.old_prefix, path_rewrite_config.new_prefix, 1
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
return Inputs(proto_inputs=lm)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
async def load_outputs(path: str, max_bytes: int = -1) -> Outputs:
|
|
135
|
+
"""
|
|
136
|
+
:param path: output file to be loaded
|
|
137
|
+
:param max_bytes: Maximum number of bytes to read from the output file.
|
|
138
|
+
If -1, reads the entire file.
|
|
139
|
+
:return: Outputs object
|
|
140
|
+
"""
|
|
141
|
+
lm = common_pb2.Outputs()
|
|
142
|
+
|
|
143
|
+
if max_bytes == -1:
|
|
144
|
+
proto_str = b"".join([c async for c in storage.get_stream(path=path)])
|
|
145
|
+
else:
|
|
146
|
+
proto_bytes = []
|
|
147
|
+
total_bytes = 0
|
|
148
|
+
async for chunk in storage.get_stream(path=path):
|
|
149
|
+
if total_bytes + len(chunk) > max_bytes:
|
|
150
|
+
import flyte.errors
|
|
151
|
+
|
|
152
|
+
raise flyte.errors.InlineIOMaxBytesBreached(
|
|
153
|
+
f"Output file at {path} exceeds max_bytes limit of {max_bytes}"
|
|
154
|
+
)
|
|
155
|
+
proto_bytes.append(chunk)
|
|
156
|
+
total_bytes += len(chunk)
|
|
157
|
+
proto_str = b"".join(proto_bytes)
|
|
158
|
+
|
|
159
|
+
lm.ParseFromString(proto_str)
|
|
160
|
+
return Outputs(proto_outputs=lm)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
async def load_error(path: str) -> execution_pb2.ExecutionError:
|
|
164
|
+
"""
|
|
165
|
+
:param path: error file to be downloaded
|
|
166
|
+
:return: execution_pb2.ExecutionError
|
|
167
|
+
"""
|
|
168
|
+
err = errors_pb2.ErrorDocument()
|
|
169
|
+
proto_str = b"".join([c async for c in storage.get_stream(path=path)])
|
|
170
|
+
err.ParseFromString(proto_str)
|
|
171
|
+
|
|
172
|
+
if err.error is not None:
|
|
173
|
+
user_code, _server_code = _clean_error_code(err.error.code)
|
|
174
|
+
return execution_pb2.ExecutionError(
|
|
175
|
+
code=user_code,
|
|
176
|
+
message=err.error.message,
|
|
177
|
+
kind=err.error.origin,
|
|
178
|
+
error_uri=path,
|
|
179
|
+
timestamp=err.error.timestamp,
|
|
180
|
+
worker=err.error.worker,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
return execution_pb2.ExecutionError(
|
|
184
|
+
code="Unknown",
|
|
185
|
+
message=f"Received unloadable error from path {path}",
|
|
186
|
+
kind=execution_pb2.ExecutionError.SYSTEM,
|
|
187
|
+
error_uri=path,
|
|
188
|
+
)
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
from typing import Dict, List, Optional, Tuple
|
|
2
|
+
|
|
3
|
+
from flyteidl2.core import tasks_pb2
|
|
4
|
+
|
|
5
|
+
from flyte._resources import CPUBaseType, DeviceClass, Resources
|
|
6
|
+
|
|
7
|
+
ACCELERATOR_DEVICE_MAP = {
|
|
8
|
+
"A100": "nvidia-tesla-a100",
|
|
9
|
+
"A100 80G": "nvidia-a100-80gb",
|
|
10
|
+
"A10": "nvidia-a10",
|
|
11
|
+
"A10G": "nvidia-a10g",
|
|
12
|
+
"A100G": "nvidia-a100g",
|
|
13
|
+
"L4": "nvidia-l4",
|
|
14
|
+
"L40s": "nvidia-l40s",
|
|
15
|
+
"L4_VWS": "nvidia-l4-vws",
|
|
16
|
+
"K80": "nvidia-tesla-k80",
|
|
17
|
+
"M60": "nvidia-tesla-m60",
|
|
18
|
+
"P4": "nvidia-tesla-p4",
|
|
19
|
+
"P100": "nvidia-tesla-p100",
|
|
20
|
+
"T4": "nvidia-tesla-t4",
|
|
21
|
+
"V100": "nvidia-tesla-v100",
|
|
22
|
+
"V5E": "tpu-v5-lite-podslice",
|
|
23
|
+
"V5P": "tpu-v5p-slice",
|
|
24
|
+
"V6E": "tpu-v6e-slice",
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
_DeviceClassToProto: Dict[DeviceClass, "tasks_pb2.GPUAccelerator.DeviceClass"] = {
|
|
28
|
+
"GPU": tasks_pb2.GPUAccelerator.NVIDIA_GPU,
|
|
29
|
+
"TPU": tasks_pb2.GPUAccelerator.GOOGLE_TPU,
|
|
30
|
+
"NEURON": tasks_pb2.GPUAccelerator.AMAZON_NEURON,
|
|
31
|
+
"AMD_GPU": tasks_pb2.GPUAccelerator.AMD_GPU,
|
|
32
|
+
"HABANA_GAUDI": tasks_pb2.GPUAccelerator.HABANA_GAUDI,
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _get_cpu_resource_entry(cpu: CPUBaseType) -> tasks_pb2.Resources.ResourceEntry:
|
|
37
|
+
return tasks_pb2.Resources.ResourceEntry(
|
|
38
|
+
name=tasks_pb2.Resources.ResourceName.CPU,
|
|
39
|
+
value=str(cpu),
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _get_memory_resource_entry(memory: str) -> tasks_pb2.Resources.ResourceEntry:
|
|
44
|
+
return tasks_pb2.Resources.ResourceEntry(
|
|
45
|
+
name=tasks_pb2.Resources.ResourceName.MEMORY,
|
|
46
|
+
value=memory,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _get_gpu_resource_entry(gpu: int) -> tasks_pb2.Resources.ResourceEntry:
|
|
51
|
+
return tasks_pb2.Resources.ResourceEntry(
|
|
52
|
+
name=tasks_pb2.Resources.ResourceName.GPU,
|
|
53
|
+
value=str(gpu),
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _get_gpu_extended_resource_entry(resources: Resources) -> Optional[tasks_pb2.GPUAccelerator]:
|
|
58
|
+
if resources is None:
|
|
59
|
+
return None
|
|
60
|
+
if resources.gpu is None or isinstance(resources.gpu, int):
|
|
61
|
+
return None
|
|
62
|
+
device = resources.get_device()
|
|
63
|
+
if device is None:
|
|
64
|
+
return None
|
|
65
|
+
|
|
66
|
+
device_class = _DeviceClassToProto.get(device.device_class, tasks_pb2.GPUAccelerator.NVIDIA_GPU)
|
|
67
|
+
if device.device is None:
|
|
68
|
+
raise RuntimeError("Device type must be specified for GPU string.")
|
|
69
|
+
else:
|
|
70
|
+
device_type = device.device
|
|
71
|
+
device_type = ACCELERATOR_DEVICE_MAP.get(device_type, device_type)
|
|
72
|
+
return tasks_pb2.GPUAccelerator(
|
|
73
|
+
device=device_type,
|
|
74
|
+
partition_size=device.partition if device.partition else None,
|
|
75
|
+
device_class=device_class,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _get_disk_resource_entry(disk: str) -> tasks_pb2.Resources.ResourceEntry:
|
|
80
|
+
return tasks_pb2.Resources.ResourceEntry(
|
|
81
|
+
name=tasks_pb2.Resources.ResourceName.EPHEMERAL_STORAGE,
|
|
82
|
+
value=disk,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def get_proto_extended_resources(resources: Resources | None) -> Optional[tasks_pb2.ExtendedResources]:
|
|
87
|
+
"""
|
|
88
|
+
TODO Implement partitioning logic string handling for GPU
|
|
89
|
+
:param resources:
|
|
90
|
+
"""
|
|
91
|
+
if resources is None:
|
|
92
|
+
return None
|
|
93
|
+
acc = _get_gpu_extended_resource_entry(resources)
|
|
94
|
+
shm = resources.get_shared_memory()
|
|
95
|
+
if acc is None and shm is None:
|
|
96
|
+
return None
|
|
97
|
+
proto_shm = None
|
|
98
|
+
if shm is not None:
|
|
99
|
+
proto_shm = tasks_pb2.SharedMemory(
|
|
100
|
+
mount_path="/dev/shm",
|
|
101
|
+
mount_name="flyte-shm",
|
|
102
|
+
size_limit=shm,
|
|
103
|
+
)
|
|
104
|
+
return tasks_pb2.ExtendedResources(gpu_accelerator=acc, shared_memory=proto_shm)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def _convert_resources_to_resource_entries(
|
|
108
|
+
resources: Resources | None,
|
|
109
|
+
) -> Tuple[List[tasks_pb2.Resources.ResourceEntry], List[tasks_pb2.Resources.ResourceEntry]]:
|
|
110
|
+
request_entries: List[tasks_pb2.Resources.ResourceEntry] = []
|
|
111
|
+
limit_entries: List[tasks_pb2.Resources.ResourceEntry] = []
|
|
112
|
+
if resources is None:
|
|
113
|
+
return request_entries, limit_entries
|
|
114
|
+
if resources.cpu is not None:
|
|
115
|
+
if isinstance(resources.cpu, tuple):
|
|
116
|
+
request_entries.append(_get_cpu_resource_entry(resources.cpu[0]))
|
|
117
|
+
limit_entries.append(_get_cpu_resource_entry(resources.cpu[1]))
|
|
118
|
+
else:
|
|
119
|
+
request_entries.append(_get_cpu_resource_entry(resources.cpu))
|
|
120
|
+
|
|
121
|
+
if resources.memory is not None:
|
|
122
|
+
if isinstance(resources.memory, tuple):
|
|
123
|
+
request_entries.append(_get_memory_resource_entry(resources.memory[0]))
|
|
124
|
+
limit_entries.append(_get_memory_resource_entry(resources.memory[1]))
|
|
125
|
+
else:
|
|
126
|
+
request_entries.append(_get_memory_resource_entry(resources.memory))
|
|
127
|
+
|
|
128
|
+
if resources.gpu is not None:
|
|
129
|
+
device = resources.get_device()
|
|
130
|
+
if device is not None:
|
|
131
|
+
request_entries.append(_get_gpu_resource_entry(device.quantity))
|
|
132
|
+
|
|
133
|
+
if resources.disk is not None:
|
|
134
|
+
request_entries.append(_get_disk_resource_entry(resources.disk))
|
|
135
|
+
|
|
136
|
+
return request_entries, limit_entries
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def get_proto_resources(resources: Resources | None) -> Optional[tasks_pb2.Resources]:
|
|
140
|
+
"""
|
|
141
|
+
Get main resources IDL representation from the resources object
|
|
142
|
+
|
|
143
|
+
:param resources: User facing Resources object containing potentially both requests and limits
|
|
144
|
+
:return: The given resources as requests and limits
|
|
145
|
+
"""
|
|
146
|
+
if resources is None:
|
|
147
|
+
return None
|
|
148
|
+
request_entries, limit_entries = _convert_resources_to_resource_entries(resources)
|
|
149
|
+
if not request_entries and not limit_entries:
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
return tasks_pb2.Resources(requests=request_entries, limits=limit_entries)
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import typing
|
|
3
|
+
from venv import logger
|
|
4
|
+
|
|
5
|
+
from flyteidl2.core import tasks_pb2
|
|
6
|
+
|
|
7
|
+
import flyte.errors
|
|
8
|
+
from flyte import ReusePolicy
|
|
9
|
+
from flyte._pod import _PRIMARY_CONTAINER_DEFAULT_NAME, _PRIMARY_CONTAINER_NAME_FIELD
|
|
10
|
+
from flyte.models import CodeBundle
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def extract_unique_id_and_image(
|
|
14
|
+
env_name: str,
|
|
15
|
+
code_bundle: CodeBundle | None,
|
|
16
|
+
task: tasks_pb2.TaskTemplate,
|
|
17
|
+
reuse_policy: ReusePolicy,
|
|
18
|
+
) -> typing.Tuple[str, str]:
|
|
19
|
+
"""
|
|
20
|
+
Compute a unique ID for the task based on its name, version, image URI, and code bundle.
|
|
21
|
+
:param env_name: Name of the reusable environment.
|
|
22
|
+
:param reuse_policy: The reuse policy for the task.
|
|
23
|
+
:param task: The task template.
|
|
24
|
+
:param code_bundle: The code bundle associated with the task.
|
|
25
|
+
:return: A unique ID string and the image URI.
|
|
26
|
+
"""
|
|
27
|
+
image = ""
|
|
28
|
+
container_ser = ""
|
|
29
|
+
if task.HasField("container"):
|
|
30
|
+
copied_container = tasks_pb2.Container()
|
|
31
|
+
copied_container.CopyFrom(task.container)
|
|
32
|
+
copied_container.args.clear() # Clear args to ensure deterministic serialization
|
|
33
|
+
container_ser = copied_container.SerializeToString(deterministic=True)
|
|
34
|
+
image = copied_container.image
|
|
35
|
+
|
|
36
|
+
if task.HasField("k8s_pod"):
|
|
37
|
+
# Clear args to ensure deterministic serialization
|
|
38
|
+
copied_k8s_pod = tasks_pb2.K8sPod()
|
|
39
|
+
copied_k8s_pod.CopyFrom(task.k8s_pod)
|
|
40
|
+
if task.config is not None:
|
|
41
|
+
primary_container_name = task.config[_PRIMARY_CONTAINER_NAME_FIELD]
|
|
42
|
+
else:
|
|
43
|
+
primary_container_name = _PRIMARY_CONTAINER_DEFAULT_NAME
|
|
44
|
+
for container in copied_k8s_pod.pod_spec["containers"]:
|
|
45
|
+
if "name" in container and container["name"] == primary_container_name:
|
|
46
|
+
image = container["image"]
|
|
47
|
+
del container["args"]
|
|
48
|
+
container_ser = copied_k8s_pod.SerializeToString(deterministic=True)
|
|
49
|
+
|
|
50
|
+
components = f"{env_name}:{container_ser}"
|
|
51
|
+
if isinstance(reuse_policy.replicas, tuple):
|
|
52
|
+
components += f":{reuse_policy.replicas[0]}:{reuse_policy.replicas[1]}"
|
|
53
|
+
else:
|
|
54
|
+
components += f":{reuse_policy.replicas}"
|
|
55
|
+
if reuse_policy.ttl is not None:
|
|
56
|
+
components += f":{reuse_policy.ttl.total_seconds()}"
|
|
57
|
+
if reuse_policy.get_scaledown_ttl() is not None:
|
|
58
|
+
components += f":{reuse_policy.get_scaledown_ttl()}"
|
|
59
|
+
if code_bundle is not None:
|
|
60
|
+
components += f":{code_bundle.computed_version}"
|
|
61
|
+
if task.security_context is not None:
|
|
62
|
+
security_ctx_str = task.security_context.SerializeToString(deterministic=True)
|
|
63
|
+
components += f":{security_ctx_str}"
|
|
64
|
+
if task.metadata.interruptible is not None:
|
|
65
|
+
components += f":{task.metadata.interruptible}"
|
|
66
|
+
if task.metadata.pod_template_name is not None:
|
|
67
|
+
components += f":{task.metadata.pod_template_name}"
|
|
68
|
+
sha256 = hashlib.sha256()
|
|
69
|
+
sha256.update(components.encode("utf-8"))
|
|
70
|
+
return sha256.hexdigest(), image
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def add_reusable(
|
|
74
|
+
task: tasks_pb2.TaskTemplate,
|
|
75
|
+
reuse_policy: ReusePolicy,
|
|
76
|
+
code_bundle: CodeBundle | None,
|
|
77
|
+
parent_env_name: str | None = None,
|
|
78
|
+
) -> tasks_pb2.TaskTemplate:
|
|
79
|
+
"""
|
|
80
|
+
Convert a ReusePolicy to a custom configuration dictionary.
|
|
81
|
+
|
|
82
|
+
:param task: The task to which the reusable policy will be added.
|
|
83
|
+
:param reuse_policy: The reuse policy to apply.
|
|
84
|
+
:param code_bundle: The code bundle associated with the task.
|
|
85
|
+
:param parent_env_name: The name of the parent environment, if any.
|
|
86
|
+
:return: The modified task with the reusable policy added.
|
|
87
|
+
"""
|
|
88
|
+
if reuse_policy is None:
|
|
89
|
+
return task
|
|
90
|
+
|
|
91
|
+
if task.HasField("custom"):
|
|
92
|
+
raise flyte.errors.RuntimeUserError(
|
|
93
|
+
"BadConfiguration", "Plugins do not support reusable policy. Only container tasks and pods."
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
logger.debug(f"Adding reusable policy for task: {task.id.name}")
|
|
97
|
+
name = parent_env_name if parent_env_name else ""
|
|
98
|
+
if parent_env_name is None:
|
|
99
|
+
name = task.id.name.split(".")[0]
|
|
100
|
+
|
|
101
|
+
version, image_uri = extract_unique_id_and_image(
|
|
102
|
+
env_name=name, code_bundle=code_bundle, task=task, reuse_policy=reuse_policy
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
scaledown_ttl = reuse_policy.get_scaledown_ttl()
|
|
106
|
+
|
|
107
|
+
task.custom = {
|
|
108
|
+
"name": name,
|
|
109
|
+
"version": version[:15], # Use only the first 15 characters for the version
|
|
110
|
+
"type": "actor",
|
|
111
|
+
"spec": {
|
|
112
|
+
"container_image": image_uri,
|
|
113
|
+
"backlog_length": None,
|
|
114
|
+
"parallelism": reuse_policy.concurrency,
|
|
115
|
+
"min_replica_count": reuse_policy.min_replicas,
|
|
116
|
+
"replica_count": reuse_policy.max_replicas,
|
|
117
|
+
"ttl_seconds": reuse_policy.ttl.total_seconds() if reuse_policy.ttl else None,
|
|
118
|
+
"scaledown_ttl_seconds": scaledown_ttl.total_seconds() if scaledown_ttl else None,
|
|
119
|
+
},
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
task.type = "actor"
|
|
123
|
+
logger.info(f"Reusable task {task.id.name} with config {task.custom}")
|
|
124
|
+
|
|
125
|
+
return task
|