flyte 0.1.0__py3-none-any.whl → 0.2.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flyte might be problematic. Click here for more details.
- flyte/__init__.py +78 -2
- flyte/_bin/__init__.py +0 -0
- flyte/_bin/runtime.py +152 -0
- flyte/_build.py +26 -0
- flyte/_cache/__init__.py +12 -0
- flyte/_cache/cache.py +145 -0
- flyte/_cache/defaults.py +9 -0
- flyte/_cache/policy_function_body.py +42 -0
- flyte/_code_bundle/__init__.py +8 -0
- flyte/_code_bundle/_ignore.py +113 -0
- flyte/_code_bundle/_packaging.py +187 -0
- flyte/_code_bundle/_utils.py +323 -0
- flyte/_code_bundle/bundle.py +209 -0
- flyte/_context.py +152 -0
- flyte/_deploy.py +243 -0
- flyte/_doc.py +29 -0
- flyte/_docstring.py +32 -0
- flyte/_environment.py +84 -0
- flyte/_excepthook.py +37 -0
- flyte/_group.py +32 -0
- flyte/_hash.py +23 -0
- flyte/_image.py +762 -0
- flyte/_initialize.py +492 -0
- flyte/_interface.py +84 -0
- flyte/_internal/__init__.py +3 -0
- flyte/_internal/controllers/__init__.py +128 -0
- flyte/_internal/controllers/_local_controller.py +193 -0
- flyte/_internal/controllers/_trace.py +41 -0
- flyte/_internal/controllers/remote/__init__.py +60 -0
- flyte/_internal/controllers/remote/_action.py +146 -0
- flyte/_internal/controllers/remote/_client.py +47 -0
- flyte/_internal/controllers/remote/_controller.py +494 -0
- flyte/_internal/controllers/remote/_core.py +410 -0
- flyte/_internal/controllers/remote/_informer.py +361 -0
- flyte/_internal/controllers/remote/_service_protocol.py +50 -0
- flyte/_internal/imagebuild/__init__.py +11 -0
- flyte/_internal/imagebuild/docker_builder.py +427 -0
- flyte/_internal/imagebuild/image_builder.py +246 -0
- flyte/_internal/imagebuild/remote_builder.py +0 -0
- flyte/_internal/resolvers/__init__.py +0 -0
- flyte/_internal/resolvers/_task_module.py +54 -0
- flyte/_internal/resolvers/common.py +31 -0
- flyte/_internal/resolvers/default.py +28 -0
- flyte/_internal/runtime/__init__.py +0 -0
- flyte/_internal/runtime/convert.py +342 -0
- flyte/_internal/runtime/entrypoints.py +135 -0
- flyte/_internal/runtime/io.py +136 -0
- flyte/_internal/runtime/resources_serde.py +138 -0
- flyte/_internal/runtime/task_serde.py +330 -0
- flyte/_internal/runtime/taskrunner.py +191 -0
- flyte/_internal/runtime/types_serde.py +54 -0
- flyte/_logging.py +135 -0
- flyte/_map.py +215 -0
- flyte/_pod.py +19 -0
- flyte/_protos/__init__.py +0 -0
- flyte/_protos/common/authorization_pb2.py +66 -0
- flyte/_protos/common/authorization_pb2.pyi +108 -0
- flyte/_protos/common/authorization_pb2_grpc.py +4 -0
- flyte/_protos/common/identifier_pb2.py +71 -0
- flyte/_protos/common/identifier_pb2.pyi +82 -0
- flyte/_protos/common/identifier_pb2_grpc.py +4 -0
- flyte/_protos/common/identity_pb2.py +48 -0
- flyte/_protos/common/identity_pb2.pyi +72 -0
- flyte/_protos/common/identity_pb2_grpc.py +4 -0
- flyte/_protos/common/list_pb2.py +36 -0
- flyte/_protos/common/list_pb2.pyi +71 -0
- flyte/_protos/common/list_pb2_grpc.py +4 -0
- flyte/_protos/common/policy_pb2.py +37 -0
- flyte/_protos/common/policy_pb2.pyi +27 -0
- flyte/_protos/common/policy_pb2_grpc.py +4 -0
- flyte/_protos/common/role_pb2.py +37 -0
- flyte/_protos/common/role_pb2.pyi +53 -0
- flyte/_protos/common/role_pb2_grpc.py +4 -0
- flyte/_protos/common/runtime_version_pb2.py +28 -0
- flyte/_protos/common/runtime_version_pb2.pyi +24 -0
- flyte/_protos/common/runtime_version_pb2_grpc.py +4 -0
- flyte/_protos/logs/dataplane/payload_pb2.py +100 -0
- flyte/_protos/logs/dataplane/payload_pb2.pyi +177 -0
- flyte/_protos/logs/dataplane/payload_pb2_grpc.py +4 -0
- flyte/_protos/secret/definition_pb2.py +49 -0
- flyte/_protos/secret/definition_pb2.pyi +93 -0
- flyte/_protos/secret/definition_pb2_grpc.py +4 -0
- flyte/_protos/secret/payload_pb2.py +62 -0
- flyte/_protos/secret/payload_pb2.pyi +94 -0
- flyte/_protos/secret/payload_pb2_grpc.py +4 -0
- flyte/_protos/secret/secret_pb2.py +38 -0
- flyte/_protos/secret/secret_pb2.pyi +6 -0
- flyte/_protos/secret/secret_pb2_grpc.py +198 -0
- flyte/_protos/secret/secret_pb2_grpc_grpc.py +198 -0
- flyte/_protos/validate/validate/validate_pb2.py +76 -0
- flyte/_protos/workflow/common_pb2.py +27 -0
- flyte/_protos/workflow/common_pb2.pyi +14 -0
- flyte/_protos/workflow/common_pb2_grpc.py +4 -0
- flyte/_protos/workflow/environment_pb2.py +29 -0
- flyte/_protos/workflow/environment_pb2.pyi +12 -0
- flyte/_protos/workflow/environment_pb2_grpc.py +4 -0
- flyte/_protos/workflow/node_execution_service_pb2.py +26 -0
- flyte/_protos/workflow/node_execution_service_pb2.pyi +4 -0
- flyte/_protos/workflow/node_execution_service_pb2_grpc.py +32 -0
- flyte/_protos/workflow/queue_service_pb2.py +105 -0
- flyte/_protos/workflow/queue_service_pb2.pyi +146 -0
- flyte/_protos/workflow/queue_service_pb2_grpc.py +172 -0
- flyte/_protos/workflow/run_definition_pb2.py +128 -0
- flyte/_protos/workflow/run_definition_pb2.pyi +314 -0
- flyte/_protos/workflow/run_definition_pb2_grpc.py +4 -0
- flyte/_protos/workflow/run_logs_service_pb2.py +41 -0
- flyte/_protos/workflow/run_logs_service_pb2.pyi +28 -0
- flyte/_protos/workflow/run_logs_service_pb2_grpc.py +69 -0
- flyte/_protos/workflow/run_service_pb2.py +129 -0
- flyte/_protos/workflow/run_service_pb2.pyi +171 -0
- flyte/_protos/workflow/run_service_pb2_grpc.py +412 -0
- flyte/_protos/workflow/state_service_pb2.py +66 -0
- flyte/_protos/workflow/state_service_pb2.pyi +75 -0
- flyte/_protos/workflow/state_service_pb2_grpc.py +138 -0
- flyte/_protos/workflow/task_definition_pb2.py +79 -0
- flyte/_protos/workflow/task_definition_pb2.pyi +81 -0
- flyte/_protos/workflow/task_definition_pb2_grpc.py +4 -0
- flyte/_protos/workflow/task_service_pb2.py +60 -0
- flyte/_protos/workflow/task_service_pb2.pyi +59 -0
- flyte/_protos/workflow/task_service_pb2_grpc.py +138 -0
- flyte/_resources.py +226 -0
- flyte/_retry.py +32 -0
- flyte/_reusable_environment.py +25 -0
- flyte/_run.py +482 -0
- flyte/_secret.py +61 -0
- flyte/_task.py +449 -0
- flyte/_task_environment.py +183 -0
- flyte/_timeout.py +47 -0
- flyte/_tools.py +27 -0
- flyte/_trace.py +120 -0
- flyte/_utils/__init__.py +26 -0
- flyte/_utils/asyn.py +119 -0
- flyte/_utils/async_cache.py +139 -0
- flyte/_utils/coro_management.py +23 -0
- flyte/_utils/file_handling.py +72 -0
- flyte/_utils/helpers.py +134 -0
- flyte/_utils/lazy_module.py +54 -0
- flyte/_utils/org_discovery.py +57 -0
- flyte/_utils/uv_script_parser.py +49 -0
- flyte/_version.py +21 -0
- flyte/cli/__init__.py +3 -0
- flyte/cli/_abort.py +28 -0
- flyte/cli/_common.py +337 -0
- flyte/cli/_create.py +145 -0
- flyte/cli/_delete.py +23 -0
- flyte/cli/_deploy.py +152 -0
- flyte/cli/_gen.py +163 -0
- flyte/cli/_get.py +310 -0
- flyte/cli/_params.py +538 -0
- flyte/cli/_run.py +231 -0
- flyte/cli/main.py +166 -0
- flyte/config/__init__.py +3 -0
- flyte/config/_config.py +216 -0
- flyte/config/_internal.py +64 -0
- flyte/config/_reader.py +207 -0
- flyte/connectors/__init__.py +0 -0
- flyte/errors.py +172 -0
- flyte/extras/__init__.py +5 -0
- flyte/extras/_container.py +263 -0
- flyte/io/__init__.py +27 -0
- flyte/io/_dir.py +448 -0
- flyte/io/_file.py +467 -0
- flyte/io/_structured_dataset/__init__.py +129 -0
- flyte/io/_structured_dataset/basic_dfs.py +219 -0
- flyte/io/_structured_dataset/structured_dataset.py +1061 -0
- flyte/models.py +391 -0
- flyte/remote/__init__.py +26 -0
- flyte/remote/_client/__init__.py +0 -0
- flyte/remote/_client/_protocols.py +133 -0
- flyte/remote/_client/auth/__init__.py +12 -0
- flyte/remote/_client/auth/_auth_utils.py +14 -0
- flyte/remote/_client/auth/_authenticators/__init__.py +0 -0
- flyte/remote/_client/auth/_authenticators/base.py +397 -0
- flyte/remote/_client/auth/_authenticators/client_credentials.py +73 -0
- flyte/remote/_client/auth/_authenticators/device_code.py +118 -0
- flyte/remote/_client/auth/_authenticators/external_command.py +79 -0
- flyte/remote/_client/auth/_authenticators/factory.py +200 -0
- flyte/remote/_client/auth/_authenticators/pkce.py +516 -0
- flyte/remote/_client/auth/_channel.py +215 -0
- flyte/remote/_client/auth/_client_config.py +83 -0
- flyte/remote/_client/auth/_default_html.py +32 -0
- flyte/remote/_client/auth/_grpc_utils/__init__.py +0 -0
- flyte/remote/_client/auth/_grpc_utils/auth_interceptor.py +288 -0
- flyte/remote/_client/auth/_grpc_utils/default_metadata_interceptor.py +151 -0
- flyte/remote/_client/auth/_keyring.py +143 -0
- flyte/remote/_client/auth/_token_client.py +260 -0
- flyte/remote/_client/auth/errors.py +16 -0
- flyte/remote/_client/controlplane.py +95 -0
- flyte/remote/_console.py +18 -0
- flyte/remote/_data.py +159 -0
- flyte/remote/_logs.py +176 -0
- flyte/remote/_project.py +85 -0
- flyte/remote/_run.py +970 -0
- flyte/remote/_secret.py +132 -0
- flyte/remote/_task.py +391 -0
- flyte/report/__init__.py +3 -0
- flyte/report/_report.py +178 -0
- flyte/report/_template.html +124 -0
- flyte/storage/__init__.py +29 -0
- flyte/storage/_config.py +233 -0
- flyte/storage/_remote_fs.py +34 -0
- flyte/storage/_storage.py +271 -0
- flyte/storage/_utils.py +5 -0
- flyte/syncify/__init__.py +56 -0
- flyte/syncify/_api.py +371 -0
- flyte/types/__init__.py +36 -0
- flyte/types/_interface.py +40 -0
- flyte/types/_pickle.py +118 -0
- flyte/types/_renderer.py +162 -0
- flyte/types/_string_literals.py +120 -0
- flyte/types/_type_engine.py +2287 -0
- flyte/types/_utils.py +80 -0
- flyte-0.2.0a0.dist-info/METADATA +249 -0
- flyte-0.2.0a0.dist-info/RECORD +218 -0
- {flyte-0.1.0.dist-info → flyte-0.2.0a0.dist-info}/WHEEL +2 -1
- flyte-0.2.0a0.dist-info/entry_points.txt +3 -0
- flyte-0.2.0a0.dist-info/top_level.txt +1 -0
- flyte-0.1.0.dist-info/METADATA +0 -6
- flyte-0.1.0.dist-info/RECORD +0 -5
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import gzip
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import pathlib
|
|
6
|
+
import tempfile
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import ClassVar, Type
|
|
9
|
+
|
|
10
|
+
from flyteidl.core.tasks_pb2 import TaskTemplate
|
|
11
|
+
|
|
12
|
+
from flyte._logging import log, logger
|
|
13
|
+
from flyte._utils import AsyncLRUCache
|
|
14
|
+
from flyte.models import CodeBundle
|
|
15
|
+
|
|
16
|
+
from ._ignore import GitIgnore, Ignore, StandardIgnore
|
|
17
|
+
from ._packaging import create_bundle, list_files_to_bundle, print_ls_tree
|
|
18
|
+
from ._utils import CopyFiles, hash_file
|
|
19
|
+
|
|
20
|
+
_pickled_file_extension = ".pkl.gz"
|
|
21
|
+
_tar_file_extension = ".tar.gz"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class _PklCache:
|
|
25
|
+
_pkl_cache: ClassVar[AsyncLRUCache[str, str]] = AsyncLRUCache[str, str](maxsize=100)
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
async def put(cls, digest: str, upload_to_path: str, from_path: pathlib.Path) -> str:
|
|
29
|
+
"""
|
|
30
|
+
Get the pickled code bundle from the cache or build it if not present.
|
|
31
|
+
|
|
32
|
+
:param digest: The hash digest of the task template.
|
|
33
|
+
:param upload_to_path: The path to upload the pickled file to.
|
|
34
|
+
:param from_path: The path to read the pickled file from.
|
|
35
|
+
:return: CodeBundle object containing the pickled file path and the computed version.
|
|
36
|
+
"""
|
|
37
|
+
import flyte.storage as storage
|
|
38
|
+
|
|
39
|
+
async def put_data() -> str:
|
|
40
|
+
return await storage.put(str(from_path), to_path=str(upload_to_path))
|
|
41
|
+
|
|
42
|
+
return await cls._pkl_cache.get(
|
|
43
|
+
key=digest,
|
|
44
|
+
value_func=put_data,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
async def build_pkl_bundle(
|
|
49
|
+
o: TaskTemplate,
|
|
50
|
+
upload_to_controlplane: bool = True,
|
|
51
|
+
upload_from_dataplane_base_path: str | None = None,
|
|
52
|
+
copy_bundle_to: pathlib.Path | None = None,
|
|
53
|
+
) -> CodeBundle:
|
|
54
|
+
"""
|
|
55
|
+
Build a Pickled for the given task.
|
|
56
|
+
|
|
57
|
+
TODO We can optimize this by having an LRU cache for the function, this is so that if the same task is being
|
|
58
|
+
pickled multiple times, we can avoid the overhead of pickling it multiple times, by copying to a common place
|
|
59
|
+
and reusing based on task hash.
|
|
60
|
+
|
|
61
|
+
:param o: Object to be pickled. This is the task template.
|
|
62
|
+
:param upload_to_controlplane: Whether to upload the pickled file to the control plane or not
|
|
63
|
+
:param upload_from_dataplane_base_path: If we are on the dataplane, this is the path where the
|
|
64
|
+
pickled file should be uploaded to. upload_to_controlplane has to be False in this case.
|
|
65
|
+
:param copy_bundle_to: If set, the bundle will be copied to this path. This is used for testing purposes.
|
|
66
|
+
:return: CodeBundle object containing the pickled file path and the computed version.
|
|
67
|
+
"""
|
|
68
|
+
import cloudpickle
|
|
69
|
+
|
|
70
|
+
if upload_to_controlplane and upload_from_dataplane_base_path:
|
|
71
|
+
raise ValueError("Cannot upload to control plane and upload from dataplane path at the same time.")
|
|
72
|
+
|
|
73
|
+
logger.debug("Building pickled code bundle.")
|
|
74
|
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
75
|
+
dest = pathlib.Path(tmp_dir) / f"code_bundle{_pickled_file_extension}"
|
|
76
|
+
with gzip.GzipFile(filename=dest, mode="wb", mtime=0) as gzipped:
|
|
77
|
+
cloudpickle.dump(o, gzipped)
|
|
78
|
+
|
|
79
|
+
if upload_to_controlplane:
|
|
80
|
+
logger.debug("Uploading pickled code bundle to control plane.")
|
|
81
|
+
from flyte.remote import upload_file
|
|
82
|
+
|
|
83
|
+
hash_digest, remote_path = await upload_file(dest)
|
|
84
|
+
return CodeBundle(pkl=remote_path, computed_version=hash_digest)
|
|
85
|
+
|
|
86
|
+
elif upload_from_dataplane_base_path:
|
|
87
|
+
from flyte._internal.runtime import io
|
|
88
|
+
|
|
89
|
+
_, str_digest, _ = hash_file(file_path=dest)
|
|
90
|
+
upload_path = io.pkl_path(upload_from_dataplane_base_path, str_digest)
|
|
91
|
+
logger.debug(f"Uploading pickled code bundle to dataplane path {upload_path}.")
|
|
92
|
+
final_path = await _PklCache.put(
|
|
93
|
+
digest=str_digest,
|
|
94
|
+
upload_to_path=upload_path,
|
|
95
|
+
from_path=dest,
|
|
96
|
+
)
|
|
97
|
+
return CodeBundle(pkl=final_path, computed_version=str_digest)
|
|
98
|
+
|
|
99
|
+
else:
|
|
100
|
+
logger.debug("Dryrun enabled, not uploading pickled code bundle.")
|
|
101
|
+
_, str_digest, _ = hash_file(file_path=dest)
|
|
102
|
+
if copy_bundle_to:
|
|
103
|
+
import shutil
|
|
104
|
+
|
|
105
|
+
# Copy the bundle to the given path
|
|
106
|
+
shutil.copy(dest, copy_bundle_to)
|
|
107
|
+
local_path = copy_bundle_to / dest.name
|
|
108
|
+
return CodeBundle(pkl=str(local_path), computed_version=str_digest)
|
|
109
|
+
return CodeBundle(pkl=str(dest), computed_version=str_digest)
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
async def build_code_bundle(
|
|
113
|
+
from_dir: Path,
|
|
114
|
+
*ignore: Type[Ignore],
|
|
115
|
+
extract_dir: str = ".",
|
|
116
|
+
dryrun: bool = False,
|
|
117
|
+
copy_bundle_to: pathlib.Path | None = None,
|
|
118
|
+
copy_style: CopyFiles = "loaded_modules",
|
|
119
|
+
) -> CodeBundle:
|
|
120
|
+
"""
|
|
121
|
+
Build the code bundle for the current environment.
|
|
122
|
+
:param from_dir: The directory to bundle of the code to bundle. This is the root directory for the source.
|
|
123
|
+
:param extract_dir: The directory to extract the code bundle to, when in the container. It defaults to the current
|
|
124
|
+
working directory.
|
|
125
|
+
:param ignore: The list of ignores to apply. This is a list of Ignore classes.
|
|
126
|
+
:param dryrun: If dryrun is enabled, files will not be uploaded to the control plane.
|
|
127
|
+
:param copy_bundle_to: If set, the bundle will be copied to this path. This is used for testing purposes.
|
|
128
|
+
:param copy_style: What to put into the tarball. (either all, or loaded_modules. if none, skip this function)
|
|
129
|
+
|
|
130
|
+
:return: The code bundle, which contains the path where the code was zipped to.
|
|
131
|
+
"""
|
|
132
|
+
logger.debug("Building code bundle.")
|
|
133
|
+
from flyte.remote import upload_file
|
|
134
|
+
|
|
135
|
+
if not ignore:
|
|
136
|
+
ignore = (StandardIgnore, GitIgnore)
|
|
137
|
+
|
|
138
|
+
logger.debug(f"Finding files to bundle, ignoring as configured by: {ignore}")
|
|
139
|
+
files, digest = list_files_to_bundle(from_dir, True, *ignore, copy_style=copy_style)
|
|
140
|
+
if logger.getEffectiveLevel() <= logging.INFO:
|
|
141
|
+
print_ls_tree(from_dir, files)
|
|
142
|
+
|
|
143
|
+
logger.debug("Building code bundle.")
|
|
144
|
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
145
|
+
bundle_path, tar_size, archive_size = create_bundle(from_dir, pathlib.Path(tmp_dir), files, digest)
|
|
146
|
+
logger.info(f"Code bundle created at {bundle_path}, size: {tar_size} MB, archive size: {archive_size} MB")
|
|
147
|
+
if not dryrun:
|
|
148
|
+
hash_digest, remote_path = await upload_file(bundle_path)
|
|
149
|
+
logger.debug(f"Code bundle uploaded to {remote_path}")
|
|
150
|
+
else:
|
|
151
|
+
remote_path = "na"
|
|
152
|
+
if copy_bundle_to:
|
|
153
|
+
import shutil
|
|
154
|
+
|
|
155
|
+
# Copy the bundle to the given path
|
|
156
|
+
shutil.copy(bundle_path, copy_bundle_to)
|
|
157
|
+
remote_path = str(copy_bundle_to / bundle_path.name)
|
|
158
|
+
_, hash_digest, _ = hash_file(file_path=bundle_path)
|
|
159
|
+
return CodeBundle(tgz=remote_path, destination=extract_dir, computed_version=hash_digest)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@log(level=logging.INFO)
|
|
163
|
+
async def download_bundle(bundle: CodeBundle) -> pathlib.Path:
|
|
164
|
+
"""
|
|
165
|
+
Downloads a code bundle (tgz | pkl) to the local destination path.
|
|
166
|
+
:param bundle: The code bundle to download.
|
|
167
|
+
|
|
168
|
+
:return: The path to the downloaded code bundle.
|
|
169
|
+
"""
|
|
170
|
+
import flyte.storage as storage
|
|
171
|
+
|
|
172
|
+
dest = pathlib.Path(bundle.destination)
|
|
173
|
+
if not dest.is_dir():
|
|
174
|
+
raise ValueError(f"Destination path should be a directory, found {dest}, {dest.stat()}")
|
|
175
|
+
|
|
176
|
+
# TODO make storage apis better to accept pathlib.Path
|
|
177
|
+
if bundle.tgz:
|
|
178
|
+
downloaded_bundle = dest / os.path.basename(bundle.tgz)
|
|
179
|
+
# Download the tgz file
|
|
180
|
+
path = await storage.get(bundle.tgz, str(downloaded_bundle.absolute()))
|
|
181
|
+
downloaded_bundle = pathlib.Path(path)
|
|
182
|
+
# NOTE the os.path.join(destination, ''). This is to ensure that the given path is in fact a directory and all
|
|
183
|
+
# downloaded data should be copied into this directory. We do this to account for a difference in behavior in
|
|
184
|
+
# fsspec, which requires a trailing slash in case of pre-existing directory.
|
|
185
|
+
process = await asyncio.create_subprocess_exec(
|
|
186
|
+
"tar",
|
|
187
|
+
"-xvf",
|
|
188
|
+
str(downloaded_bundle),
|
|
189
|
+
"-C",
|
|
190
|
+
str(dest),
|
|
191
|
+
stdout=asyncio.subprocess.PIPE,
|
|
192
|
+
stderr=asyncio.subprocess.PIPE,
|
|
193
|
+
)
|
|
194
|
+
stdout, stderr = await process.communicate()
|
|
195
|
+
|
|
196
|
+
if process.returncode != 0:
|
|
197
|
+
raise RuntimeError(stderr.decode())
|
|
198
|
+
return downloaded_bundle.absolute()
|
|
199
|
+
|
|
200
|
+
elif bundle.pkl:
|
|
201
|
+
# Lets gunzip the pkl file
|
|
202
|
+
|
|
203
|
+
downloaded_bundle = dest / os.path.basename(bundle.pkl)
|
|
204
|
+
# Download the tgz file
|
|
205
|
+
path = await storage.get(bundle.pkl, str(downloaded_bundle.absolute()))
|
|
206
|
+
downloaded_bundle = pathlib.Path(path)
|
|
207
|
+
return downloaded_bundle.absolute()
|
|
208
|
+
else:
|
|
209
|
+
raise ValueError("Code bundle should be either tgz or pkl, found neither.")
|
flyte/_context.py
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import contextvars
|
|
4
|
+
from dataclasses import dataclass, replace
|
|
5
|
+
from typing import TYPE_CHECKING, Awaitable, Callable, Optional, ParamSpec, TypeVar
|
|
6
|
+
|
|
7
|
+
from flyte._logging import logger
|
|
8
|
+
from flyte.models import GroupData, RawDataPath, TaskContext
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from flyte.report import Report
|
|
12
|
+
|
|
13
|
+
P = ParamSpec("P") # capture the function's parameters
|
|
14
|
+
R = TypeVar("R") # return type
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass(frozen=True, kw_only=True)
|
|
18
|
+
class ContextData:
|
|
19
|
+
"""
|
|
20
|
+
A ContextData cannot be created without an execution. Even for local execution's there should be an execution ID
|
|
21
|
+
|
|
22
|
+
:param: action The action ID of the current execution. This is always set, within a run.
|
|
23
|
+
:param: group_data If nested in a group the current group information
|
|
24
|
+
:param: task_context The context of the current task execution, this is what is available to the user, it is set
|
|
25
|
+
when the task is executed through `run` methods. If the Task is executed as regular python methods, this
|
|
26
|
+
will be None.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
group_data: Optional[GroupData] = None
|
|
30
|
+
task_context: Optional[TaskContext] = None
|
|
31
|
+
raw_data_path: Optional[RawDataPath] = None
|
|
32
|
+
|
|
33
|
+
def replace(self, **kwargs) -> ContextData:
|
|
34
|
+
return replace(self, **kwargs)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class Context:
|
|
38
|
+
"""
|
|
39
|
+
A context class to hold the current execution context.
|
|
40
|
+
This is not coroutine safe, it assumes that the context is set in a single thread.
|
|
41
|
+
You should use the `contextual_run` function to run a function in a new context tree.
|
|
42
|
+
|
|
43
|
+
A context tree is defined as a tree of contexts, where under the root, all coroutines that were started in
|
|
44
|
+
this context tree can access the context mutations, but no coroutine, created outside of the context tree can access
|
|
45
|
+
the context mutations.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
def __init__(self, data: ContextData):
|
|
49
|
+
if data is None:
|
|
50
|
+
raise ValueError("Cannot create a new context without contextdata.")
|
|
51
|
+
self._data = data
|
|
52
|
+
self._id = id(self) # Immutable unique identifier
|
|
53
|
+
self._token = None # Context variable token to restore the previous context
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def data(self) -> ContextData:
|
|
57
|
+
"""Viewable data."""
|
|
58
|
+
return self._data
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def raw_data(self) -> RawDataPath:
|
|
62
|
+
"""
|
|
63
|
+
Get the raw data prefix for the current context first by looking up the task context, then the raw data path
|
|
64
|
+
"""
|
|
65
|
+
if self.data and self.data.task_context and self.data.task_context.raw_data_path:
|
|
66
|
+
return self.data.task_context.raw_data_path
|
|
67
|
+
if self.data and self.data.raw_data_path:
|
|
68
|
+
return self.data.raw_data_path
|
|
69
|
+
raise ValueError("Raw data path has not been set in the context.")
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def id(self) -> int:
|
|
73
|
+
"""Viewable ID."""
|
|
74
|
+
return self._id
|
|
75
|
+
|
|
76
|
+
def replace_task_context(self, tctx: TaskContext) -> Context:
|
|
77
|
+
"""
|
|
78
|
+
Replace the task context in the current context.
|
|
79
|
+
"""
|
|
80
|
+
return Context(self.data.replace(task_context=tctx))
|
|
81
|
+
|
|
82
|
+
def new_raw_data_path(self, raw_data_path: RawDataPath) -> Context:
|
|
83
|
+
"""
|
|
84
|
+
Return a copy of the context with the given raw data path object
|
|
85
|
+
"""
|
|
86
|
+
return Context(self.data.replace(raw_data_path=raw_data_path))
|
|
87
|
+
|
|
88
|
+
def get_report(self) -> Optional[Report]:
|
|
89
|
+
"""
|
|
90
|
+
Returns a report if within a task context, else a None
|
|
91
|
+
:return:
|
|
92
|
+
"""
|
|
93
|
+
if self.data.task_context:
|
|
94
|
+
return self.data.task_context.report
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
def is_task_context(self) -> bool:
|
|
98
|
+
"""
|
|
99
|
+
Returns true if the context is a task context
|
|
100
|
+
:return:
|
|
101
|
+
"""
|
|
102
|
+
return self.data.task_context is not None
|
|
103
|
+
|
|
104
|
+
def __enter__(self):
|
|
105
|
+
"""Enter the context, setting it as the current context."""
|
|
106
|
+
self._token = root_context_var.set(self)
|
|
107
|
+
return self
|
|
108
|
+
|
|
109
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
110
|
+
"""Exit the context, restoring the previous context."""
|
|
111
|
+
try:
|
|
112
|
+
root_context_var.reset(self._token)
|
|
113
|
+
except Exception as e:
|
|
114
|
+
logger.warn(f"Failed to reset context: {e}")
|
|
115
|
+
raise e
|
|
116
|
+
|
|
117
|
+
async def __aenter__(self):
|
|
118
|
+
"""Async version of context entry."""
|
|
119
|
+
self._token = root_context_var.set(self)
|
|
120
|
+
return self
|
|
121
|
+
|
|
122
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
123
|
+
"""Async version of context exit."""
|
|
124
|
+
root_context_var.reset(self._token)
|
|
125
|
+
|
|
126
|
+
def __repr__(self):
|
|
127
|
+
return f"{self.data}"
|
|
128
|
+
|
|
129
|
+
def __str__(self):
|
|
130
|
+
return self.__repr__()
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
# Global context variable to hold the current context
|
|
134
|
+
root_context_var = contextvars.ContextVar("root", default=Context(data=ContextData()))
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def ctx() -> Optional[TaskContext]:
|
|
138
|
+
"""Retrieve the current task context from the context variable."""
|
|
139
|
+
return internal_ctx().data.task_context
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def internal_ctx() -> Context:
|
|
143
|
+
"""Retrieve the current context from the context variable."""
|
|
144
|
+
return root_context_var.get()
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
async def contextual_run(func: Callable[P, Awaitable[R]], *args: P.args, **kwargs: P.kwargs) -> R:
|
|
148
|
+
"""
|
|
149
|
+
Run a function with a new context subtree.
|
|
150
|
+
"""
|
|
151
|
+
_ctx = contextvars.copy_context()
|
|
152
|
+
return await _ctx.run(func, *args, **kwargs)
|
flyte/_deploy.py
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
|
|
6
|
+
|
|
7
|
+
import rich.repr
|
|
8
|
+
|
|
9
|
+
import flyte.errors
|
|
10
|
+
from flyte.models import SerializationContext
|
|
11
|
+
from flyte.syncify import syncify
|
|
12
|
+
|
|
13
|
+
from ._environment import Environment
|
|
14
|
+
from ._image import Image
|
|
15
|
+
from ._initialize import ensure_client, get_client, get_common_config, requires_initialization
|
|
16
|
+
from ._logging import logger
|
|
17
|
+
from ._task import TaskTemplate
|
|
18
|
+
from ._task_environment import TaskEnvironment
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from flyte._protos.workflow import task_definition_pb2
|
|
22
|
+
|
|
23
|
+
from ._code_bundle import CopyFiles
|
|
24
|
+
from ._internal.imagebuild.image_builder import ImageCache
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@rich.repr.auto
|
|
28
|
+
@dataclass
|
|
29
|
+
class DeploymentPlan:
|
|
30
|
+
envs: Dict[str, Environment]
|
|
31
|
+
version: Optional[str] = None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@rich.repr.auto
|
|
35
|
+
@dataclass
|
|
36
|
+
class Deployment:
|
|
37
|
+
envs: Dict[str, Environment]
|
|
38
|
+
deployed_tasks: List[task_definition_pb2.TaskSpec] | None = None
|
|
39
|
+
|
|
40
|
+
def summary_repr(self) -> str:
|
|
41
|
+
"""
|
|
42
|
+
Returns a summary representation of the deployment.
|
|
43
|
+
"""
|
|
44
|
+
env_names = ", ".join(self.envs.keys())
|
|
45
|
+
task_names_versions = ", ".join(
|
|
46
|
+
f"{task.task_template.id.name} (v{task.task_template.id.version})" for task in self.deployed_tasks or []
|
|
47
|
+
)
|
|
48
|
+
return f"Deployment(envs=[{env_names}], tasks=[{task_names_versions}])"
|
|
49
|
+
|
|
50
|
+
def task_repr(self) -> List[List[Tuple[str, str]]]:
|
|
51
|
+
"""
|
|
52
|
+
Returns a detailed representation of the deployed tasks.
|
|
53
|
+
"""
|
|
54
|
+
tuples = []
|
|
55
|
+
if self.deployed_tasks:
|
|
56
|
+
for task in self.deployed_tasks:
|
|
57
|
+
tuples.append(
|
|
58
|
+
[
|
|
59
|
+
("name", task.task_template.id.name),
|
|
60
|
+
("version", task.task_template.id.version),
|
|
61
|
+
]
|
|
62
|
+
)
|
|
63
|
+
return tuples
|
|
64
|
+
|
|
65
|
+
def env_repr(self) -> List[List[Tuple[str, str]]]:
|
|
66
|
+
"""
|
|
67
|
+
Returns a detailed representation of the deployed environments.
|
|
68
|
+
"""
|
|
69
|
+
tuples = []
|
|
70
|
+
for env_name, env in self.envs.items():
|
|
71
|
+
tuples.append(
|
|
72
|
+
[
|
|
73
|
+
("environment", env_name),
|
|
74
|
+
("image", env.image.uri if isinstance(env.image, Image) else env.image or ""),
|
|
75
|
+
]
|
|
76
|
+
)
|
|
77
|
+
return tuples
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
async def _deploy_task(
|
|
81
|
+
task: TaskTemplate, serialization_context: SerializationContext, dryrun: bool = False
|
|
82
|
+
) -> task_definition_pb2.TaskSpec:
|
|
83
|
+
"""
|
|
84
|
+
Deploy the given task.
|
|
85
|
+
"""
|
|
86
|
+
ensure_client()
|
|
87
|
+
from ._internal.runtime.convert import convert_upload_default_inputs
|
|
88
|
+
from ._internal.runtime.task_serde import translate_task_to_wire
|
|
89
|
+
from ._protos.workflow import task_definition_pb2, task_service_pb2
|
|
90
|
+
|
|
91
|
+
image_uri = task.image.uri if isinstance(task.image, Image) else task.image
|
|
92
|
+
|
|
93
|
+
if dryrun:
|
|
94
|
+
return translate_task_to_wire(task, serialization_context)
|
|
95
|
+
|
|
96
|
+
default_inputs = await convert_upload_default_inputs(task.interface)
|
|
97
|
+
spec = translate_task_to_wire(task, serialization_context, default_inputs=default_inputs)
|
|
98
|
+
|
|
99
|
+
msg = f"Deploying task {task.name}, with image {image_uri} version {serialization_context.version}"
|
|
100
|
+
if spec.task_template.HasField("container") and spec.task_template.container.args:
|
|
101
|
+
msg += f" from {spec.task_template.container.args[-3]}.{spec.task_template.container.args[-1]}"
|
|
102
|
+
logger.info(msg)
|
|
103
|
+
task_id = task_definition_pb2.TaskIdentifier(
|
|
104
|
+
org=spec.task_template.id.org,
|
|
105
|
+
project=spec.task_template.id.project,
|
|
106
|
+
domain=spec.task_template.id.domain,
|
|
107
|
+
version=spec.task_template.id.version,
|
|
108
|
+
name=spec.task_template.id.name,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
await get_client().task_service.DeployTask(task_service_pb2.DeployTaskRequest(task_id=task_id, spec=spec))
|
|
112
|
+
logger.info(f"Deployed task {task.name} with version {task_id.version}")
|
|
113
|
+
return spec
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
async def _build_image_bg(env_name: str, image: Image) -> Tuple[str, str]:
|
|
117
|
+
"""
|
|
118
|
+
Build the image in the background and return the environment name and the built image.
|
|
119
|
+
"""
|
|
120
|
+
from ._build import build
|
|
121
|
+
|
|
122
|
+
logger.info(f"Building image {image.name} for environment {env_name}")
|
|
123
|
+
return env_name, await build.aio(image)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
async def build_images(deployment: DeploymentPlan) -> ImageCache:
|
|
127
|
+
"""
|
|
128
|
+
Build the images for the given deployment plan and update the environment with the built image.
|
|
129
|
+
"""
|
|
130
|
+
from ._internal.imagebuild.image_builder import ImageCache
|
|
131
|
+
|
|
132
|
+
images = []
|
|
133
|
+
image_identifier_map = {}
|
|
134
|
+
for env_name, env in deployment.envs.items():
|
|
135
|
+
if not isinstance(env.image, str):
|
|
136
|
+
logger.warning(f"Building Image for environment {env_name}, image: {env.image}")
|
|
137
|
+
images.append(_build_image_bg(env_name, env.image))
|
|
138
|
+
|
|
139
|
+
elif env.image == "auto" and "auto" not in image_identifier_map:
|
|
140
|
+
auto_image = Image.auto()
|
|
141
|
+
image_identifier_map["auto"] = auto_image.uri
|
|
142
|
+
final_images = await asyncio.gather(*images)
|
|
143
|
+
|
|
144
|
+
for env_name, image_uri in final_images:
|
|
145
|
+
logger.warning(f"Built Image for environment {env_name}, image: {image_uri}")
|
|
146
|
+
env = deployment.envs[env_name]
|
|
147
|
+
if isinstance(env.image, Image):
|
|
148
|
+
image_identifier_map[env.image.identifier] = env.image.uri
|
|
149
|
+
|
|
150
|
+
return ImageCache(image_lookup=image_identifier_map)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
@requires_initialization
|
|
154
|
+
async def apply(deployment: DeploymentPlan, copy_style: CopyFiles, dryrun: bool = False) -> Deployment:
|
|
155
|
+
from ._code_bundle import build_code_bundle
|
|
156
|
+
|
|
157
|
+
cfg = get_common_config()
|
|
158
|
+
image_cache = await build_images(deployment)
|
|
159
|
+
|
|
160
|
+
version = deployment.version
|
|
161
|
+
code_bundle = None
|
|
162
|
+
if copy_style == "none" and not version:
|
|
163
|
+
raise flyte.errors.DeploymentError("Version must be set when copy_style is none")
|
|
164
|
+
else:
|
|
165
|
+
code_bundle = await build_code_bundle(from_dir=cfg.root_dir, dryrun=dryrun, copy_style=copy_style)
|
|
166
|
+
version = version or code_bundle.computed_version
|
|
167
|
+
# TODO we should update the version to include the image cache digest and code bundle digest. This is
|
|
168
|
+
# to ensure that changes in image dependencies, cause an update to the deployment version.
|
|
169
|
+
# TODO Also hash the environment and tasks to ensure that changes in the environment or tasks
|
|
170
|
+
|
|
171
|
+
sc = SerializationContext(
|
|
172
|
+
project=cfg.project,
|
|
173
|
+
domain=cfg.domain,
|
|
174
|
+
org=cfg.org,
|
|
175
|
+
code_bundle=code_bundle,
|
|
176
|
+
version=version,
|
|
177
|
+
image_cache=image_cache,
|
|
178
|
+
root_dir=cfg.root_dir,
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
tasks = []
|
|
182
|
+
for env_name, env in deployment.envs.items():
|
|
183
|
+
logger.info(f"Deploying environment {env_name}")
|
|
184
|
+
# TODO Make this pluggable based on the environment type
|
|
185
|
+
if isinstance(env, TaskEnvironment):
|
|
186
|
+
for task in env.tasks.values():
|
|
187
|
+
tasks.append(_deploy_task(task, dryrun=dryrun, serialization_context=sc))
|
|
188
|
+
return Deployment(envs=deployment.envs, deployed_tasks=await asyncio.gather(*tasks))
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _recursive_discover(
|
|
192
|
+
planned_envs: Dict[str, Environment], envs: Environment | List[Environment]
|
|
193
|
+
) -> Dict[str, Environment]:
|
|
194
|
+
"""
|
|
195
|
+
Recursively deploy the environment and its dependencies, if not already deployed (present in env_tasks) and
|
|
196
|
+
return the updated env_tasks.
|
|
197
|
+
"""
|
|
198
|
+
if isinstance(envs, Environment):
|
|
199
|
+
envs = [envs]
|
|
200
|
+
for env in envs:
|
|
201
|
+
# Skip if the environment is already planned
|
|
202
|
+
if env.name in planned_envs:
|
|
203
|
+
continue
|
|
204
|
+
# Recursively discover dependent environments
|
|
205
|
+
for dependent_env in env.depends_on:
|
|
206
|
+
_recursive_discover(planned_envs, dependent_env)
|
|
207
|
+
# Add the environment to the existing envs
|
|
208
|
+
planned_envs[env.name] = env
|
|
209
|
+
return planned_envs
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def plan_deploy(*envs: Environment, version: Optional[str] = None) -> DeploymentPlan:
|
|
213
|
+
if envs is None:
|
|
214
|
+
return DeploymentPlan({})
|
|
215
|
+
planned_envs = _recursive_discover({}, *envs)
|
|
216
|
+
return DeploymentPlan(planned_envs, version=version)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
@syncify
|
|
220
|
+
async def deploy(
|
|
221
|
+
*envs: Environment,
|
|
222
|
+
dryrun: bool = False,
|
|
223
|
+
version: str | None = None,
|
|
224
|
+
interactive_mode: bool | None = None,
|
|
225
|
+
copy_style: CopyFiles = "loaded_modules",
|
|
226
|
+
) -> Deployment:
|
|
227
|
+
"""
|
|
228
|
+
Deploy the given environment or list of environments.
|
|
229
|
+
:param envs: Environment or list of environments to deploy.
|
|
230
|
+
:param dryrun: dryrun mode, if True, the deployment will not be applied to the control plane.
|
|
231
|
+
:param version: version of the deployment, if None, the version will be computed from the code bundle.
|
|
232
|
+
TODO: Support for interactive_mode
|
|
233
|
+
:param interactive_mode: Optional, can be forced to True or False.
|
|
234
|
+
If not provided, it will be set based on the current environment. For example Jupyter notebooks are considered
|
|
235
|
+
interactive mode, while scripts are not. This is used to determine how the code bundle is created.
|
|
236
|
+
:param copy_style: Copy style to use when running the task
|
|
237
|
+
|
|
238
|
+
:return: Deployment object containing the deployed environments and tasks.
|
|
239
|
+
"""
|
|
240
|
+
if interactive_mode:
|
|
241
|
+
raise NotImplementedError("Interactive mode not yet implemented for deployment")
|
|
242
|
+
deployment = plan_deploy(*envs, version=version)
|
|
243
|
+
return await apply(deployment, copy_style=copy_style, dryrun=dryrun)
|
flyte/_doc.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Callable, Optional
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass
|
|
7
|
+
class Documentation:
|
|
8
|
+
"""
|
|
9
|
+
This class is used to store the documentation of a task.
|
|
10
|
+
|
|
11
|
+
It can be set explicitly or extracted from the docstring of the task.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
description: str
|
|
15
|
+
|
|
16
|
+
def __help__str__(self):
|
|
17
|
+
return self.description
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def extract_docstring(func: Optional[Callable]) -> Documentation:
|
|
21
|
+
"""
|
|
22
|
+
Extracts the description from a docstring.
|
|
23
|
+
"""
|
|
24
|
+
if func is None:
|
|
25
|
+
return Documentation(description="")
|
|
26
|
+
docstring = inspect.getdoc(func)
|
|
27
|
+
if not docstring:
|
|
28
|
+
return Documentation(description="")
|
|
29
|
+
return Documentation(description=docstring)
|
flyte/_docstring.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING, Callable, Dict, Optional
|
|
2
|
+
|
|
3
|
+
if TYPE_CHECKING:
|
|
4
|
+
pass
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Docstring(object):
|
|
8
|
+
def __init__(self, docstring: Optional[str] = None, callable_: Optional[Callable] = None):
|
|
9
|
+
import docstring_parser
|
|
10
|
+
|
|
11
|
+
self._parsed_docstring: docstring_parser.Docstring
|
|
12
|
+
|
|
13
|
+
if docstring is not None:
|
|
14
|
+
self._parsed_docstring = docstring_parser.parse(docstring)
|
|
15
|
+
elif callable_.__doc__ is not None:
|
|
16
|
+
self._parsed_docstring = docstring_parser.parse(callable_.__doc__)
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
def input_descriptions(self) -> Dict[str, Optional[str]]:
|
|
20
|
+
return {p.arg_name: p.description for p in self._parsed_docstring.params}
|
|
21
|
+
|
|
22
|
+
@property
|
|
23
|
+
def output_descriptions(self) -> Dict[str, Optional[str]]:
|
|
24
|
+
return {p.return_name: p.description for p in self._parsed_docstring.many_returns if p.return_name is not None}
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def short_description(self) -> Optional[str]:
|
|
28
|
+
return self._parsed_docstring.short_description
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def long_description(self) -> Optional[str]:
|
|
32
|
+
return self._parsed_docstring.long_description
|