flyte 0.2.0b10__py3-none-any.whl → 0.2.0b12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flyte might be problematic. Click here for more details.
- flyte/__init__.py +2 -0
- flyte/_bin/runtime.py +17 -5
- flyte/_deploy.py +29 -0
- flyte/_initialize.py +21 -6
- flyte/_internal/controllers/_local_controller.py +2 -1
- flyte/_internal/controllers/_trace.py +1 -0
- flyte/_internal/controllers/remote/_action.py +1 -1
- flyte/_internal/controllers/remote/_informer.py +1 -1
- flyte/_internal/runtime/convert.py +7 -4
- flyte/_internal/runtime/task_serde.py +80 -10
- flyte/_internal/runtime/taskrunner.py +1 -1
- flyte/_logging.py +1 -1
- flyte/_pod.py +19 -0
- flyte/_protos/common/list_pb2.py +3 -3
- flyte/_protos/common/list_pb2.pyi +2 -0
- flyte/_protos/workflow/environment_pb2.py +29 -0
- flyte/_protos/workflow/environment_pb2.pyi +12 -0
- flyte/_protos/workflow/environment_pb2_grpc.py +4 -0
- flyte/_protos/workflow/run_definition_pb2.py +61 -61
- flyte/_protos/workflow/run_definition_pb2.pyi +4 -2
- flyte/_protos/workflow/run_service_pb2.py +20 -24
- flyte/_protos/workflow/run_service_pb2.pyi +2 -6
- flyte/_protos/workflow/task_definition_pb2.py +28 -22
- flyte/_protos/workflow/task_definition_pb2.pyi +16 -4
- flyte/_protos/workflow/task_service_pb2.py +27 -11
- flyte/_protos/workflow/task_service_pb2.pyi +29 -1
- flyte/_protos/workflow/task_service_pb2_grpc.py +34 -0
- flyte/_task.py +2 -13
- flyte/_trace.py +0 -2
- flyte/_utils/__init__.py +4 -0
- flyte/_utils/org_discovery.py +57 -0
- flyte/_version.py +2 -2
- flyte/cli/_abort.py +4 -2
- flyte/cli/_common.py +10 -4
- flyte/cli/_create.py +17 -8
- flyte/cli/_deploy.py +14 -7
- flyte/cli/_get.py +11 -10
- flyte/cli/_params.py +1 -1
- flyte/cli/_run.py +1 -1
- flyte/cli/main.py +3 -7
- flyte/errors.py +11 -0
- flyte/extras/_container.py +0 -7
- flyte/remote/__init__.py +2 -1
- flyte/remote/_client/_protocols.py +2 -0
- flyte/remote/_data.py +2 -1
- flyte/remote/_task.py +141 -9
- flyte/syncify/_api.py +0 -1
- flyte/types/_type_engine.py +3 -1
- {flyte-0.2.0b10.dist-info → flyte-0.2.0b12.dist-info}/METADATA +1 -1
- {flyte-0.2.0b10.dist-info → flyte-0.2.0b12.dist-info}/RECORD +53 -48
- {flyte-0.2.0b10.dist-info → flyte-0.2.0b12.dist-info}/WHEEL +0 -0
- {flyte-0.2.0b10.dist-info → flyte-0.2.0b12.dist-info}/entry_points.txt +0 -0
- {flyte-0.2.0b10.dist-info → flyte-0.2.0b12.dist-info}/top_level.txt +0 -0
flyte/__init__.py
CHANGED
|
@@ -25,6 +25,7 @@ __all__ = [
|
|
|
25
25
|
"Device",
|
|
26
26
|
"Environment",
|
|
27
27
|
"Image",
|
|
28
|
+
"PodTemplate",
|
|
28
29
|
"Resources",
|
|
29
30
|
"RetryStrategy",
|
|
30
31
|
"ReusePolicy",
|
|
@@ -53,6 +54,7 @@ from ._group import group
|
|
|
53
54
|
from ._image import Image
|
|
54
55
|
from ._initialize import init, init_from_config
|
|
55
56
|
from ._map import map
|
|
57
|
+
from ._pod import PodTemplate
|
|
56
58
|
from ._resources import GPU, TPU, Device, Resources
|
|
57
59
|
from ._retry import RetryStrategy
|
|
58
60
|
from ._reusable_environment import ReusePolicy
|
flyte/_bin/runtime.py
CHANGED
|
@@ -76,13 +76,17 @@ def main(
|
|
|
76
76
|
):
|
|
77
77
|
sys.path.insert(0, ".")
|
|
78
78
|
|
|
79
|
+
import flyte
|
|
79
80
|
import flyte._utils as utils
|
|
80
|
-
from flyte._initialize import
|
|
81
|
+
from flyte._initialize import init
|
|
81
82
|
from flyte._internal.controllers import create_controller
|
|
82
83
|
from flyte._internal.imagebuild.image_builder import ImageCache
|
|
83
84
|
from flyte._internal.runtime.entrypoints import load_and_run_task
|
|
85
|
+
from flyte._logging import logger
|
|
84
86
|
from flyte.models import ActionID, Checkpoints, CodeBundle, RawDataPath
|
|
85
87
|
|
|
88
|
+
logger.info(f"Initializing flyte runtime - version {flyte.__version__}")
|
|
89
|
+
|
|
86
90
|
assert org, "Org is required for now"
|
|
87
91
|
assert project, "Project is required"
|
|
88
92
|
assert domain, "Domain is required"
|
|
@@ -98,18 +102,26 @@ def main(
|
|
|
98
102
|
# This detection of api key is a hack for now.
|
|
99
103
|
controller_kwargs: dict[str, Any] = {"insecure": False}
|
|
100
104
|
if api_key := os.getenv(_UNION_EAGER_API_KEY_ENV_VAR):
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
logger.warning(f"Using api key {api_key}")
|
|
105
|
+
logger.info("Using api key from environment")
|
|
104
106
|
controller_kwargs["api_key"] = api_key
|
|
105
107
|
else:
|
|
106
108
|
ep = os.environ.get(ENDPOINT_OVERRIDE, "host.docker.internal:8090")
|
|
107
109
|
controller_kwargs["endpoint"] = ep
|
|
108
110
|
if "localhost" in ep or "docker" in ep:
|
|
109
111
|
controller_kwargs["insecure"] = True
|
|
112
|
+
logger.debug(f"Using controller endpoint: {ep} with kwargs: {controller_kwargs}")
|
|
110
113
|
|
|
111
114
|
bundle = CodeBundle(tgz=tgz, pkl=pkl, destination=dest, computed_version=version)
|
|
112
|
-
|
|
115
|
+
# We init regular client here so that reference tasks can work
|
|
116
|
+
# Current reference tasks will not work with remote controller, because we create 2 different
|
|
117
|
+
# channels on different threads and this is not supported by grpcio or the auth system. It ends up leading
|
|
118
|
+
# File "src/python/grpcio/grpc/_cython/_cygrpc/aio/completion_queue.pyx.pxi", line 147,
|
|
119
|
+
# in grpc._cython.cygrpc.PollerCompletionQueue._handle_events
|
|
120
|
+
# BlockingIOError: [Errno 11] Resource temporarily unavailable
|
|
121
|
+
# init(org=org, project=project, domain=domain, **controller_kwargs)
|
|
122
|
+
# TODO solution is to use a single channel for both controller and reference tasks, but this requires a refactor
|
|
123
|
+
init()
|
|
124
|
+
# Controller is created with the same kwargs as init, so that it can be used to run tasks
|
|
113
125
|
controller = create_controller(ct="remote", **controller_kwargs)
|
|
114
126
|
|
|
115
127
|
ic = ImageCache.from_transport(image_cache) if image_cache else None
|
flyte/_deploy.py
CHANGED
|
@@ -46,6 +46,35 @@ class Deployment:
|
|
|
46
46
|
)
|
|
47
47
|
return f"Deployment(envs=[{env_names}], tasks=[{task_names_versions}])"
|
|
48
48
|
|
|
49
|
+
def task_repr(self) -> List[List[Tuple[str, str]]]:
|
|
50
|
+
"""
|
|
51
|
+
Returns a detailed representation of the deployed tasks.
|
|
52
|
+
"""
|
|
53
|
+
tuples = []
|
|
54
|
+
if self.deployed_tasks:
|
|
55
|
+
for task in self.deployed_tasks:
|
|
56
|
+
tuples.append(
|
|
57
|
+
[
|
|
58
|
+
("name", task.task_template.id.name),
|
|
59
|
+
("version", task.task_template.id.version),
|
|
60
|
+
]
|
|
61
|
+
)
|
|
62
|
+
return tuples
|
|
63
|
+
|
|
64
|
+
def env_repr(self) -> List[List[Tuple[str, str]]]:
|
|
65
|
+
"""
|
|
66
|
+
Returns a detailed representation of the deployed environments.
|
|
67
|
+
"""
|
|
68
|
+
tuples = []
|
|
69
|
+
for env_name, env in self.envs.items():
|
|
70
|
+
tuples.append(
|
|
71
|
+
[
|
|
72
|
+
("environment", env_name),
|
|
73
|
+
("image", env.image.uri if isinstance(env.image, Image) else env.image or ""),
|
|
74
|
+
]
|
|
75
|
+
)
|
|
76
|
+
return tuples
|
|
77
|
+
|
|
49
78
|
|
|
50
79
|
async def _deploy_task(
|
|
51
80
|
task: TaskTemplate, serialization_context: SerializationContext, dryrun: bool = False
|
flyte/_initialize.py
CHANGED
|
@@ -32,6 +32,7 @@ class CommonInit:
|
|
|
32
32
|
org: str | None = None
|
|
33
33
|
project: str | None = None
|
|
34
34
|
domain: str | None = None
|
|
35
|
+
batch_size: int = 1000
|
|
35
36
|
|
|
36
37
|
|
|
37
38
|
@dataclass(init=True, kw_only=True, repr=True, eq=True, frozen=True)
|
|
@@ -130,6 +131,7 @@ async def init(
|
|
|
130
131
|
rpc_retries: int = 3,
|
|
131
132
|
http_proxy_url: str | None = None,
|
|
132
133
|
storage: Storage | None = None,
|
|
134
|
+
batch_size: int = 1000,
|
|
133
135
|
) -> None:
|
|
134
136
|
"""
|
|
135
137
|
Initialize the Flyte system with the given configuration. This method should be called before any other Flyte
|
|
@@ -162,11 +164,12 @@ async def init(
|
|
|
162
164
|
:param insecure: insecure flag for the client
|
|
163
165
|
:param storage: Optional blob store (S3, GCS, Azure) configuration if needed to access (i.e. using Minio)
|
|
164
166
|
:param org: Optional organization override for the client. Should be set by auth instead.
|
|
165
|
-
:param
|
|
167
|
+
:param batch_size: Optional batch size for operations that use listings, defaults to 1000, so limit larger than
|
|
168
|
+
batch_size will be split into multiple requests.
|
|
166
169
|
|
|
167
170
|
:return: None
|
|
168
171
|
"""
|
|
169
|
-
from flyte._utils import get_cwd_editable_install
|
|
172
|
+
from flyte._utils import get_cwd_editable_install, org_from_endpoint, sanitize_endpoint
|
|
170
173
|
|
|
171
174
|
interactive_mode = ipython_check()
|
|
172
175
|
|
|
@@ -176,8 +179,7 @@ async def init(
|
|
|
176
179
|
|
|
177
180
|
global _init_config # noqa: PLW0603
|
|
178
181
|
|
|
179
|
-
|
|
180
|
-
endpoint = f"dns:///{endpoint}"
|
|
182
|
+
endpoint = sanitize_endpoint(endpoint)
|
|
181
183
|
|
|
182
184
|
with _init_lock:
|
|
183
185
|
client = None
|
|
@@ -206,12 +208,15 @@ async def init(
|
|
|
206
208
|
domain=domain,
|
|
207
209
|
client=client,
|
|
208
210
|
storage=storage,
|
|
209
|
-
org=org,
|
|
211
|
+
org=org or org_from_endpoint(endpoint),
|
|
212
|
+
batch_size=batch_size,
|
|
210
213
|
)
|
|
211
214
|
|
|
212
215
|
|
|
213
216
|
@syncify
|
|
214
|
-
async def init_from_config(
|
|
217
|
+
async def init_from_config(
|
|
218
|
+
path_or_config: str | Config | None = None, root_dir: Path | None = None, log_level: int | None = None
|
|
219
|
+
) -> None:
|
|
215
220
|
"""
|
|
216
221
|
Initialize the Flyte system using a configuration file or Config object. This method should be called before any
|
|
217
222
|
other Flyte remote API methods are called. Thread-safe implementation.
|
|
@@ -221,6 +226,8 @@ async def init_from_config(path_or_config: str | Config | None = None, root_dir:
|
|
|
221
226
|
files like config etc. For example if one uses the copy-style=="all", it is essential to determine the
|
|
222
227
|
root directory for the current project. If not provided, it defaults to the editable install directory or
|
|
223
228
|
if not available, the current working directory.
|
|
229
|
+
:param log_level: Optional logging level for the framework logger,
|
|
230
|
+
default is set using the default initialization policies
|
|
224
231
|
:return: None
|
|
225
232
|
"""
|
|
226
233
|
import flyte.config as config
|
|
@@ -228,6 +235,13 @@ async def init_from_config(path_or_config: str | Config | None = None, root_dir:
|
|
|
228
235
|
cfg: config.Config
|
|
229
236
|
if path_or_config is None or isinstance(path_or_config, str):
|
|
230
237
|
# If a string is passed, treat it as a path to the config file
|
|
238
|
+
if path_or_config:
|
|
239
|
+
if not Path(path_or_config).exists():
|
|
240
|
+
raise InitializationError(
|
|
241
|
+
"ConfigFileNotFoundError",
|
|
242
|
+
"user",
|
|
243
|
+
f"Configuration file '{path_or_config}' does not exist., current working directory is {Path.cwd()}",
|
|
244
|
+
)
|
|
231
245
|
if root_dir and path_or_config:
|
|
232
246
|
cfg = config.auto(str(root_dir / path_or_config))
|
|
233
247
|
else:
|
|
@@ -251,6 +265,7 @@ async def init_from_config(path_or_config: str | Config | None = None, root_dir:
|
|
|
251
265
|
client_id=cfg.platform.client_id,
|
|
252
266
|
client_credentials_secret=cfg.platform.client_credentials_secret,
|
|
253
267
|
root_dir=root_dir,
|
|
268
|
+
log_level=log_level,
|
|
254
269
|
)
|
|
255
270
|
|
|
256
271
|
|
|
@@ -162,6 +162,7 @@ class LocalController:
|
|
|
162
162
|
action=action_id,
|
|
163
163
|
interface=_interface,
|
|
164
164
|
inputs_path=action_output_path,
|
|
165
|
+
name=_func.__name__,
|
|
165
166
|
),
|
|
166
167
|
True,
|
|
167
168
|
)
|
|
@@ -179,7 +180,7 @@ class LocalController:
|
|
|
179
180
|
|
|
180
181
|
if info.interface.outputs and info.output:
|
|
181
182
|
# If the result is not an AsyncGenerator, convert it directly
|
|
182
|
-
converted_outputs = await convert.convert_from_native_to_outputs(info.output, info.interface)
|
|
183
|
+
converted_outputs = await convert.convert_from_native_to_outputs(info.output, info.interface, info.name)
|
|
183
184
|
assert converted_outputs
|
|
184
185
|
elif info.error:
|
|
185
186
|
# If there is an error, convert it to a native error
|
|
@@ -130,7 +130,7 @@ class Action:
|
|
|
130
130
|
"""
|
|
131
131
|
from flyte._logging import logger
|
|
132
132
|
|
|
133
|
-
logger.
|
|
133
|
+
logger.debug(f"In Action from_state {obj.action_id} {obj.phase} {obj.output_uri}")
|
|
134
134
|
return cls(
|
|
135
135
|
action_id=obj.action_id,
|
|
136
136
|
parent_action_name=parent_action_name,
|
|
@@ -235,7 +235,7 @@ class Informer:
|
|
|
235
235
|
await self._shared_queue.put(node)
|
|
236
236
|
# hack to work in the absence of sentinel
|
|
237
237
|
except asyncio.CancelledError:
|
|
238
|
-
logger.
|
|
238
|
+
logger.info(f"Watch cancelled: {self.name}")
|
|
239
239
|
return
|
|
240
240
|
except asyncio.TimeoutError as e:
|
|
241
241
|
logger.error(f"Watch timeout: {self.name}", exc_info=e)
|
|
@@ -11,7 +11,7 @@ import flyte.errors
|
|
|
11
11
|
import flyte.storage as storage
|
|
12
12
|
from flyte._protos.workflow import run_definition_pb2, task_definition_pb2
|
|
13
13
|
from flyte.models import ActionID, NativeInterface, TaskContext
|
|
14
|
-
from flyte.types import TypeEngine
|
|
14
|
+
from flyte.types import TypeEngine, TypeTransformerFailedError
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
@dataclass(frozen=True)
|
|
@@ -80,7 +80,7 @@ async def convert_from_native_to_inputs(interface: NativeInterface, *args, **kwa
|
|
|
80
80
|
)
|
|
81
81
|
|
|
82
82
|
|
|
83
|
-
async def convert_from_native_to_outputs(o: Any, interface: NativeInterface) -> Outputs:
|
|
83
|
+
async def convert_from_native_to_outputs(o: Any, interface: NativeInterface, task_name: str = "") -> Outputs:
|
|
84
84
|
# Always make it a tuple even if it's just one item to simplify logic below
|
|
85
85
|
if not isinstance(o, tuple):
|
|
86
86
|
o = (o,)
|
|
@@ -90,8 +90,11 @@ async def convert_from_native_to_outputs(o: Any, interface: NativeInterface) ->
|
|
|
90
90
|
)
|
|
91
91
|
named = []
|
|
92
92
|
for (output_name, python_type), v in zip(interface.outputs.items(), o):
|
|
93
|
-
|
|
94
|
-
|
|
93
|
+
try:
|
|
94
|
+
lit = await TypeEngine.to_literal(v, python_type, TypeEngine.to_literal_type(python_type))
|
|
95
|
+
named.append(run_definition_pb2.NamedLiteral(name=output_name, value=lit))
|
|
96
|
+
except TypeTransformerFailedError as e:
|
|
97
|
+
raise flyte.errors.RuntimeDataValidationError(output_name, e, task_name)
|
|
95
98
|
|
|
96
99
|
return Outputs(proto_outputs=run_definition_pb2.Outputs(literals=named))
|
|
97
100
|
|
|
@@ -3,9 +3,11 @@ This module provides functionality to serialize and deserialize tasks to and fro
|
|
|
3
3
|
It includes a Resolver interface for loading tasks, and functions to load classes and tasks.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
+
import copy
|
|
6
7
|
import importlib
|
|
8
|
+
import typing
|
|
7
9
|
from datetime import timedelta
|
|
8
|
-
from typing import Optional, Type
|
|
10
|
+
from typing import Optional, Type, cast
|
|
9
11
|
|
|
10
12
|
from flyteidl.core import identifier_pb2, literals_pb2, security_pb2, tasks_pb2
|
|
11
13
|
from google.protobuf import duration_pb2, wrappers_pb2
|
|
@@ -13,6 +15,7 @@ from google.protobuf import duration_pb2, wrappers_pb2
|
|
|
13
15
|
import flyte.errors
|
|
14
16
|
from flyte._cache.cache import VersionParameters, cache_from_request
|
|
15
17
|
from flyte._logging import logger
|
|
18
|
+
from flyte._pod import _PRIMARY_CONTAINER_NAME_FIELD, PodTemplate
|
|
16
19
|
from flyte._protos.workflow import task_definition_pb2
|
|
17
20
|
from flyte._secret import SecretRequest, secrets_from_request
|
|
18
21
|
from flyte._task import AsyncFunctionTaskTemplate, TaskTemplate
|
|
@@ -121,17 +124,18 @@ def get_proto_task(task: TaskTemplate, serialize_context: SerializationContext)
|
|
|
121
124
|
# if task.parent_env is None:
|
|
122
125
|
# raise ValueError(f"Task {task.name} must have a parent environment")
|
|
123
126
|
|
|
124
|
-
#
|
|
125
|
-
|
|
126
|
-
#
|
|
127
|
-
container = _get_urun_container(serialize_context, task)
|
|
127
|
+
# TODO Add support for SQL, extra_config, custom
|
|
128
|
+
extra_config: typing.Dict[str, str] = {}
|
|
129
|
+
custom = {} # type: ignore
|
|
128
130
|
|
|
129
|
-
# TODO Add support for SQL, Pod, extra_config, custom
|
|
130
|
-
pod = None
|
|
131
131
|
sql = None
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
132
|
+
if task.pod_template and not isinstance(task.pod_template, str):
|
|
133
|
+
container = None
|
|
134
|
+
pod = _get_k8s_pod(_get_urun_container(serialize_context, task), task.pod_template)
|
|
135
|
+
extra_config[_PRIMARY_CONTAINER_NAME_FIELD] = task.pod_template.primary_container_name
|
|
136
|
+
else:
|
|
137
|
+
container = _get_urun_container(serialize_context, task)
|
|
138
|
+
pod = None
|
|
135
139
|
|
|
136
140
|
# -------------- CACHE HANDLING ----------------------
|
|
137
141
|
task_cache = cache_from_request(task.cache)
|
|
@@ -210,6 +214,72 @@ def _get_urun_container(
|
|
|
210
214
|
)
|
|
211
215
|
|
|
212
216
|
|
|
217
|
+
def _sanitize_resource_name(resource: tasks_pb2.Resources.ResourceEntry) -> str:
|
|
218
|
+
return tasks_pb2.Resources.ResourceName.Name(resource.name).lower().replace("_", "-")
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def _get_k8s_pod(primary_container: tasks_pb2.Container, pod_template: PodTemplate) -> Optional[tasks_pb2.K8sPod]:
|
|
222
|
+
"""
|
|
223
|
+
Get the K8sPod representation of the task template.
|
|
224
|
+
:param task: The task to convert.
|
|
225
|
+
:return: The K8sPod representation of the task template.
|
|
226
|
+
"""
|
|
227
|
+
from kubernetes.client import ApiClient, V1PodSpec
|
|
228
|
+
from kubernetes.client.models import V1EnvVar, V1ResourceRequirements
|
|
229
|
+
|
|
230
|
+
pod_template = copy.deepcopy(pod_template)
|
|
231
|
+
containers = cast(V1PodSpec, pod_template.pod_spec).containers
|
|
232
|
+
primary_exists = False
|
|
233
|
+
|
|
234
|
+
for container in containers:
|
|
235
|
+
if container.name == pod_template.primary_container_name:
|
|
236
|
+
primary_exists = True
|
|
237
|
+
break
|
|
238
|
+
|
|
239
|
+
if not primary_exists:
|
|
240
|
+
raise ValueError(
|
|
241
|
+
"No primary container defined in the pod spec."
|
|
242
|
+
f" You must define a primary container with the name '{pod_template.primary_container_name}'."
|
|
243
|
+
)
|
|
244
|
+
final_containers = []
|
|
245
|
+
|
|
246
|
+
for container in containers:
|
|
247
|
+
# We overwrite the primary container attributes with the values given to ContainerTask.
|
|
248
|
+
# The attributes include: image, command, args, resource, and env (env is unioned)
|
|
249
|
+
|
|
250
|
+
if container.name == pod_template.primary_container_name:
|
|
251
|
+
if container.image is None:
|
|
252
|
+
# Copy the image from primary_container only if the image is not specified in the pod spec.
|
|
253
|
+
container.image = primary_container.image
|
|
254
|
+
|
|
255
|
+
container.command = list(primary_container.command)
|
|
256
|
+
container.args = list(primary_container.args)
|
|
257
|
+
|
|
258
|
+
limits, requests = {}, {}
|
|
259
|
+
for resource in primary_container.resources.limits:
|
|
260
|
+
limits[_sanitize_resource_name(resource)] = resource.value
|
|
261
|
+
for resource in primary_container.resources.requests:
|
|
262
|
+
requests[_sanitize_resource_name(resource)] = resource.value
|
|
263
|
+
|
|
264
|
+
resource_requirements = V1ResourceRequirements(limits=limits, requests=requests)
|
|
265
|
+
if len(limits) > 0 or len(requests) > 0:
|
|
266
|
+
# Important! Only copy over resource requirements if they are non-empty.
|
|
267
|
+
container.resources = resource_requirements
|
|
268
|
+
|
|
269
|
+
if primary_container.env is not None:
|
|
270
|
+
container.env = [V1EnvVar(name=e.key, value=e.value) for e in primary_container.env] + (
|
|
271
|
+
container.env or []
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
final_containers.append(container)
|
|
275
|
+
|
|
276
|
+
cast(V1PodSpec, pod_template.pod_spec).containers = final_containers
|
|
277
|
+
pod_spec = ApiClient().sanitize_for_serialization(pod_template.pod_spec)
|
|
278
|
+
|
|
279
|
+
metadata = tasks_pb2.K8sObjectMetadata(labels=pod_template.labels, annotations=pod_template.annotations)
|
|
280
|
+
return tasks_pb2.K8sPod(pod_spec=pod_spec, metadata=metadata)
|
|
281
|
+
|
|
282
|
+
|
|
213
283
|
def extract_code_bundle(task_spec: task_definition_pb2.TaskSpec) -> Optional[CodeBundle]:
|
|
214
284
|
"""
|
|
215
285
|
Extract the code bundle from the task spec.
|
|
@@ -145,7 +145,7 @@ async def convert_and_run(
|
|
|
145
145
|
return None, convert_from_native_to_error(err)
|
|
146
146
|
if task.report:
|
|
147
147
|
await flyte.report.flush.aio()
|
|
148
|
-
return await convert_from_native_to_outputs(out, task.native_interface), None
|
|
148
|
+
return await convert_from_native_to_outputs(out, task.native_interface, task.name), None
|
|
149
149
|
|
|
150
150
|
|
|
151
151
|
async def extract_download_run_upload(
|
flyte/_logging.py
CHANGED
flyte/_pod.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import TYPE_CHECKING, Dict, Optional
|
|
3
|
+
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from kubernetes.client import V1PodSpec
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
_PRIMARY_CONTAINER_NAME_FIELD = "primary_container_name"
|
|
9
|
+
_PRIMARY_CONTAINER_DEFAULT_NAME = "primary"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(init=True, repr=True, eq=True, frozen=False)
|
|
13
|
+
class PodTemplate(object):
|
|
14
|
+
"""Custom PodTemplate specification for a Task."""
|
|
15
|
+
|
|
16
|
+
pod_spec: Optional["V1PodSpec"] = field(default_factory=lambda: V1PodSpec())
|
|
17
|
+
primary_container_name: str = _PRIMARY_CONTAINER_DEFAULT_NAME
|
|
18
|
+
labels: Optional[Dict[str, str]] = None
|
|
19
|
+
annotations: Optional[Dict[str, str]] = None
|
flyte/_protos/common/list_pb2.py
CHANGED
|
@@ -13,7 +13,7 @@ _sym_db = _symbol_database.Default()
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x63ommon/list.proto\x12\x0f\x63loudidl.common\"\x83\x01\n\x04Sort\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12=\n\tdirection\x18\x02 \x01(\x0e\x32\x1f.cloudidl.common.Sort.DirectionR\tdirection\"*\n\tDirection\x12\x0e\n\nDESCENDING\x10\x00\x12\r\n\tASCENDING\x10\x01\"\xfe\x01\n\x0bListRequest\x12\x14\n\x05limit\x18\x01 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\x12\x32\n\x07sort_by\x18\x03 \x01(\x0b\x32\x15.cloudidl.common.SortB\x02\x18\x01R\x06sortBy\x12\x31\n\x07\x66ilters\x18\x04 \x03(\x0b\x32\x17.cloudidl.common.FilterR\x07\x66ilters\x12\x1f\n\x0braw_filters\x18\x05 \x03(\tR\nrawFilters\x12;\n\x0esort_by_fields\x18\x06 \x03(\x0b\x32\x15.cloudidl.common.SortR\x0csortByFields\"\
|
|
16
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x63ommon/list.proto\x12\x0f\x63loudidl.common\"\x83\x01\n\x04Sort\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12=\n\tdirection\x18\x02 \x01(\x0e\x32\x1f.cloudidl.common.Sort.DirectionR\tdirection\"*\n\tDirection\x12\x0e\n\nDESCENDING\x10\x00\x12\r\n\tASCENDING\x10\x01\"\xfe\x01\n\x0bListRequest\x12\x14\n\x05limit\x18\x01 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\x12\x32\n\x07sort_by\x18\x03 \x01(\x0b\x32\x15.cloudidl.common.SortB\x02\x18\x01R\x06sortBy\x12\x31\n\x07\x66ilters\x18\x04 \x03(\x0b\x32\x17.cloudidl.common.FilterR\x07\x66ilters\x12\x1f\n\x0braw_filters\x18\x05 \x03(\tR\nrawFilters\x12;\n\x0esort_by_fields\x18\x06 \x03(\x0b\x32\x15.cloudidl.common.SortR\x0csortByFields\"\xcc\x02\n\x06\x46ilter\x12<\n\x08\x66unction\x18\x01 \x01(\x0e\x32 .cloudidl.common.Filter.FunctionR\x08\x66unction\x12\x14\n\x05\x66ield\x18\x02 \x01(\tR\x05\x66ield\x12\x16\n\x06values\x18\x03 \x03(\tR\x06values\"\xd5\x01\n\x08\x46unction\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05\x12\x0c\n\x08\x43ONTAINS\x10\x06\x12\x0c\n\x08VALUE_IN\x10\x07\x12\r\n\tENDS_WITH\x10\x0c\x12\x11\n\rNOT_ENDS_WITH\x10\r\x12\x1d\n\x19\x43ONTAINS_CASE_INSENSITIVE\x10\x0e\x42\xaa\x01\n\x13\x63om.cloudidl.commonB\tListProtoH\x02P\x01Z)github.com/unionai/cloud/gen/pb-go/common\xa2\x02\x03\x43\x43X\xaa\x02\x0f\x43loudidl.Common\xca\x02\x0f\x43loudidl\\Common\xe2\x02\x1b\x43loudidl\\Common\\GPBMetadata\xea\x02\x10\x43loudidl::Commonb\x06proto3')
|
|
17
17
|
|
|
18
18
|
_globals = globals()
|
|
19
19
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
@@ -30,7 +30,7 @@ if _descriptor._USE_C_DESCRIPTORS == False:
|
|
|
30
30
|
_globals['_LISTREQUEST']._serialized_start=173
|
|
31
31
|
_globals['_LISTREQUEST']._serialized_end=427
|
|
32
32
|
_globals['_FILTER']._serialized_start=430
|
|
33
|
-
_globals['_FILTER']._serialized_end=
|
|
33
|
+
_globals['_FILTER']._serialized_end=762
|
|
34
34
|
_globals['_FILTER_FUNCTION']._serialized_start=549
|
|
35
|
-
_globals['_FILTER_FUNCTION']._serialized_end=
|
|
35
|
+
_globals['_FILTER_FUNCTION']._serialized_end=762
|
|
36
36
|
# @@protoc_insertion_point(module_scope)
|
|
@@ -50,6 +50,7 @@ class Filter(_message.Message):
|
|
|
50
50
|
VALUE_IN: _ClassVar[Filter.Function]
|
|
51
51
|
ENDS_WITH: _ClassVar[Filter.Function]
|
|
52
52
|
NOT_ENDS_WITH: _ClassVar[Filter.Function]
|
|
53
|
+
CONTAINS_CASE_INSENSITIVE: _ClassVar[Filter.Function]
|
|
53
54
|
EQUAL: Filter.Function
|
|
54
55
|
NOT_EQUAL: Filter.Function
|
|
55
56
|
GREATER_THAN: Filter.Function
|
|
@@ -60,6 +61,7 @@ class Filter(_message.Message):
|
|
|
60
61
|
VALUE_IN: Filter.Function
|
|
61
62
|
ENDS_WITH: Filter.Function
|
|
62
63
|
NOT_ENDS_WITH: Filter.Function
|
|
64
|
+
CONTAINS_CASE_INSENSITIVE: Filter.Function
|
|
63
65
|
FUNCTION_FIELD_NUMBER: _ClassVar[int]
|
|
64
66
|
FIELD_FIELD_NUMBER: _ClassVar[int]
|
|
65
67
|
VALUES_FIELD_NUMBER: _ClassVar[int]
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# source: workflow/environment.proto
|
|
4
|
+
"""Generated protocol buffer code."""
|
|
5
|
+
from google.protobuf import descriptor as _descriptor
|
|
6
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
7
|
+
from google.protobuf import symbol_database as _symbol_database
|
|
8
|
+
from google.protobuf.internal import builder as _builder
|
|
9
|
+
# @@protoc_insertion_point(imports)
|
|
10
|
+
|
|
11
|
+
_sym_db = _symbol_database.Default()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
from flyte._protos.validate.validate import validate_pb2 as validate_dot_validate__pb2
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aworkflow/environment.proto\x12\x11\x63loudidl.workflow\x1a\x17validate/validate.proto\",\n\x0b\x45nvironment\x12\x1d\n\x04name\x18\x01 \x01(\tB\t\xfa\x42\x06r\x04\x10\x01\x18?R\x04nameB\xbd\x01\n\x15\x63om.cloudidl.workflowB\x10\x45nvironmentProtoH\x02P\x01Z+github.com/unionai/cloud/gen/pb-go/workflow\xa2\x02\x03\x43WX\xaa\x02\x11\x43loudidl.Workflow\xca\x02\x11\x43loudidl\\Workflow\xe2\x02\x1d\x43loudidl\\Workflow\\GPBMetadata\xea\x02\x12\x43loudidl::Workflowb\x06proto3')
|
|
18
|
+
|
|
19
|
+
_globals = globals()
|
|
20
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
21
|
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflow.environment_pb2', _globals)
|
|
22
|
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
|
23
|
+
DESCRIPTOR._options = None
|
|
24
|
+
DESCRIPTOR._serialized_options = b'\n\025com.cloudidl.workflowB\020EnvironmentProtoH\002P\001Z+github.com/unionai/cloud/gen/pb-go/workflow\242\002\003CWX\252\002\021Cloudidl.Workflow\312\002\021Cloudidl\\Workflow\342\002\035Cloudidl\\Workflow\\GPBMetadata\352\002\022Cloudidl::Workflow'
|
|
25
|
+
_ENVIRONMENT.fields_by_name['name']._options = None
|
|
26
|
+
_ENVIRONMENT.fields_by_name['name']._serialized_options = b'\372B\006r\004\020\001\030?'
|
|
27
|
+
_globals['_ENVIRONMENT']._serialized_start=74
|
|
28
|
+
_globals['_ENVIRONMENT']._serialized_end=118
|
|
29
|
+
# @@protoc_insertion_point(module_scope)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from flyte._protos.validate.validate import validate_pb2 as _validate_pb2
|
|
2
|
+
from google.protobuf import descriptor as _descriptor
|
|
3
|
+
from google.protobuf import message as _message
|
|
4
|
+
from typing import ClassVar as _ClassVar, Optional as _Optional
|
|
5
|
+
|
|
6
|
+
DESCRIPTOR: _descriptor.FileDescriptor
|
|
7
|
+
|
|
8
|
+
class Environment(_message.Message):
|
|
9
|
+
__slots__ = ["name"]
|
|
10
|
+
NAME_FIELD_NUMBER: _ClassVar[int]
|
|
11
|
+
name: str
|
|
12
|
+
def __init__(self, name: _Optional[str] = ...) -> None: ...
|