flyte 2.0.0b13__py3-none-any.whl → 2.0.0b30__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flyte/__init__.py +18 -2
- flyte/_bin/debug.py +38 -0
- flyte/_bin/runtime.py +62 -8
- flyte/_cache/cache.py +4 -2
- flyte/_cache/local_cache.py +216 -0
- flyte/_code_bundle/_ignore.py +12 -4
- flyte/_code_bundle/_packaging.py +13 -9
- flyte/_code_bundle/_utils.py +18 -10
- flyte/_code_bundle/bundle.py +17 -9
- flyte/_constants.py +1 -0
- flyte/_context.py +4 -1
- flyte/_custom_context.py +73 -0
- flyte/_debug/constants.py +38 -0
- flyte/_debug/utils.py +17 -0
- flyte/_debug/vscode.py +307 -0
- flyte/_deploy.py +235 -61
- flyte/_environment.py +20 -6
- flyte/_excepthook.py +1 -1
- flyte/_hash.py +1 -16
- flyte/_image.py +178 -81
- flyte/_initialize.py +132 -51
- flyte/_interface.py +39 -2
- flyte/_internal/controllers/__init__.py +4 -5
- flyte/_internal/controllers/_local_controller.py +70 -29
- flyte/_internal/controllers/_trace.py +1 -1
- flyte/_internal/controllers/remote/__init__.py +0 -2
- flyte/_internal/controllers/remote/_action.py +14 -16
- flyte/_internal/controllers/remote/_client.py +1 -1
- flyte/_internal/controllers/remote/_controller.py +68 -70
- flyte/_internal/controllers/remote/_core.py +127 -99
- flyte/_internal/controllers/remote/_informer.py +19 -10
- flyte/_internal/controllers/remote/_service_protocol.py +7 -7
- flyte/_internal/imagebuild/docker_builder.py +181 -69
- flyte/_internal/imagebuild/image_builder.py +0 -5
- flyte/_internal/imagebuild/remote_builder.py +155 -64
- flyte/_internal/imagebuild/utils.py +51 -2
- flyte/_internal/resolvers/_task_module.py +5 -38
- flyte/_internal/resolvers/default.py +2 -2
- flyte/_internal/runtime/convert.py +110 -21
- flyte/_internal/runtime/entrypoints.py +27 -1
- flyte/_internal/runtime/io.py +21 -8
- flyte/_internal/runtime/resources_serde.py +20 -6
- flyte/_internal/runtime/reuse.py +1 -1
- flyte/_internal/runtime/rusty.py +20 -5
- flyte/_internal/runtime/task_serde.py +34 -19
- flyte/_internal/runtime/taskrunner.py +22 -4
- flyte/_internal/runtime/trigger_serde.py +160 -0
- flyte/_internal/runtime/types_serde.py +1 -1
- flyte/_keyring/__init__.py +0 -0
- flyte/_keyring/file.py +115 -0
- flyte/_logging.py +201 -39
- flyte/_map.py +111 -14
- flyte/_module.py +70 -0
- flyte/_pod.py +4 -3
- flyte/_resources.py +213 -31
- flyte/_run.py +110 -39
- flyte/_task.py +75 -16
- flyte/_task_environment.py +105 -29
- flyte/_task_plugins.py +4 -2
- flyte/_trace.py +5 -0
- flyte/_trigger.py +1000 -0
- flyte/_utils/__init__.py +2 -1
- flyte/_utils/asyn.py +3 -1
- flyte/_utils/coro_management.py +2 -1
- flyte/_utils/docker_credentials.py +173 -0
- flyte/_utils/module_loader.py +17 -2
- flyte/_version.py +3 -3
- flyte/cli/_abort.py +3 -3
- flyte/cli/_build.py +3 -6
- flyte/cli/_common.py +78 -7
- flyte/cli/_create.py +182 -4
- flyte/cli/_delete.py +23 -1
- flyte/cli/_deploy.py +63 -16
- flyte/cli/_get.py +79 -34
- flyte/cli/_params.py +26 -10
- flyte/cli/_plugins.py +209 -0
- flyte/cli/_run.py +151 -26
- flyte/cli/_serve.py +64 -0
- flyte/cli/_update.py +37 -0
- flyte/cli/_user.py +17 -0
- flyte/cli/main.py +30 -4
- flyte/config/_config.py +10 -6
- flyte/config/_internal.py +1 -0
- flyte/config/_reader.py +29 -8
- flyte/connectors/__init__.py +11 -0
- flyte/connectors/_connector.py +270 -0
- flyte/connectors/_server.py +197 -0
- flyte/connectors/utils.py +135 -0
- flyte/errors.py +22 -2
- flyte/extend.py +8 -1
- flyte/extras/_container.py +6 -1
- flyte/git/__init__.py +3 -0
- flyte/git/_config.py +21 -0
- flyte/io/__init__.py +2 -0
- flyte/io/_dataframe/__init__.py +2 -0
- flyte/io/_dataframe/basic_dfs.py +17 -8
- flyte/io/_dataframe/dataframe.py +98 -132
- flyte/io/_dir.py +575 -113
- flyte/io/_file.py +582 -139
- flyte/io/_hashing_io.py +342 -0
- flyte/models.py +74 -15
- flyte/remote/__init__.py +6 -1
- flyte/remote/_action.py +34 -26
- flyte/remote/_client/_protocols.py +39 -4
- flyte/remote/_client/auth/_authenticators/device_code.py +4 -5
- flyte/remote/_client/auth/_authenticators/pkce.py +1 -1
- flyte/remote/_client/auth/_channel.py +10 -6
- flyte/remote/_client/controlplane.py +17 -5
- flyte/remote/_console.py +3 -2
- flyte/remote/_data.py +6 -6
- flyte/remote/_logs.py +3 -3
- flyte/remote/_run.py +64 -8
- flyte/remote/_secret.py +26 -17
- flyte/remote/_task.py +75 -33
- flyte/remote/_trigger.py +306 -0
- flyte/remote/_user.py +33 -0
- flyte/report/_report.py +1 -1
- flyte/storage/__init__.py +6 -1
- flyte/storage/_config.py +5 -1
- flyte/storage/_parallel_reader.py +274 -0
- flyte/storage/_storage.py +200 -103
- flyte/types/__init__.py +16 -0
- flyte/types/_interface.py +2 -2
- flyte/types/_pickle.py +35 -8
- flyte/types/_string_literals.py +8 -9
- flyte/types/_type_engine.py +40 -70
- flyte/types/_utils.py +1 -1
- flyte-2.0.0b30.data/scripts/debug.py +38 -0
- {flyte-2.0.0b13.data → flyte-2.0.0b30.data}/scripts/runtime.py +62 -8
- {flyte-2.0.0b13.dist-info → flyte-2.0.0b30.dist-info}/METADATA +11 -3
- flyte-2.0.0b30.dist-info/RECORD +192 -0
- {flyte-2.0.0b13.dist-info → flyte-2.0.0b30.dist-info}/entry_points.txt +3 -0
- flyte/_protos/common/authorization_pb2.py +0 -66
- flyte/_protos/common/authorization_pb2.pyi +0 -108
- flyte/_protos/common/authorization_pb2_grpc.py +0 -4
- flyte/_protos/common/identifier_pb2.py +0 -93
- flyte/_protos/common/identifier_pb2.pyi +0 -110
- flyte/_protos/common/identifier_pb2_grpc.py +0 -4
- flyte/_protos/common/identity_pb2.py +0 -48
- flyte/_protos/common/identity_pb2.pyi +0 -72
- flyte/_protos/common/identity_pb2_grpc.py +0 -4
- flyte/_protos/common/list_pb2.py +0 -36
- flyte/_protos/common/list_pb2.pyi +0 -71
- flyte/_protos/common/list_pb2_grpc.py +0 -4
- flyte/_protos/common/policy_pb2.py +0 -37
- flyte/_protos/common/policy_pb2.pyi +0 -27
- flyte/_protos/common/policy_pb2_grpc.py +0 -4
- flyte/_protos/common/role_pb2.py +0 -37
- flyte/_protos/common/role_pb2.pyi +0 -53
- flyte/_protos/common/role_pb2_grpc.py +0 -4
- flyte/_protos/common/runtime_version_pb2.py +0 -28
- flyte/_protos/common/runtime_version_pb2.pyi +0 -24
- flyte/_protos/common/runtime_version_pb2_grpc.py +0 -4
- flyte/_protos/imagebuilder/definition_pb2.py +0 -59
- flyte/_protos/imagebuilder/definition_pb2.pyi +0 -140
- flyte/_protos/imagebuilder/definition_pb2_grpc.py +0 -4
- flyte/_protos/imagebuilder/payload_pb2.py +0 -32
- flyte/_protos/imagebuilder/payload_pb2.pyi +0 -21
- flyte/_protos/imagebuilder/payload_pb2_grpc.py +0 -4
- flyte/_protos/imagebuilder/service_pb2.py +0 -29
- flyte/_protos/imagebuilder/service_pb2.pyi +0 -5
- flyte/_protos/imagebuilder/service_pb2_grpc.py +0 -66
- flyte/_protos/logs/dataplane/payload_pb2.py +0 -100
- flyte/_protos/logs/dataplane/payload_pb2.pyi +0 -177
- flyte/_protos/logs/dataplane/payload_pb2_grpc.py +0 -4
- flyte/_protos/secret/definition_pb2.py +0 -49
- flyte/_protos/secret/definition_pb2.pyi +0 -93
- flyte/_protos/secret/definition_pb2_grpc.py +0 -4
- flyte/_protos/secret/payload_pb2.py +0 -62
- flyte/_protos/secret/payload_pb2.pyi +0 -94
- flyte/_protos/secret/payload_pb2_grpc.py +0 -4
- flyte/_protos/secret/secret_pb2.py +0 -38
- flyte/_protos/secret/secret_pb2.pyi +0 -6
- flyte/_protos/secret/secret_pb2_grpc.py +0 -198
- flyte/_protos/secret/secret_pb2_grpc_grpc.py +0 -198
- flyte/_protos/validate/validate/validate_pb2.py +0 -76
- flyte/_protos/workflow/common_pb2.py +0 -27
- flyte/_protos/workflow/common_pb2.pyi +0 -14
- flyte/_protos/workflow/common_pb2_grpc.py +0 -4
- flyte/_protos/workflow/environment_pb2.py +0 -29
- flyte/_protos/workflow/environment_pb2.pyi +0 -12
- flyte/_protos/workflow/environment_pb2_grpc.py +0 -4
- flyte/_protos/workflow/node_execution_service_pb2.py +0 -26
- flyte/_protos/workflow/node_execution_service_pb2.pyi +0 -4
- flyte/_protos/workflow/node_execution_service_pb2_grpc.py +0 -32
- flyte/_protos/workflow/queue_service_pb2.py +0 -109
- flyte/_protos/workflow/queue_service_pb2.pyi +0 -166
- flyte/_protos/workflow/queue_service_pb2_grpc.py +0 -172
- flyte/_protos/workflow/run_definition_pb2.py +0 -121
- flyte/_protos/workflow/run_definition_pb2.pyi +0 -327
- flyte/_protos/workflow/run_definition_pb2_grpc.py +0 -4
- flyte/_protos/workflow/run_logs_service_pb2.py +0 -41
- flyte/_protos/workflow/run_logs_service_pb2.pyi +0 -28
- flyte/_protos/workflow/run_logs_service_pb2_grpc.py +0 -69
- flyte/_protos/workflow/run_service_pb2.py +0 -137
- flyte/_protos/workflow/run_service_pb2.pyi +0 -185
- flyte/_protos/workflow/run_service_pb2_grpc.py +0 -446
- flyte/_protos/workflow/state_service_pb2.py +0 -67
- flyte/_protos/workflow/state_service_pb2.pyi +0 -76
- flyte/_protos/workflow/state_service_pb2_grpc.py +0 -138
- flyte/_protos/workflow/task_definition_pb2.py +0 -79
- flyte/_protos/workflow/task_definition_pb2.pyi +0 -81
- flyte/_protos/workflow/task_definition_pb2_grpc.py +0 -4
- flyte/_protos/workflow/task_service_pb2.py +0 -60
- flyte/_protos/workflow/task_service_pb2.pyi +0 -59
- flyte/_protos/workflow/task_service_pb2_grpc.py +0 -138
- flyte-2.0.0b13.dist-info/RECORD +0 -239
- /flyte/{_protos → _debug}/__init__.py +0 -0
- {flyte-2.0.0b13.dist-info → flyte-2.0.0b30.dist-info}/WHEEL +0 -0
- {flyte-2.0.0b13.dist-info → flyte-2.0.0b30.dist-info}/licenses/LICENSE +0 -0
- {flyte-2.0.0b13.dist-info → flyte-2.0.0b30.dist-info}/top_level.txt +0 -0
flyte/_deploy.py
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
-
import
|
|
4
|
+
import hashlib
|
|
5
5
|
from dataclasses import dataclass
|
|
6
|
-
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
|
|
6
|
+
from typing import TYPE_CHECKING, Dict, List, Optional, Protocol, Set, Tuple, Type
|
|
7
7
|
|
|
8
|
+
import cloudpickle
|
|
8
9
|
import rich.repr
|
|
9
10
|
|
|
10
11
|
import flyte.errors
|
|
@@ -13,13 +14,14 @@ from flyte.syncify import syncify
|
|
|
13
14
|
|
|
14
15
|
from ._environment import Environment
|
|
15
16
|
from ._image import Image
|
|
16
|
-
from ._initialize import ensure_client, get_client,
|
|
17
|
+
from ._initialize import ensure_client, get_client, get_init_config, requires_initialization
|
|
17
18
|
from ._logging import logger
|
|
18
19
|
from ._task import TaskTemplate
|
|
19
20
|
from ._task_environment import TaskEnvironment
|
|
20
21
|
|
|
21
22
|
if TYPE_CHECKING:
|
|
22
|
-
from
|
|
23
|
+
from flyteidl2.task import task_definition_pb2
|
|
24
|
+
from flyteidl2.trigger import trigger_definition_pb2
|
|
23
25
|
|
|
24
26
|
from ._code_bundle import CopyFiles
|
|
25
27
|
from ._internal.imagebuild.image_builder import ImageCache
|
|
@@ -34,68 +36,126 @@ class DeploymentPlan:
|
|
|
34
36
|
|
|
35
37
|
@rich.repr.auto
|
|
36
38
|
@dataclass
|
|
39
|
+
class DeploymentContext:
|
|
40
|
+
"""
|
|
41
|
+
Context for deployment operations.
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
environment: Environment | TaskEnvironment
|
|
45
|
+
serialization_context: SerializationContext
|
|
46
|
+
dryrun: bool = False
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@rich.repr.auto
|
|
50
|
+
@dataclass
|
|
51
|
+
class DeployedTask:
|
|
52
|
+
deployed_task: task_definition_pb2.TaskSpec
|
|
53
|
+
deployed_triggers: List[trigger_definition_pb2.TaskTrigger]
|
|
54
|
+
|
|
55
|
+
def summary_repr(self) -> str:
|
|
56
|
+
"""
|
|
57
|
+
Returns a summary representation of the deployed task.
|
|
58
|
+
"""
|
|
59
|
+
return (
|
|
60
|
+
f"DeployedTask(name={self.deployed_task.task_template.id.name}, "
|
|
61
|
+
f"version={self.deployed_task.task_template.id.version})"
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
def table_repr(self) -> List[Tuple[str, ...]]:
|
|
65
|
+
"""
|
|
66
|
+
Returns a table representation of the deployed task.
|
|
67
|
+
"""
|
|
68
|
+
return [
|
|
69
|
+
("name", self.deployed_task.task_template.id.name),
|
|
70
|
+
("version", self.deployed_task.task_template.id.version),
|
|
71
|
+
("triggers", ",".join([t.name for t in self.deployed_triggers])),
|
|
72
|
+
]
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@rich.repr.auto
|
|
76
|
+
@dataclass
|
|
77
|
+
class DeployedEnv:
|
|
78
|
+
env: Environment
|
|
79
|
+
deployed_entities: List[DeployedTask]
|
|
80
|
+
|
|
81
|
+
def summary_repr(self) -> str:
|
|
82
|
+
"""
|
|
83
|
+
Returns a summary representation of the deployment.
|
|
84
|
+
"""
|
|
85
|
+
entities = ", ".join(f"{e.summary_repr()}" for e in self.deployed_entities or [])
|
|
86
|
+
return f"Deployment(env=[{self.env.name}], entities=[{entities}])"
|
|
87
|
+
|
|
88
|
+
def table_repr(self) -> List[List[Tuple[str, ...]]]:
|
|
89
|
+
"""
|
|
90
|
+
Returns a detailed representation of the deployed tasks.
|
|
91
|
+
"""
|
|
92
|
+
tuples = []
|
|
93
|
+
if self.deployed_entities:
|
|
94
|
+
for e in self.deployed_entities:
|
|
95
|
+
tuples.append(e.table_repr())
|
|
96
|
+
return tuples
|
|
97
|
+
|
|
98
|
+
def env_repr(self) -> List[Tuple[str, ...]]:
|
|
99
|
+
"""
|
|
100
|
+
Returns a detailed representation of the deployed environments.
|
|
101
|
+
"""
|
|
102
|
+
env = self.env
|
|
103
|
+
return [
|
|
104
|
+
("environment", env.name),
|
|
105
|
+
("image", env.image.uri if isinstance(env.image, Image) else env.image or ""),
|
|
106
|
+
]
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@rich.repr.auto
|
|
110
|
+
@dataclass(frozen=True)
|
|
37
111
|
class Deployment:
|
|
38
|
-
envs: Dict[str,
|
|
39
|
-
deployed_tasks: List[task_definition_pb2.TaskSpec] | None = None
|
|
112
|
+
envs: Dict[str, DeployedEnv]
|
|
40
113
|
|
|
41
114
|
def summary_repr(self) -> str:
|
|
42
115
|
"""
|
|
43
116
|
Returns a summary representation of the deployment.
|
|
44
117
|
"""
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
f"{task.task_template.id.name} (v{task.task_template.id.version})" for task in self.deployed_tasks or []
|
|
48
|
-
)
|
|
49
|
-
return f"Deployment(envs=[{env_names}], tasks=[{task_names_versions}])"
|
|
118
|
+
envs = ", ".join(f"{e.summary_repr()}" for e in self.envs.values() or [])
|
|
119
|
+
return f"Deployment(envs=[{envs}])"
|
|
50
120
|
|
|
51
|
-
def
|
|
121
|
+
def table_repr(self) -> List[List[Tuple[str, ...]]]:
|
|
52
122
|
"""
|
|
53
123
|
Returns a detailed representation of the deployed tasks.
|
|
54
124
|
"""
|
|
55
125
|
tuples = []
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
tuples.append(
|
|
59
|
-
[
|
|
60
|
-
("name", task.task_template.id.name),
|
|
61
|
-
("version", task.task_template.id.version),
|
|
62
|
-
]
|
|
63
|
-
)
|
|
126
|
+
for d in self.envs.values():
|
|
127
|
+
tuples.extend(d.table_repr())
|
|
64
128
|
return tuples
|
|
65
129
|
|
|
66
|
-
def env_repr(self) -> List[List[Tuple[str,
|
|
130
|
+
def env_repr(self) -> List[List[Tuple[str, ...]]]:
|
|
67
131
|
"""
|
|
68
132
|
Returns a detailed representation of the deployed environments.
|
|
69
133
|
"""
|
|
70
134
|
tuples = []
|
|
71
|
-
for
|
|
72
|
-
tuples.append(
|
|
73
|
-
[
|
|
74
|
-
("environment", env_name),
|
|
75
|
-
("image", env.image.uri if isinstance(env.image, Image) else env.image or ""),
|
|
76
|
-
]
|
|
77
|
-
)
|
|
135
|
+
for d in self.envs.values():
|
|
136
|
+
tuples.append(d.env_repr())
|
|
78
137
|
return tuples
|
|
79
138
|
|
|
80
139
|
|
|
81
140
|
async def _deploy_task(
|
|
82
141
|
task: TaskTemplate, serialization_context: SerializationContext, dryrun: bool = False
|
|
83
|
-
) ->
|
|
142
|
+
) -> DeployedTask:
|
|
84
143
|
"""
|
|
85
144
|
Deploy the given task.
|
|
86
145
|
"""
|
|
87
146
|
ensure_client()
|
|
88
147
|
import grpc.aio
|
|
148
|
+
from flyteidl2.task import task_definition_pb2, task_service_pb2
|
|
89
149
|
|
|
90
150
|
from ._internal.runtime.convert import convert_upload_default_inputs
|
|
91
151
|
from ._internal.runtime.task_serde import translate_task_to_wire
|
|
92
|
-
from .
|
|
152
|
+
from ._internal.runtime.trigger_serde import to_task_trigger
|
|
93
153
|
|
|
94
154
|
image_uri = task.image.uri if isinstance(task.image, Image) else task.image
|
|
95
155
|
|
|
96
156
|
try:
|
|
97
157
|
if dryrun:
|
|
98
|
-
return translate_task_to_wire(task, serialization_context)
|
|
158
|
+
return DeployedTask(translate_task_to_wire(task, serialization_context), [])
|
|
99
159
|
|
|
100
160
|
default_inputs = await convert_upload_default_inputs(task.interface)
|
|
101
161
|
spec = translate_task_to_wire(task, serialization_context, default_inputs=default_inputs)
|
|
@@ -112,15 +172,31 @@ async def _deploy_task(
|
|
|
112
172
|
name=spec.task_template.id.name,
|
|
113
173
|
)
|
|
114
174
|
|
|
175
|
+
deployable_triggers_coros = []
|
|
176
|
+
for t in task.triggers:
|
|
177
|
+
inputs = spec.task_template.interface.inputs
|
|
178
|
+
default_inputs = spec.default_inputs
|
|
179
|
+
deployable_triggers_coros.append(
|
|
180
|
+
to_task_trigger(t=t, task_name=task.name, task_inputs=inputs, task_default_inputs=list(default_inputs))
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
deployable_triggers = await asyncio.gather(*deployable_triggers_coros)
|
|
115
184
|
try:
|
|
116
|
-
await get_client().task_service.DeployTask(
|
|
185
|
+
await get_client().task_service.DeployTask(
|
|
186
|
+
task_service_pb2.DeployTaskRequest(
|
|
187
|
+
task_id=task_id,
|
|
188
|
+
spec=spec,
|
|
189
|
+
triggers=deployable_triggers,
|
|
190
|
+
)
|
|
191
|
+
)
|
|
117
192
|
logger.info(f"Deployed task {task.name} with version {task_id.version}")
|
|
118
193
|
except grpc.aio.AioRpcError as e:
|
|
119
194
|
if e.code() == grpc.StatusCode.ALREADY_EXISTS:
|
|
120
195
|
logger.info(f"Task {task.name} with image {image_uri} already exists, skipping deployment.")
|
|
121
|
-
return spec
|
|
196
|
+
return DeployedTask(spec, deployable_triggers)
|
|
122
197
|
raise
|
|
123
|
-
|
|
198
|
+
|
|
199
|
+
return DeployedTask(spec, deployable_triggers)
|
|
124
200
|
except Exception as e:
|
|
125
201
|
logger.error(f"Failed to deploy task {task.name} with image {image_uri}: {e}")
|
|
126
202
|
raise flyte.errors.DeploymentError(
|
|
@@ -138,50 +214,141 @@ async def _build_image_bg(env_name: str, image: Image) -> Tuple[str, str]:
|
|
|
138
214
|
return env_name, await build.aio(image)
|
|
139
215
|
|
|
140
216
|
|
|
141
|
-
async def _build_images(deployment: DeploymentPlan) -> ImageCache:
|
|
217
|
+
async def _build_images(deployment: DeploymentPlan, image_refs: Dict[str, str] | None = None) -> ImageCache:
|
|
142
218
|
"""
|
|
143
219
|
Build the images for the given deployment plan and update the environment with the built image.
|
|
144
220
|
"""
|
|
145
221
|
from ._internal.imagebuild.image_builder import ImageCache
|
|
146
222
|
|
|
223
|
+
if image_refs is None:
|
|
224
|
+
image_refs = {}
|
|
225
|
+
|
|
147
226
|
images = []
|
|
148
227
|
image_identifier_map = {}
|
|
149
228
|
for env_name, env in deployment.envs.items():
|
|
150
229
|
if not isinstance(env.image, str):
|
|
230
|
+
if env.image._ref_name is not None:
|
|
231
|
+
if env.image._ref_name in image_refs:
|
|
232
|
+
# If the image is set in the config, set it as the base_image
|
|
233
|
+
image_uri = image_refs[env.image._ref_name]
|
|
234
|
+
env.image = env.image.clone(base_image=image_uri)
|
|
235
|
+
else:
|
|
236
|
+
raise ValueError(
|
|
237
|
+
f"Image name '{env.image._ref_name}' not found in config. Available: {list(image_refs.keys())}"
|
|
238
|
+
)
|
|
239
|
+
if not env.image._layers:
|
|
240
|
+
# No additional layers, use the base_image directly without building
|
|
241
|
+
image_identifier_map[env_name] = image_uri
|
|
242
|
+
continue
|
|
151
243
|
logger.debug(f"Building Image for environment {env_name}, image: {env.image}")
|
|
152
244
|
images.append(_build_image_bg(env_name, env.image))
|
|
153
245
|
|
|
154
246
|
elif env.image == "auto" and "auto" not in image_identifier_map:
|
|
247
|
+
if "default" in image_refs:
|
|
248
|
+
# If the default image is set through CLI, use it instead
|
|
249
|
+
image_uri = image_refs["default"]
|
|
250
|
+
image_identifier_map[env_name] = image_uri
|
|
251
|
+
continue
|
|
155
252
|
auto_image = Image.from_debian_base()
|
|
156
|
-
|
|
253
|
+
images.append(_build_image_bg(env_name, auto_image))
|
|
157
254
|
final_images = await asyncio.gather(*images)
|
|
158
255
|
|
|
159
256
|
for env_name, image_uri in final_images:
|
|
160
257
|
logger.warning(f"Built Image for environment {env_name}, image: {image_uri}")
|
|
161
|
-
|
|
162
|
-
if isinstance(env.image, Image):
|
|
163
|
-
image_identifier_map[env.image.identifier] = image_uri
|
|
258
|
+
image_identifier_map[env_name] = image_uri
|
|
164
259
|
|
|
165
260
|
return ImageCache(image_lookup=image_identifier_map)
|
|
166
261
|
|
|
167
262
|
|
|
263
|
+
class Deployer(Protocol):
|
|
264
|
+
"""
|
|
265
|
+
Protocol for deployment callables.
|
|
266
|
+
"""
|
|
267
|
+
|
|
268
|
+
async def __call__(self, context: DeploymentContext) -> DeployedEnv:
|
|
269
|
+
"""
|
|
270
|
+
Deploy the environment described in the context.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
context: Deployment context containing environment, serialization context, and dryrun flag
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
Deployment result
|
|
277
|
+
"""
|
|
278
|
+
...
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
async def _deploy_task_env(context: DeploymentContext) -> DeployedEnv:
|
|
282
|
+
"""
|
|
283
|
+
Deploy the given task environment.
|
|
284
|
+
"""
|
|
285
|
+
ensure_client()
|
|
286
|
+
env = context.environment
|
|
287
|
+
if not isinstance(env, TaskEnvironment):
|
|
288
|
+
raise ValueError(f"Expected TaskEnvironment, got {type(env)}")
|
|
289
|
+
|
|
290
|
+
task_coros = []
|
|
291
|
+
for task in env.tasks.values():
|
|
292
|
+
task_coros.append(_deploy_task(task, context.serialization_context, dryrun=context.dryrun))
|
|
293
|
+
deployed_task_vals = await asyncio.gather(*task_coros)
|
|
294
|
+
deployed_tasks = []
|
|
295
|
+
for t in deployed_task_vals:
|
|
296
|
+
deployed_tasks.append(t)
|
|
297
|
+
return DeployedEnv(env=env, deployed_entities=deployed_tasks)
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
_ENVTYPE_REGISTRY: Dict[Type[Environment | TaskEnvironment], Deployer] = {
|
|
301
|
+
TaskEnvironment: _deploy_task_env,
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
def register_deployer(env_type: Type[Environment | TaskEnvironment], deployer: Deployer) -> None:
|
|
306
|
+
"""
|
|
307
|
+
Register a deployer for a specific environment type.
|
|
308
|
+
|
|
309
|
+
Args:
|
|
310
|
+
env_type: Type of environment this deployer handles
|
|
311
|
+
deployer: Deployment callable that conforms to the Deployer protocol
|
|
312
|
+
"""
|
|
313
|
+
_ENVTYPE_REGISTRY[env_type] = deployer
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def get_deployer(env_type: Type[Environment | TaskEnvironment]) -> Deployer:
|
|
317
|
+
"""
|
|
318
|
+
Get the registered deployer for an environment type.
|
|
319
|
+
|
|
320
|
+
Args:
|
|
321
|
+
env_type: Type of environment to get deployer for
|
|
322
|
+
|
|
323
|
+
Returns:
|
|
324
|
+
Deployer for the environment type, defaults to task environment deployer
|
|
325
|
+
"""
|
|
326
|
+
v = _ENVTYPE_REGISTRY.get(env_type)
|
|
327
|
+
if v is None:
|
|
328
|
+
raise ValueError(f"No deployer registered for environment type {env_type}")
|
|
329
|
+
return v
|
|
330
|
+
|
|
331
|
+
|
|
168
332
|
@requires_initialization
|
|
169
333
|
async def apply(deployment_plan: DeploymentPlan, copy_style: CopyFiles, dryrun: bool = False) -> Deployment:
|
|
170
334
|
from ._code_bundle import build_code_bundle
|
|
171
335
|
|
|
172
|
-
cfg =
|
|
336
|
+
cfg = get_init_config()
|
|
173
337
|
|
|
174
|
-
image_cache = await _build_images(deployment_plan)
|
|
338
|
+
image_cache = await _build_images(deployment_plan, cfg.images)
|
|
175
339
|
|
|
176
|
-
|
|
177
|
-
if copy_style == "none" and not version:
|
|
340
|
+
if copy_style == "none" and not deployment_plan.version:
|
|
178
341
|
raise flyte.errors.DeploymentError("Version must be set when copy_style is none")
|
|
179
342
|
else:
|
|
180
343
|
code_bundle = await build_code_bundle(from_dir=cfg.root_dir, dryrun=dryrun, copy_style=copy_style)
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
344
|
+
if deployment_plan.version:
|
|
345
|
+
version = deployment_plan.version
|
|
346
|
+
else:
|
|
347
|
+
h = hashlib.md5()
|
|
348
|
+
h.update(cloudpickle.dumps(deployment_plan.envs))
|
|
349
|
+
h.update(code_bundle.computed_version.encode("utf-8"))
|
|
350
|
+
h.update(cloudpickle.dumps(image_cache))
|
|
351
|
+
version = h.hexdigest()
|
|
185
352
|
|
|
186
353
|
sc = SerializationContext(
|
|
187
354
|
project=cfg.project,
|
|
@@ -193,15 +360,18 @@ async def apply(deployment_plan: DeploymentPlan, copy_style: CopyFiles, dryrun:
|
|
|
193
360
|
root_dir=cfg.root_dir,
|
|
194
361
|
)
|
|
195
362
|
|
|
196
|
-
|
|
197
|
-
|
|
363
|
+
deployment_coros = []
|
|
198
364
|
for env_name, env in deployment_plan.envs.items():
|
|
199
365
|
logger.info(f"Deploying environment {env_name}")
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
366
|
+
deployer = get_deployer(type(env))
|
|
367
|
+
context = DeploymentContext(environment=env, serialization_context=sc, dryrun=dryrun)
|
|
368
|
+
deployment_coros.append(deployer(context))
|
|
369
|
+
deployed_envs = await asyncio.gather(*deployment_coros)
|
|
370
|
+
envs = {}
|
|
371
|
+
for d in deployed_envs:
|
|
372
|
+
envs[d.env.name] = d
|
|
373
|
+
|
|
374
|
+
return Deployment(envs)
|
|
205
375
|
|
|
206
376
|
|
|
207
377
|
def _recursive_discover(planned_envs: Dict[str, Environment], env: Environment) -> Dict[str, Environment]:
|
|
@@ -209,14 +379,16 @@ def _recursive_discover(planned_envs: Dict[str, Environment], env: Environment)
|
|
|
209
379
|
Recursively deploy the environment and its dependencies, if not already deployed (present in env_tasks) and
|
|
210
380
|
return the updated env_tasks.
|
|
211
381
|
"""
|
|
212
|
-
# Skip if the environment is already planned
|
|
213
382
|
if env.name in planned_envs:
|
|
214
|
-
|
|
383
|
+
if planned_envs[env.name] is not env:
|
|
384
|
+
# Raise error if different TaskEnvironment objects have the same name
|
|
385
|
+
raise ValueError(f"Duplicate environment name '{env.name}' found")
|
|
386
|
+
# Add the environment to the existing envs
|
|
387
|
+
planned_envs[env.name] = env
|
|
388
|
+
|
|
215
389
|
# Recursively discover dependent environments
|
|
216
390
|
for dependent_env in env.depends_on:
|
|
217
391
|
_recursive_discover(planned_envs, dependent_env)
|
|
218
|
-
# Add the environment to the existing envs
|
|
219
|
-
planned_envs[env.name] = env
|
|
220
392
|
return planned_envs
|
|
221
393
|
|
|
222
394
|
|
|
@@ -224,10 +396,10 @@ def plan_deploy(*envs: Environment, version: Optional[str] = None) -> List[Deplo
|
|
|
224
396
|
if envs is None:
|
|
225
397
|
return [DeploymentPlan({})]
|
|
226
398
|
deployment_plans = []
|
|
227
|
-
visited_envs:
|
|
399
|
+
visited_envs: Set[str] = set()
|
|
228
400
|
for env in envs:
|
|
229
401
|
if env.name in visited_envs:
|
|
230
|
-
|
|
402
|
+
raise ValueError(f"Duplicate environment name '{env.name}' found")
|
|
231
403
|
planned_envs = _recursive_discover({}, env)
|
|
232
404
|
deployment_plans.append(DeploymentPlan(planned_envs, version=version))
|
|
233
405
|
visited_envs.update(planned_envs.keys())
|
|
@@ -271,5 +443,7 @@ async def build_images(envs: Environment) -> ImageCache:
|
|
|
271
443
|
:param envs: Environment to build images for.
|
|
272
444
|
:return: ImageCache containing the built images.
|
|
273
445
|
"""
|
|
446
|
+
cfg = get_init_config()
|
|
447
|
+
images = cfg.images if cfg else {}
|
|
274
448
|
deployment = plan_deploy(envs)
|
|
275
|
-
return await _build_images(deployment[0])
|
|
449
|
+
return await _build_images(deployment[0], images)
|
flyte/_environment.py
CHANGED
|
@@ -2,16 +2,14 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import re
|
|
4
4
|
from dataclasses import dataclass, field
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import Any, Dict, List, Literal, Optional, Union
|
|
6
6
|
|
|
7
7
|
import rich.repr
|
|
8
8
|
|
|
9
9
|
from ._image import Image
|
|
10
|
+
from ._pod import PodTemplate
|
|
10
11
|
from ._resources import Resources
|
|
11
|
-
from ._secret import SecretRequest
|
|
12
|
-
|
|
13
|
-
if TYPE_CHECKING:
|
|
14
|
-
from kubernetes.client import V1PodTemplate
|
|
12
|
+
from ._secret import Secret, SecretRequest
|
|
15
13
|
|
|
16
14
|
# Global registry to track all Environment instances in load order
|
|
17
15
|
_ENVIRONMENT_REGISTRY: List[Environment] = []
|
|
@@ -38,22 +36,37 @@ class Environment:
|
|
|
38
36
|
:param resources: Resources to allocate for the environment.
|
|
39
37
|
:param env_vars: Environment variables to set for the environment.
|
|
40
38
|
:param secrets: Secrets to inject into the environment.
|
|
39
|
+
:param pod_template: Pod template to use for the environment.
|
|
40
|
+
:param description: Description of the environment.
|
|
41
|
+
:param interruptible: Whether the environment is interruptible and can be scheduled on spot/preemptible instances
|
|
41
42
|
:param depends_on: Environment dependencies to hint, so when you deploy the environment, the dependencies are
|
|
42
43
|
also deployed. This is useful when you have a set of environments that depend on each other.
|
|
43
44
|
"""
|
|
44
45
|
|
|
45
46
|
name: str
|
|
46
47
|
depends_on: List[Environment] = field(default_factory=list)
|
|
47
|
-
pod_template: Optional[Union[str,
|
|
48
|
+
pod_template: Optional[Union[str, PodTemplate]] = None
|
|
48
49
|
description: Optional[str] = None
|
|
49
50
|
secrets: Optional[SecretRequest] = None
|
|
50
51
|
env_vars: Optional[Dict[str, str]] = None
|
|
51
52
|
resources: Optional[Resources] = None
|
|
53
|
+
interruptible: bool = False
|
|
52
54
|
image: Union[str, Image, Literal["auto"]] = "auto"
|
|
53
55
|
|
|
54
56
|
def __post_init__(self):
|
|
55
57
|
if not is_snake_or_kebab_with_numbers(self.name):
|
|
56
58
|
raise ValueError(f"Environment name '{self.name}' must be in snake_case or kebab-case format.")
|
|
59
|
+
if not isinstance(self.image, (Image, str)):
|
|
60
|
+
raise TypeError(f"Expected image to be of type str or Image, got {type(self.image)}")
|
|
61
|
+
if self.secrets and not isinstance(self.secrets, (str, Secret, List)):
|
|
62
|
+
raise TypeError(f"Expected secrets to be of type SecretRequest, got {type(self.secrets)}")
|
|
63
|
+
for dep in self.depends_on:
|
|
64
|
+
if not isinstance(dep, Environment):
|
|
65
|
+
raise TypeError(f"Expected depends_on to be of type List[Environment], got {type(dep)}")
|
|
66
|
+
if self.resources is not None and not isinstance(self.resources, Resources):
|
|
67
|
+
raise TypeError(f"Expected resources to be of type Resources, got {type(self.resources)}")
|
|
68
|
+
if self.env_vars is not None and not isinstance(self.env_vars, dict):
|
|
69
|
+
raise TypeError(f"Expected env_vars to be of type Dict[str, str], got {type(self.env_vars)}")
|
|
57
70
|
# Automatically register this environment instance in load order
|
|
58
71
|
_ENVIRONMENT_REGISTRY.append(self)
|
|
59
72
|
|
|
@@ -78,6 +91,7 @@ class Environment:
|
|
|
78
91
|
env_vars: Optional[Dict[str, str]] = None,
|
|
79
92
|
secrets: Optional[SecretRequest] = None,
|
|
80
93
|
depends_on: Optional[List[Environment]] = None,
|
|
94
|
+
description: Optional[str] = None,
|
|
81
95
|
**kwargs: Any,
|
|
82
96
|
) -> Environment:
|
|
83
97
|
raise NotImplementedError
|
flyte/_excepthook.py
CHANGED
|
@@ -33,5 +33,5 @@ def custom_excepthook(exc_type, exc_value, exc_tb):
|
|
|
33
33
|
filtered_tb = [frame for frame in tb_list if should_include_frame(frame)]
|
|
34
34
|
# Print the filtered version (custom format)
|
|
35
35
|
print("Filtered traceback (most recent call last):")
|
|
36
|
-
|
|
36
|
+
traceback.print_list(filtered_tb)
|
|
37
37
|
print(f"{exc_type.__name__}: {exc_value}\n")
|
flyte/_hash.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import
|
|
1
|
+
from typing import TypeVar
|
|
2
2
|
|
|
3
3
|
T = TypeVar("T")
|
|
4
4
|
|
|
@@ -6,18 +6,3 @@ T = TypeVar("T")
|
|
|
6
6
|
class HashOnReferenceMixin(object):
|
|
7
7
|
def __hash__(self):
|
|
8
8
|
return hash(id(self))
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class HashMethod(Generic[T]):
|
|
12
|
-
"""
|
|
13
|
-
Flyte-specific object used to wrap the hash function for a specific type
|
|
14
|
-
"""
|
|
15
|
-
|
|
16
|
-
def __init__(self, function: Callable[[T], str]):
|
|
17
|
-
self._function = function
|
|
18
|
-
|
|
19
|
-
def calculate(self, obj: T) -> str:
|
|
20
|
-
"""
|
|
21
|
-
Calculate hash for `obj`.
|
|
22
|
-
"""
|
|
23
|
-
return self._function(obj)
|