flyte 2.0.0b21__py3-none-any.whl → 2.0.0b23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flyte might be problematic. Click here for more details.

Files changed (92) hide show
  1. flyte/__init__.py +5 -0
  2. flyte/_bin/runtime.py +36 -6
  3. flyte/_cache/cache.py +4 -2
  4. flyte/_cache/local_cache.py +215 -0
  5. flyte/_code_bundle/bundle.py +1 -0
  6. flyte/_debug/constants.py +0 -1
  7. flyte/_debug/vscode.py +6 -1
  8. flyte/_deploy.py +204 -55
  9. flyte/_environment.py +5 -0
  10. flyte/_excepthook.py +1 -1
  11. flyte/_image.py +101 -68
  12. flyte/_initialize.py +30 -1
  13. flyte/_interface.py +3 -1
  14. flyte/_internal/controllers/_local_controller.py +64 -24
  15. flyte/_internal/controllers/remote/_action.py +4 -1
  16. flyte/_internal/controllers/remote/_controller.py +5 -2
  17. flyte/_internal/controllers/remote/_core.py +6 -3
  18. flyte/_internal/controllers/remote/_informer.py +1 -1
  19. flyte/_internal/imagebuild/docker_builder.py +95 -28
  20. flyte/_internal/imagebuild/image_builder.py +0 -5
  21. flyte/_internal/imagebuild/remote_builder.py +6 -1
  22. flyte/_internal/runtime/io.py +13 -1
  23. flyte/_internal/runtime/rusty.py +17 -2
  24. flyte/_internal/runtime/task_serde.py +15 -11
  25. flyte/_internal/runtime/taskrunner.py +1 -1
  26. flyte/_internal/runtime/trigger_serde.py +153 -0
  27. flyte/_keyring/file.py +2 -2
  28. flyte/_logging.py +1 -1
  29. flyte/_protos/common/identifier_pb2.py +19 -1
  30. flyte/_protos/common/identifier_pb2.pyi +22 -0
  31. flyte/_protos/workflow/common_pb2.py +14 -3
  32. flyte/_protos/workflow/common_pb2.pyi +49 -0
  33. flyte/_protos/workflow/queue_service_pb2.py +41 -35
  34. flyte/_protos/workflow/queue_service_pb2.pyi +26 -12
  35. flyte/_protos/workflow/queue_service_pb2_grpc.py +34 -0
  36. flyte/_protos/workflow/run_definition_pb2.py +38 -38
  37. flyte/_protos/workflow/run_definition_pb2.pyi +4 -2
  38. flyte/_protos/workflow/run_service_pb2.py +60 -50
  39. flyte/_protos/workflow/run_service_pb2.pyi +24 -6
  40. flyte/_protos/workflow/run_service_pb2_grpc.py +34 -0
  41. flyte/_protos/workflow/task_definition_pb2.py +15 -11
  42. flyte/_protos/workflow/task_definition_pb2.pyi +19 -2
  43. flyte/_protos/workflow/task_service_pb2.py +18 -17
  44. flyte/_protos/workflow/task_service_pb2.pyi +5 -2
  45. flyte/_protos/workflow/trigger_definition_pb2.py +66 -0
  46. flyte/_protos/workflow/trigger_definition_pb2.pyi +117 -0
  47. flyte/_protos/workflow/trigger_definition_pb2_grpc.py +4 -0
  48. flyte/_protos/workflow/trigger_service_pb2.py +96 -0
  49. flyte/_protos/workflow/trigger_service_pb2.pyi +110 -0
  50. flyte/_protos/workflow/trigger_service_pb2_grpc.py +281 -0
  51. flyte/_run.py +42 -15
  52. flyte/_task.py +35 -4
  53. flyte/_task_environment.py +61 -16
  54. flyte/_trigger.py +382 -0
  55. flyte/_version.py +3 -3
  56. flyte/cli/_abort.py +3 -3
  57. flyte/cli/_build.py +1 -3
  58. flyte/cli/_common.py +17 -4
  59. flyte/cli/_create.py +74 -0
  60. flyte/cli/_delete.py +23 -1
  61. flyte/cli/_deploy.py +16 -10
  62. flyte/cli/_get.py +75 -34
  63. flyte/cli/_params.py +4 -2
  64. flyte/cli/_run.py +25 -6
  65. flyte/cli/_update.py +36 -0
  66. flyte/cli/_user.py +17 -0
  67. flyte/cli/main.py +9 -1
  68. flyte/errors.py +9 -0
  69. flyte/io/_dir.py +513 -115
  70. flyte/io/_file.py +495 -135
  71. flyte/models.py +32 -0
  72. flyte/remote/__init__.py +6 -1
  73. flyte/remote/_action.py +9 -8
  74. flyte/remote/_client/_protocols.py +36 -2
  75. flyte/remote/_client/controlplane.py +19 -3
  76. flyte/remote/_run.py +42 -2
  77. flyte/remote/_task.py +14 -1
  78. flyte/remote/_trigger.py +308 -0
  79. flyte/remote/_user.py +33 -0
  80. flyte/storage/__init__.py +6 -1
  81. flyte/storage/_storage.py +119 -101
  82. flyte/types/_pickle.py +34 -7
  83. flyte/types/_type_engine.py +6 -0
  84. {flyte-2.0.0b21.data → flyte-2.0.0b23.data}/scripts/runtime.py +36 -6
  85. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/METADATA +3 -1
  86. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/RECORD +91 -79
  87. flyte/_protos/secret/secret_pb2_grpc_grpc.py +0 -198
  88. {flyte-2.0.0b21.data → flyte-2.0.0b23.data}/scripts/debug.py +0 -0
  89. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/WHEEL +0 -0
  90. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/entry_points.txt +0 -0
  91. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/licenses/LICENSE +0 -0
  92. {flyte-2.0.0b21.dist-info → flyte-2.0.0b23.dist-info}/top_level.txt +0 -0
flyte/_deploy.py CHANGED
@@ -1,10 +1,11 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
- import typing
4
+ import hashlib
5
5
  from dataclasses import dataclass
6
- from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
6
+ from typing import TYPE_CHECKING, Dict, List, Optional, Protocol, Set, Tuple, Type
7
7
 
8
+ import cloudpickle
8
9
  import rich.repr
9
10
 
10
11
  import flyte.errors
@@ -19,7 +20,7 @@ from ._task import TaskTemplate
19
20
  from ._task_environment import TaskEnvironment
20
21
 
21
22
  if TYPE_CHECKING:
22
- from flyte._protos.workflow import task_definition_pb2
23
+ from flyte._protos.workflow import task_definition_pb2, trigger_definition_pb2
23
24
 
24
25
  from ._code_bundle import CopyFiles
25
26
  from ._internal.imagebuild.image_builder import ImageCache
@@ -34,53 +35,110 @@ class DeploymentPlan:
34
35
 
35
36
  @rich.repr.auto
36
37
  @dataclass
38
+ class DeploymentContext:
39
+ """
40
+ Context for deployment operations.
41
+ """
42
+
43
+ environment: Environment | TaskEnvironment
44
+ serialization_context: SerializationContext
45
+ dryrun: bool = False
46
+
47
+
48
+ @rich.repr.auto
49
+ @dataclass
50
+ class DeployedTask:
51
+ deployed_task: task_definition_pb2.TaskSpec
52
+ deployed_triggers: List[trigger_definition_pb2.TaskTrigger]
53
+
54
+ def summary_repr(self) -> str:
55
+ """
56
+ Returns a summary representation of the deployed task.
57
+ """
58
+ return (
59
+ f"DeployedTask(name={self.deployed_task.task_template.id.name}, "
60
+ f"version={self.deployed_task.task_template.id.version})"
61
+ )
62
+
63
+ def table_repr(self) -> List[Tuple[str, ...]]:
64
+ """
65
+ Returns a table representation of the deployed task.
66
+ """
67
+ return [
68
+ ("name", self.deployed_task.task_template.id.name),
69
+ ("version", self.deployed_task.task_template.id.version),
70
+ ("triggers", ",".join([t.name for t in self.deployed_triggers])),
71
+ ]
72
+
73
+
74
+ @rich.repr.auto
75
+ @dataclass
76
+ class DeployedEnv:
77
+ env: Environment
78
+ deployed_entities: List[DeployedTask]
79
+
80
+ def summary_repr(self) -> str:
81
+ """
82
+ Returns a summary representation of the deployment.
83
+ """
84
+ entities = ", ".join(f"{e.summary_repr()}" for e in self.deployed_entities or [])
85
+ return f"Deployment(env=[{self.env.name}], entities=[{entities}])"
86
+
87
+ def table_repr(self) -> List[List[Tuple[str, ...]]]:
88
+ """
89
+ Returns a detailed representation of the deployed tasks.
90
+ """
91
+ tuples = []
92
+ if self.deployed_entities:
93
+ for e in self.deployed_entities:
94
+ tuples.append(e.table_repr())
95
+ return tuples
96
+
97
+ def env_repr(self) -> List[Tuple[str, ...]]:
98
+ """
99
+ Returns a detailed representation of the deployed environments.
100
+ """
101
+ env = self.env
102
+ return [
103
+ ("environment", env.name),
104
+ ("image", env.image.uri if isinstance(env.image, Image) else env.image or ""),
105
+ ]
106
+
107
+
108
+ @rich.repr.auto
109
+ @dataclass(frozen=True)
37
110
  class Deployment:
38
- envs: Dict[str, Environment]
39
- deployed_tasks: List[task_definition_pb2.TaskSpec] | None = None
111
+ envs: Dict[str, DeployedEnv]
40
112
 
41
113
  def summary_repr(self) -> str:
42
114
  """
43
115
  Returns a summary representation of the deployment.
44
116
  """
45
- env_names = ", ".join(self.envs.keys())
46
- task_names_versions = ", ".join(
47
- f"{task.task_template.id.name} (v{task.task_template.id.version})" for task in self.deployed_tasks or []
48
- )
49
- return f"Deployment(envs=[{env_names}], tasks=[{task_names_versions}])"
117
+ envs = ", ".join(f"{e.summary_repr()}" for e in self.envs.values() or [])
118
+ return f"Deployment(envs=[{envs}])"
50
119
 
51
- def task_repr(self) -> List[List[Tuple[str, str]]]:
120
+ def table_repr(self) -> List[List[Tuple[str, ...]]]:
52
121
  """
53
122
  Returns a detailed representation of the deployed tasks.
54
123
  """
55
124
  tuples = []
56
- if self.deployed_tasks:
57
- for task in self.deployed_tasks:
58
- tuples.append(
59
- [
60
- ("name", task.task_template.id.name),
61
- ("version", task.task_template.id.version),
62
- ]
63
- )
125
+ for d in self.envs.values():
126
+ tuples.extend(d.table_repr())
64
127
  return tuples
65
128
 
66
- def env_repr(self) -> List[List[Tuple[str, str]]]:
129
+ def env_repr(self) -> List[List[Tuple[str, ...]]]:
67
130
  """
68
131
  Returns a detailed representation of the deployed environments.
69
132
  """
70
133
  tuples = []
71
- for env_name, env in self.envs.items():
72
- tuples.append(
73
- [
74
- ("environment", env_name),
75
- ("image", env.image.uri if isinstance(env.image, Image) else env.image or ""),
76
- ]
77
- )
134
+ for d in self.envs.values():
135
+ tuples.append(d.env_repr())
78
136
  return tuples
79
137
 
80
138
 
81
139
  async def _deploy_task(
82
140
  task: TaskTemplate, serialization_context: SerializationContext, dryrun: bool = False
83
- ) -> task_definition_pb2.TaskSpec:
141
+ ) -> DeployedTask:
84
142
  """
85
143
  Deploy the given task.
86
144
  """
@@ -89,13 +147,14 @@ async def _deploy_task(
89
147
 
90
148
  from ._internal.runtime.convert import convert_upload_default_inputs
91
149
  from ._internal.runtime.task_serde import translate_task_to_wire
150
+ from ._internal.runtime.trigger_serde import to_task_trigger
92
151
  from ._protos.workflow import task_definition_pb2, task_service_pb2
93
152
 
94
153
  image_uri = task.image.uri if isinstance(task.image, Image) else task.image
95
154
 
96
155
  try:
97
156
  if dryrun:
98
- return translate_task_to_wire(task, serialization_context)
157
+ return DeployedTask(translate_task_to_wire(task, serialization_context), [])
99
158
 
100
159
  default_inputs = await convert_upload_default_inputs(task.interface)
101
160
  spec = translate_task_to_wire(task, serialization_context, default_inputs=default_inputs)
@@ -112,15 +171,31 @@ async def _deploy_task(
112
171
  name=spec.task_template.id.name,
113
172
  )
114
173
 
174
+ deployable_triggers_coros = []
175
+ for t in task.triggers:
176
+ inputs = spec.task_template.interface.inputs
177
+ default_inputs = spec.default_inputs
178
+ deployable_triggers_coros.append(
179
+ to_task_trigger(t=t, task_name=task.name, task_inputs=inputs, task_default_inputs=list(default_inputs))
180
+ )
181
+
182
+ deployable_triggers = await asyncio.gather(*deployable_triggers_coros)
115
183
  try:
116
- await get_client().task_service.DeployTask(task_service_pb2.DeployTaskRequest(task_id=task_id, spec=spec))
184
+ await get_client().task_service.DeployTask(
185
+ task_service_pb2.DeployTaskRequest(
186
+ task_id=task_id,
187
+ spec=spec,
188
+ triggers=deployable_triggers,
189
+ )
190
+ )
117
191
  logger.info(f"Deployed task {task.name} with version {task_id.version}")
118
192
  except grpc.aio.AioRpcError as e:
119
193
  if e.code() == grpc.StatusCode.ALREADY_EXISTS:
120
194
  logger.info(f"Task {task.name} with image {image_uri} already exists, skipping deployment.")
121
- return spec
195
+ return DeployedTask(spec, deployable_triggers)
122
196
  raise
123
- return spec
197
+
198
+ return DeployedTask(spec, deployable_triggers)
124
199
  except Exception as e:
125
200
  logger.error(f"Failed to deploy task {task.name} with image {image_uri}: {e}")
126
201
  raise flyte.errors.DeploymentError(
@@ -159,14 +234,80 @@ async def _build_images(deployment: DeploymentPlan) -> ImageCache:
159
234
  for env_name, image_uri in final_images:
160
235
  logger.warning(f"Built Image for environment {env_name}, image: {image_uri}")
161
236
  env = deployment.envs[env_name]
162
- if isinstance(env.image, Image):
163
- image_identifier_map[env.image.identifier] = image_uri
164
- elif env.image == "auto":
165
- image_identifier_map["auto"] = image_uri
237
+ image_identifier_map[env_name] = image_uri
166
238
 
167
239
  return ImageCache(image_lookup=image_identifier_map)
168
240
 
169
241
 
242
+ class Deployer(Protocol):
243
+ """
244
+ Protocol for deployment callables.
245
+ """
246
+
247
+ async def __call__(self, context: DeploymentContext) -> DeployedEnv:
248
+ """
249
+ Deploy the environment described in the context.
250
+
251
+ Args:
252
+ context: Deployment context containing environment, serialization context, and dryrun flag
253
+
254
+ Returns:
255
+ Deployment result
256
+ """
257
+ ...
258
+
259
+
260
+ async def _deploy_task_env(context: DeploymentContext) -> DeployedEnv:
261
+ """
262
+ Deploy the given task environment.
263
+ """
264
+ ensure_client()
265
+ env = context.environment
266
+ if not isinstance(env, TaskEnvironment):
267
+ raise ValueError(f"Expected TaskEnvironment, got {type(env)}")
268
+
269
+ task_coros = []
270
+ for task in env.tasks.values():
271
+ task_coros.append(_deploy_task(task, context.serialization_context, dryrun=context.dryrun))
272
+ deployed_task_vals = await asyncio.gather(*task_coros)
273
+ deployed_tasks = []
274
+ for t in deployed_task_vals:
275
+ deployed_tasks.append(t)
276
+ return DeployedEnv(env=env, deployed_entities=deployed_tasks)
277
+
278
+
279
+ _ENVTYPE_REGISTRY: Dict[Type[Environment | TaskEnvironment], Deployer] = {
280
+ TaskEnvironment: _deploy_task_env,
281
+ }
282
+
283
+
284
+ def register_deployer(env_type: Type[Environment | TaskEnvironment], deployer: Deployer) -> None:
285
+ """
286
+ Register a deployer for a specific environment type.
287
+
288
+ Args:
289
+ env_type: Type of environment this deployer handles
290
+ deployer: Deployment callable that conforms to the Deployer protocol
291
+ """
292
+ _ENVTYPE_REGISTRY[env_type] = deployer
293
+
294
+
295
+ def get_deployer(env_type: Type[Environment | TaskEnvironment]) -> Deployer:
296
+ """
297
+ Get the registered deployer for an environment type.
298
+
299
+ Args:
300
+ env_type: Type of environment to get deployer for
301
+
302
+ Returns:
303
+ Deployer for the environment type, defaults to task environment deployer
304
+ """
305
+ v = _ENVTYPE_REGISTRY.get(env_type)
306
+ if v is None:
307
+ raise ValueError(f"No deployer registered for environment type {env_type}")
308
+ return v
309
+
310
+
170
311
  @requires_initialization
171
312
  async def apply(deployment_plan: DeploymentPlan, copy_style: CopyFiles, dryrun: bool = False) -> Deployment:
172
313
  from ._code_bundle import build_code_bundle
@@ -175,15 +316,18 @@ async def apply(deployment_plan: DeploymentPlan, copy_style: CopyFiles, dryrun:
175
316
 
176
317
  image_cache = await _build_images(deployment_plan)
177
318
 
178
- version = deployment_plan.version
179
- if copy_style == "none" and not version:
319
+ if copy_style == "none" and not deployment_plan.version:
180
320
  raise flyte.errors.DeploymentError("Version must be set when copy_style is none")
181
321
  else:
182
322
  code_bundle = await build_code_bundle(from_dir=cfg.root_dir, dryrun=dryrun, copy_style=copy_style)
183
- version = version or code_bundle.computed_version
184
- # TODO we should update the version to include the image cache digest and code bundle digest. This is
185
- # to ensure that changes in image dependencies, cause an update to the deployment version.
186
- # TODO Also hash the environment and tasks to ensure that changes in the environment or tasks
323
+ if deployment_plan.version:
324
+ version = deployment_plan.version
325
+ else:
326
+ h = hashlib.md5()
327
+ h.update(cloudpickle.dumps(deployment_plan.envs))
328
+ h.update(code_bundle.computed_version.encode("utf-8"))
329
+ h.update(cloudpickle.dumps(image_cache))
330
+ version = h.hexdigest()
187
331
 
188
332
  sc = SerializationContext(
189
333
  project=cfg.project,
@@ -195,15 +339,18 @@ async def apply(deployment_plan: DeploymentPlan, copy_style: CopyFiles, dryrun:
195
339
  root_dir=cfg.root_dir,
196
340
  )
197
341
 
198
- tasks = []
199
-
342
+ deployment_coros = []
200
343
  for env_name, env in deployment_plan.envs.items():
201
344
  logger.info(f"Deploying environment {env_name}")
202
- # TODO Make this pluggable based on the environment type
203
- if isinstance(env, TaskEnvironment):
204
- for task in env.tasks.values():
205
- tasks.append(_deploy_task(task, dryrun=dryrun, serialization_context=sc))
206
- return Deployment(envs=deployment_plan.envs, deployed_tasks=await asyncio.gather(*tasks))
345
+ deployer = get_deployer(type(env))
346
+ context = DeploymentContext(environment=env, serialization_context=sc, dryrun=dryrun)
347
+ deployment_coros.append(deployer(context))
348
+ deployed_envs = await asyncio.gather(*deployment_coros)
349
+ envs = {}
350
+ for d in deployed_envs:
351
+ envs[d.env.name] = d
352
+
353
+ return Deployment(envs)
207
354
 
208
355
 
209
356
  def _recursive_discover(planned_envs: Dict[str, Environment], env: Environment) -> Dict[str, Environment]:
@@ -211,14 +358,16 @@ def _recursive_discover(planned_envs: Dict[str, Environment], env: Environment)
211
358
  Recursively deploy the environment and its dependencies, if not already deployed (present in env_tasks) and
212
359
  return the updated env_tasks.
213
360
  """
214
- # Skip if the environment is already planned
215
361
  if env.name in planned_envs:
216
- return planned_envs
362
+ if planned_envs[env.name] is not env:
363
+ # Raise error if different TaskEnvironment objects have the same name
364
+ raise ValueError(f"Duplicate environment name '{env.name}' found")
365
+ # Add the environment to the existing envs
366
+ planned_envs[env.name] = env
367
+
217
368
  # Recursively discover dependent environments
218
369
  for dependent_env in env.depends_on:
219
370
  _recursive_discover(planned_envs, dependent_env)
220
- # Add the environment to the existing envs
221
- planned_envs[env.name] = env
222
371
  return planned_envs
223
372
 
224
373
 
@@ -226,10 +375,10 @@ def plan_deploy(*envs: Environment, version: Optional[str] = None) -> List[Deplo
226
375
  if envs is None:
227
376
  return [DeploymentPlan({})]
228
377
  deployment_plans = []
229
- visited_envs: typing.Set[str] = set()
378
+ visited_envs: Set[str] = set()
230
379
  for env in envs:
231
380
  if env.name in visited_envs:
232
- continue
381
+ raise ValueError(f"Duplicate environment name '{env.name}' found")
233
382
  planned_envs = _recursive_discover({}, env)
234
383
  deployment_plans.append(DeploymentPlan(planned_envs, version=version))
235
384
  visited_envs.update(planned_envs.keys())
flyte/_environment.py CHANGED
@@ -36,6 +36,9 @@ class Environment:
36
36
  :param resources: Resources to allocate for the environment.
37
37
  :param env_vars: Environment variables to set for the environment.
38
38
  :param secrets: Secrets to inject into the environment.
39
+ :param pod_template: Pod template to use for the environment.
40
+ :param description: Description of the environment.
41
+ :param interruptible: Whether the environment is interruptible and can be scheduled on spot/preemptible instances
39
42
  :param depends_on: Environment dependencies to hint, so when you deploy the environment, the dependencies are
40
43
  also deployed. This is useful when you have a set of environments that depend on each other.
41
44
  """
@@ -47,6 +50,7 @@ class Environment:
47
50
  secrets: Optional[SecretRequest] = None
48
51
  env_vars: Optional[Dict[str, str]] = None
49
52
  resources: Optional[Resources] = None
53
+ interruptible: bool = False
50
54
  image: Union[str, Image, Literal["auto"]] = "auto"
51
55
 
52
56
  def __post_init__(self):
@@ -87,6 +91,7 @@ class Environment:
87
91
  env_vars: Optional[Dict[str, str]] = None,
88
92
  secrets: Optional[SecretRequest] = None,
89
93
  depends_on: Optional[List[Environment]] = None,
94
+ description: Optional[str] = None,
90
95
  **kwargs: Any,
91
96
  ) -> Environment:
92
97
  raise NotImplementedError
flyte/_excepthook.py CHANGED
@@ -33,5 +33,5 @@ def custom_excepthook(exc_type, exc_value, exc_tb):
33
33
  filtered_tb = [frame for frame in tb_list if should_include_frame(frame)]
34
34
  # Print the filtered version (custom format)
35
35
  print("Filtered traceback (most recent call last):")
36
- print("".join(traceback.format_list(filtered_tb)))
36
+ traceback.print_tb(filtered_tb)
37
37
  print(f"{exc_type.__name__}: {exc_value}\n")