flyte 2.0.0b9__py3-none-any.whl → 2.0.0b14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flyte might be problematic. Click here for more details.

Files changed (60) hide show
  1. flyte/__init__.py +55 -31
  2. flyte/_bin/debug.py +38 -0
  3. flyte/_bin/runtime.py +13 -0
  4. flyte/_code_bundle/_utils.py +2 -0
  5. flyte/_code_bundle/bundle.py +4 -4
  6. flyte/_context.py +1 -1
  7. flyte/_debug/__init__.py +0 -0
  8. flyte/_debug/constants.py +39 -0
  9. flyte/_debug/utils.py +17 -0
  10. flyte/_debug/vscode.py +300 -0
  11. flyte/_environment.py +5 -5
  12. flyte/_image.py +34 -19
  13. flyte/_initialize.py +15 -29
  14. flyte/_internal/controllers/remote/_action.py +2 -2
  15. flyte/_internal/controllers/remote/_controller.py +1 -1
  16. flyte/_internal/imagebuild/docker_builder.py +11 -15
  17. flyte/_internal/imagebuild/remote_builder.py +71 -22
  18. flyte/_internal/runtime/entrypoints.py +3 -0
  19. flyte/_internal/runtime/reuse.py +7 -3
  20. flyte/_internal/runtime/task_serde.py +4 -3
  21. flyte/_internal/runtime/taskrunner.py +9 -3
  22. flyte/_logging.py +5 -2
  23. flyte/_protos/common/identifier_pb2.py +25 -19
  24. flyte/_protos/common/identifier_pb2.pyi +10 -0
  25. flyte/_protos/imagebuilder/definition_pb2.py +32 -31
  26. flyte/_protos/imagebuilder/definition_pb2.pyi +25 -12
  27. flyte/_protos/workflow/queue_service_pb2.py +24 -24
  28. flyte/_protos/workflow/queue_service_pb2.pyi +6 -6
  29. flyte/_protos/workflow/run_definition_pb2.py +48 -48
  30. flyte/_protos/workflow/run_definition_pb2.pyi +20 -10
  31. flyte/_reusable_environment.py +41 -19
  32. flyte/_run.py +9 -9
  33. flyte/_secret.py +9 -5
  34. flyte/_task.py +16 -11
  35. flyte/_task_environment.py +11 -13
  36. flyte/_tools.py +0 -13
  37. flyte/_version.py +16 -3
  38. flyte/cli/_build.py +2 -3
  39. flyte/cli/_common.py +16 -5
  40. flyte/cli/_gen.py +10 -1
  41. flyte/cli/_get.py +16 -14
  42. flyte/cli/_run.py +258 -25
  43. flyte/models.py +9 -0
  44. flyte/remote/_client/auth/_authenticators/base.py +8 -2
  45. flyte/remote/_client/auth/_authenticators/device_code.py +1 -1
  46. flyte/remote/_client/auth/_authenticators/pkce.py +1 -1
  47. flyte/remote/_client/auth/_channel.py +0 -6
  48. flyte/remote/_client/auth/_client_config.py +4 -2
  49. flyte/remote/_client/controlplane.py +14 -0
  50. flyte/remote/_task.py +18 -4
  51. flyte/storage/_storage.py +83 -7
  52. flyte/types/_type_engine.py +3 -33
  53. flyte-2.0.0b14.data/scripts/debug.py +38 -0
  54. {flyte-2.0.0b9.data → flyte-2.0.0b14.data}/scripts/runtime.py +13 -0
  55. {flyte-2.0.0b9.dist-info → flyte-2.0.0b14.dist-info}/METADATA +2 -2
  56. {flyte-2.0.0b9.dist-info → flyte-2.0.0b14.dist-info}/RECORD +60 -54
  57. {flyte-2.0.0b9.dist-info → flyte-2.0.0b14.dist-info}/WHEEL +0 -0
  58. {flyte-2.0.0b9.dist-info → flyte-2.0.0b14.dist-info}/entry_points.txt +0 -0
  59. {flyte-2.0.0b9.dist-info → flyte-2.0.0b14.dist-info}/licenses/LICENSE +0 -0
  60. {flyte-2.0.0b9.dist-info → flyte-2.0.0b14.dist-info}/top_level.txt +0 -0
@@ -69,18 +69,20 @@ class Envs(_message.Message):
69
69
  def __init__(self, values: _Optional[_Iterable[_Union[_literals_pb2.KeyValuePair, _Mapping]]] = ...) -> None: ...
70
70
 
71
71
  class RunSpec(_message.Message):
72
- __slots__ = ["labels", "annotations", "envs", "interruptible", "overwrite_cache"]
72
+ __slots__ = ["labels", "annotations", "envs", "interruptible", "overwrite_cache", "cluster"]
73
73
  LABELS_FIELD_NUMBER: _ClassVar[int]
74
74
  ANNOTATIONS_FIELD_NUMBER: _ClassVar[int]
75
75
  ENVS_FIELD_NUMBER: _ClassVar[int]
76
76
  INTERRUPTIBLE_FIELD_NUMBER: _ClassVar[int]
77
77
  OVERWRITE_CACHE_FIELD_NUMBER: _ClassVar[int]
78
+ CLUSTER_FIELD_NUMBER: _ClassVar[int]
78
79
  labels: Labels
79
80
  annotations: Annotations
80
81
  envs: Envs
81
82
  interruptible: _wrappers_pb2.BoolValue
82
83
  overwrite_cache: bool
83
- def __init__(self, labels: _Optional[_Union[Labels, _Mapping]] = ..., annotations: _Optional[_Union[Annotations, _Mapping]] = ..., envs: _Optional[_Union[Envs, _Mapping]] = ..., interruptible: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., overwrite_cache: bool = ...) -> None: ...
84
+ cluster: str
85
+ def __init__(self, labels: _Optional[_Union[Labels, _Mapping]] = ..., annotations: _Optional[_Union[Annotations, _Mapping]] = ..., envs: _Optional[_Union[Envs, _Mapping]] = ..., interruptible: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., overwrite_cache: bool = ..., cluster: _Optional[str] = ...) -> None: ...
84
86
 
85
87
  class Run(_message.Message):
86
88
  __slots__ = ["action"]
@@ -140,16 +142,18 @@ class ActionMetadata(_message.Message):
140
142
  def __init__(self, parent: _Optional[str] = ..., group: _Optional[str] = ..., executed_by: _Optional[_Union[_identity_pb2.EnrichedIdentity, _Mapping]] = ..., task: _Optional[_Union[TaskActionMetadata, _Mapping]] = ..., trace: _Optional[_Union[TraceActionMetadata, _Mapping]] = ..., condition: _Optional[_Union[ConditionActionMetadata, _Mapping]] = ...) -> None: ...
141
143
 
142
144
  class ActionStatus(_message.Message):
143
- __slots__ = ["phase", "start_time", "end_time", "attempts"]
145
+ __slots__ = ["phase", "start_time", "end_time", "attempts", "cache_status"]
144
146
  PHASE_FIELD_NUMBER: _ClassVar[int]
145
147
  START_TIME_FIELD_NUMBER: _ClassVar[int]
146
148
  END_TIME_FIELD_NUMBER: _ClassVar[int]
147
149
  ATTEMPTS_FIELD_NUMBER: _ClassVar[int]
150
+ CACHE_STATUS_FIELD_NUMBER: _ClassVar[int]
148
151
  phase: Phase
149
152
  start_time: _timestamp_pb2.Timestamp
150
153
  end_time: _timestamp_pb2.Timestamp
151
154
  attempts: int
152
- def __init__(self, phase: _Optional[_Union[Phase, str]] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., attempts: _Optional[int] = ...) -> None: ...
155
+ cache_status: _catalog_pb2.CatalogCacheStatus
156
+ def __init__(self, phase: _Optional[_Union[Phase, str]] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., attempts: _Optional[int] = ..., cache_status: _Optional[_Union[_catalog_pb2.CatalogCacheStatus, str]] = ...) -> None: ...
153
157
 
154
158
  class Action(_message.Message):
155
159
  __slots__ = ["id", "metadata", "status"]
@@ -221,7 +225,7 @@ class ActionDetails(_message.Message):
221
225
  def __init__(self, id: _Optional[_Union[_identifier_pb2.ActionIdentifier, _Mapping]] = ..., metadata: _Optional[_Union[ActionMetadata, _Mapping]] = ..., status: _Optional[_Union[ActionStatus, _Mapping]] = ..., error_info: _Optional[_Union[ErrorInfo, _Mapping]] = ..., abort_info: _Optional[_Union[AbortInfo, _Mapping]] = ..., resolved_task_spec: _Optional[_Union[_task_definition_pb2.TaskSpec, _Mapping]] = ..., attempts: _Optional[_Iterable[_Union[ActionAttempt, _Mapping]]] = ...) -> None: ...
222
226
 
223
227
  class ActionAttempt(_message.Message):
224
- __slots__ = ["phase", "start_time", "end_time", "error_info", "attempt", "log_info", "outputs", "logs_available", "cache_status", "cluster_events", "phase_transitions", "cluster"]
228
+ __slots__ = ["phase", "start_time", "end_time", "error_info", "attempt", "log_info", "outputs", "logs_available", "cache_status", "cluster_events", "phase_transitions", "cluster", "log_context"]
225
229
  PHASE_FIELD_NUMBER: _ClassVar[int]
226
230
  START_TIME_FIELD_NUMBER: _ClassVar[int]
227
231
  END_TIME_FIELD_NUMBER: _ClassVar[int]
@@ -234,6 +238,7 @@ class ActionAttempt(_message.Message):
234
238
  CLUSTER_EVENTS_FIELD_NUMBER: _ClassVar[int]
235
239
  PHASE_TRANSITIONS_FIELD_NUMBER: _ClassVar[int]
236
240
  CLUSTER_FIELD_NUMBER: _ClassVar[int]
241
+ LOG_CONTEXT_FIELD_NUMBER: _ClassVar[int]
237
242
  phase: Phase
238
243
  start_time: _timestamp_pb2.Timestamp
239
244
  end_time: _timestamp_pb2.Timestamp
@@ -246,7 +251,8 @@ class ActionAttempt(_message.Message):
246
251
  cluster_events: _containers.RepeatedCompositeFieldContainer[ClusterEvent]
247
252
  phase_transitions: _containers.RepeatedCompositeFieldContainer[PhaseTransition]
248
253
  cluster: str
249
- def __init__(self, phase: _Optional[_Union[Phase, str]] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., error_info: _Optional[_Union[ErrorInfo, _Mapping]] = ..., attempt: _Optional[int] = ..., log_info: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., outputs: _Optional[_Union[OutputReferences, _Mapping]] = ..., logs_available: bool = ..., cache_status: _Optional[_Union[_catalog_pb2.CatalogCacheStatus, str]] = ..., cluster_events: _Optional[_Iterable[_Union[ClusterEvent, _Mapping]]] = ..., phase_transitions: _Optional[_Iterable[_Union[PhaseTransition, _Mapping]]] = ..., cluster: _Optional[str] = ...) -> None: ...
254
+ log_context: _execution_pb2.LogContext
255
+ def __init__(self, phase: _Optional[_Union[Phase, str]] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., error_info: _Optional[_Union[ErrorInfo, _Mapping]] = ..., attempt: _Optional[int] = ..., log_info: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., outputs: _Optional[_Union[OutputReferences, _Mapping]] = ..., logs_available: bool = ..., cache_status: _Optional[_Union[_catalog_pb2.CatalogCacheStatus, str]] = ..., cluster_events: _Optional[_Iterable[_Union[ClusterEvent, _Mapping]]] = ..., phase_transitions: _Optional[_Iterable[_Union[PhaseTransition, _Mapping]]] = ..., cluster: _Optional[str] = ..., log_context: _Optional[_Union[_execution_pb2.LogContext, _Mapping]] = ...) -> None: ...
250
256
 
251
257
  class ClusterEvent(_message.Message):
252
258
  __slots__ = ["occurred_at", "message"]
@@ -267,7 +273,7 @@ class PhaseTransition(_message.Message):
267
273
  def __init__(self, phase: _Optional[_Union[Phase, str]] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ...
268
274
 
269
275
  class ActionEvent(_message.Message):
270
- __slots__ = ["id", "attempt", "phase", "version", "start_time", "updated_time", "end_time", "error_info", "log_info", "log_context", "cluster", "outputs", "cache_status", "cluster_events"]
276
+ __slots__ = ["id", "attempt", "phase", "version", "start_time", "updated_time", "end_time", "error_info", "log_info", "log_context", "cluster", "outputs", "cache_status", "cluster_events", "reported_time"]
271
277
  ID_FIELD_NUMBER: _ClassVar[int]
272
278
  ATTEMPT_FIELD_NUMBER: _ClassVar[int]
273
279
  PHASE_FIELD_NUMBER: _ClassVar[int]
@@ -282,6 +288,7 @@ class ActionEvent(_message.Message):
282
288
  OUTPUTS_FIELD_NUMBER: _ClassVar[int]
283
289
  CACHE_STATUS_FIELD_NUMBER: _ClassVar[int]
284
290
  CLUSTER_EVENTS_FIELD_NUMBER: _ClassVar[int]
291
+ REPORTED_TIME_FIELD_NUMBER: _ClassVar[int]
285
292
  id: _identifier_pb2.ActionIdentifier
286
293
  attempt: int
287
294
  phase: Phase
@@ -296,7 +303,8 @@ class ActionEvent(_message.Message):
296
303
  outputs: OutputReferences
297
304
  cache_status: _catalog_pb2.CatalogCacheStatus
298
305
  cluster_events: _containers.RepeatedCompositeFieldContainer[ClusterEvent]
299
- def __init__(self, id: _Optional[_Union[_identifier_pb2.ActionIdentifier, _Mapping]] = ..., attempt: _Optional[int] = ..., phase: _Optional[_Union[Phase, str]] = ..., version: _Optional[int] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., updated_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., error_info: _Optional[_Union[ErrorInfo, _Mapping]] = ..., log_info: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., log_context: _Optional[_Union[_execution_pb2.LogContext, _Mapping]] = ..., cluster: _Optional[str] = ..., outputs: _Optional[_Union[OutputReferences, _Mapping]] = ..., cache_status: _Optional[_Union[_catalog_pb2.CatalogCacheStatus, str]] = ..., cluster_events: _Optional[_Iterable[_Union[ClusterEvent, _Mapping]]] = ...) -> None: ...
306
+ reported_time: _timestamp_pb2.Timestamp
307
+ def __init__(self, id: _Optional[_Union[_identifier_pb2.ActionIdentifier, _Mapping]] = ..., attempt: _Optional[int] = ..., phase: _Optional[_Union[Phase, str]] = ..., version: _Optional[int] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., updated_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., error_info: _Optional[_Union[ErrorInfo, _Mapping]] = ..., log_info: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., log_context: _Optional[_Union[_execution_pb2.LogContext, _Mapping]] = ..., cluster: _Optional[str] = ..., outputs: _Optional[_Union[OutputReferences, _Mapping]] = ..., cache_status: _Optional[_Union[_catalog_pb2.CatalogCacheStatus, str]] = ..., cluster_events: _Optional[_Iterable[_Union[ClusterEvent, _Mapping]]] = ..., reported_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ...
300
308
 
301
309
  class NamedLiteral(_message.Message):
302
310
  __slots__ = ["name", "value"]
@@ -315,10 +323,12 @@ class OutputReferences(_message.Message):
315
323
  def __init__(self, output_uri: _Optional[str] = ..., report_uri: _Optional[str] = ...) -> None: ...
316
324
 
317
325
  class Inputs(_message.Message):
318
- __slots__ = ["literals"]
326
+ __slots__ = ["literals", "context"]
319
327
  LITERALS_FIELD_NUMBER: _ClassVar[int]
328
+ CONTEXT_FIELD_NUMBER: _ClassVar[int]
320
329
  literals: _containers.RepeatedCompositeFieldContainer[NamedLiteral]
321
- def __init__(self, literals: _Optional[_Iterable[_Union[NamedLiteral, _Mapping]]] = ...) -> None: ...
330
+ context: _containers.RepeatedCompositeFieldContainer[_literals_pb2.KeyValuePair]
331
+ def __init__(self, literals: _Optional[_Iterable[_Union[NamedLiteral, _Mapping]]] = ..., context: _Optional[_Iterable[_Union[_literals_pb2.KeyValuePair, _Mapping]]] = ...) -> None: ...
322
332
 
323
333
  class Outputs(_message.Message):
324
334
  __slots__ = ["literals"]
@@ -1,6 +1,6 @@
1
1
  from dataclasses import dataclass
2
2
  from datetime import timedelta
3
- from typing import Optional, Tuple, Union
3
+ from typing import Tuple, Union
4
4
 
5
5
  from flyte._logging import logger
6
6
 
@@ -17,25 +17,22 @@ class ReusePolicy:
17
17
 
18
18
  :param replicas: Either a single int representing number of replicas or a tuple of two ints representing
19
19
  the min and max.
20
- :param idle_ttl: The maximum idle duration for an environment replica, specified as either seconds (int) or a
21
- timedelta. If not set, the environment's global default will be used.
20
+ :param idle_ttl: The maximum idle duration for an environment, specified as either seconds (int) or a
21
+ timedelta, after which all replicas in the environment are shutdown.
22
+ If not set, the default is configured in the backend (can be as low as 90s).
22
23
  When a replica remains idle — meaning no tasks are running — for this duration, it will be automatically
23
- terminated.
24
+ terminated, also referred to as environment idle timeout.
24
25
  :param concurrency: The maximum number of tasks that can run concurrently in one instance of the environment.
25
- Concurrency of greater than 1 is only supported only for `async` tasks.
26
- :param reuse_salt: Optional string used to control environment reuse.
27
- If set, the environment will be reused even if the code bundle changes.
28
- To force a new environment, either set this to `None` or change its value.
29
-
30
- Example:
31
- reuse_salt = "v1" # Environment is reused
32
- reuse_salt = "v2" # Forces environment recreation
26
+ Concurrency of greater than 1 is only supported for `async` tasks.
27
+ :param scaledown_ttl: The minimum time to wait before scaling down each replica, specified as either seconds (int)
28
+ or a timedelta. This is useful to prevent rapid scaling down of replicas when tasks are running
29
+ frequently. If not set, the default is configured in the backend.
33
30
  """
34
31
 
35
32
  replicas: Union[int, Tuple[int, int]] = 2
36
- idle_ttl: Optional[Union[int, timedelta]] = None
37
- reuse_salt: str | None = None
33
+ idle_ttl: Union[int, timedelta] = 30 # seconds
38
34
  concurrency: int = 1
35
+ scaledown_ttl: Union[int, timedelta] = 30 # seconds
39
36
 
40
37
  def __post_init__(self):
41
38
  if self.replicas is None:
@@ -47,11 +44,12 @@ class ReusePolicy:
47
44
  elif len(self.replicas) != 2:
48
45
  raise ValueError("replicas must be an int or a tuple of two ints")
49
46
 
50
- if self.idle_ttl:
51
- if isinstance(self.idle_ttl, int):
52
- self.idle_ttl = timedelta(seconds=int(self.idle_ttl))
53
- elif not isinstance(self.idle_ttl, timedelta):
54
- raise ValueError("idle_ttl must be an int (seconds) or a timedelta")
47
+ if isinstance(self.idle_ttl, int):
48
+ self.idle_ttl = timedelta(seconds=int(self.idle_ttl))
49
+ elif not isinstance(self.idle_ttl, timedelta):
50
+ raise ValueError("idle_ttl must be an int (seconds) or a timedelta")
51
+ if self.idle_ttl.total_seconds() < 30:
52
+ raise ValueError("idle_ttl must be at least 30 seconds")
55
53
 
56
54
  if self.replicas[1] == 1 and self.concurrency == 1:
57
55
  logger.warning(
@@ -61,6 +59,13 @@ class ReusePolicy:
61
59
  "that runs child tasks."
62
60
  )
63
61
 
62
+ if isinstance(self.scaledown_ttl, int):
63
+ self.scaledown_ttl = timedelta(seconds=int(self.scaledown_ttl))
64
+ elif not isinstance(self.scaledown_ttl, timedelta):
65
+ raise ValueError("scaledown_ttl must be an int (seconds) or a timedelta")
66
+ if self.scaledown_ttl.total_seconds() < 30:
67
+ raise ValueError("scaledown_ttl must be at least 30 seconds")
68
+
64
69
  @property
65
70
  def ttl(self) -> timedelta | None:
66
71
  """
@@ -72,6 +77,23 @@ class ReusePolicy:
72
77
  return self.idle_ttl
73
78
  return timedelta(seconds=self.idle_ttl)
74
79
 
80
+ @property
81
+ def min_replicas(self) -> int:
82
+ """
83
+ Returns the minimum number of replicas.
84
+ """
85
+ return self.replicas[0] if isinstance(self.replicas, tuple) else self.replicas
86
+
87
+ def get_scaledown_ttl(self) -> timedelta | None:
88
+ """
89
+ Returns the scaledown TTL as a timedelta. If scaledown_ttl is not set, returns None.
90
+ """
91
+ if self.scaledown_ttl is None:
92
+ return None
93
+ if isinstance(self.scaledown_ttl, timedelta):
94
+ return self.scaledown_ttl
95
+ return timedelta(seconds=int(self.scaledown_ttl))
96
+
75
97
  @property
76
98
  def max_replicas(self) -> int:
77
99
  """
flyte/_run.py CHANGED
@@ -19,8 +19,6 @@ from flyte._initialize import (
19
19
  )
20
20
  from flyte._logging import logger
21
21
  from flyte._task import P, R, TaskTemplate
22
- from flyte._tools import ipython_check
23
- from flyte.errors import InitializationError
24
22
  from flyte.models import (
25
23
  ActionID,
26
24
  Checkpoints,
@@ -89,13 +87,15 @@ class _Runner:
89
87
  overwrite_cache: bool = False,
90
88
  project: str | None = None,
91
89
  domain: str | None = None,
92
- env: Dict[str, str] | None = None,
90
+ env_vars: Dict[str, str] | None = None,
93
91
  labels: Dict[str, str] | None = None,
94
92
  annotations: Dict[str, str] | None = None,
95
93
  interruptible: bool = False,
96
94
  log_level: int | None = None,
97
95
  disable_run_cache: bool = False,
98
96
  ):
97
+ from flyte._tools import ipython_check
98
+
99
99
  init_config = _get_init_config()
100
100
  client = init_config.client if init_config else None
101
101
  if not force_mode and client is not None:
@@ -116,7 +116,7 @@ class _Runner:
116
116
  self._overwrite_cache = overwrite_cache
117
117
  self._project = project
118
118
  self._domain = domain
119
- self._env = env
119
+ self._env_vars = env_vars
120
120
  self._labels = labels
121
121
  self._annotations = annotations
122
122
  self._interruptible = interruptible
@@ -198,7 +198,7 @@ class _Runner:
198
198
  task_spec = translate_task_to_wire(obj, s_ctx)
199
199
  inputs = await convert_from_native_to_inputs(obj.native_interface, *args, **kwargs)
200
200
 
201
- env = self._env or {}
201
+ env = self._env_vars or {}
202
202
  if self._log_level:
203
203
  env["LOG_LEVEL"] = str(self._log_level)
204
204
  else:
@@ -207,7 +207,7 @@ class _Runner:
207
207
  if not self._dry_run:
208
208
  if get_client() is None:
209
209
  # This can only happen, if the user forces flyte.run(mode="remote") without initializing the client
210
- raise InitializationError(
210
+ raise flyte.errors.InitializationError(
211
211
  "ClientNotInitializedError",
212
212
  "user",
213
213
  "flyte.run requires client to be initialized. "
@@ -542,7 +542,7 @@ def with_runcontext(
542
542
  overwrite_cache: bool = False,
543
543
  project: str | None = None,
544
544
  domain: str | None = None,
545
- env: Dict[str, str] | None = None,
545
+ env_vars: Dict[str, str] | None = None,
546
546
  labels: Dict[str, str] | None = None,
547
547
  annotations: Dict[str, str] | None = None,
548
548
  interruptible: bool = False,
@@ -582,7 +582,7 @@ def with_runcontext(
582
582
  :param overwrite_cache: Optional If true, the cache will be overwritten for the run
583
583
  :param project: Optional The project to use for the run
584
584
  :param domain: Optional The domain to use for the run
585
- :param env: Optional Environment variables to set for the run
585
+ :param env_vars: Optional Environment variables to set for the run
586
586
  :param labels: Optional Labels to set for the run
587
587
  :param annotations: Optional Annotations to set for the run
588
588
  :param interruptible: Optional If true, the run can be interrupted by the user.
@@ -606,7 +606,7 @@ def with_runcontext(
606
606
  raw_data_path=raw_data_path,
607
607
  run_base_dir=run_base_dir,
608
608
  overwrite_cache=overwrite_cache,
609
- env=env,
609
+ env_vars=env_vars,
610
610
  labels=labels,
611
611
  annotations=annotations,
612
612
  interruptible=interruptible,
flyte/_secret.py CHANGED
@@ -17,14 +17,14 @@ class Secret:
17
17
 
18
18
  Example:
19
19
  ```python
20
- @task(secrets="MY_SECRET")
20
+ @task(secrets="my-secret")
21
21
  async def my_task():
22
- os.environ["MY_SECRET"] # This will be set to the value of the secret
22
+ # This will be set to the value of the secret. Note: The env var is always uppercase, and - is replaced with _.
23
+ os.environ["MY_SECRET"]
23
24
 
24
- @task(secrets=Secret("MY_SECRET", mount="/path/to/secret"))
25
+ @task(secrets=Secret("my-openai-api-key", as_env_var="OPENAI_API_KEY"))
25
26
  async def my_task2():
26
- async with open("/path/to/secret") as f:
27
- secret_value = f.read()
27
+ os.environ["OPENAI_API_KEY"]
28
28
  ```
29
29
 
30
30
  TODO: Add support for secret versioning (some stores) and secret groups (some stores) and mounting as files.
@@ -32,6 +32,7 @@ class Secret:
32
32
  :param key: The name of the secret in the secret store.
33
33
  :param group: The group of the secret in the secret store.
34
34
  :param mount: Use this to specify the path where the secret should be mounted.
35
+ TODO: support arbitrary mount paths. Today only "/etc/flyte/secrets" is supported
35
36
  :param as_env_var: The name of the environment variable that the secret should be mounted as.
36
37
  """
37
38
 
@@ -41,6 +42,9 @@ class Secret:
41
42
  as_env_var: Optional[str] = None
42
43
 
43
44
  def __post_init__(self):
45
+ if not self.mount and not self.as_env_var:
46
+ self.as_env_var = f"{self.group}_{self.key}" if self.group else self.key
47
+ self.as_env_var = self.as_env_var.replace("-", "_").upper()
44
48
  if self.as_env_var is not None:
45
49
  pattern = r"^[A-Z_][A-Z0-9_]*$"
46
50
  if not re.match(pattern, self.as_env_var):
flyte/_task.py CHANGED
@@ -76,7 +76,7 @@ class TaskTemplate(Generic[P, R]):
76
76
  :param reusable: Optional The reusability policy for the task, defaults to None, which means the task environment
77
77
  will not be reused across task invocations.
78
78
  :param docs: Optional The documentation for the task, if not provided the function docstring will be used.
79
- :param env: Optional The environment variables to set for the task.
79
+ :param env_vars: Optional The environment variables to set for the task.
80
80
  :param secrets: Optional The secrets that will be injected into the task at runtime.
81
81
  :param timeout: Optional The timeout for the task.
82
82
  :param max_inline_io_bytes: Maximum allowed size (in bytes) for all inputs and outputs passed directly to the task
@@ -85,7 +85,7 @@ class TaskTemplate(Generic[P, R]):
85
85
 
86
86
  name: str
87
87
  interface: NativeInterface
88
- friendly_name: str = ""
88
+ short_name: str = ""
89
89
  task_type: str = "python"
90
90
  task_type_version: int = 0
91
91
  image: Union[str, Image, Literal["auto"]] = "auto"
@@ -95,7 +95,7 @@ class TaskTemplate(Generic[P, R]):
95
95
  retries: Union[int, RetryStrategy] = 0
96
96
  reusable: Union[ReusePolicy, None] = None
97
97
  docs: Optional[Documentation] = None
98
- env: Optional[Dict[str, str]] = None
98
+ env_vars: Optional[Dict[str, str]] = None
99
99
  secrets: Optional[SecretRequest] = None
100
100
  timeout: Optional[TimeoutType] = None
101
101
  pod_template: Optional[Union[str, PodTemplate]] = None
@@ -129,9 +129,9 @@ class TaskTemplate(Generic[P, R]):
129
129
  if isinstance(self.retries, int):
130
130
  self.retries = RetryStrategy(count=self.retries)
131
131
 
132
- if self.friendly_name == "":
133
- # If friendly_name is not set, use the name of the task
134
- self.friendly_name = self.name
132
+ if self.short_name == "":
133
+ # If short_name is not set, use the name of the task
134
+ self.short_name = self.name
135
135
 
136
136
  def __getstate__(self):
137
137
  """
@@ -314,14 +314,16 @@ class TaskTemplate(Generic[P, R]):
314
314
  def override(
315
315
  self,
316
316
  *,
317
+ short_name: Optional[str] = None,
317
318
  resources: Optional[Resources] = None,
318
- cache: CacheRequest = "auto",
319
+ cache: Optional[CacheRequest] = None,
319
320
  retries: Union[int, RetryStrategy] = 0,
320
321
  timeout: Optional[TimeoutType] = None,
321
322
  reusable: Union[ReusePolicy, Literal["off"], None] = None,
322
- env: Optional[Dict[str, str]] = None,
323
+ env_vars: Optional[Dict[str, str]] = None,
323
324
  secrets: Optional[SecretRequest] = None,
324
325
  max_inline_io_bytes: int | None = None,
326
+ pod_template: Optional[Union[str, PodTemplate]] = None,
325
327
  **kwargs: Any,
326
328
  ) -> TaskTemplate:
327
329
  """
@@ -344,7 +346,7 @@ class TaskTemplate(Generic[P, R]):
344
346
  " Reusable tasks will use the parent env's resources. You can disable reusability and"
345
347
  " override resources if needed. (set reusable='off')"
346
348
  )
347
- if env is not None:
349
+ if env_vars is not None:
348
350
  raise ValueError(
349
351
  "Cannot override env when reusable is set."
350
352
  " Reusable tasks will use the parent env's env. You can disable reusability and "
@@ -358,7 +360,7 @@ class TaskTemplate(Generic[P, R]):
358
360
  )
359
361
 
360
362
  resources = resources or self.resources
361
- env = env or self.env
363
+ env_vars = env_vars or self.env_vars
362
364
  secrets = secrets or self.secrets
363
365
 
364
366
  for k, v in kwargs.items():
@@ -373,14 +375,17 @@ class TaskTemplate(Generic[P, R]):
373
375
 
374
376
  return replace(
375
377
  self,
378
+ short_name=short_name or self.short_name,
376
379
  resources=resources,
377
380
  cache=cache,
378
381
  retries=retries,
379
382
  timeout=timeout,
380
383
  reusable=cast(Optional[ReusePolicy], reusable),
381
- env=env,
384
+ env_vars=env_vars,
382
385
  secrets=secrets,
383
386
  max_inline_io_bytes=max_inline_io_bytes,
387
+ pod_template=pod_template,
388
+ **kwargs,
384
389
  )
385
390
 
386
391
 
@@ -53,7 +53,7 @@ class TaskEnvironment(Environment):
53
53
  :param name: Name of the environment
54
54
  :param image: Docker image to use for the environment. If set to "auto", will use the default image.
55
55
  :param resources: Resources to allocate for the environment.
56
- :param env: Environment variables to set for the environment.
56
+ :param env_vars: Environment variables to set for the environment.
57
57
  :param secrets: Secrets to inject into the environment.
58
58
  :param depends_on: Environment dependencies to hint, so when you deploy the environment,
59
59
  the dependencies are also deployed. This is useful when you have a set of environments
@@ -80,7 +80,7 @@ class TaskEnvironment(Environment):
80
80
  name: str,
81
81
  image: Optional[Union[str, Image, Literal["auto"]]] = None,
82
82
  resources: Optional[Resources] = None,
83
- env: Optional[Dict[str, str]] = None,
83
+ env_vars: Optional[Dict[str, str]] = None,
84
84
  secrets: Optional[SecretRequest] = None,
85
85
  depends_on: Optional[List[Environment]] = None,
86
86
  **kwargs: Any,
@@ -93,7 +93,7 @@ class TaskEnvironment(Environment):
93
93
  :param name: The name of the environment.
94
94
  :param image: The image to use for the environment.
95
95
  :param resources: The resources to allocate for the environment.
96
- :param env: The environment variables to set for the environment.
96
+ :param env_vars: The environment variables to set for the environment.
97
97
  :param secrets: The secrets to inject into the environment.
98
98
  :param depends_on: The environment dependencies to hint, so when you deploy the environment,
99
99
  the dependencies are also deployed. This is useful when you have a set of environments
@@ -119,8 +119,8 @@ class TaskEnvironment(Environment):
119
119
  kwargs["resources"] = resources
120
120
  if cache is not None:
121
121
  kwargs["cache"] = cache
122
- if env is not None:
123
- kwargs["env"] = env
122
+ if env_vars is not None:
123
+ kwargs["env_vars"] = env_vars
124
124
  if reusable_set:
125
125
  kwargs["reusable"] = reusable
126
126
  if secrets is not None:
@@ -133,12 +133,11 @@ class TaskEnvironment(Environment):
133
133
  self,
134
134
  _func=None,
135
135
  *,
136
- name: Optional[str] = None,
136
+ short_name: Optional[str] = None,
137
137
  cache: CacheRequest | None = None,
138
138
  retries: Union[int, RetryStrategy] = 0,
139
139
  timeout: Union[timedelta, int] = 0,
140
140
  docs: Optional[Documentation] = None,
141
- secrets: Optional[SecretRequest] = None,
142
141
  pod_template: Optional[Union[str, "V1PodTemplate"]] = None,
143
142
  report: bool = False,
144
143
  max_inline_io_bytes: int = MAX_INLINE_IO_BYTES,
@@ -148,12 +147,11 @@ class TaskEnvironment(Environment):
148
147
 
149
148
  :param _func: Optional The function to decorate. If not provided, the decorator will return a callable that
150
149
  accepts a function to be decorated.
151
- :param name: Optional A friendly name for the task (defaults to the function name)
150
+ :param short_name: Optional A friendly name for the task (defaults to the function name)
152
151
  :param cache: Optional The cache policy for the task, defaults to auto, which will cache the results of the
153
152
  task.
154
153
  :param retries: Optional The number of retries for the task, defaults to 0, which means no retries.
155
154
  :param docs: Optional The documentation for the task, if not provided the function docstring will be used.
156
- :param secrets: Optional The secrets that will be injected into the task at runtime.
157
155
  :param timeout: Optional The timeout for the task.
158
156
  :param pod_template: Optional The pod template for the task, if not provided the default pod template will be
159
157
  used.
@@ -168,7 +166,7 @@ class TaskEnvironment(Environment):
168
166
  raise ValueError("Cannot set pod_template when environment is reusable.")
169
167
 
170
168
  def decorator(func: FunctionTypes) -> AsyncFunctionTaskTemplate[P, R]:
171
- friendly_name = name or func.__name__
169
+ short = short_name or func.__name__
172
170
  task_name = self.name + "." + func.__name__
173
171
 
174
172
  if not inspect.iscoroutinefunction(func) and self.reusable is not None:
@@ -203,13 +201,13 @@ class TaskEnvironment(Environment):
203
201
  timeout=timeout,
204
202
  reusable=self.reusable,
205
203
  docs=docs,
206
- env=self.env,
207
- secrets=secrets or self.secrets,
204
+ env_vars=self.env_vars,
205
+ secrets=self.secrets,
208
206
  pod_template=pod_template or self.pod_template,
209
207
  parent_env=weakref.ref(self),
210
208
  interface=NativeInterface.from_callable(func),
211
209
  report=report,
212
- friendly_name=friendly_name,
210
+ short_name=short,
213
211
  plugin_config=self.plugin_config,
214
212
  max_inline_io_bytes=max_inline_io_bytes,
215
213
  )
flyte/_tools.py CHANGED
@@ -1,6 +1,3 @@
1
- import os
2
-
3
-
4
1
  def ipython_check() -> bool:
5
2
  """
6
3
  Check if interface is launching from iPython (not colab)
@@ -17,16 +14,6 @@ def ipython_check() -> bool:
17
14
  return is_ipython
18
15
 
19
16
 
20
- def is_in_cluster() -> bool:
21
- """
22
- Check if the task is running in a cluster
23
- :return is_in_cluster (bool): True or False
24
- """
25
- if os.getenv("_UN_CLS"):
26
- return True
27
- return False
28
-
29
-
30
17
  def ipywidgets_check() -> bool:
31
18
  """
32
19
  Check if the interface is running in IPython with ipywidgets support.
flyte/_version.py CHANGED
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '2.0.0b9'
21
- __version_tuple__ = version_tuple = (2, 0, 0, 'b9')
31
+ __version__ = version = '2.0.0b14'
32
+ __version_tuple__ = version_tuple = (2, 0, 0, 'b14')
33
+
34
+ __commit_id__ = commit_id = 'g5efc235bd'
flyte/cli/_build.py CHANGED
@@ -3,8 +3,7 @@ from pathlib import Path
3
3
  from types import ModuleType
4
4
  from typing import Any, Dict, List, cast
5
5
 
6
- import click
7
- from click import Context
6
+ import rich_click as click
8
7
 
9
8
  import flyte
10
9
 
@@ -44,7 +43,7 @@ class BuildEnvCommand(click.Command):
44
43
  self.build_args = build_args
45
44
  super().__init__(*args, **kwargs)
46
45
 
47
- def invoke(self, ctx: Context):
46
+ def invoke(self, ctx: click.Context):
48
47
  from rich.console import Console
49
48
 
50
49
  console = Console()
flyte/cli/_common.py CHANGED
@@ -316,11 +316,23 @@ class FileGroup(GroupBase):
316
316
  def files(self):
317
317
  if self._files is None:
318
318
  directory = self._dir or Path(".").absolute()
319
- self._files = [os.fspath(p) for p in directory.glob("*.py") if p.name != "__init__.py"]
320
- if not self._files:
321
- self._files = [os.fspath(".")] + [
322
- os.fspath(p.name) for p in directory.iterdir() if not p.name.startswith(("_", ".")) and p.is_dir()
319
+ # add python files
320
+ _files = [os.fspath(p) for p in directory.glob("*.py") if p.name != "__init__.py"]
321
+
322
+ # add directories
323
+ _files.extend(
324
+ [
325
+ os.fspath(directory / p.name)
326
+ for p in directory.iterdir()
327
+ if not p.name.startswith(("_", ".")) and p.is_dir()
323
328
  ]
329
+ )
330
+
331
+ # files that are in the current directory or subdirectories of the
332
+ # current directory should be displayed as relative paths
333
+ self._files = [
334
+ str(Path(f).relative_to(Path.cwd())) if Path(f).is_relative_to(Path.cwd()) else f for f in _files
335
+ ]
324
336
  return self._files
325
337
 
326
338
  def list_commands(self, ctx):
@@ -351,7 +363,6 @@ def format(title: str, vals: Iterable[Any], of: OutputFormat = "table") -> Table
351
363
  """
352
364
  Get a table from a list of values.
353
365
  """
354
-
355
366
  match of:
356
367
  case "table-simple":
357
368
  return _table_format(Table(title, box=None), vals)