flyte 0.2.0b12__py3-none-any.whl → 0.2.0b13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flyte might be problematic. Click here for more details.

@@ -15,15 +15,15 @@ class StateServiceStub(object):
15
15
  Args:
16
16
  channel: A grpc.Channel.
17
17
  """
18
- self.Store = channel.unary_unary(
19
- '/cloudidl.workflow.StateService/Store',
20
- request_serializer=workflow_dot_state__service__pb2.StoreRequest.SerializeToString,
21
- response_deserializer=workflow_dot_state__service__pb2.StoreResponse.FromString,
18
+ self.Put = channel.stream_stream(
19
+ '/cloudidl.workflow.StateService/Put',
20
+ request_serializer=workflow_dot_state__service__pb2.PutRequest.SerializeToString,
21
+ response_deserializer=workflow_dot_state__service__pb2.PutResponse.FromString,
22
22
  )
23
- self.Load = channel.unary_unary(
24
- '/cloudidl.workflow.StateService/Load',
25
- request_serializer=workflow_dot_state__service__pb2.LoadRequest.SerializeToString,
26
- response_deserializer=workflow_dot_state__service__pb2.LoadResponse.FromString,
23
+ self.Get = channel.stream_stream(
24
+ '/cloudidl.workflow.StateService/Get',
25
+ request_serializer=workflow_dot_state__service__pb2.GetRequest.SerializeToString,
26
+ response_deserializer=workflow_dot_state__service__pb2.GetResponse.FromString,
27
27
  )
28
28
  self.Watch = channel.unary_stream(
29
29
  '/cloudidl.workflow.StateService/Watch',
@@ -36,15 +36,15 @@ class StateServiceServicer(object):
36
36
  """provides an interface for managing the state of actions.
37
37
  """
38
38
 
39
- def Store(self, request, context):
40
- """store the state of an action.
39
+ def Put(self, request_iterator, context):
40
+ """put the state of an action.
41
41
  """
42
42
  context.set_code(grpc.StatusCode.UNIMPLEMENTED)
43
43
  context.set_details('Method not implemented!')
44
44
  raise NotImplementedError('Method not implemented!')
45
45
 
46
- def Load(self, request, context):
47
- """load the state of an action.
46
+ def Get(self, request_iterator, context):
47
+ """get the state of an action.
48
48
  """
49
49
  context.set_code(grpc.StatusCode.UNIMPLEMENTED)
50
50
  context.set_details('Method not implemented!')
@@ -60,15 +60,15 @@ class StateServiceServicer(object):
60
60
 
61
61
  def add_StateServiceServicer_to_server(servicer, server):
62
62
  rpc_method_handlers = {
63
- 'Store': grpc.unary_unary_rpc_method_handler(
64
- servicer.Store,
65
- request_deserializer=workflow_dot_state__service__pb2.StoreRequest.FromString,
66
- response_serializer=workflow_dot_state__service__pb2.StoreResponse.SerializeToString,
63
+ 'Put': grpc.stream_stream_rpc_method_handler(
64
+ servicer.Put,
65
+ request_deserializer=workflow_dot_state__service__pb2.PutRequest.FromString,
66
+ response_serializer=workflow_dot_state__service__pb2.PutResponse.SerializeToString,
67
67
  ),
68
- 'Load': grpc.unary_unary_rpc_method_handler(
69
- servicer.Load,
70
- request_deserializer=workflow_dot_state__service__pb2.LoadRequest.FromString,
71
- response_serializer=workflow_dot_state__service__pb2.LoadResponse.SerializeToString,
68
+ 'Get': grpc.stream_stream_rpc_method_handler(
69
+ servicer.Get,
70
+ request_deserializer=workflow_dot_state__service__pb2.GetRequest.FromString,
71
+ response_serializer=workflow_dot_state__service__pb2.GetResponse.SerializeToString,
72
72
  ),
73
73
  'Watch': grpc.unary_stream_rpc_method_handler(
74
74
  servicer.Watch,
@@ -87,7 +87,7 @@ class StateService(object):
87
87
  """
88
88
 
89
89
  @staticmethod
90
- def Store(request,
90
+ def Put(request_iterator,
91
91
  target,
92
92
  options=(),
93
93
  channel_credentials=None,
@@ -97,14 +97,14 @@ class StateService(object):
97
97
  wait_for_ready=None,
98
98
  timeout=None,
99
99
  metadata=None):
100
- return grpc.experimental.unary_unary(request, target, '/cloudidl.workflow.StateService/Store',
101
- workflow_dot_state__service__pb2.StoreRequest.SerializeToString,
102
- workflow_dot_state__service__pb2.StoreResponse.FromString,
100
+ return grpc.experimental.stream_stream(request_iterator, target, '/cloudidl.workflow.StateService/Put',
101
+ workflow_dot_state__service__pb2.PutRequest.SerializeToString,
102
+ workflow_dot_state__service__pb2.PutResponse.FromString,
103
103
  options, channel_credentials,
104
104
  insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
105
105
 
106
106
  @staticmethod
107
- def Load(request,
107
+ def Get(request_iterator,
108
108
  target,
109
109
  options=(),
110
110
  channel_credentials=None,
@@ -114,9 +114,9 @@ class StateService(object):
114
114
  wait_for_ready=None,
115
115
  timeout=None,
116
116
  metadata=None):
117
- return grpc.experimental.unary_unary(request, target, '/cloudidl.workflow.StateService/Load',
118
- workflow_dot_state__service__pb2.LoadRequest.SerializeToString,
119
- workflow_dot_state__service__pb2.LoadResponse.FromString,
117
+ return grpc.experimental.stream_stream(request_iterator, target, '/cloudidl.workflow.StateService/Get',
118
+ workflow_dot_state__service__pb2.GetRequest.SerializeToString,
119
+ workflow_dot_state__service__pb2.GetResponse.FromString,
120
120
  options, channel_credentials,
121
121
  insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
122
122
 
flyte/_run.py CHANGED
@@ -63,6 +63,7 @@ class _Runner:
63
63
  raw_data_path: str | None = None,
64
64
  metadata_path: str | None = None,
65
65
  run_base_dir: str | None = None,
66
+ overwrite_cache: bool = False,
66
67
  ):
67
68
  init_config = _get_init_config()
68
69
  client = init_config.client if init_config else None
@@ -81,6 +82,7 @@ class _Runner:
81
82
  self._raw_data_path = raw_data_path
82
83
  self._metadata_path = metadata_path or "/tmp"
83
84
  self._run_base_dir = run_base_dir or "/tmp/base"
85
+ self._overwrite_cache = overwrite_cache
84
86
 
85
87
  @requires_initialization
86
88
  async def _run_remote(self, obj: TaskTemplate[P, R] | LazyEntity, *args: P.args, **kwargs: P.kwargs) -> Run:
@@ -182,6 +184,9 @@ class _Runner:
182
184
  project_id=project_id,
183
185
  task_spec=task_spec,
184
186
  inputs=inputs.proto_inputs,
187
+ run_spec=run_definition_pb2.RunSpec(
188
+ overwrite_cache=self._overwrite_cache,
189
+ ),
185
190
  ),
186
191
  )
187
192
  return Run(pb2=resp.run)
@@ -414,6 +419,7 @@ def with_runcontext(
414
419
  interactive_mode: bool | None = None,
415
420
  raw_data_path: str | None = None,
416
421
  run_base_dir: str | None = None,
422
+ overwrite_cache: bool = False,
417
423
  ) -> _Runner:
418
424
  """
419
425
  Launch a new run with the given parameters as the context.
flyte/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.2.0b12'
21
- __version_tuple__ = version_tuple = (0, 2, 0, 'b12')
20
+ __version__ = version = '0.2.0b13'
21
+ __version_tuple__ = version_tuple = (0, 2, 0, 'b13')
flyte/cli/_deploy.py CHANGED
@@ -147,6 +147,6 @@ deploy = EnvFiles(
147
147
  name="deploy",
148
148
  help="""
149
149
  Deploy one or more environments from a python file.
150
- The deploy command will create or update environments in the Flyte system.
150
+ This command will create or update environments in the Flyte system.
151
151
  """,
152
152
  )
flyte/cli/_get.py CHANGED
@@ -20,7 +20,7 @@ def get():
20
20
  Using a `get` subcommand without any arguments will retrieve a list of available resources to get.
21
21
  For example:
22
22
 
23
- * `get project` (without specifiying aproject), will list all projects.
23
+ * `get project` (without specifying a project), will list all projects.
24
24
  * `get project my_project` will return the details of the project named `my_project`.
25
25
 
26
26
  In some cases, a partially specified command will act as a filter and return available further parameters.
@@ -143,7 +143,7 @@ def action(
143
143
  "--pretty",
144
144
  is_flag=True,
145
145
  default=False,
146
- help="Show logs in a auto scrolling box, where number of lines is limited to `--lines`",
146
+ help="Show logs in an auto-scrolling box, where number of lines is limited to `--lines`",
147
147
  )
148
148
  @click.option(
149
149
  "--attempt", "-a", type=int, default=None, help="Attempt number to show logs for, defaults to the latest attempt."
flyte/cli/_run.py CHANGED
@@ -66,7 +66,7 @@ class RunArguments:
66
66
  ["--follow", "-f"],
67
67
  is_flag=True,
68
68
  default=False,
69
- help="Wait and watch logs for the parent action. If not provided, the cli will exit after "
69
+ help="Wait and watch logs for the parent action. If not provided, the CLI will exit after "
70
70
  "successfully launching a remote execution with a link to the UI.",
71
71
  )
72
72
  },
@@ -108,7 +108,6 @@ class RunTaskCommand(click.Command):
108
108
 
109
109
  r = flyte.with_runcontext(
110
110
  copy_style=self.run_args.copy_style,
111
- version=self.run_args.copy_style,
112
111
  mode="local" if self.run_args.local else "remote",
113
112
  name=self.run_args.name,
114
113
  ).run(self.obj, **ctx.params)
flyte/cli/main.py CHANGED
@@ -103,7 +103,7 @@ def main(
103
103
  config_file: str | None,
104
104
  ):
105
105
  """
106
- The Flyte CLI is the the command line interface for working with the Flyte SDK and backend.
106
+ The Flyte CLI is the command line interface for working with the Flyte SDK and backend.
107
107
 
108
108
  It follows a simple verb/noun structure,
109
109
  where the top-level commands are verbs that describe the action to be taken,
flyte/syncify/_api.py CHANGED
@@ -50,7 +50,7 @@ class SyncGenFunction(Protocol[P, R_co]):
50
50
 
51
51
  class _BackgroundLoop:
52
52
  """
53
- A background event loop that runs in a separate thread and used the the Syncify decorator to run asynchronous
53
+ A background event loop that runs in a separate thread and used the `Syncify` decorator to run asynchronous
54
54
  functions or methods synchronously.
55
55
  """
56
56
 
@@ -35,6 +35,7 @@ from mashumaro.jsonschema.models import Context, JSONSchema
35
35
  from mashumaro.jsonschema.plugins import BasePlugin
36
36
  from mashumaro.jsonschema.schema import Instance
37
37
  from mashumaro.mixins.json import DataClassJSONMixin
38
+ from pydantic import BaseModel
38
39
  from typing_extensions import Annotated, get_args, get_origin
39
40
 
40
41
  import flyte.storage as storage
@@ -352,6 +353,79 @@ class RestrictedTypeTransformer(TypeTransformer[T], ABC):
352
353
  raise RestrictedTypeError(f"Transformer for type {self.python_type} is restricted currently")
353
354
 
354
355
 
356
+ class PydanticTransformer(TypeTransformer[BaseModel]):
357
+ def __init__(self):
358
+ super().__init__("Pydantic Transformer", BaseModel, enable_type_assertions=False)
359
+
360
+ def get_literal_type(self, t: Type[BaseModel]) -> LiteralType:
361
+ schema = t.model_json_schema()
362
+ fields = t.__annotations__.items()
363
+
364
+ literal_type = {}
365
+ for name, python_type in fields:
366
+ try:
367
+ literal_type[name] = TypeEngine.to_literal_type(python_type)
368
+ except Exception as e:
369
+ logger.warning(
370
+ "Field {} of type {} cannot be converted to a literal type. Error: {}".format(name, python_type, e)
371
+ )
372
+
373
+ # This is for attribute access in FlytePropeller.
374
+ ts = TypeStructure(tag="", dataclass_type=literal_type)
375
+
376
+ meta_struct = struct_pb2.Struct()
377
+ meta_struct.update(
378
+ {
379
+ CACHE_KEY_METADATA: {
380
+ SERIALIZATION_FORMAT: MESSAGEPACK,
381
+ }
382
+ }
383
+ )
384
+
385
+ return LiteralType(
386
+ simple=SimpleType.STRUCT,
387
+ metadata=schema,
388
+ structure=ts,
389
+ annotation=TypeAnnotation(annotations=meta_struct),
390
+ )
391
+
392
+ async def to_literal(
393
+ self,
394
+ python_val: BaseModel,
395
+ python_type: Type[BaseModel],
396
+ expected: LiteralType,
397
+ ) -> Literal:
398
+ json_str = python_val.model_dump_json()
399
+ dict_obj = json.loads(json_str)
400
+ msgpack_bytes = msgpack.dumps(dict_obj)
401
+ return Literal(scalar=Scalar(binary=Binary(value=msgpack_bytes, tag=MESSAGEPACK)))
402
+
403
+ def from_binary_idl(self, binary_idl_object: Binary, expected_python_type: Type[BaseModel]) -> BaseModel:
404
+ if binary_idl_object.tag == MESSAGEPACK:
405
+ dict_obj = msgpack.loads(binary_idl_object.value, strict_map_key=False)
406
+ json_str = json.dumps(dict_obj)
407
+ python_val = expected_python_type.model_validate_json(
408
+ json_data=json_str, strict=False, context={"deserialize": True}
409
+ )
410
+ return python_val
411
+ else:
412
+ raise TypeTransformerFailedError(f"Unsupported binary format: `{binary_idl_object.tag}`")
413
+
414
+ async def to_python_value(self, lv: Literal, expected_python_type: Type[BaseModel]) -> BaseModel:
415
+ """
416
+ There are two kinds of literal values to handle:
417
+ 1. Protobuf Structs (from the UI)
418
+ 2. Binary scalars (from other sources)
419
+ We need to account for both cases accordingly.
420
+ """
421
+ if lv and lv.HasField("scalar") and lv.scalar.HasField("binary"):
422
+ return self.from_binary_idl(lv.scalar.binary, expected_python_type) # type: ignore
423
+
424
+ json_str = _json_format.MessageToJson(lv.scalar.generic)
425
+ python_val = expected_python_type.model_validate_json(json_str, strict=False, context={"deserialize": True})
426
+ return python_val
427
+
428
+
355
429
  class PydanticSchemaPlugin(BasePlugin):
356
430
  """This allows us to generate proper schemas for Pydantic models."""
357
431
 
@@ -562,9 +636,8 @@ class DataclassTransformer(TypeTransformer[object]):
562
636
 
563
637
  # This is for attribute access in FlytePropeller.
564
638
  ts = TypeStructure(tag="", dataclass_type=literal_type)
565
- from google.protobuf.struct_pb2 import Struct
566
639
 
567
- meta_struct = Struct()
640
+ meta_struct = struct_pb2.Struct()
568
641
  meta_struct.update(
569
642
  {
570
643
  CACHE_KEY_METADATA: {
@@ -627,7 +700,7 @@ class DataclassTransformer(TypeTransformer[object]):
627
700
  field.type = self._get_origin_type_in_annotation(cast(type, field.type))
628
701
  return python_type
629
702
 
630
- async def from_binary_idl(self, binary_idl_object: Binary, expected_python_type: Type[T]) -> T:
703
+ def from_binary_idl(self, binary_idl_object: Binary, expected_python_type: Type[T]) -> T:
631
704
  if binary_idl_object.tag == MESSAGEPACK:
632
705
  if issubclass(expected_python_type, DataClassJSONMixin):
633
706
  dict_obj = msgpack.loads(binary_idl_object.value, strict_map_key=False)
@@ -652,9 +725,10 @@ class DataclassTransformer(TypeTransformer[object]):
652
725
  "user defined datatypes in Flytekit"
653
726
  )
654
727
 
655
- if lv.scalar and lv.scalar.binary:
656
- return await self.from_binary_idl(lv.scalar.binary, expected_python_type) # type: ignore
728
+ if lv.HasField("scalar") and lv.scalar.HasField("binary"):
729
+ return self.from_binary_idl(lv.scalar.binary, expected_python_type) # type: ignore
657
730
 
731
+ # todo: revisit this, it should always be a binary in v2.
658
732
  json_str = _json_format.MessageToJson(lv.scalar.generic)
659
733
 
660
734
  # The `from_json` function is provided from mashumaro's `DataClassJSONMixin`.
@@ -970,11 +1044,10 @@ class TypeEngine(typing.Generic[T]):
970
1044
  return cls._REGISTRY[python_type.__origin__]
971
1045
 
972
1046
  # Handling UnionType specially - PEP 604
973
- if sys.version_info >= (3, 10):
974
- import types
1047
+ import types
975
1048
 
976
- if isinstance(python_type, types.UnionType):
977
- return cls._REGISTRY[types.UnionType]
1049
+ if isinstance(python_type, types.UnionType):
1050
+ return cls._REGISTRY[types.UnionType]
978
1051
 
979
1052
  if python_type in cls._REGISTRY:
980
1053
  return cls._REGISTRY[python_type]
@@ -2041,6 +2114,7 @@ def _register_default_type_transformers():
2041
2114
  TypeEngine.register(DictTransformer())
2042
2115
  TypeEngine.register(EnumTransformer())
2043
2116
  TypeEngine.register(ProtobufTransformer())
2117
+ TypeEngine.register(PydanticTransformer())
2044
2118
 
2045
2119
  # inner type is. Also unsupported are typing's Tuples. Even though you can look inside them, Flyte's type system
2046
2120
  # doesn't support these currently.
@@ -0,0 +1,249 @@
1
+ Metadata-Version: 2.4
2
+ Name: flyte
3
+ Version: 0.2.0b13
4
+ Summary: Add your description here
5
+ Author-email: Ketan Umare <kumare3@users.noreply.github.com>
6
+ Requires-Python: >=3.10
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: aiofiles>=24.1.0
9
+ Requires-Dist: click>=8.2.1
10
+ Requires-Dist: flyteidl==1.15.4b0
11
+ Requires-Dist: cloudpickle>=3.1.1
12
+ Requires-Dist: fsspec>=2025.3.0
13
+ Requires-Dist: grpcio>=1.71.0
14
+ Requires-Dist: obstore>=0.6.0
15
+ Requires-Dist: protobuf>=6.30.1
16
+ Requires-Dist: pydantic>=2.10.6
17
+ Requires-Dist: pyyaml>=6.0.2
18
+ Requires-Dist: rich-click>=1.8.9
19
+ Requires-Dist: httpx>=0.28.1
20
+ Requires-Dist: keyring>=25.6.0
21
+ Requires-Dist: msgpack>=1.1.0
22
+ Requires-Dist: toml>=0.10.2
23
+ Requires-Dist: async-lru>=2.0.5
24
+ Requires-Dist: mashumaro
25
+ Requires-Dist: dataclasses_json
26
+
27
+ # Flyte v2 SDK
28
+
29
+ The next-generation SDK for Flyte.
30
+
31
+ [![Publish Python Packages and Official Images](https://github.com/unionai/unionv2/actions/workflows/publish.yml/badge.svg)](https://github.com/unionai/unionv2/actions/workflows/publish.yml)
32
+
33
+ ## Quick start
34
+
35
+ 1. Run `uv venv`, and `source .venv/bin/activate` to create a new virtual environment.
36
+ 2. Install the latest version of the SDK by running the following:
37
+
38
+ ```
39
+ uv pip install --no-cache --prerelease=allow --upgrade flyte
40
+ ```
41
+
42
+ 4. Create the config and point it to your cluster by running the following:
43
+
44
+ ```
45
+ flyte create config --endpoint <your-endpoint-url> --project <your-project> --domain <your-domain>
46
+ ```
47
+
48
+ This will create a `config.yaml` file in the current directory which will be referenced ahead of any other `config.yaml`s found in your system.
49
+
50
+ 5. Now you can run code with the CLI:
51
+
52
+ ```
53
+ flyte run <path-to-your-script> <task-name>
54
+ ```
55
+
56
+ ## Hello World Example
57
+
58
+ ```python
59
+ # hello_world.py
60
+
61
+ import flyte
62
+
63
+ env = flyte.TaskEnvironment(name="hello_world")
64
+
65
+
66
+ @env.task
67
+ async def say_hello(data: str) -> str:
68
+ return f"Hello {data}"
69
+
70
+
71
+ @env.task
72
+ async def say_hello_nested(data: str) -> str:
73
+ return await say_hello.override(resources=flyte.Resources(gpu="A100 80G:4")).execute(data)
74
+
75
+
76
+ if __name__ == "__main__":
77
+ import asyncio
78
+
79
+ # to run pure python - the SDK is not invoked at all
80
+ asyncio.run(say_hello_nested("test"))
81
+
82
+ # To run locally, but run through type system etc
83
+ flyte.init()
84
+ flyte.run(say_hello_nested, "World")
85
+
86
+ # To run remote
87
+ flyte.init(endpoint="dns:///localhost:8090", insecure=True)
88
+ flyte.run(say_hello_nested, "World")
89
+ # It is possible to switch local and remote, but keeping init to have and endpoint, but , changing context during run
90
+ flyte.with_runcontext(mode="local").run(...) # this will run locally only
91
+
92
+ # To run remote with a config
93
+ flyte.init_from_config("config.yaml")
94
+ ```
95
+
96
+ ## CLI
97
+
98
+ All commands can be run from any root directory.
99
+ For examples, it is not needed to have `__init__.py` in the directory.
100
+ If you run from a directory, the code will automatically package and upload all modules that are imported.
101
+ You can change the behavior by using `--copy-style` flag.
102
+
103
+ ```bash
104
+ flyte run hello_world.py say_hello --data "World"
105
+ ```
106
+
107
+ To follow the logs for the `a0` action, you can use the `--follow` flag:
108
+
109
+ ```bash
110
+ flyte run --follow hello_world.py say_hello --data "World"
111
+ ```
112
+
113
+ Note that `--follow` has to be used with the `run` command.
114
+
115
+ Change copy style:
116
+
117
+ ```bash
118
+ flyte run --copy-style all hello_world.py say_hello_nested --data "World"
119
+ ```
120
+
121
+ ## Building Images
122
+
123
+ ```python
124
+ import flyte
125
+
126
+ env = flyte.TaskEnvironment(
127
+ name="hello_world",
128
+ image=flyte.Image.auto().with_apt_packages(...).with_pip_packages(...),
129
+ )
130
+
131
+ ```
132
+
133
+ ## Deploying
134
+
135
+ ```bash
136
+ flyte deploy hello_world.py say_hello_nested
137
+ ```
138
+
139
+ ## Get information
140
+
141
+ Get all runs:
142
+
143
+ ```bash
144
+ flyte get run
145
+ ```
146
+
147
+ Get a specific run:
148
+
149
+ ```bash
150
+ flyte get run "run-name"
151
+ ```
152
+
153
+ Get all actions for a run:
154
+
155
+ ```bash
156
+ flyte get actions "run-name"
157
+ ```
158
+
159
+ Get a specific action for a run:
160
+
161
+ ```bash
162
+ flyte get action "run-name" "action-name"
163
+ ```
164
+
165
+ Get action logs:
166
+
167
+ ```bash
168
+ flyte get logs "run-name" ["action-name"]
169
+ ```
170
+
171
+ This defaults to root action if no action name is provided
172
+
173
+ ## Running workflows programmatically in Python
174
+
175
+ You can run any workflow programmatically within the script module using __main__:
176
+
177
+ ```python
178
+ if __name__ == "__main__":
179
+ import flyte
180
+ flyte.init()
181
+ flyte.run(say_hello_nested, "World")
182
+ ```
183
+
184
+ ## Running scripts with dependencies specified in metadata headers
185
+
186
+ You can also run a `uv` script with dependencies specified in metadata headers
187
+ and build the task image automatically based on those dependencies:
188
+
189
+ ```python
190
+ # container_images.py
191
+
192
+ # /// script
193
+ # dependencies = [
194
+ # "polars",
195
+ # "flyte>=0.2.0b12"
196
+ # ]
197
+ # ///
198
+
199
+ import polars as pl
200
+
201
+ import flyte
202
+
203
+
204
+ env = flyte.TaskEnvironment(
205
+ name="polars_image",
206
+ image=flyte.Image.from_uv_script(
207
+ __file__,
208
+ name="flyte",
209
+ registry="ghcr.io/<you-username>"
210
+ arch=("linux/amd64", "linux/arm64"),
211
+ ).with_apt_packages("ca-certificates"),
212
+ )
213
+
214
+
215
+ @env.task
216
+ async def create_dataframe() -> pl.DataFrame:
217
+ return pl.DataFrame(
218
+ {"name": ["Alice", "Bob", "Charlie"], "age": [25, 32, 37], "city": ["New York", "Paris", "Berlin"]}
219
+ )
220
+
221
+
222
+ @env.task
223
+ async def print_dataframe(dataframe: pl.DataFrame):
224
+ print(dataframe)
225
+
226
+
227
+ @env.task
228
+ async def workflow():
229
+ df = await create_dataframe()
230
+ await print_dataframe(df)
231
+
232
+
233
+ if __name__ == "__main__":
234
+ flyte.init_from_config("config.yaml")
235
+ run = flyte.run(workflow)
236
+ print(run.name)
237
+ print(run.url)
238
+ run.wait(run)
239
+ ```
240
+
241
+ When you execute
242
+
243
+ ```bash
244
+ uv run hello_world.py
245
+ ```
246
+
247
+ `uv` will automatically update the local virtual environment with the dependencies specified in the metadata headers.
248
+ Then, Flyte will build the task image using those dependencies and push it to the registry you specify.
249
+ Flyte will then deploy the tasks to the cluster where the system will pull the image and run the tasks using it.