flyte 0.2.0b11__py3-none-any.whl → 0.2.0b13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flyte might be problematic. Click here for more details.

Files changed (50) hide show
  1. flyte/_bin/runtime.py +11 -2
  2. flyte/_deploy.py +29 -0
  3. flyte/_initialize.py +7 -6
  4. flyte/_internal/controllers/remote/_action.py +5 -0
  5. flyte/_internal/controllers/remote/_controller.py +43 -3
  6. flyte/_internal/controllers/remote/_core.py +7 -0
  7. flyte/_internal/runtime/convert.py +61 -7
  8. flyte/_internal/runtime/task_serde.py +1 -1
  9. flyte/_protos/common/list_pb2.py +3 -3
  10. flyte/_protos/common/list_pb2.pyi +2 -0
  11. flyte/_protos/workflow/environment_pb2.py +29 -0
  12. flyte/_protos/workflow/environment_pb2.pyi +12 -0
  13. flyte/_protos/workflow/environment_pb2_grpc.py +4 -0
  14. flyte/_protos/workflow/queue_service_pb2.py +30 -29
  15. flyte/_protos/workflow/queue_service_pb2.pyi +5 -2
  16. flyte/_protos/workflow/run_definition_pb2.py +61 -61
  17. flyte/_protos/workflow/run_definition_pb2.pyi +4 -2
  18. flyte/_protos/workflow/run_service_pb2.py +20 -24
  19. flyte/_protos/workflow/run_service_pb2.pyi +2 -6
  20. flyte/_protos/workflow/state_service_pb2.py +36 -28
  21. flyte/_protos/workflow/state_service_pb2.pyi +19 -15
  22. flyte/_protos/workflow/state_service_pb2_grpc.py +28 -28
  23. flyte/_protos/workflow/task_definition_pb2.py +28 -22
  24. flyte/_protos/workflow/task_definition_pb2.pyi +16 -4
  25. flyte/_protos/workflow/task_service_pb2.py +27 -11
  26. flyte/_protos/workflow/task_service_pb2.pyi +29 -1
  27. flyte/_protos/workflow/task_service_pb2_grpc.py +34 -0
  28. flyte/_run.py +6 -0
  29. flyte/_trace.py +0 -2
  30. flyte/_utils/__init__.py +4 -0
  31. flyte/_utils/org_discovery.py +26 -0
  32. flyte/_version.py +2 -2
  33. flyte/cli/_abort.py +4 -2
  34. flyte/cli/_common.py +8 -2
  35. flyte/cli/_create.py +4 -3
  36. flyte/cli/_deploy.py +15 -8
  37. flyte/cli/_get.py +13 -12
  38. flyte/cli/_run.py +1 -2
  39. flyte/cli/main.py +1 -1
  40. flyte/remote/__init__.py +2 -1
  41. flyte/remote/_client/_protocols.py +2 -0
  42. flyte/remote/_task.py +141 -9
  43. flyte/syncify/_api.py +1 -2
  44. flyte/types/_type_engine.py +83 -9
  45. flyte-0.2.0b13.dist-info/METADATA +249 -0
  46. {flyte-0.2.0b11.dist-info → flyte-0.2.0b13.dist-info}/RECORD +49 -46
  47. flyte-0.2.0b11.dist-info/METADATA +0 -181
  48. {flyte-0.2.0b11.dist-info → flyte-0.2.0b13.dist-info}/WHEEL +0 -0
  49. {flyte-0.2.0b11.dist-info → flyte-0.2.0b13.dist-info}/entry_points.txt +0 -0
  50. {flyte-0.2.0b11.dist-info → flyte-0.2.0b13.dist-info}/top_level.txt +0 -0
@@ -58,6 +58,8 @@ class TaskService(Protocol):
58
58
  self, request: task_service_pb2.GetTaskDetailsRequest
59
59
  ) -> task_service_pb2.GetTaskDetailsResponse: ...
60
60
 
61
+ async def ListTasks(self, request: task_service_pb2.ListTasksRequest) -> task_service_pb2.ListTasksResponse: ...
62
+
61
63
 
62
64
  class RunService(Protocol):
63
65
  async def CreateRun(self, request: run_service_pb2.CreateRunRequest) -> run_service_pb2.CreateRunResponse: ...
flyte/remote/_task.py CHANGED
@@ -3,27 +3,44 @@ from __future__ import annotations
3
3
  import functools
4
4
  from dataclasses import dataclass
5
5
  from threading import Lock
6
- from typing import Any, Callable, Coroutine, Dict, Literal, Optional, Union
6
+ from typing import Any, AsyncIterator, Callable, Coroutine, Dict, Iterator, Literal, Optional, Tuple, Union
7
7
 
8
8
  import rich.repr
9
+ from google.protobuf import timestamp
9
10
 
10
11
  import flyte
11
12
  import flyte.errors
12
13
  from flyte._context import internal_ctx
13
- from flyte._initialize import get_client, get_common_config
14
+ from flyte._initialize import ensure_client, get_client, get_common_config
15
+ from flyte._logging import logger
16
+ from flyte._protos.common import list_pb2
14
17
  from flyte._protos.workflow import task_definition_pb2, task_service_pb2
15
18
  from flyte.models import NativeInterface
16
19
  from flyte.syncify import syncify
17
20
 
18
21
 
22
+ def _repr_task_metadata(metadata: task_definition_pb2.TaskMetadata) -> rich.repr.Result:
23
+ """
24
+ Rich representation of the task metadata.
25
+ """
26
+ if metadata.deployed_by:
27
+ if metadata.deployed_by.user:
28
+ yield "deployed_by", f"User: {metadata.deployed_by.user.spec.email}"
29
+ else:
30
+ yield "deployed_by", f"App: {metadata.deployed_by.application.spec.name}"
31
+ yield "short_name", metadata.short_name
32
+ yield "deployed_at", timestamp.to_datetime(metadata.deployed_at)
33
+ yield "environment_name", metadata.environment_name
34
+
35
+
19
36
  class LazyEntity:
20
37
  """
21
38
  Fetches the entity when the entity is called or when the entity is retrieved.
22
39
  The entity is derived from RemoteEntity so that it behaves exactly like the mimicked entity.
23
40
  """
24
41
 
25
- def __init__(self, name: str, getter: Callable[..., Coroutine[Any, Any, Task]], *args, **kwargs):
26
- self._task: Optional[Task] = None
42
+ def __init__(self, name: str, getter: Callable[..., Coroutine[Any, Any, TaskDetails]], *args, **kwargs):
43
+ self._task: Optional[TaskDetails] = None
27
44
  self._getter = getter
28
45
  self._name = name
29
46
  self._mutex = Lock()
@@ -33,7 +50,7 @@ class LazyEntity:
33
50
  return self._name
34
51
 
35
52
  @syncify
36
- async def fetch(self) -> Task:
53
+ async def fetch(self) -> TaskDetails:
37
54
  """
38
55
  Forwards all other attributes to task, causing the task to be fetched!
39
56
  """
@@ -62,7 +79,7 @@ AutoVersioning = Literal["latest", "current"]
62
79
 
63
80
 
64
81
  @dataclass
65
- class Task:
82
+ class TaskDetails:
66
83
  pb2: task_definition_pb2.TaskDetails
67
84
 
68
85
  @classmethod
@@ -87,10 +104,19 @@ class Task:
87
104
  if version is None and auto_version not in ["latest", "current"]:
88
105
  raise ValueError("auto_version must be either 'latest' or 'current'.")
89
106
 
90
- async def deferred_get(_version: str | None, _auto_version: AutoVersioning | None) -> Task:
107
+ async def deferred_get(_version: str | None, _auto_version: AutoVersioning | None) -> TaskDetails:
91
108
  if _version is None:
92
109
  if _auto_version == "latest":
93
- raise NotImplementedError("auto_version=latest is not yet implemented.")
110
+ tasks = []
111
+ async for x in Task.listall.aio(
112
+ by_task_name=name,
113
+ sort_by=("created_at", "desc"),
114
+ limit=1,
115
+ ):
116
+ tasks.append(x)
117
+ if not tasks:
118
+ raise flyte.errors.ReferenceTaskError(f"Task {name} not found.")
119
+ _version = tasks[0].version
94
120
  elif _auto_version == "current":
95
121
  ctx = flyte.ctx()
96
122
  if ctx is None:
@@ -205,7 +231,7 @@ class Task:
205
231
  env: Optional[Dict[str, str]] = None,
206
232
  secrets: Optional[flyte.SecretRequest] = None,
207
233
  **kwargs: Any,
208
- ) -> Task:
234
+ ) -> TaskDetails:
209
235
  raise NotImplementedError
210
236
 
211
237
  def __rich_repr__(self) -> rich.repr.Result:
@@ -223,5 +249,111 @@ class Task:
223
249
  yield "resources", self.resources
224
250
 
225
251
 
252
+ @dataclass
253
+ class Task:
254
+ pb2: task_definition_pb2.Task
255
+
256
+ def __init__(self, pb2: task_definition_pb2.Task):
257
+ self.pb2 = pb2
258
+
259
+ @property
260
+ def name(self) -> str:
261
+ """
262
+ The name of the task.
263
+ """
264
+ return self.pb2.task_id.name
265
+
266
+ @property
267
+ def version(self) -> str:
268
+ """
269
+ The version of the task.
270
+ """
271
+ return self.pb2.task_id.version
272
+
273
+ @classmethod
274
+ def get(cls, name: str, version: str | None = None, auto_version: AutoVersioning | None = None) -> LazyEntity:
275
+ """
276
+ Get a task by its ID or name. If both are provided, the ID will take precedence.
277
+
278
+ Either version or auto_version are required parameters.
279
+
280
+ :param name: The name of the task.
281
+ :param version: The version of the task.
282
+ :param auto_version: If set to "latest", the latest-by-time ordered from now, version of the task will be used.
283
+ If set to "current", the version will be derived from the callee tasks context. This is useful if you are
284
+ deploying all environments with the same version. If auto_version is current, you can only access the task from
285
+ within a task context.
286
+ """
287
+ return TaskDetails.get(name, version=version, auto_version=auto_version)
288
+
289
+ @syncify
290
+ @classmethod
291
+ async def listall(
292
+ cls,
293
+ by_task_name: str | None = None,
294
+ sort_by: Tuple[str, Literal["asc", "desc"]] | None = None,
295
+ limit: int = 100,
296
+ ) -> Union[AsyncIterator[Task], Iterator[Task]]:
297
+ """
298
+ Get all runs for the current project and domain.
299
+
300
+ :param by_task_name: If provided, only tasks with this name will be returned.
301
+ :param sort_by: The sorting criteria for the project list, in the format (field, order).
302
+ :param limit: The maximum number of tasks to return.
303
+ :return: An iterator of runs.
304
+ """
305
+ ensure_client()
306
+ token = None
307
+ sort_by = sort_by or ("created_at", "asc")
308
+ sort_pb2 = list_pb2.Sort(
309
+ key=sort_by[0], direction=list_pb2.Sort.ASCENDING if sort_by[1] == "asc" else list_pb2.Sort.DESCENDING
310
+ )
311
+ cfg = get_common_config()
312
+ filters = []
313
+ if by_task_name:
314
+ filters.append(
315
+ list_pb2.Filter(
316
+ function=list_pb2.Filter.Function.EQUAL,
317
+ field="name",
318
+ values=[by_task_name],
319
+ )
320
+ )
321
+ original_limit = limit
322
+ if limit > cfg.batch_size:
323
+ limit = cfg.batch_size
324
+ retrieved = 0
325
+ while True:
326
+ resp = await get_client().task_service.ListTasks(
327
+ task_service_pb2.ListTasksRequest(
328
+ org=cfg.org,
329
+ request=list_pb2.ListRequest(
330
+ sort_by=sort_pb2,
331
+ filters=filters,
332
+ limit=limit,
333
+ token=token,
334
+ ),
335
+ )
336
+ )
337
+ token = resp.token
338
+ for t in resp.tasks:
339
+ retrieved += 1
340
+ yield cls(t)
341
+ if not token or retrieved >= original_limit:
342
+ logger.debug(f"Retrieved {retrieved} tasks, stopping iteration.")
343
+ break
344
+
345
+ def __rich_repr__(self) -> rich.repr.Result:
346
+ """
347
+ Rich representation of the task.
348
+ """
349
+ yield "project", self.pb2.task_id.project
350
+ yield "domain", self.pb2.task_id.domain
351
+ yield "name", self.pb2.task_id.name
352
+ yield "version", self.pb2.task_id.version
353
+ yield "short_name", self.pb2.metadata.short_name
354
+ for t in _repr_task_metadata(self.pb2.metadata):
355
+ yield t
356
+
357
+
226
358
  if __name__ == "__main__":
227
359
  tk = Task.get(name="example_task")
flyte/syncify/_api.py CHANGED
@@ -50,7 +50,7 @@ class SyncGenFunction(Protocol[P, R_co]):
50
50
 
51
51
  class _BackgroundLoop:
52
52
  """
53
- A background event loop that runs in a separate thread and used the the Syncify decorator to run asynchronous
53
+ A background event loop that runs in a separate thread and used the `Syncify` decorator to run asynchronous
54
54
  functions or methods synchronously.
55
55
  """
56
56
 
@@ -141,7 +141,6 @@ class _BackgroundLoop:
141
141
  aio_future: asyncio.Future[R_co] = asyncio.wrap_future(future)
142
142
  # await for the future to complete and yield its result
143
143
  v = await aio_future
144
- print(f"Yielding value: {v}")
145
144
  yield v
146
145
  except StopAsyncIteration:
147
146
  break
@@ -35,6 +35,7 @@ from mashumaro.jsonschema.models import Context, JSONSchema
35
35
  from mashumaro.jsonschema.plugins import BasePlugin
36
36
  from mashumaro.jsonschema.schema import Instance
37
37
  from mashumaro.mixins.json import DataClassJSONMixin
38
+ from pydantic import BaseModel
38
39
  from typing_extensions import Annotated, get_args, get_origin
39
40
 
40
41
  import flyte.storage as storage
@@ -352,6 +353,79 @@ class RestrictedTypeTransformer(TypeTransformer[T], ABC):
352
353
  raise RestrictedTypeError(f"Transformer for type {self.python_type} is restricted currently")
353
354
 
354
355
 
356
+ class PydanticTransformer(TypeTransformer[BaseModel]):
357
+ def __init__(self):
358
+ super().__init__("Pydantic Transformer", BaseModel, enable_type_assertions=False)
359
+
360
+ def get_literal_type(self, t: Type[BaseModel]) -> LiteralType:
361
+ schema = t.model_json_schema()
362
+ fields = t.__annotations__.items()
363
+
364
+ literal_type = {}
365
+ for name, python_type in fields:
366
+ try:
367
+ literal_type[name] = TypeEngine.to_literal_type(python_type)
368
+ except Exception as e:
369
+ logger.warning(
370
+ "Field {} of type {} cannot be converted to a literal type. Error: {}".format(name, python_type, e)
371
+ )
372
+
373
+ # This is for attribute access in FlytePropeller.
374
+ ts = TypeStructure(tag="", dataclass_type=literal_type)
375
+
376
+ meta_struct = struct_pb2.Struct()
377
+ meta_struct.update(
378
+ {
379
+ CACHE_KEY_METADATA: {
380
+ SERIALIZATION_FORMAT: MESSAGEPACK,
381
+ }
382
+ }
383
+ )
384
+
385
+ return LiteralType(
386
+ simple=SimpleType.STRUCT,
387
+ metadata=schema,
388
+ structure=ts,
389
+ annotation=TypeAnnotation(annotations=meta_struct),
390
+ )
391
+
392
+ async def to_literal(
393
+ self,
394
+ python_val: BaseModel,
395
+ python_type: Type[BaseModel],
396
+ expected: LiteralType,
397
+ ) -> Literal:
398
+ json_str = python_val.model_dump_json()
399
+ dict_obj = json.loads(json_str)
400
+ msgpack_bytes = msgpack.dumps(dict_obj)
401
+ return Literal(scalar=Scalar(binary=Binary(value=msgpack_bytes, tag=MESSAGEPACK)))
402
+
403
+ def from_binary_idl(self, binary_idl_object: Binary, expected_python_type: Type[BaseModel]) -> BaseModel:
404
+ if binary_idl_object.tag == MESSAGEPACK:
405
+ dict_obj = msgpack.loads(binary_idl_object.value, strict_map_key=False)
406
+ json_str = json.dumps(dict_obj)
407
+ python_val = expected_python_type.model_validate_json(
408
+ json_data=json_str, strict=False, context={"deserialize": True}
409
+ )
410
+ return python_val
411
+ else:
412
+ raise TypeTransformerFailedError(f"Unsupported binary format: `{binary_idl_object.tag}`")
413
+
414
+ async def to_python_value(self, lv: Literal, expected_python_type: Type[BaseModel]) -> BaseModel:
415
+ """
416
+ There are two kinds of literal values to handle:
417
+ 1. Protobuf Structs (from the UI)
418
+ 2. Binary scalars (from other sources)
419
+ We need to account for both cases accordingly.
420
+ """
421
+ if lv and lv.HasField("scalar") and lv.scalar.HasField("binary"):
422
+ return self.from_binary_idl(lv.scalar.binary, expected_python_type) # type: ignore
423
+
424
+ json_str = _json_format.MessageToJson(lv.scalar.generic)
425
+ python_val = expected_python_type.model_validate_json(json_str, strict=False, context={"deserialize": True})
426
+ return python_val
427
+
428
+
355
429
  class PydanticSchemaPlugin(BasePlugin):
356
430
  """This allows us to generate proper schemas for Pydantic models."""
357
431
 
@@ -562,9 +636,8 @@ class DataclassTransformer(TypeTransformer[object]):
562
636
 
563
637
  # This is for attribute access in FlytePropeller.
564
638
  ts = TypeStructure(tag="", dataclass_type=literal_type)
565
- from google.protobuf.struct_pb2 import Struct
566
639
 
567
- meta_struct = Struct()
640
+ meta_struct = struct_pb2.Struct()
568
641
  meta_struct.update(
569
642
  {
570
643
  CACHE_KEY_METADATA: {
@@ -627,7 +700,7 @@ class DataclassTransformer(TypeTransformer[object]):
627
700
  field.type = self._get_origin_type_in_annotation(cast(type, field.type))
628
701
  return python_type
629
702
 
630
- async def from_binary_idl(self, binary_idl_object: Binary, expected_python_type: Type[T]) -> T:
703
+ def from_binary_idl(self, binary_idl_object: Binary, expected_python_type: Type[T]) -> T:
631
704
  if binary_idl_object.tag == MESSAGEPACK:
632
705
  if issubclass(expected_python_type, DataClassJSONMixin):
633
706
  dict_obj = msgpack.loads(binary_idl_object.value, strict_map_key=False)
@@ -652,9 +725,10 @@ class DataclassTransformer(TypeTransformer[object]):
652
725
  "user defined datatypes in Flytekit"
653
726
  )
654
727
 
655
- if lv.scalar and lv.scalar.binary:
656
- return await self.from_binary_idl(lv.scalar.binary, expected_python_type) # type: ignore
728
+ if lv.HasField("scalar") and lv.scalar.HasField("binary"):
729
+ return self.from_binary_idl(lv.scalar.binary, expected_python_type) # type: ignore
657
730
 
731
+ # todo: revisit this, it should always be a binary in v2.
658
732
  json_str = _json_format.MessageToJson(lv.scalar.generic)
659
733
 
660
734
  # The `from_json` function is provided from mashumaro's `DataClassJSONMixin`.
@@ -970,11 +1044,10 @@ class TypeEngine(typing.Generic[T]):
970
1044
  return cls._REGISTRY[python_type.__origin__]
971
1045
 
972
1046
  # Handling UnionType specially - PEP 604
973
- if sys.version_info >= (3, 10):
974
- import types
1047
+ import types
975
1048
 
976
- if isinstance(python_type, types.UnionType):
977
- return cls._REGISTRY[types.UnionType]
1049
+ if isinstance(python_type, types.UnionType):
1050
+ return cls._REGISTRY[types.UnionType]
978
1051
 
979
1052
  if python_type in cls._REGISTRY:
980
1053
  return cls._REGISTRY[python_type]
@@ -2041,6 +2114,7 @@ def _register_default_type_transformers():
2041
2114
  TypeEngine.register(DictTransformer())
2042
2115
  TypeEngine.register(EnumTransformer())
2043
2116
  TypeEngine.register(ProtobufTransformer())
2117
+ TypeEngine.register(PydanticTransformer())
2044
2118
 
2045
2119
  # inner type is. Also unsupported are typing's Tuples. Even though you can look inside them, Flyte's type system
2046
2120
  # doesn't support these currently.
@@ -0,0 +1,249 @@
1
+ Metadata-Version: 2.4
2
+ Name: flyte
3
+ Version: 0.2.0b13
4
+ Summary: Add your description here
5
+ Author-email: Ketan Umare <kumare3@users.noreply.github.com>
6
+ Requires-Python: >=3.10
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: aiofiles>=24.1.0
9
+ Requires-Dist: click>=8.2.1
10
+ Requires-Dist: flyteidl==1.15.4b0
11
+ Requires-Dist: cloudpickle>=3.1.1
12
+ Requires-Dist: fsspec>=2025.3.0
13
+ Requires-Dist: grpcio>=1.71.0
14
+ Requires-Dist: obstore>=0.6.0
15
+ Requires-Dist: protobuf>=6.30.1
16
+ Requires-Dist: pydantic>=2.10.6
17
+ Requires-Dist: pyyaml>=6.0.2
18
+ Requires-Dist: rich-click>=1.8.9
19
+ Requires-Dist: httpx>=0.28.1
20
+ Requires-Dist: keyring>=25.6.0
21
+ Requires-Dist: msgpack>=1.1.0
22
+ Requires-Dist: toml>=0.10.2
23
+ Requires-Dist: async-lru>=2.0.5
24
+ Requires-Dist: mashumaro
25
+ Requires-Dist: dataclasses_json
26
+
27
+ # Flyte v2 SDK
28
+
29
+ The next-generation SDK for Flyte.
30
+
31
+ [![Publish Python Packages and Official Images](https://github.com/unionai/unionv2/actions/workflows/publish.yml/badge.svg)](https://github.com/unionai/unionv2/actions/workflows/publish.yml)
32
+
33
+ ## Quick start
34
+
35
+ 1. Run `uv venv`, and `source .venv/bin/activate` to create a new virtual environment.
36
+ 2. Install the latest version of the SDK by running the following:
37
+
38
+ ```
39
+ uv pip install --no-cache --prerelease=allow --upgrade flyte
40
+ ```
41
+
42
+ 4. Create the config and point it to your cluster by running the following:
43
+
44
+ ```
45
+ flyte create config --endpoint <your-endpoint-url> --project <your-project> --domain <your-domain>
46
+ ```
47
+
48
+ This will create a `config.yaml` file in the current directory which will be referenced ahead of any other `config.yaml`s found in your system.
49
+
50
+ 5. Now you can run code with the CLI:
51
+
52
+ ```
53
+ flyte run <path-to-your-script> <task-name>
54
+ ```
55
+
56
+ ## Hello World Example
57
+
58
+ ```python
59
+ # hello_world.py
60
+
61
+ import flyte
62
+
63
+ env = flyte.TaskEnvironment(name="hello_world")
64
+
65
+
66
+ @env.task
67
+ async def say_hello(data: str) -> str:
68
+ return f"Hello {data}"
69
+
70
+
71
+ @env.task
72
+ async def say_hello_nested(data: str) -> str:
73
+ return await say_hello.override(resources=flyte.Resources(gpu="A100 80G:4")).execute(data)
74
+
75
+
76
+ if __name__ == "__main__":
77
+ import asyncio
78
+
79
+ # to run pure python - the SDK is not invoked at all
80
+ asyncio.run(say_hello_nested("test"))
81
+
82
+ # To run locally, but run through type system etc
83
+ flyte.init()
84
+ flyte.run(say_hello_nested, "World")
85
+
86
+ # To run remote
87
+ flyte.init(endpoint="dns:///localhost:8090", insecure=True)
88
+ flyte.run(say_hello_nested, "World")
89
+ # It is possible to switch local and remote, but keeping init to have and endpoint, but , changing context during run
90
+ flyte.with_runcontext(mode="local").run(...) # this will run locally only
91
+
92
+ # To run remote with a config
93
+ flyte.init_from_config("config.yaml")
94
+ ```
95
+
96
+ ## CLI
97
+
98
+ All commands can be run from any root directory.
99
+ For examples, it is not needed to have `__init__.py` in the directory.
100
+ If you run from a directory, the code will automatically package and upload all modules that are imported.
101
+ You can change the behavior by using `--copy-style` flag.
102
+
103
+ ```bash
104
+ flyte run hello_world.py say_hello --data "World"
105
+ ```
106
+
107
+ To follow the logs for the `a0` action, you can use the `--follow` flag:
108
+
109
+ ```bash
110
+ flyte run --follow hello_world.py say_hello --data "World"
111
+ ```
112
+
113
+ Note that `--follow` has to be used with the `run` command.
114
+
115
+ Change copy style:
116
+
117
+ ```bash
118
+ flyte run --copy-style all hello_world.py say_hello_nested --data "World"
119
+ ```
120
+
121
+ ## Building Images
122
+
123
+ ```python
124
+ import flyte
125
+
126
+ env = flyte.TaskEnvironment(
127
+ name="hello_world",
128
+ image=flyte.Image.auto().with_apt_packages(...).with_pip_packages(...),
129
+ )
130
+
131
+ ```
132
+
133
+ ## Deploying
134
+
135
+ ```bash
136
+ flyte deploy hello_world.py say_hello_nested
137
+ ```
138
+
139
+ ## Get information
140
+
141
+ Get all runs:
142
+
143
+ ```bash
144
+ flyte get run
145
+ ```
146
+
147
+ Get a specific run:
148
+
149
+ ```bash
150
+ flyte get run "run-name"
151
+ ```
152
+
153
+ Get all actions for a run:
154
+
155
+ ```bash
156
+ flyte get actions "run-name"
157
+ ```
158
+
159
+ Get a specific action for a run:
160
+
161
+ ```bash
162
+ flyte get action "run-name" "action-name"
163
+ ```
164
+
165
+ Get action logs:
166
+
167
+ ```bash
168
+ flyte get logs "run-name" ["action-name"]
169
+ ```
170
+
171
+ This defaults to root action if no action name is provided
172
+
173
+ ## Running workflows programmatically in Python
174
+
175
+ You can run any workflow programmatically within the script module using __main__:
176
+
177
+ ```python
178
+ if __name__ == "__main__":
179
+ import flyte
180
+ flyte.init()
181
+ flyte.run(say_hello_nested, "World")
182
+ ```
183
+
184
+ ## Running scripts with dependencies specified in metadata headers
185
+
186
+ You can also run a `uv` script with dependencies specified in metadata headers
187
+ and build the task image automatically based on those dependencies:
188
+
189
+ ```python
190
+ # container_images.py
191
+
192
+ # /// script
193
+ # dependencies = [
194
+ # "polars",
195
+ # "flyte>=0.2.0b12"
196
+ # ]
197
+ # ///
198
+
199
+ import polars as pl
200
+
201
+ import flyte
202
+
203
+
204
+ env = flyte.TaskEnvironment(
205
+ name="polars_image",
206
+ image=flyte.Image.from_uv_script(
207
+ __file__,
208
+ name="flyte",
209
+ registry="ghcr.io/<you-username>"
210
+ arch=("linux/amd64", "linux/arm64"),
211
+ ).with_apt_packages("ca-certificates"),
212
+ )
213
+
214
+
215
+ @env.task
216
+ async def create_dataframe() -> pl.DataFrame:
217
+ return pl.DataFrame(
218
+ {"name": ["Alice", "Bob", "Charlie"], "age": [25, 32, 37], "city": ["New York", "Paris", "Berlin"]}
219
+ )
220
+
221
+
222
+ @env.task
223
+ async def print_dataframe(dataframe: pl.DataFrame):
224
+ print(dataframe)
225
+
226
+
227
+ @env.task
228
+ async def workflow():
229
+ df = await create_dataframe()
230
+ await print_dataframe(df)
231
+
232
+
233
+ if __name__ == "__main__":
234
+ flyte.init_from_config("config.yaml")
235
+ run = flyte.run(workflow)
236
+ print(run.name)
237
+ print(run.url)
238
+ run.wait(run)
239
+ ```
240
+
241
+ When you execute
242
+
243
+ ```bash
244
+ uv run hello_world.py
245
+ ```
246
+
247
+ `uv` will automatically update the local virtual environment with the dependencies specified in the metadata headers.
248
+ Then, Flyte will build the task image using those dependencies and push it to the registry you specify.
249
+ Flyte will then deploy the tasks to the cluster where the system will pull the image and run the tasks using it.