flyte 0.1.0__py3-none-any.whl → 0.2.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flyte might be problematic. Click here for more details.
- flyte/__init__.py +78 -2
- flyte/_bin/__init__.py +0 -0
- flyte/_bin/runtime.py +152 -0
- flyte/_build.py +26 -0
- flyte/_cache/__init__.py +12 -0
- flyte/_cache/cache.py +145 -0
- flyte/_cache/defaults.py +9 -0
- flyte/_cache/policy_function_body.py +42 -0
- flyte/_code_bundle/__init__.py +8 -0
- flyte/_code_bundle/_ignore.py +113 -0
- flyte/_code_bundle/_packaging.py +187 -0
- flyte/_code_bundle/_utils.py +323 -0
- flyte/_code_bundle/bundle.py +209 -0
- flyte/_context.py +152 -0
- flyte/_deploy.py +243 -0
- flyte/_doc.py +29 -0
- flyte/_docstring.py +32 -0
- flyte/_environment.py +84 -0
- flyte/_excepthook.py +37 -0
- flyte/_group.py +32 -0
- flyte/_hash.py +23 -0
- flyte/_image.py +762 -0
- flyte/_initialize.py +492 -0
- flyte/_interface.py +84 -0
- flyte/_internal/__init__.py +3 -0
- flyte/_internal/controllers/__init__.py +128 -0
- flyte/_internal/controllers/_local_controller.py +193 -0
- flyte/_internal/controllers/_trace.py +41 -0
- flyte/_internal/controllers/remote/__init__.py +60 -0
- flyte/_internal/controllers/remote/_action.py +146 -0
- flyte/_internal/controllers/remote/_client.py +47 -0
- flyte/_internal/controllers/remote/_controller.py +494 -0
- flyte/_internal/controllers/remote/_core.py +410 -0
- flyte/_internal/controllers/remote/_informer.py +361 -0
- flyte/_internal/controllers/remote/_service_protocol.py +50 -0
- flyte/_internal/imagebuild/__init__.py +11 -0
- flyte/_internal/imagebuild/docker_builder.py +427 -0
- flyte/_internal/imagebuild/image_builder.py +246 -0
- flyte/_internal/imagebuild/remote_builder.py +0 -0
- flyte/_internal/resolvers/__init__.py +0 -0
- flyte/_internal/resolvers/_task_module.py +54 -0
- flyte/_internal/resolvers/common.py +31 -0
- flyte/_internal/resolvers/default.py +28 -0
- flyte/_internal/runtime/__init__.py +0 -0
- flyte/_internal/runtime/convert.py +342 -0
- flyte/_internal/runtime/entrypoints.py +135 -0
- flyte/_internal/runtime/io.py +136 -0
- flyte/_internal/runtime/resources_serde.py +138 -0
- flyte/_internal/runtime/task_serde.py +330 -0
- flyte/_internal/runtime/taskrunner.py +191 -0
- flyte/_internal/runtime/types_serde.py +54 -0
- flyte/_logging.py +135 -0
- flyte/_map.py +215 -0
- flyte/_pod.py +19 -0
- flyte/_protos/__init__.py +0 -0
- flyte/_protos/common/authorization_pb2.py +66 -0
- flyte/_protos/common/authorization_pb2.pyi +108 -0
- flyte/_protos/common/authorization_pb2_grpc.py +4 -0
- flyte/_protos/common/identifier_pb2.py +71 -0
- flyte/_protos/common/identifier_pb2.pyi +82 -0
- flyte/_protos/common/identifier_pb2_grpc.py +4 -0
- flyte/_protos/common/identity_pb2.py +48 -0
- flyte/_protos/common/identity_pb2.pyi +72 -0
- flyte/_protos/common/identity_pb2_grpc.py +4 -0
- flyte/_protos/common/list_pb2.py +36 -0
- flyte/_protos/common/list_pb2.pyi +71 -0
- flyte/_protos/common/list_pb2_grpc.py +4 -0
- flyte/_protos/common/policy_pb2.py +37 -0
- flyte/_protos/common/policy_pb2.pyi +27 -0
- flyte/_protos/common/policy_pb2_grpc.py +4 -0
- flyte/_protos/common/role_pb2.py +37 -0
- flyte/_protos/common/role_pb2.pyi +53 -0
- flyte/_protos/common/role_pb2_grpc.py +4 -0
- flyte/_protos/common/runtime_version_pb2.py +28 -0
- flyte/_protos/common/runtime_version_pb2.pyi +24 -0
- flyte/_protos/common/runtime_version_pb2_grpc.py +4 -0
- flyte/_protos/logs/dataplane/payload_pb2.py +100 -0
- flyte/_protos/logs/dataplane/payload_pb2.pyi +177 -0
- flyte/_protos/logs/dataplane/payload_pb2_grpc.py +4 -0
- flyte/_protos/secret/definition_pb2.py +49 -0
- flyte/_protos/secret/definition_pb2.pyi +93 -0
- flyte/_protos/secret/definition_pb2_grpc.py +4 -0
- flyte/_protos/secret/payload_pb2.py +62 -0
- flyte/_protos/secret/payload_pb2.pyi +94 -0
- flyte/_protos/secret/payload_pb2_grpc.py +4 -0
- flyte/_protos/secret/secret_pb2.py +38 -0
- flyte/_protos/secret/secret_pb2.pyi +6 -0
- flyte/_protos/secret/secret_pb2_grpc.py +198 -0
- flyte/_protos/secret/secret_pb2_grpc_grpc.py +198 -0
- flyte/_protos/validate/validate/validate_pb2.py +76 -0
- flyte/_protos/workflow/common_pb2.py +27 -0
- flyte/_protos/workflow/common_pb2.pyi +14 -0
- flyte/_protos/workflow/common_pb2_grpc.py +4 -0
- flyte/_protos/workflow/environment_pb2.py +29 -0
- flyte/_protos/workflow/environment_pb2.pyi +12 -0
- flyte/_protos/workflow/environment_pb2_grpc.py +4 -0
- flyte/_protos/workflow/node_execution_service_pb2.py +26 -0
- flyte/_protos/workflow/node_execution_service_pb2.pyi +4 -0
- flyte/_protos/workflow/node_execution_service_pb2_grpc.py +32 -0
- flyte/_protos/workflow/queue_service_pb2.py +105 -0
- flyte/_protos/workflow/queue_service_pb2.pyi +146 -0
- flyte/_protos/workflow/queue_service_pb2_grpc.py +172 -0
- flyte/_protos/workflow/run_definition_pb2.py +128 -0
- flyte/_protos/workflow/run_definition_pb2.pyi +314 -0
- flyte/_protos/workflow/run_definition_pb2_grpc.py +4 -0
- flyte/_protos/workflow/run_logs_service_pb2.py +41 -0
- flyte/_protos/workflow/run_logs_service_pb2.pyi +28 -0
- flyte/_protos/workflow/run_logs_service_pb2_grpc.py +69 -0
- flyte/_protos/workflow/run_service_pb2.py +129 -0
- flyte/_protos/workflow/run_service_pb2.pyi +171 -0
- flyte/_protos/workflow/run_service_pb2_grpc.py +412 -0
- flyte/_protos/workflow/state_service_pb2.py +66 -0
- flyte/_protos/workflow/state_service_pb2.pyi +75 -0
- flyte/_protos/workflow/state_service_pb2_grpc.py +138 -0
- flyte/_protos/workflow/task_definition_pb2.py +79 -0
- flyte/_protos/workflow/task_definition_pb2.pyi +81 -0
- flyte/_protos/workflow/task_definition_pb2_grpc.py +4 -0
- flyte/_protos/workflow/task_service_pb2.py +60 -0
- flyte/_protos/workflow/task_service_pb2.pyi +59 -0
- flyte/_protos/workflow/task_service_pb2_grpc.py +138 -0
- flyte/_resources.py +226 -0
- flyte/_retry.py +32 -0
- flyte/_reusable_environment.py +25 -0
- flyte/_run.py +482 -0
- flyte/_secret.py +61 -0
- flyte/_task.py +449 -0
- flyte/_task_environment.py +183 -0
- flyte/_timeout.py +47 -0
- flyte/_tools.py +27 -0
- flyte/_trace.py +120 -0
- flyte/_utils/__init__.py +26 -0
- flyte/_utils/asyn.py +119 -0
- flyte/_utils/async_cache.py +139 -0
- flyte/_utils/coro_management.py +23 -0
- flyte/_utils/file_handling.py +72 -0
- flyte/_utils/helpers.py +134 -0
- flyte/_utils/lazy_module.py +54 -0
- flyte/_utils/org_discovery.py +57 -0
- flyte/_utils/uv_script_parser.py +49 -0
- flyte/_version.py +21 -0
- flyte/cli/__init__.py +3 -0
- flyte/cli/_abort.py +28 -0
- flyte/cli/_common.py +337 -0
- flyte/cli/_create.py +145 -0
- flyte/cli/_delete.py +23 -0
- flyte/cli/_deploy.py +152 -0
- flyte/cli/_gen.py +163 -0
- flyte/cli/_get.py +310 -0
- flyte/cli/_params.py +538 -0
- flyte/cli/_run.py +231 -0
- flyte/cli/main.py +166 -0
- flyte/config/__init__.py +3 -0
- flyte/config/_config.py +216 -0
- flyte/config/_internal.py +64 -0
- flyte/config/_reader.py +207 -0
- flyte/connectors/__init__.py +0 -0
- flyte/errors.py +172 -0
- flyte/extras/__init__.py +5 -0
- flyte/extras/_container.py +263 -0
- flyte/io/__init__.py +27 -0
- flyte/io/_dir.py +448 -0
- flyte/io/_file.py +467 -0
- flyte/io/_structured_dataset/__init__.py +129 -0
- flyte/io/_structured_dataset/basic_dfs.py +219 -0
- flyte/io/_structured_dataset/structured_dataset.py +1061 -0
- flyte/models.py +391 -0
- flyte/remote/__init__.py +26 -0
- flyte/remote/_client/__init__.py +0 -0
- flyte/remote/_client/_protocols.py +133 -0
- flyte/remote/_client/auth/__init__.py +12 -0
- flyte/remote/_client/auth/_auth_utils.py +14 -0
- flyte/remote/_client/auth/_authenticators/__init__.py +0 -0
- flyte/remote/_client/auth/_authenticators/base.py +397 -0
- flyte/remote/_client/auth/_authenticators/client_credentials.py +73 -0
- flyte/remote/_client/auth/_authenticators/device_code.py +118 -0
- flyte/remote/_client/auth/_authenticators/external_command.py +79 -0
- flyte/remote/_client/auth/_authenticators/factory.py +200 -0
- flyte/remote/_client/auth/_authenticators/pkce.py +516 -0
- flyte/remote/_client/auth/_channel.py +215 -0
- flyte/remote/_client/auth/_client_config.py +83 -0
- flyte/remote/_client/auth/_default_html.py +32 -0
- flyte/remote/_client/auth/_grpc_utils/__init__.py +0 -0
- flyte/remote/_client/auth/_grpc_utils/auth_interceptor.py +288 -0
- flyte/remote/_client/auth/_grpc_utils/default_metadata_interceptor.py +151 -0
- flyte/remote/_client/auth/_keyring.py +143 -0
- flyte/remote/_client/auth/_token_client.py +260 -0
- flyte/remote/_client/auth/errors.py +16 -0
- flyte/remote/_client/controlplane.py +95 -0
- flyte/remote/_console.py +18 -0
- flyte/remote/_data.py +159 -0
- flyte/remote/_logs.py +176 -0
- flyte/remote/_project.py +85 -0
- flyte/remote/_run.py +970 -0
- flyte/remote/_secret.py +132 -0
- flyte/remote/_task.py +391 -0
- flyte/report/__init__.py +3 -0
- flyte/report/_report.py +178 -0
- flyte/report/_template.html +124 -0
- flyte/storage/__init__.py +29 -0
- flyte/storage/_config.py +233 -0
- flyte/storage/_remote_fs.py +34 -0
- flyte/storage/_storage.py +271 -0
- flyte/storage/_utils.py +5 -0
- flyte/syncify/__init__.py +56 -0
- flyte/syncify/_api.py +371 -0
- flyte/types/__init__.py +36 -0
- flyte/types/_interface.py +40 -0
- flyte/types/_pickle.py +118 -0
- flyte/types/_renderer.py +162 -0
- flyte/types/_string_literals.py +120 -0
- flyte/types/_type_engine.py +2287 -0
- flyte/types/_utils.py +80 -0
- flyte-0.2.0a0.dist-info/METADATA +249 -0
- flyte-0.2.0a0.dist-info/RECORD +218 -0
- {flyte-0.1.0.dist-info → flyte-0.2.0a0.dist-info}/WHEEL +2 -1
- flyte-0.2.0a0.dist-info/entry_points.txt +3 -0
- flyte-0.2.0a0.dist-info/top_level.txt +1 -0
- flyte-0.1.0.dist-info/METADATA +0 -6
- flyte-0.1.0.dist-info/RECORD +0 -5
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"""
|
|
2
|
+
# Syncify Module
|
|
3
|
+
This module provides the `syncify` decorator and the `Syncify` class.
|
|
4
|
+
The decorator can be used to convert asynchronous functions or methods into synchronous ones.
|
|
5
|
+
This is useful for integrating async code into synchronous contexts.
|
|
6
|
+
|
|
7
|
+
Every asynchronous function or method wrapped with `syncify` can be called synchronously using the
|
|
8
|
+
parenthesis `()` operator, or asynchronously using the `.aio()` method.
|
|
9
|
+
|
|
10
|
+
Example::
|
|
11
|
+
|
|
12
|
+
```python
|
|
13
|
+
from flyte.syncify import syncify
|
|
14
|
+
|
|
15
|
+
@syncify
|
|
16
|
+
async def async_function(x: str) -> str:
|
|
17
|
+
return f"Hello, Async World {x}!"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# now you can call it synchronously
|
|
21
|
+
result = async_function("Async World") # Note: no .aio() needed for sync calls
|
|
22
|
+
print(result)
|
|
23
|
+
# Output: Hello, Async World Async World!
|
|
24
|
+
|
|
25
|
+
# or call it asynchronously
|
|
26
|
+
async def main():
|
|
27
|
+
result = await async_function.aio("World") # Note the use of .aio() for async calls
|
|
28
|
+
print(result)
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## Creating a Syncify Instance
|
|
32
|
+
```python
|
|
33
|
+
from flyte.syncify. import Syncify
|
|
34
|
+
|
|
35
|
+
syncer = Syncify("my_syncer")
|
|
36
|
+
|
|
37
|
+
# Now you can use `syncer` to decorate your async functions or methods
|
|
38
|
+
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## How does it work?
|
|
42
|
+
The Syncify class wraps asynchronous functions, classmethods, instance methods, and static methods to
|
|
43
|
+
provide a synchronous interface. The wrapped methods are always executed in the context of a background loop,
|
|
44
|
+
whether they are called synchronously or asynchronously. This allows for seamless integration of async code, as
|
|
45
|
+
certain async libraries capture the event loop. An example is grpc.aio, which captures the event loop.
|
|
46
|
+
In such a case, the Syncify class ensures that the async function is executed in the context of the background loop.
|
|
47
|
+
|
|
48
|
+
To use it correctly with grpc.aio, you should wrap every grpc.aio channel creation, and client invocation
|
|
49
|
+
with the same `Syncify` instance. This ensures that the async code runs in the correct event loop context.
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
from flyte.syncify._api import Syncify
|
|
53
|
+
|
|
54
|
+
syncify = Syncify()
|
|
55
|
+
|
|
56
|
+
__all__ = ["Syncify", "syncify"]
|
flyte/syncify/_api.py
ADDED
|
@@ -0,0 +1,371 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import atexit
|
|
5
|
+
import concurrent.futures
|
|
6
|
+
import functools
|
|
7
|
+
import inspect
|
|
8
|
+
import logging
|
|
9
|
+
import threading
|
|
10
|
+
from typing import (
|
|
11
|
+
Any,
|
|
12
|
+
AsyncIterator,
|
|
13
|
+
Awaitable,
|
|
14
|
+
Callable,
|
|
15
|
+
Coroutine,
|
|
16
|
+
Iterator,
|
|
17
|
+
ParamSpec,
|
|
18
|
+
Protocol,
|
|
19
|
+
TypeVar,
|
|
20
|
+
Union,
|
|
21
|
+
cast,
|
|
22
|
+
overload,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
from flyte._logging import logger
|
|
26
|
+
|
|
27
|
+
P = ParamSpec("P")
|
|
28
|
+
R_co = TypeVar("R_co", covariant=True)
|
|
29
|
+
T = TypeVar("T")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class SyncFunction(Protocol[P, R_co]):
|
|
33
|
+
"""
|
|
34
|
+
A protocol that defines the interface for synchronous functions or methods that can be converted from asynchronous
|
|
35
|
+
ones.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __call__(self, *args: Any, **kwargs: Any) -> R_co: ...
|
|
39
|
+
|
|
40
|
+
def aio(self, *args: Any, **kwargs: Any) -> Awaitable[R_co]: ...
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class SyncGenFunction(Protocol[P, R_co]):
|
|
44
|
+
"""
|
|
45
|
+
A protocol that defines the interface for synchronous functions or methods that can be converted from asynchronous
|
|
46
|
+
ones.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Iterator[R_co]: ...
|
|
50
|
+
|
|
51
|
+
def aio(self, *args: Any, **kwargs: Any) -> AsyncIterator[R_co]: ...
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class _BackgroundLoop:
|
|
55
|
+
"""
|
|
56
|
+
A background event loop that runs in a separate thread and used the `Syncify` decorator to run asynchronous
|
|
57
|
+
functions or methods synchronously.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
def __init__(self, name: str):
|
|
61
|
+
self.loop = asyncio.new_event_loop()
|
|
62
|
+
self.thread = threading.Thread(name=name, target=self._run, daemon=True)
|
|
63
|
+
self.thread.start()
|
|
64
|
+
atexit.register(self.stop)
|
|
65
|
+
|
|
66
|
+
def _run(self):
|
|
67
|
+
asyncio.set_event_loop(self.loop)
|
|
68
|
+
self.loop.run_forever()
|
|
69
|
+
|
|
70
|
+
def stop(self):
|
|
71
|
+
# stop the loop and wait briefly for thread to exit
|
|
72
|
+
self.loop.call_soon_threadsafe(self.loop.stop)
|
|
73
|
+
self.thread.join(timeout=1)
|
|
74
|
+
|
|
75
|
+
def is_in_loop(self) -> bool:
|
|
76
|
+
"""
|
|
77
|
+
Check if the current thread is the background loop thread.
|
|
78
|
+
"""
|
|
79
|
+
# If the current thread is not the background loop thread, return False
|
|
80
|
+
if threading.current_thread() != self.thread:
|
|
81
|
+
return False
|
|
82
|
+
|
|
83
|
+
if not self.thread.is_alive():
|
|
84
|
+
# If the thread is not alive, we cannot be in the loop
|
|
85
|
+
return False
|
|
86
|
+
|
|
87
|
+
# Lets get the current event loop and check if it matches the background loop
|
|
88
|
+
loop = None
|
|
89
|
+
try:
|
|
90
|
+
loop = asyncio.get_running_loop()
|
|
91
|
+
except RuntimeError:
|
|
92
|
+
pass
|
|
93
|
+
|
|
94
|
+
return loop == self.loop
|
|
95
|
+
|
|
96
|
+
def iterate_in_loop_sync(self, async_gen: AsyncIterator[R_co]) -> Iterator[R_co]:
|
|
97
|
+
# Create an iterator that pulls items from the async generator
|
|
98
|
+
assert self.thread.name != threading.current_thread().name, (
|
|
99
|
+
f"Cannot run coroutine in the same thread {self.thread.name}"
|
|
100
|
+
)
|
|
101
|
+
while True:
|
|
102
|
+
try:
|
|
103
|
+
# use __anext__() and cast to Coroutine so mypy is happy
|
|
104
|
+
future: concurrent.futures.Future[R_co] = asyncio.run_coroutine_threadsafe(
|
|
105
|
+
cast(Coroutine[Any, Any, R_co], async_gen.__anext__()),
|
|
106
|
+
self.loop,
|
|
107
|
+
)
|
|
108
|
+
yield future.result()
|
|
109
|
+
except (StopAsyncIteration, StopIteration):
|
|
110
|
+
break
|
|
111
|
+
except Exception as e:
|
|
112
|
+
if logger.getEffectiveLevel() > logging.DEBUG:
|
|
113
|
+
# If the log level is not DEBUG, we will remove the extra stack frames to avoid confusion for the
|
|
114
|
+
# user
|
|
115
|
+
# This is because the stack trace will include the Syncify wrapper and the background loop thread
|
|
116
|
+
tb = e.__traceback__
|
|
117
|
+
while tb and tb.tb_next:
|
|
118
|
+
if tb.tb_frame.f_code.co_name == "":
|
|
119
|
+
break
|
|
120
|
+
tb = tb.tb_next
|
|
121
|
+
raise e.with_traceback(tb)
|
|
122
|
+
# If the log level is DEBUG, we will keep the extra stack frames to help with debugging
|
|
123
|
+
raise e
|
|
124
|
+
|
|
125
|
+
def call_in_loop_sync(self, coro: Coroutine[Any, Any, R_co]) -> R_co | Iterator[R_co]:
|
|
126
|
+
"""
|
|
127
|
+
Run the given coroutine in the background loop and return its result.
|
|
128
|
+
"""
|
|
129
|
+
future: concurrent.futures.Future[R_co | AsyncIterator[R_co]] = asyncio.run_coroutine_threadsafe(
|
|
130
|
+
coro, self.loop
|
|
131
|
+
)
|
|
132
|
+
result = future.result()
|
|
133
|
+
if result is not None and hasattr(result, "__aiter__"):
|
|
134
|
+
# If the result is an async iterator, we need to convert it to a sync iterator
|
|
135
|
+
return cast(Iterator[R_co], self.iterate_in_loop_sync(cast(AsyncIterator[R_co], result)))
|
|
136
|
+
# Otherwise, just return the result
|
|
137
|
+
return result
|
|
138
|
+
|
|
139
|
+
async def iterate_in_loop(self, async_gen: AsyncIterator[R_co]) -> AsyncIterator[R_co]:
|
|
140
|
+
"""
|
|
141
|
+
Run the given async iterator in the background loop and yield its results.
|
|
142
|
+
"""
|
|
143
|
+
if self.is_in_loop():
|
|
144
|
+
# If we are already in the background loop, just return the async iterator
|
|
145
|
+
async for r in async_gen:
|
|
146
|
+
yield r
|
|
147
|
+
return
|
|
148
|
+
|
|
149
|
+
while True:
|
|
150
|
+
try:
|
|
151
|
+
# same replacement here for the async path
|
|
152
|
+
future: concurrent.futures.Future[R_co] = asyncio.run_coroutine_threadsafe(
|
|
153
|
+
cast(Coroutine[Any, Any, R_co], async_gen.__anext__()),
|
|
154
|
+
self.loop,
|
|
155
|
+
)
|
|
156
|
+
# Wrap the future in an asyncio Future to yield it in an async context
|
|
157
|
+
aio_future: asyncio.Future[R_co] = asyncio.wrap_future(future)
|
|
158
|
+
# await for the future to complete and yield its result
|
|
159
|
+
v = await aio_future
|
|
160
|
+
yield v
|
|
161
|
+
except StopAsyncIteration:
|
|
162
|
+
break
|
|
163
|
+
except Exception as e:
|
|
164
|
+
if logger.getEffectiveLevel() > logging.DEBUG:
|
|
165
|
+
# If the log level is not DEBUG, we will remove the extra stack frames to avoid confusion for the
|
|
166
|
+
# user.
|
|
167
|
+
# This is because the stack trace will include the Syncify wrapper and the background loop thread
|
|
168
|
+
tb = e.__traceback__
|
|
169
|
+
while tb and tb.tb_next:
|
|
170
|
+
if tb.tb_frame.f_code.co_name == "":
|
|
171
|
+
break
|
|
172
|
+
tb = tb.tb_next
|
|
173
|
+
raise e.with_traceback(tb)
|
|
174
|
+
# If the log level is DEBUG, we will keep the extra stack frames to help with debugging
|
|
175
|
+
raise e
|
|
176
|
+
|
|
177
|
+
async def aio(self, coro: Coroutine[Any, Any, R_co]) -> R_co:
|
|
178
|
+
"""
|
|
179
|
+
Run the given coroutine in the background loop and return its result.
|
|
180
|
+
"""
|
|
181
|
+
if self.is_in_loop():
|
|
182
|
+
# If we are already in the background loop, just run the coroutine
|
|
183
|
+
return await coro
|
|
184
|
+
try:
|
|
185
|
+
# Otherwise, run it in the background loop and wait for the result
|
|
186
|
+
future: concurrent.futures.Future[R_co] = asyncio.run_coroutine_threadsafe(coro, self.loop)
|
|
187
|
+
# Wrap the future in an asyncio Future to await it in an async context
|
|
188
|
+
aio_future: asyncio.Future[R_co] = asyncio.wrap_future(future)
|
|
189
|
+
# await for the future to complete and return its result
|
|
190
|
+
return await aio_future
|
|
191
|
+
except Exception as e:
|
|
192
|
+
if logger.getEffectiveLevel() > logging.DEBUG:
|
|
193
|
+
# If the log level is not DEBUG, we will remove the extra stack frames to avoid confusion for the user
|
|
194
|
+
# This is because the stack trace will include the Syncify wrapper and the background loop thread
|
|
195
|
+
tb = e.__traceback__
|
|
196
|
+
while tb and tb.tb_next:
|
|
197
|
+
if tb.tb_frame.f_code.co_name == "":
|
|
198
|
+
break
|
|
199
|
+
tb = tb.tb_next
|
|
200
|
+
raise e.with_traceback(tb)
|
|
201
|
+
# If the log level is DEBUG, we will keep the extra stack frames to help with debugging
|
|
202
|
+
raise e
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
class _SyncWrapper:
|
|
206
|
+
"""
|
|
207
|
+
A wrapper class that the Syncify decorator uses to convert asynchronous functions or methods into synchronous ones.
|
|
208
|
+
"""
|
|
209
|
+
|
|
210
|
+
def __init__(
|
|
211
|
+
self,
|
|
212
|
+
fn: Any,
|
|
213
|
+
bg_loop: _BackgroundLoop,
|
|
214
|
+
underlying_obj: Any = None,
|
|
215
|
+
):
|
|
216
|
+
self.fn = fn
|
|
217
|
+
self._bg_loop = bg_loop
|
|
218
|
+
self._underlying_obj = underlying_obj
|
|
219
|
+
|
|
220
|
+
def __get__(self, instance: Any, owner: Any) -> Any:
|
|
221
|
+
"""
|
|
222
|
+
This method is called when the wrapper is accessed as a method of a class instance.
|
|
223
|
+
:param instance:
|
|
224
|
+
:param owner:
|
|
225
|
+
:return:
|
|
226
|
+
"""
|
|
227
|
+
fn: Any = self.fn
|
|
228
|
+
if instance is not None:
|
|
229
|
+
# If we have an instance, we need to bind the method to the instance (for instance methods)
|
|
230
|
+
fn = self.fn.__get__(instance, owner)
|
|
231
|
+
|
|
232
|
+
if instance is None and owner is not None and self._underlying_obj is not None:
|
|
233
|
+
# If we have an owner, we need to bind the method to the owner (for classmethods or staticmethods)
|
|
234
|
+
fn = self._underlying_obj.__get__(None, owner)
|
|
235
|
+
|
|
236
|
+
wrapper = _SyncWrapper(fn, bg_loop=self._bg_loop, underlying_obj=self._underlying_obj)
|
|
237
|
+
functools.update_wrapper(wrapper, self.fn)
|
|
238
|
+
return wrapper
|
|
239
|
+
|
|
240
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
|
241
|
+
if threading.current_thread().name == self._bg_loop.thread.name:
|
|
242
|
+
# If we are already in the background loop thread, we can call the function directly
|
|
243
|
+
raise AssertionError(
|
|
244
|
+
f"Deadlock detected: blocking call used in syncify thread {self._bg_loop.thread.name} "
|
|
245
|
+
f"when calling function {self.fn}, use .aio() if in an async call."
|
|
246
|
+
)
|
|
247
|
+
try:
|
|
248
|
+
# bind method if needed
|
|
249
|
+
coro_fn = self.fn
|
|
250
|
+
|
|
251
|
+
if inspect.isasyncgenfunction(coro_fn):
|
|
252
|
+
# Handle async iterator by converting to sync iterator
|
|
253
|
+
async_gen = coro_fn(*args, **kwargs)
|
|
254
|
+
return self._bg_loop.iterate_in_loop_sync(async_gen)
|
|
255
|
+
else:
|
|
256
|
+
return self._bg_loop.call_in_loop_sync(coro_fn(*args, **kwargs))
|
|
257
|
+
except Exception as e:
|
|
258
|
+
if logger.getEffectiveLevel() > logging.DEBUG:
|
|
259
|
+
# If the log level is not DEBUG, we will remove the extra stack frames to avoid confusion for the user
|
|
260
|
+
# This is because the stack trace will include the Syncify wrapper and the background loop thread
|
|
261
|
+
tb = e.__traceback__
|
|
262
|
+
while tb and tb.tb_next:
|
|
263
|
+
if tb.tb_frame.f_code.co_name == self.fn.__name__:
|
|
264
|
+
break
|
|
265
|
+
tb = tb.tb_next
|
|
266
|
+
raise e.with_traceback(tb)
|
|
267
|
+
# If the log level is DEBUG, we will keep the extra stack frames to help with debugging
|
|
268
|
+
raise e
|
|
269
|
+
|
|
270
|
+
def aio(self, *args: Any, **kwargs: Any) -> Any:
|
|
271
|
+
fn = self.fn
|
|
272
|
+
|
|
273
|
+
try:
|
|
274
|
+
if inspect.isasyncgenfunction(fn):
|
|
275
|
+
# If the function is an async generator, we need to handle it differently
|
|
276
|
+
async_iter = fn(*args, **kwargs)
|
|
277
|
+
return self._bg_loop.iterate_in_loop(async_iter)
|
|
278
|
+
else:
|
|
279
|
+
# If we are already in the background loop, just return the coroutine
|
|
280
|
+
coro = fn(*args, **kwargs)
|
|
281
|
+
if hasattr(coro, "__aiter__"):
|
|
282
|
+
# If the coroutine is an async iterator, we need to handle it differently
|
|
283
|
+
return self._bg_loop.iterate_in_loop(coro)
|
|
284
|
+
return self._bg_loop.aio(coro)
|
|
285
|
+
except Exception as e:
|
|
286
|
+
if logger.getEffectiveLevel() > logging.DEBUG:
|
|
287
|
+
# If the log level is not DEBUG, we will remove the extra stack frames to avoid confusion for the user
|
|
288
|
+
# This is because the stack trace will include the Syncify wrapper and the background loop thread
|
|
289
|
+
tb = e.__traceback__
|
|
290
|
+
while tb and tb.tb_next:
|
|
291
|
+
if tb.tb_frame.f_code.co_name == self.fn.__name__:
|
|
292
|
+
break
|
|
293
|
+
tb = tb.tb_next
|
|
294
|
+
raise e.with_traceback(tb)
|
|
295
|
+
# If the log level is DEBUG, we will keep the extra stack frames to help with debugging
|
|
296
|
+
raise e
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
class Syncify:
|
|
300
|
+
"""
|
|
301
|
+
A decorator to convert asynchronous functions or methods into synchronous ones.
|
|
302
|
+
|
|
303
|
+
This is useful for integrating async code into synchronous contexts.
|
|
304
|
+
|
|
305
|
+
Example::
|
|
306
|
+
|
|
307
|
+
```python
|
|
308
|
+
syncer = Syncify()
|
|
309
|
+
|
|
310
|
+
@syncer
|
|
311
|
+
async def async_function(x: str) -> str:
|
|
312
|
+
return f"Hello, Async World {x}!"
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
# now you can call it synchronously
|
|
316
|
+
result = async_function("Async World")
|
|
317
|
+
print(result)
|
|
318
|
+
# Output: Hello, Async World Async World!
|
|
319
|
+
|
|
320
|
+
# or call it asynchronously
|
|
321
|
+
async def main():
|
|
322
|
+
result = await async_function.aio("World")
|
|
323
|
+
print(result)
|
|
324
|
+
```
|
|
325
|
+
|
|
326
|
+
"""
|
|
327
|
+
|
|
328
|
+
def __init__(self, name: str = "flyte_syncify"):
|
|
329
|
+
self._bg_loop = _BackgroundLoop(name=name)
|
|
330
|
+
|
|
331
|
+
@overload
|
|
332
|
+
def __call__(self, func: Callable[P, Awaitable[R_co]]) -> Any: ...
|
|
333
|
+
|
|
334
|
+
# def __call__(self, func: Callable[P, Awaitable[R_co]]) -> SyncFunction[P, R_co]: ...
|
|
335
|
+
|
|
336
|
+
@overload
|
|
337
|
+
def __call__(self, func: Callable[P, Iterator[R_co] | AsyncIterator[R_co]]) -> SyncGenFunction[P, R_co]: ...
|
|
338
|
+
|
|
339
|
+
# def __call__(self, func: Callable[[Type[T], *P.args, *P.kwargs], Awaitable[R_co]])
|
|
340
|
+
# -> SyncFunction[[Type[T], *P.args, *P.kwargs], R_co]: ...
|
|
341
|
+
@overload
|
|
342
|
+
def __call__(self, func: classmethod) -> Union[SyncFunction[P, R_co], SyncGenFunction[P, R_co]]: ...
|
|
343
|
+
|
|
344
|
+
@overload
|
|
345
|
+
def __call__(self, func: staticmethod) -> staticmethod: ...
|
|
346
|
+
|
|
347
|
+
def __call__(self, obj):
|
|
348
|
+
if isinstance(obj, classmethod):
|
|
349
|
+
wrapper = _SyncWrapper(obj.__func__, bg_loop=self._bg_loop, underlying_obj=obj)
|
|
350
|
+
functools.update_wrapper(wrapper, obj.__func__)
|
|
351
|
+
return wrapper
|
|
352
|
+
|
|
353
|
+
if isinstance(obj, staticmethod):
|
|
354
|
+
fn = obj.__func__
|
|
355
|
+
wrapper = _SyncWrapper(fn, bg_loop=self._bg_loop)
|
|
356
|
+
functools.update_wrapper(wrapper, fn)
|
|
357
|
+
return staticmethod(wrapper)
|
|
358
|
+
|
|
359
|
+
if inspect.isasyncgenfunction(obj):
|
|
360
|
+
wrapper = _SyncWrapper(obj, bg_loop=self._bg_loop)
|
|
361
|
+
functools.update_wrapper(wrapper, obj)
|
|
362
|
+
return cast(Callable[P, Iterator[R_co]], wrapper)
|
|
363
|
+
|
|
364
|
+
if inspect.iscoroutinefunction(obj):
|
|
365
|
+
wrapper = _SyncWrapper(obj, bg_loop=self._bg_loop)
|
|
366
|
+
functools.update_wrapper(wrapper, obj)
|
|
367
|
+
return wrapper
|
|
368
|
+
|
|
369
|
+
raise TypeError(
|
|
370
|
+
"Syncify can only be applied to async functions, async generators, async classmethods or staticmethods."
|
|
371
|
+
)
|
flyte/types/__init__.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""
|
|
2
|
+
# Flyte Type System
|
|
3
|
+
|
|
4
|
+
The Flyte type system provides a way to define, transform, and manipulate types in Flyte workflows.
|
|
5
|
+
Since the data flowing through Flyte has to often cross process, container and langauge boundaries, the type system
|
|
6
|
+
is designed to be serializable to a universal format that can be understood across different environments. This
|
|
7
|
+
universal format is based on Protocol Buffers. The types are called LiteralTypes and the runtime
|
|
8
|
+
representation of data is called Literals.
|
|
9
|
+
|
|
10
|
+
The type system includes:
|
|
11
|
+
- **TypeEngine**: The core engine that manages type transformations and serialization. This is the main entry point for
|
|
12
|
+
for all the internal type transformations and serialization logic.
|
|
13
|
+
- **TypeTransformer**: A class that defines how to transform one type to another. This is extensible
|
|
14
|
+
allowing users to define custom types and transformations.
|
|
15
|
+
- **Renderable**: An interface for types that can be rendered as HTML, that can be outputted to a flyte.report.
|
|
16
|
+
|
|
17
|
+
It is always possible to bypass the type system and use the `FlytePickle` type to serialize any python object
|
|
18
|
+
into a pickle format. The pickle format is not human-readable, but can be passed between flyte tasks that are
|
|
19
|
+
written in python. The Pickled objects cannot be represented in the UI, and may be in-efficient for large datasets.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from ._interface import guess_interface
|
|
23
|
+
from ._pickle import FlytePickle
|
|
24
|
+
from ._renderer import Renderable
|
|
25
|
+
from ._string_literals import literal_string_repr
|
|
26
|
+
from ._type_engine import TypeEngine, TypeTransformer, TypeTransformerFailedError
|
|
27
|
+
|
|
28
|
+
__all__ = [
|
|
29
|
+
"FlytePickle",
|
|
30
|
+
"Renderable",
|
|
31
|
+
"TypeEngine",
|
|
32
|
+
"TypeTransformer",
|
|
33
|
+
"TypeTransformerFailedError",
|
|
34
|
+
"guess_interface",
|
|
35
|
+
"literal_string_repr",
|
|
36
|
+
]
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from typing import Any, Dict, Iterable, Tuple, Type, cast
|
|
3
|
+
|
|
4
|
+
from flyteidl.core import interface_pb2, literals_pb2
|
|
5
|
+
|
|
6
|
+
from flyte._protos.workflow import common_pb2
|
|
7
|
+
from flyte.models import NativeInterface
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def guess_interface(
|
|
11
|
+
interface: interface_pb2.TypedInterface, default_inputs: Iterable[common_pb2.NamedParameter] | None = None
|
|
12
|
+
) -> NativeInterface:
|
|
13
|
+
"""
|
|
14
|
+
Returns the interface of the task with guessed types, as types may not be present in current env.
|
|
15
|
+
"""
|
|
16
|
+
import flyte.types
|
|
17
|
+
|
|
18
|
+
if interface is None:
|
|
19
|
+
return NativeInterface({}, {})
|
|
20
|
+
|
|
21
|
+
default_input_literals: Dict[str, literals_pb2.Literal] = {}
|
|
22
|
+
if default_inputs is not None:
|
|
23
|
+
for param in default_inputs:
|
|
24
|
+
if param.parameter.HasField("default"):
|
|
25
|
+
default_input_literals[param.name] = param.parameter.default
|
|
26
|
+
|
|
27
|
+
guessed_inputs: Dict[str, Tuple[Type[Any], Any] | Any] = {}
|
|
28
|
+
if interface.inputs is not None and len(interface.inputs.variables) > 0:
|
|
29
|
+
input_types = flyte.types.TypeEngine.guess_python_types(cast(dict, interface.inputs.variables))
|
|
30
|
+
for name, t in input_types.items():
|
|
31
|
+
if name not in default_input_literals:
|
|
32
|
+
guessed_inputs[name] = (t, inspect.Parameter.empty)
|
|
33
|
+
else:
|
|
34
|
+
guessed_inputs[name] = (t, NativeInterface.has_default)
|
|
35
|
+
|
|
36
|
+
guessed_outputs: Dict[str, Type[Any]] = {}
|
|
37
|
+
if interface.outputs is not None and len(interface.outputs.variables) > 0:
|
|
38
|
+
guessed_outputs = flyte.types.TypeEngine.guess_python_types(cast(dict, interface.outputs.variables))
|
|
39
|
+
|
|
40
|
+
return NativeInterface.from_types(guessed_inputs, guessed_outputs, default_input_literals)
|
flyte/types/_pickle.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import os
|
|
3
|
+
import typing
|
|
4
|
+
from typing import Type
|
|
5
|
+
|
|
6
|
+
import aiofiles
|
|
7
|
+
import cloudpickle
|
|
8
|
+
from flyteidl.core import literals_pb2, types_pb2
|
|
9
|
+
|
|
10
|
+
import flyte.storage as storage
|
|
11
|
+
|
|
12
|
+
from ._type_engine import TypeEngine, TypeTransformer
|
|
13
|
+
|
|
14
|
+
T = typing.TypeVar("T")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class FlytePickle(typing.Generic[T]):
|
|
18
|
+
"""
|
|
19
|
+
This type is only used by flytekit internally. User should not use this type.
|
|
20
|
+
Any type that flyte can't recognize will become FlytePickle
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
@classmethod
|
|
24
|
+
def python_type(cls) -> typing.Type:
|
|
25
|
+
return type(None)
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
def __class_getitem__(cls, python_type: typing.Type) -> typing.Type:
|
|
29
|
+
if python_type is None:
|
|
30
|
+
return cls
|
|
31
|
+
|
|
32
|
+
class _SpecificFormatClass(FlytePickle):
|
|
33
|
+
# Get the type engine to see this as kind of a generic
|
|
34
|
+
__origin__ = FlytePickle
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
def python_type(cls) -> typing.Type:
|
|
38
|
+
return python_type
|
|
39
|
+
|
|
40
|
+
return _SpecificFormatClass
|
|
41
|
+
|
|
42
|
+
@classmethod
|
|
43
|
+
async def to_pickle(cls, python_val: typing.Any) -> str:
|
|
44
|
+
h = hashlib.md5()
|
|
45
|
+
str_bytes = cloudpickle.dumps(python_val)
|
|
46
|
+
h.update(str_bytes)
|
|
47
|
+
|
|
48
|
+
uri = storage.get_random_local_path(file_path_or_file_name=h.hexdigest())
|
|
49
|
+
os.makedirs(os.path.dirname(uri), exist_ok=True)
|
|
50
|
+
async with aiofiles.open(uri, "w+b") as outfile:
|
|
51
|
+
await outfile.write(str_bytes)
|
|
52
|
+
|
|
53
|
+
return await storage.put(str(uri))
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
async def from_pickle(cls, uri: str) -> typing.Any:
|
|
57
|
+
# Deserialize the pickle, and return data in the pickle,
|
|
58
|
+
# and download pickle file to local first if file is not in the local file systems.
|
|
59
|
+
if storage.is_remote(uri):
|
|
60
|
+
local_path = storage.get_random_local_path()
|
|
61
|
+
await storage.get(uri, str(local_path), False)
|
|
62
|
+
uri = str(local_path)
|
|
63
|
+
async with aiofiles.open(uri, "rb") as infile:
|
|
64
|
+
data = cloudpickle.loads(await infile.read())
|
|
65
|
+
return data
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class FlytePickleTransformer(TypeTransformer[FlytePickle]):
|
|
69
|
+
PYTHON_PICKLE_FORMAT = "PythonPickle"
|
|
70
|
+
|
|
71
|
+
def __init__(self):
|
|
72
|
+
super().__init__(name="FlytePickle", t=FlytePickle)
|
|
73
|
+
|
|
74
|
+
def assert_type(self, t: Type[T], v: T):
|
|
75
|
+
# Every type can serialize to pickle, so we don't need to check the type here.
|
|
76
|
+
...
|
|
77
|
+
|
|
78
|
+
async def to_python_value(self, lv: literals_pb2.Literal, expected_python_type: Type[T]) -> T:
|
|
79
|
+
uri = lv.scalar.blob.uri
|
|
80
|
+
return await FlytePickle.from_pickle(uri)
|
|
81
|
+
|
|
82
|
+
async def to_literal(
|
|
83
|
+
self,
|
|
84
|
+
python_val: T,
|
|
85
|
+
python_type: Type[T],
|
|
86
|
+
expected: types_pb2.LiteralType,
|
|
87
|
+
) -> literals_pb2.Literal:
|
|
88
|
+
if python_val is None:
|
|
89
|
+
raise AssertionError("Cannot pickle None Value.")
|
|
90
|
+
meta = literals_pb2.BlobMetadata(
|
|
91
|
+
type=types_pb2.BlobType(
|
|
92
|
+
format=self.PYTHON_PICKLE_FORMAT, dimensionality=types_pb2.BlobType.BlobDimensionality.SINGLE
|
|
93
|
+
)
|
|
94
|
+
)
|
|
95
|
+
remote_path = await FlytePickle.to_pickle(python_val)
|
|
96
|
+
return literals_pb2.Literal(scalar=literals_pb2.Scalar(blob=literals_pb2.Blob(metadata=meta, uri=remote_path)))
|
|
97
|
+
|
|
98
|
+
def guess_python_type(self, literal_type: types_pb2.LiteralType) -> typing.Type[FlytePickle[typing.Any]]:
|
|
99
|
+
if (
|
|
100
|
+
literal_type.blob is not None
|
|
101
|
+
and literal_type.blob.dimensionality == types_pb2.BlobType.BlobDimensionality.SINGLE
|
|
102
|
+
and literal_type.blob.format == FlytePickleTransformer.PYTHON_PICKLE_FORMAT
|
|
103
|
+
):
|
|
104
|
+
return FlytePickle
|
|
105
|
+
|
|
106
|
+
raise ValueError(f"Transformer {self} cannot reverse {literal_type}")
|
|
107
|
+
|
|
108
|
+
def get_literal_type(self, t: Type[T]) -> types_pb2.LiteralType:
|
|
109
|
+
lt = types_pb2.LiteralType(
|
|
110
|
+
blob=types_pb2.BlobType(
|
|
111
|
+
format=self.PYTHON_PICKLE_FORMAT, dimensionality=types_pb2.BlobType.BlobDimensionality.SINGLE
|
|
112
|
+
)
|
|
113
|
+
)
|
|
114
|
+
lt.metadata = {"python_class_name": str(t)}
|
|
115
|
+
return lt
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
TypeEngine.register(FlytePickleTransformer())
|