fal 0.14.0__py3-none-any.whl → 0.15.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fal might be problematic. Click here for more details.
- fal/__init__.py +1 -13
- fal/_serialization.py +151 -121
- fal/api.py +106 -61
- fal/app.py +25 -9
- fal/auth/__init__.py +2 -3
- fal/auth/auth0.py +4 -2
- fal/auth/local.py +2 -1
- fal/cli.py +10 -7
- fal/exceptions/__init__.py +3 -3
- fal/exceptions/_base.py +1 -12
- fal/exceptions/auth.py +2 -4
- fal/exceptions/handlers.py +8 -19
- fal/flags.py +0 -2
- fal/logging/isolate.py +4 -4
- fal/sdk.py +40 -5
- fal/sync.py +7 -3
- fal/toolkit/__init__.py +0 -2
- fal/toolkit/exceptions.py +0 -5
- fal/toolkit/file/file.py +61 -50
- fal/toolkit/file/providers/fal.py +20 -7
- fal/toolkit/file/providers/gcp.py +0 -2
- fal/toolkit/file/providers/r2.py +0 -2
- fal/toolkit/file/types.py +0 -4
- fal/toolkit/image/image.py +11 -15
- fal/toolkit/optimize.py +0 -3
- fal/toolkit/utils/download_utils.py +7 -17
- fal/workflows.py +9 -3
- fal-0.15.2.dist-info/METADATA +119 -0
- {fal-0.14.0.dist-info → fal-0.15.2.dist-info}/RECORD +50 -51
- {fal-0.14.0.dist-info → fal-0.15.2.dist-info}/WHEEL +2 -1
- fal-0.15.2.dist-info/entry_points.txt +2 -0
- fal-0.15.2.dist-info/top_level.txt +2 -0
- fal/env.py +0 -3
- fal/toolkit/mainify.py +0 -13
- fal-0.14.0.dist-info/METADATA +0 -89
- fal-0.14.0.dist-info/entry_points.txt +0 -4
fal/__init__.py
CHANGED
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from fal import apps # noqa: F401
|
|
4
|
-
from fal.api import FalServerlessHost, LocalHost, cached
|
|
5
|
-
from fal.api import function
|
|
4
|
+
from fal.api import FalServerlessHost, LocalHost, cached, function
|
|
6
5
|
from fal.api import function as isolated # noqa: F401
|
|
7
6
|
from fal.app import App, endpoint, realtime, wrap_app # noqa: F401
|
|
8
7
|
from fal.sdk import FalServerlessKeyCredentials
|
|
@@ -24,14 +23,3 @@ __all__ = [
|
|
|
24
23
|
"FalServerlessKeyCredentials",
|
|
25
24
|
"sync_dir",
|
|
26
25
|
]
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
# NOTE: This makes `import fal.dbt` import the `dbt-fal` module and `import fal` import the `fal` module
|
|
30
|
-
# NOTE: taken from dbt-core: https://github.com/dbt-labs/dbt-core/blob/ac539fd5cf325cfb5315339077d03399d575f570/core/dbt/adapters/__init__.py#L1-L7
|
|
31
|
-
# N.B.
|
|
32
|
-
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
|
|
33
|
-
# The matching statement is in plugins/postgres/dbt/adapters/__init__.py
|
|
34
|
-
|
|
35
|
-
from pkgutil import extend_path # noqa: E402
|
|
36
|
-
|
|
37
|
-
__path__ = extend_path(__path__, __name__)
|
fal/_serialization.py
CHANGED
|
@@ -1,102 +1,71 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
from
|
|
3
|
+
import pickle
|
|
4
|
+
from typing import Any, Callable
|
|
5
5
|
|
|
6
|
-
import
|
|
7
|
-
from dill import _dill
|
|
6
|
+
import cloudpickle
|
|
8
7
|
|
|
9
|
-
from fal.toolkit import mainify
|
|
10
8
|
|
|
11
|
-
|
|
12
|
-
#
|
|
13
|
-
|
|
14
|
-
|
|
9
|
+
def _register_pickle_by_value(name) -> None:
|
|
10
|
+
# cloudpickle.register_pickle_by_value wants an imported module object,
|
|
11
|
+
# but there is really no reason to go through that complication, as
|
|
12
|
+
# it might be prone to errors.
|
|
13
|
+
cloudpickle.cloudpickle._PICKLE_BY_VALUE_MODULES.add(name)
|
|
15
14
|
|
|
16
15
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
from pydantic.fields import ModelField
|
|
16
|
+
def include_package_from_path(raw_path: str) -> None:
|
|
17
|
+
from pathlib import Path
|
|
20
18
|
|
|
21
|
-
return ModelField(**kwargs)
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
@mainify
|
|
25
|
-
def _pydantic_make_private_field(kwargs):
|
|
26
|
-
from pydantic.fields import ModelPrivateAttr
|
|
27
|
-
|
|
28
|
-
return ModelPrivateAttr(**kwargs)
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
# this allows us to record all the "isolated" function and then mainify everything in
|
|
32
|
-
# module they exist
|
|
33
|
-
@wraps(_dill._locate_function)
|
|
34
|
-
def by_value_locator(obj, pickler=None, og_locator=_dill._locate_function):
|
|
35
|
-
module_name = getattr(obj, "__module__", None)
|
|
36
|
-
if module_name is not None:
|
|
37
|
-
# If it is coming from the same module, directly allow
|
|
38
|
-
# it to be pickled.
|
|
39
|
-
if module_name in _MODULES:
|
|
40
|
-
return False
|
|
41
|
-
|
|
42
|
-
package_name, *_ = module_name.partition(".")
|
|
43
|
-
# If it is coming from the same package, then do the same.
|
|
44
|
-
if package_name in _PACKAGES:
|
|
45
|
-
return False
|
|
46
|
-
|
|
47
|
-
og_result = og_locator(obj, pickler)
|
|
48
|
-
return og_result
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
_dill._locate_function = by_value_locator
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
def include_packages_from_path(raw_path: str):
|
|
55
19
|
path = Path(raw_path).resolve()
|
|
56
20
|
parent = path
|
|
57
21
|
while (parent.parent / "__init__.py").exists():
|
|
58
22
|
parent = parent.parent
|
|
59
23
|
|
|
60
24
|
if parent != path:
|
|
61
|
-
|
|
25
|
+
_register_pickle_by_value(parent.name)
|
|
62
26
|
|
|
63
27
|
|
|
64
|
-
def
|
|
28
|
+
def include_modules_from(obj: Any) -> None:
|
|
65
29
|
module_name = getattr(obj, "__module__", None)
|
|
66
30
|
if not module_name:
|
|
67
|
-
return
|
|
31
|
+
return
|
|
32
|
+
|
|
33
|
+
if "." in module_name:
|
|
34
|
+
# Just include the whole package
|
|
35
|
+
package_name, *_ = module_name.partition(".")
|
|
36
|
+
_register_pickle_by_value(package_name)
|
|
37
|
+
return
|
|
68
38
|
|
|
69
|
-
_MODULES.add(module_name)
|
|
70
39
|
if module_name == "__main__":
|
|
71
40
|
# When the module is __main__, we need to recursively go up the
|
|
72
41
|
# tree to locate the actual package name.
|
|
73
42
|
import __main__
|
|
74
43
|
|
|
75
|
-
|
|
44
|
+
include_package_from_path(__main__.__file__)
|
|
45
|
+
return
|
|
76
46
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
47
|
+
_register_pickle_by_value(module_name)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _register(cls: Any, func: Callable) -> None:
|
|
51
|
+
cloudpickle.Pickler.dispatch[cls] = func
|
|
80
52
|
|
|
81
53
|
|
|
82
|
-
|
|
83
|
-
def patch_pydantic_field_serialization():
|
|
54
|
+
def _patch_pydantic_field_serialization() -> None:
|
|
84
55
|
# Cythonized pydantic fields can't be serialized automatically, so we are
|
|
85
56
|
# have a special case handling for them that unpacks it to a dictionary
|
|
86
57
|
# and then reloads it on the other side.
|
|
87
|
-
|
|
88
|
-
|
|
58
|
+
# https://github.com/ray-project/ray/blob/842bbcf4236e41f58d25058b0482cd05bfe9e4da/python/ray/_private/pydantic_compat.py#L80
|
|
89
59
|
try:
|
|
90
|
-
|
|
60
|
+
from pydantic.fields import ModelField, ModelPrivateAttr
|
|
91
61
|
except ImportError:
|
|
92
62
|
return
|
|
93
63
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
args = {
|
|
64
|
+
def create_model_field(kwargs: dict) -> ModelField:
|
|
65
|
+
return ModelField(**kwargs)
|
|
66
|
+
|
|
67
|
+
def pickle_model_field(field: ModelField) -> tuple[Callable, tuple]:
|
|
68
|
+
kwargs = {
|
|
100
69
|
"name": field.name,
|
|
101
70
|
# outer_type_ is the original type for ModelFields,
|
|
102
71
|
# while type_ can be updated later with the nested type
|
|
@@ -110,92 +79,153 @@ def patch_pydantic_field_serialization():
|
|
|
110
79
|
"alias": field.alias,
|
|
111
80
|
"field_info": field.field_info,
|
|
112
81
|
}
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
def
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
) ->
|
|
120
|
-
|
|
82
|
+
|
|
83
|
+
return create_model_field, (kwargs,)
|
|
84
|
+
|
|
85
|
+
def create_private_attr(kwargs: dict) -> ModelPrivateAttr:
|
|
86
|
+
return ModelPrivateAttr(**kwargs)
|
|
87
|
+
|
|
88
|
+
def pickle_private_attr(field: ModelPrivateAttr) -> tuple[Callable, tuple]:
|
|
89
|
+
kwargs = {
|
|
121
90
|
"default": field.default,
|
|
122
91
|
"default_factory": field.default_factory,
|
|
123
92
|
}
|
|
124
|
-
|
|
93
|
+
|
|
94
|
+
return create_private_attr, (kwargs,)
|
|
95
|
+
|
|
96
|
+
_register(ModelField, pickle_model_field)
|
|
97
|
+
_register(ModelPrivateAttr, pickle_private_attr)
|
|
125
98
|
|
|
126
99
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
#
|
|
130
|
-
#
|
|
131
|
-
#
|
|
100
|
+
def _patch_pydantic_model_serialization() -> None:
|
|
101
|
+
# If user has created new pydantic models in his namespace, we will try to pickle
|
|
102
|
+
# those by value, which means recreating class skeleton, which will stumble upon
|
|
103
|
+
# __pydantic_parent_namespace__ in its __dict__ and it may contain modules that
|
|
104
|
+
# happened to be imported in the namespace but are not actually used, resulting
|
|
105
|
+
# in pickling errors. Unfortunately this also means that `model_rebuid()` might
|
|
106
|
+
# not work.
|
|
132
107
|
try:
|
|
133
|
-
import pydantic
|
|
108
|
+
import pydantic
|
|
134
109
|
except ImportError:
|
|
135
110
|
return
|
|
136
111
|
|
|
137
|
-
pydantic
|
|
112
|
+
# https://github.com/pydantic/pydantic/pull/2573
|
|
113
|
+
if not hasattr(pydantic, "__version__") or pydantic.__version__.startswith("1."):
|
|
114
|
+
return
|
|
115
|
+
|
|
116
|
+
backup = "_original_extract_class_dict"
|
|
117
|
+
if getattr(cloudpickle.cloudpickle, backup, None):
|
|
118
|
+
return
|
|
119
|
+
|
|
120
|
+
original = cloudpickle.cloudpickle._extract_class_dict
|
|
121
|
+
|
|
122
|
+
def patched(cls):
|
|
123
|
+
attr_name = "__pydantic_parent_namespace__"
|
|
124
|
+
if issubclass(cls, pydantic.BaseModel) and getattr(cls, attr_name, None):
|
|
125
|
+
setattr(cls, attr_name, None)
|
|
126
|
+
|
|
127
|
+
return original(cls)
|
|
128
|
+
|
|
129
|
+
cloudpickle.cloudpickle._extract_class_dict = patched
|
|
130
|
+
setattr(cloudpickle.cloudpickle, backup, original)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _patch_lru_cache() -> None:
|
|
134
|
+
# https://github.com/cloudpipe/cloudpickle/issues/178
|
|
135
|
+
# https://github.com/uqfoundation/dill/blob/70f569b0dd268d2b1e85c0f300951b11f53c5d53/dill/_dill.py#L1429
|
|
138
136
|
|
|
137
|
+
from functools import _lru_cache_wrapper as LRUCacheType
|
|
138
|
+
from functools import lru_cache
|
|
139
139
|
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
# Adapting tblib.pickling_support.install for dill.
|
|
143
|
-
from types import TracebackType
|
|
140
|
+
def create_lru_cache(func: Callable, kwargs: dict) -> LRUCacheType:
|
|
141
|
+
return lru_cache(**kwargs)(func)
|
|
144
142
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
143
|
+
def pickle_lru_cache(obj: LRUCacheType) -> tuple[Callable, tuple]:
|
|
144
|
+
if hasattr(obj, "cache_parameters"):
|
|
145
|
+
params = obj.cache_parameters()
|
|
146
|
+
kwargs = {
|
|
147
|
+
"maxsize": params["maxsize"],
|
|
148
|
+
"typed": params["typed"],
|
|
149
|
+
}
|
|
150
|
+
else:
|
|
151
|
+
kwargs = {"maxsize": obj.cache_info().maxsize}
|
|
151
152
|
|
|
152
|
-
|
|
153
|
-
def save_traceback(pickler, obj):
|
|
154
|
-
unpickle, args = pickle_traceback(obj)
|
|
155
|
-
pickler.save_reduce(unpickle, args, obj=obj)
|
|
153
|
+
return create_lru_cache, (obj.__wrapped__, kwargs)
|
|
156
154
|
|
|
157
|
-
|
|
158
|
-
def save_exception(pickler, obj):
|
|
159
|
-
unpickle, args = pickle_exception(obj)
|
|
160
|
-
pickler.save_reduce(unpickle, args, obj=obj)
|
|
155
|
+
_register(LRUCacheType, pickle_lru_cache)
|
|
161
156
|
|
|
162
|
-
|
|
163
|
-
|
|
157
|
+
|
|
158
|
+
def _patch_lock() -> None:
|
|
159
|
+
# https://github.com/uqfoundation/dill/blob/70f569b0dd268d2b1e85c0f300951b11f53c5d53/dill/_dill.py#L1310
|
|
160
|
+
from _thread import LockType
|
|
161
|
+
from threading import Lock
|
|
162
|
+
|
|
163
|
+
def create_lock(locked: bool) -> Lock:
|
|
164
|
+
lock = Lock()
|
|
165
|
+
if locked and not lock.acquire(False):
|
|
166
|
+
raise pickle.UnpicklingError("Cannot acquire lock")
|
|
167
|
+
return lock
|
|
168
|
+
|
|
169
|
+
def pickle_lock(obj: LockType) -> tuple[Callable, tuple]:
|
|
170
|
+
return create_lock, (obj.locked(),)
|
|
171
|
+
|
|
172
|
+
_register(LockType, pickle_lock)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def _patch_rlock() -> None:
|
|
176
|
+
# https://github.com/uqfoundation/dill/blob/70f569b0dd268d2b1e85c0f300951b11f53c5d53/dill/_dill.py#L1317
|
|
177
|
+
from _thread import RLock as RLockType # type: ignore[attr-defined]
|
|
178
|
+
|
|
179
|
+
def create_rlock(count: int, owner: int) -> RLockType:
|
|
180
|
+
lock = RLockType()
|
|
181
|
+
if owner is not None:
|
|
182
|
+
lock._acquire_restore((count, owner)) # type: ignore[attr-defined]
|
|
183
|
+
if owner and not lock._is_owned(): # type: ignore[attr-defined]
|
|
184
|
+
raise pickle.UnpicklingError("Cannot acquire lock")
|
|
185
|
+
return lock
|
|
186
|
+
|
|
187
|
+
def pickle_rlock(obj: RLockType) -> tuple[Callable, tuple]:
|
|
188
|
+
r = obj.__repr__()
|
|
189
|
+
count = int(r.split('count=')[1].split()[0].rstrip('>'))
|
|
190
|
+
owner = int(r.split('owner=')[1].split()[0])
|
|
191
|
+
|
|
192
|
+
return create_rlock, (count, owner)
|
|
193
|
+
|
|
194
|
+
_register(RLockType, pickle_rlock)
|
|
164
195
|
|
|
165
196
|
|
|
166
|
-
@mainify
|
|
167
197
|
def _patch_console_thread_locals() -> None:
|
|
168
|
-
# NOTE: we __sometimes__ might have to serialize these
|
|
169
198
|
from rich.console import ConsoleThreadLocals
|
|
170
199
|
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
200
|
+
def create_locals(kwargs: dict) -> ConsoleThreadLocals:
|
|
201
|
+
return ConsoleThreadLocals(**kwargs)
|
|
202
|
+
|
|
203
|
+
def pickle_locals(obj: ConsoleThreadLocals) -> tuple[Callable, tuple]:
|
|
204
|
+
kwargs = {
|
|
174
205
|
"theme_stack": obj.theme_stack,
|
|
175
206
|
"buffer": obj.buffer,
|
|
176
207
|
"buffer_index": obj.buffer_index,
|
|
177
208
|
}
|
|
209
|
+
return create_locals, (kwargs, )
|
|
178
210
|
|
|
179
|
-
|
|
180
|
-
return ConsoleThreadLocals(**kwargs)
|
|
211
|
+
_register(ConsoleThreadLocals, pickle_locals)
|
|
181
212
|
|
|
182
|
-
pickler.save_reduce(unpickle, (args,), obj=obj)
|
|
183
213
|
|
|
214
|
+
def _patch_exceptions() -> None:
|
|
215
|
+
# Support chained exceptions
|
|
216
|
+
from tblib.pickling_support import install
|
|
184
217
|
|
|
185
|
-
|
|
186
|
-
def patch_dill():
|
|
187
|
-
import dill
|
|
218
|
+
install()
|
|
188
219
|
|
|
189
|
-
dill.settings["recurse"] = True
|
|
190
220
|
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
221
|
+
def patch_pickle() -> None:
|
|
222
|
+
_patch_pydantic_field_serialization()
|
|
223
|
+
_patch_pydantic_model_serialization()
|
|
224
|
+
_patch_lru_cache()
|
|
225
|
+
_patch_lock()
|
|
226
|
+
_patch_rlock()
|
|
194
227
|
_patch_console_thread_locals()
|
|
228
|
+
_patch_exceptions()
|
|
195
229
|
|
|
230
|
+
_register_pickle_by_value("fal")
|
|
196
231
|
|
|
197
|
-
@mainify
|
|
198
|
-
def patch_pickle():
|
|
199
|
-
from tblib import pickling_support
|
|
200
|
-
|
|
201
|
-
pickling_support.install()
|