fal 1.46.0__py3-none-any.whl → 1.46.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fal might be problematic. Click here for more details.

fal/_fal_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '1.46.0'
32
- __version_tuple__ = version_tuple = (1, 46, 0)
31
+ __version__ = version = '1.46.1'
32
+ __version_tuple__ = version_tuple = (1, 46, 1)
33
33
 
34
34
  __commit_id__ = commit_id = None
fal/distributed/utils.py CHANGED
@@ -8,7 +8,7 @@ import warnings
8
8
  from collections.abc import Callable
9
9
  from io import BytesIO
10
10
  from pathlib import Path
11
- from typing import TYPE_CHECKING, Any, Optional, Union
11
+ from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union
12
12
 
13
13
  if TYPE_CHECKING:
14
14
  import torch.multiprocessing as mp
@@ -271,8 +271,8 @@ def wrap_distributed_worker(
271
271
  master_port: int,
272
272
  timeout: int,
273
273
  cwd: Optional[Union[str, Path]],
274
- args: tuple[Any],
275
- kwargs: dict[str, Any],
274
+ args: Tuple[Any],
275
+ kwargs: Dict[str, Any],
276
276
  ) -> None:
277
277
  """
278
278
  Worker function for distributed training or inference.
fal/distributed/worker.py CHANGED
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import asyncio
2
4
  import inspect
3
5
  import os
@@ -9,6 +11,7 @@ import traceback
9
11
  import warnings
10
12
  from collections.abc import AsyncIterator, Callable, Coroutine
11
13
  from concurrent.futures import Future
14
+ from functools import partial
12
15
  from pathlib import Path
13
16
  from typing import TYPE_CHECKING, Any, Optional, Union
14
17
 
@@ -70,7 +73,7 @@ class DistributedWorker:
70
73
  # Public API
71
74
 
72
75
  @property
73
- def device(self) -> "torch.device":
76
+ def device(self) -> torch.device:
74
77
  """
75
78
  :return: The device for the current worker.
76
79
  """
@@ -162,6 +165,16 @@ class DistributedWorker:
162
165
  self.loop.call_soon_threadsafe(self.loop.stop)
163
166
  self.thread.join(timeout=timeout)
164
167
 
168
+ async def _run_sync_in_executor(
169
+ self,
170
+ func: Callable[..., Any],
171
+ *args: Any,
172
+ **kwargs: Any,
173
+ ) -> Any:
174
+ """Run a synchronous function in the executor."""
175
+ loop = asyncio.get_running_loop()
176
+ return await loop.run_in_executor(None, partial(func, *args, **kwargs))
177
+
165
178
  def run_in_worker(
166
179
  self,
167
180
  func: Callable[..., Any],
@@ -174,7 +187,9 @@ class DistributedWorker:
174
187
  if inspect.iscoroutinefunction(func):
175
188
  coro = func(*args, **kwargs)
176
189
  else:
177
- coro = asyncio.to_thread(func, *args, **kwargs)
190
+ # Using in place of asyncio.to_thread
191
+ # since it's not available in Python 3.8
192
+ coro = self._run_sync_in_executor(func, *args, **kwargs)
178
193
 
179
194
  return self.submit(coro)
180
195
 
@@ -206,8 +221,8 @@ class DistributedRunner:
206
221
  A class to launch and manage distributed workers.
207
222
  """
208
223
 
209
- zmq_socket: Optional["Socket[Any]"]
210
- context: Optional["mp.ProcessContext"]
224
+ zmq_socket: Optional[Socket[Any]]
225
+ context: Optional[mp.ProcessContext]
211
226
  keepalive_timer: Optional[KeepAliveTimer]
212
227
 
213
228
  def __init__(
@@ -296,7 +311,7 @@ class DistributedRunner:
296
311
  f"Distributed processes are not running. Errors: {self.gather_errors()}"
297
312
  )
298
313
 
299
- def get_zmq_socket(self) -> "Socket[Any]":
314
+ def get_zmq_socket(self) -> Socket[Any]:
300
315
  """
301
316
  Returns a ZeroMQ socket of the specified type.
302
317
  :param socket_type: The type of the ZeroMQ socket.
@@ -750,7 +765,7 @@ class DistributedRunner:
750
765
  assert rank == b"0", "Expected response from worker with rank 0"
751
766
  return distributed_deserialize(response)
752
767
 
753
- async def __aenter__(self) -> "DistributedRunner":
768
+ async def __aenter__(self) -> DistributedRunner:
754
769
  """
755
770
  Enter the context manager.
756
771
  :return: The DistributedRunner instance.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fal
3
- Version: 1.46.0
3
+ Version: 1.46.1
4
4
  Summary: fal is an easy-to-use Serverless Python Framework
5
5
  Author: Features & Labels <support@fal.ai>
6
6
  Requires-Python: >=3.8
@@ -53,6 +53,7 @@ Requires-Dist: pytest-xdist; extra == "test"
53
53
  Requires-Dist: pytest-timeout; extra == "test"
54
54
  Requires-Dist: flaky; extra == "test"
55
55
  Requires-Dist: boto3; extra == "test"
56
+ Requires-Dist: numpy; extra == "test"
56
57
  Provides-Extra: dev
57
58
  Requires-Dist: fal[docs,test]; extra == "dev"
58
59
  Requires-Dist: openapi-python-client<1,>=0.14.1; extra == "dev"
@@ -1,6 +1,6 @@
1
1
  fal/__init__.py,sha256=wXs1G0gSc7ZK60-bHe-B2m0l_sA6TrFk4BxY0tMoLe8,784
2
2
  fal/__main__.py,sha256=4JMK66Wj4uLZTKbF-sT3LAxOsr6buig77PmOkJCRRxw,83
3
- fal/_fal_version.py,sha256=HSh8n5t3dT12FiqCeiBwPGGXi_9ERts2UyWRnVzpW5U,706
3
+ fal/_fal_version.py,sha256=r7bDyLX7CxvhE0UPuE3CER_HLAjx7gKNT1btlAQda-M,706
4
4
  fal/_serialization.py,sha256=npXNsFJ5G7jzBeBIyVMH01Ww34mGY4XWhHpRbSrTtnQ,7598
5
5
  fal/_version.py,sha256=1BbTFnucNC_6ldKJ_ZoC722_UkW4S9aDBSW9L0fkKAw,2315
6
6
  fal/app.py,sha256=izlqdqfUtNxKW6oJerJYbVaUIZwxlO-DTtqFrcTleyI,28039
@@ -50,8 +50,8 @@ fal/console/__init__.py,sha256=lGPUuTqIM9IKTa1cyyA-MA2iZJKVHp2YydsITZVlb6g,148
50
50
  fal/console/icons.py,sha256=De9MfFaSkO2Lqfne13n3PrYfTXJVIzYZVqYn5BWsdrA,108
51
51
  fal/console/ux.py,sha256=KMQs3UHQvVHDxDQQqlot-WskVKoMQXOE3jiVkkfmIMY,356
52
52
  fal/distributed/__init__.py,sha256=jnLKLzTnF0GurjJnt7xD1-wB-oKWALZcZ1ZK0c6WI4g,134
53
- fal/distributed/utils.py,sha256=8Wl1WytTtZQw4Ja-95D2vM40P26eYZNZe-fI2KVX7kU,12648
54
- fal/distributed/worker.py,sha256=TReJBU1P1iXarKXVI8NvlXt7stMWxdvWIozJ2lZTF5w,26704
53
+ fal/distributed/utils.py,sha256=b7zgYg2O9L_kMGup8Mc5x7QRx_gPwwELcmVAXo-oGlU,12661
54
+ fal/distributed/worker.py,sha256=vrxqg4BeGICo_VNvTZWsFPMgGdkZ0WUVt56ljg2T3Zs,27195
55
55
  fal/exceptions/__init__.py,sha256=4hq-sy3dMZs6YxvbO_p6R-bK4Tzf7ubvA8AyUR0GVPo,349
56
56
  fal/exceptions/_base.py,sha256=PLSOHQs7lftDaRYDHKz9xkB6orQvynmUTi4DrdPnYMs,1797
57
57
  fal/exceptions/_cuda.py,sha256=L3qvDNaPTthp95IFSBI6pMt3YbRfn1H0inQkj_7NKF8,1719
@@ -152,8 +152,8 @@ openapi_fal_rest/models/workflow_node_type.py,sha256=-FzyeY2bxcNmizKbJI8joG7byRi
152
152
  openapi_fal_rest/models/workflow_schema.py,sha256=4K5gsv9u9pxx2ItkffoyHeNjBBYf6ur5bN4m_zePZNY,2019
153
153
  openapi_fal_rest/models/workflow_schema_input.py,sha256=2OkOXWHTNsCXHWS6EGDFzcJKkW5FIap-2gfO233EvZQ,1191
154
154
  openapi_fal_rest/models/workflow_schema_output.py,sha256=EblwSPAGfWfYVWw_WSSaBzQVju296is9o28rMBAd0mc,1196
155
- fal-1.46.0.dist-info/METADATA,sha256=bOq6HL6Lzc-yql9GTNRaTHcqlwMpuNyULT90kR4Rzo8,4185
156
- fal-1.46.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
157
- fal-1.46.0.dist-info/entry_points.txt,sha256=32zwTUC1U1E7nSTIGCoANQOQ3I7-qHG5wI6gsVz5pNU,37
158
- fal-1.46.0.dist-info/top_level.txt,sha256=r257X1L57oJL8_lM0tRrfGuXFwm66i1huwQygbpLmHw,21
159
- fal-1.46.0.dist-info/RECORD,,
155
+ fal-1.46.1.dist-info/METADATA,sha256=wv_JLs46I6tnsjgaYQWkgWyAOc4lPNv_QyYdjqz8IFU,4223
156
+ fal-1.46.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
157
+ fal-1.46.1.dist-info/entry_points.txt,sha256=32zwTUC1U1E7nSTIGCoANQOQ3I7-qHG5wI6gsVz5pNU,37
158
+ fal-1.46.1.dist-info/top_level.txt,sha256=r257X1L57oJL8_lM0tRrfGuXFwm66i1huwQygbpLmHw,21
159
+ fal-1.46.1.dist-info/RECORD,,
File without changes