xoscar 0.2.1__cp38-cp38-macosx_10_9_x86_64.whl → 0.3.2__cp38-cp38-macosx_10_9_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of xoscar might be problematic. Click here for more details.

xoscar/__init__.py CHANGED
@@ -50,7 +50,6 @@ from ._utils import create_actor_ref
50
50
 
51
51
  # make sure methods are registered
52
52
  from .backends import indigen, test
53
- from .entrypoints import init_extension_entrypoints
54
53
  from . import _version
55
54
 
56
55
  del indigen, test
@@ -59,6 +58,3 @@ _T = TypeVar("_T")
59
58
  ActorRefType = Union[ActorRef, _T]
60
59
 
61
60
  __version__ = _version.get_versions()["version"]
62
-
63
- init_extension_entrypoints()
64
- del init_extension_entrypoints
Binary file
@@ -97,9 +97,9 @@ class DummyServer(Server):
97
97
  else tuple()
98
98
  )
99
99
 
100
- _address_to_instances: weakref.WeakValueDictionary[
101
- str, "DummyServer"
102
- ] = weakref.WeakValueDictionary()
100
+ _address_to_instances: weakref.WeakValueDictionary[str, "DummyServer"] = (
101
+ weakref.WeakValueDictionary()
102
+ )
103
103
  _channels: list[ChannelType]
104
104
  _tasks: list[asyncio.Task]
105
105
  scheme: str | None = "dummy"
@@ -26,7 +26,7 @@ from ..context import BaseActorContext
26
26
  from ..core import ActorRef, BufferRef, FileObjectRef, create_local_actor_ref
27
27
  from ..debug import debug_async_timeout, detect_cycle_send
28
28
  from ..errors import CannotCancelTask
29
- from ..utils import dataslots
29
+ from ..utils import dataslots, fix_all_zero_ip
30
30
  from .allocate_strategy import AddressSpecified, AllocateStrategy
31
31
  from .communication import Client, DummyClient, UCXClient
32
32
  from .core import ActorCaller
@@ -187,6 +187,7 @@ class IndigenActorContext(BaseActorContext):
187
187
 
188
188
  async def actor_ref(self, *args, **kwargs):
189
189
  actor_ref = create_actor_ref(*args, **kwargs)
190
+ connect_addr = actor_ref.address
190
191
  local_actor_ref = create_local_actor_ref(actor_ref.address, actor_ref.uid)
191
192
  if local_actor_ref is not None:
192
193
  return local_actor_ref
@@ -195,7 +196,10 @@ class IndigenActorContext(BaseActorContext):
195
196
  )
196
197
  future = await self._call(actor_ref.address, message, wait=False)
197
198
  result = await self._wait(future, actor_ref.address, message)
198
- return self._process_result_message(result)
199
+ res = self._process_result_message(result)
200
+ if res.address != connect_addr:
201
+ res.address = fix_all_zero_ip(res.address, connect_addr)
202
+ return res
199
203
 
200
204
  async def send(
201
205
  self,
xoscar/backends/core.py CHANGED
@@ -85,7 +85,8 @@ class ActorCaller:
85
85
  f"Remote server {client.dest_address} closed"
86
86
  ) from None
87
87
  future = self._client_to_message_futures[client].pop(message.message_id)
88
- future.set_result(message)
88
+ if not future.done():
89
+ future.set_result(message)
89
90
  except DeserializeMessageFailed as e:
90
91
  message_id = e.message_id
91
92
  future = self._client_to_message_futures[client].pop(message_id)
@@ -187,14 +187,19 @@ class MainActorPool(MainActorPoolBase):
187
187
  def start_pool_in_process():
188
188
  ctx = multiprocessing.get_context(method=start_method)
189
189
  status_queue = ctx.Queue()
190
+ main_pool_pid = os.getpid()
190
191
 
191
192
  with _suspend_init_main():
192
193
  process = ctx.Process(
193
194
  target=cls._start_sub_pool,
194
- args=(actor_pool_config, process_index, status_queue),
195
+ args=(
196
+ actor_pool_config,
197
+ process_index,
198
+ status_queue,
199
+ main_pool_pid,
200
+ ),
195
201
  name=f"IndigenActorPool{process_index}",
196
202
  )
197
- process.daemon = True
198
203
  process.start()
199
204
 
200
205
  # wait for sub actor pool to finish starting
@@ -209,15 +214,22 @@ class MainActorPool(MainActorPoolBase):
209
214
 
210
215
  @classmethod
211
216
  async def wait_sub_pools_ready(cls, create_pool_tasks: List[asyncio.Task]):
212
- processes = []
217
+ processes: list[multiprocessing.Process] = []
213
218
  ext_addresses = []
219
+ error = None
214
220
  for task in create_pool_tasks:
215
221
  process, status = await task
222
+ processes.append(process)
216
223
  if status.status == 1:
217
224
  # start sub pool failed
218
- raise status.error.with_traceback(status.traceback)
219
- processes.append(process)
220
- ext_addresses.append(status.external_addresses)
225
+ error = status.error.with_traceback(status.traceback)
226
+ else:
227
+ ext_addresses.append(status.external_addresses)
228
+ if error:
229
+ for p in processes:
230
+ # error happens, kill all subprocesses
231
+ p.kill()
232
+ raise error
221
233
  return processes, ext_addresses
222
234
 
223
235
  @classmethod
@@ -226,6 +238,7 @@ class MainActorPool(MainActorPoolBase):
226
238
  actor_config: ActorPoolConfig,
227
239
  process_index: int,
228
240
  status_queue: multiprocessing.Queue,
241
+ main_pool_pid: int,
229
242
  ):
230
243
  ensure_coverage()
231
244
 
@@ -259,7 +272,9 @@ class MainActorPool(MainActorPoolBase):
259
272
  else:
260
273
  asyncio.set_event_loop(asyncio.new_event_loop())
261
274
 
262
- coro = cls._create_sub_pool(actor_config, process_index, status_queue)
275
+ coro = cls._create_sub_pool(
276
+ actor_config, process_index, status_queue, main_pool_pid
277
+ )
263
278
  asyncio.run(coro)
264
279
 
265
280
  @classmethod
@@ -268,6 +283,7 @@ class MainActorPool(MainActorPoolBase):
268
283
  actor_config: ActorPoolConfig,
269
284
  process_index: int,
270
285
  status_queue: multiprocessing.Queue,
286
+ main_pool_pid: int,
271
287
  ):
272
288
  process_status = None
273
289
  try:
@@ -276,7 +292,11 @@ class MainActorPool(MainActorPoolBase):
276
292
  if env:
277
293
  os.environ.update(env)
278
294
  pool = await SubActorPool.create(
279
- {"actor_pool_config": actor_config, "process_index": process_index}
295
+ {
296
+ "actor_pool_config": actor_config,
297
+ "process_index": process_index,
298
+ "main_pool_pid": main_pool_pid,
299
+ }
280
300
  )
281
301
  external_addresses = cur_pool_config["external_address"]
282
302
  process_status = SubpoolStatus(
@@ -342,14 +362,14 @@ class MainActorPool(MainActorPoolBase):
342
362
  def start_pool_in_process():
343
363
  ctx = multiprocessing.get_context(method=start_method)
344
364
  status_queue = ctx.Queue()
365
+ main_pool_pid = os.getpid()
345
366
 
346
367
  with _suspend_init_main():
347
368
  process = ctx.Process(
348
369
  target=self._start_sub_pool,
349
- args=(self._config, process_index, status_queue),
370
+ args=(self._config, process_index, status_queue, main_pool_pid),
350
371
  name=f"IndigenActorPool{process_index}",
351
372
  )
352
- process.daemon = True
353
373
  process.start()
354
374
 
355
375
  # wait for sub actor pool to finish starting
xoscar/backends/pool.py CHANGED
@@ -27,11 +27,12 @@ import traceback
27
27
  from abc import ABC, ABCMeta, abstractmethod
28
28
  from typing import Any, Callable, Coroutine, Optional, Type, TypeVar
29
29
 
30
+ import psutil
31
+
30
32
  from .._utils import TypeDispatcher, create_actor_ref, to_binary
31
33
  from ..api import Actor
32
34
  from ..core import ActorRef, BufferRef, FileObjectRef, register_local_pool
33
35
  from ..debug import debug_async_timeout, record_message_trace
34
- from ..entrypoints import init_extension_entrypoints
35
36
  from ..errors import (
36
37
  ActorAlreadyExist,
37
38
  ActorNotExist,
@@ -186,8 +187,6 @@ class AbstractActorPool(ABC):
186
187
  self._asyncio_task_timeout_detector_task = (
187
188
  register_asyncio_task_timeout_detector()
188
189
  )
189
- # load third party extensions.
190
- init_extension_entrypoints()
191
190
  # init metrics
192
191
  metric_configs = self._config.get_metric_configs()
193
192
  metric_backend = metric_configs.get("backend")
@@ -821,7 +820,8 @@ SubProcessHandle = multiprocessing.Process
821
820
 
822
821
 
823
822
  class SubActorPoolBase(ActorPoolBase):
824
- __slots__ = ("_main_address",)
823
+ __slots__ = ("_main_address", "_watch_main_pool_task")
824
+ _watch_main_pool_task: Optional[asyncio.Task]
825
825
 
826
826
  def __init__(
827
827
  self,
@@ -834,6 +834,7 @@ class SubActorPoolBase(ActorPoolBase):
834
834
  config: ActorPoolConfig,
835
835
  servers: list[Server],
836
836
  main_address: str,
837
+ main_pool_pid: Optional[int],
837
838
  ):
838
839
  super().__init__(
839
840
  process_index,
@@ -846,6 +847,26 @@ class SubActorPoolBase(ActorPoolBase):
846
847
  servers,
847
848
  )
848
849
  self._main_address = main_address
850
+ if main_pool_pid:
851
+ self._watch_main_pool_task = asyncio.create_task(
852
+ self._watch_main_pool(main_pool_pid)
853
+ )
854
+ else:
855
+ self._watch_main_pool_task = None
856
+
857
+ async def _watch_main_pool(self, main_pool_pid: int):
858
+ main_process = psutil.Process(main_pool_pid)
859
+ while not self.stopped:
860
+ try:
861
+ await asyncio.to_thread(main_process.status)
862
+ await asyncio.sleep(0.1)
863
+ continue
864
+ except (psutil.NoSuchProcess, ProcessLookupError, asyncio.CancelledError):
865
+ # main pool died
866
+ break
867
+
868
+ if not self.stopped:
869
+ await self.stop()
849
870
 
850
871
  async def notify_main_pool_to_destroy(
851
872
  self, message: DestroyActorMessage
@@ -900,14 +921,22 @@ class SubActorPoolBase(ActorPoolBase):
900
921
 
901
922
  @staticmethod
902
923
  def _parse_config(config: dict, kw: dict) -> dict:
924
+ main_pool_pid = config.pop("main_pool_pid", None)
903
925
  kw = AbstractActorPool._parse_config(config, kw)
904
926
  pool_config: ActorPoolConfig = kw["config"]
905
927
  main_process_index = pool_config.get_process_indexes()[0]
906
928
  kw["main_address"] = pool_config.get_pool_config(main_process_index)[
907
929
  "external_address"
908
930
  ][0]
931
+ kw["main_pool_pid"] = main_pool_pid
909
932
  return kw
910
933
 
934
+ async def stop(self):
935
+ await super().stop()
936
+ if self._watch_main_pool_task:
937
+ self._watch_main_pool_task.cancel()
938
+ await self._watch_main_pool_task
939
+
911
940
 
912
941
  class MainActorPoolBase(ActorPoolBase):
913
942
  __slots__ = (
@@ -55,7 +55,7 @@ class TestMainActorPool(MainActorPool):
55
55
  status_queue: multiprocessing.Queue = multiprocessing.Queue()
56
56
  return (
57
57
  asyncio.create_task(
58
- cls._create_sub_pool(actor_pool_config, process_index, status_queue)
58
+ cls._create_sub_pool(actor_pool_config, process_index, status_queue, 0)
59
59
  ),
60
60
  status_queue,
61
61
  )
@@ -77,9 +77,14 @@ class TestMainActorPool(MainActorPool):
77
77
  actor_config: ActorPoolConfig,
78
78
  process_index: int,
79
79
  status_queue: multiprocessing.Queue,
80
+ main_pool_pid: int,
80
81
  ):
81
82
  pool: TestSubActorPool = await TestSubActorPool.create(
82
- {"actor_pool_config": actor_config, "process_index": process_index}
83
+ {
84
+ "actor_pool_config": actor_config,
85
+ "process_index": process_index,
86
+ "main_pool_pid": main_pool_pid,
87
+ }
83
88
  )
84
89
  await pool.start()
85
90
  status_queue.put(
@@ -55,30 +55,30 @@ class AllReduceAlgorithm(IntEnum):
55
55
 
56
56
 
57
57
  TypeMappingGloo: Dict[Type[np.dtype], "xp.GlooDataType_t"] = {
58
- np.int8: xp.GlooDataType_t.glooInt8,
59
- np.uint8: xp.GlooDataType_t.glooUint8,
60
- np.int32: xp.GlooDataType_t.glooInt32,
61
- np.uint32: xp.GlooDataType_t.glooUint32,
62
- np.int64: xp.GlooDataType_t.glooInt64,
63
- np.uint64: xp.GlooDataType_t.glooUint64,
64
- np.float16: xp.GlooDataType_t.glooFloat16,
65
- np.float32: xp.GlooDataType_t.glooFloat32,
66
- np.float64: xp.GlooDataType_t.glooFloat64,
58
+ np.int8: xp.GlooDataType_t.glooInt8, # type: ignore
59
+ np.uint8: xp.GlooDataType_t.glooUint8, # type: ignore
60
+ np.int32: xp.GlooDataType_t.glooInt32, # type: ignore
61
+ np.uint32: xp.GlooDataType_t.glooUint32, # type: ignore
62
+ np.int64: xp.GlooDataType_t.glooInt64, # type: ignore
63
+ np.uint64: xp.GlooDataType_t.glooUint64, # type: ignore
64
+ np.float16: xp.GlooDataType_t.glooFloat16, # type: ignore
65
+ np.float32: xp.GlooDataType_t.glooFloat32, # type: ignore
66
+ np.float64: xp.GlooDataType_t.glooFloat64, # type: ignore
67
67
  }
68
68
  cupy = lazy_import("cupy")
69
69
  if cupy is not None:
70
70
  from cupy.cuda import nccl
71
71
 
72
72
  TypeMappingNCCL: Dict[Type[np.dtype], int] = {
73
- np.int8: nccl.NCCL_INT8,
74
- np.uint8: nccl.NCCL_UINT8,
75
- np.int32: nccl.NCCL_INT32,
76
- np.uint32: nccl.NCCL_UINT32,
77
- np.int64: nccl.NCCL_INT64,
78
- np.uint64: nccl.NCCL_UINT64,
79
- np.float16: nccl.NCCL_FLOAT16,
80
- np.float32: nccl.NCCL_FLOAT32,
81
- np.float64: nccl.NCCL_FLOAT64,
73
+ np.int8: nccl.NCCL_INT8, # type: ignore
74
+ np.uint8: nccl.NCCL_UINT8, # type: ignore
75
+ np.int32: nccl.NCCL_INT32, # type: ignore
76
+ np.uint32: nccl.NCCL_UINT32, # type: ignore
77
+ np.int64: nccl.NCCL_INT64, # type: ignore
78
+ np.uint64: nccl.NCCL_UINT64, # type: ignore
79
+ np.float16: nccl.NCCL_FLOAT16, # type: ignore
80
+ np.float32: nccl.NCCL_FLOAT32, # type: ignore
81
+ np.float64: nccl.NCCL_FLOAT64, # type: ignore
82
82
  }
83
83
 
84
84
  ReduceOpMappingNCCL: Dict[CollectiveReduceOp, int] = {
Binary file
Binary file
xoscar/utils.py CHANGED
@@ -462,3 +462,41 @@ def is_windows():
462
462
 
463
463
  def is_linux():
464
464
  return sys.platform.startswith("linux")
465
+
466
+
467
+ def is_v4_zero_ip(ip_port_addr: str) -> bool:
468
+ return ip_port_addr.startswith("0.0.0.0:")
469
+
470
+
471
+ def is_v6_zero_ip(ip_port_addr: str) -> bool:
472
+ # tcp6 addr ":::123", ":: means all zero"
473
+ arr = ip_port_addr.split(":")
474
+ if len(arr) <= 2: # Not tcp6 or udp6
475
+ return False
476
+ for part in arr[0:-1]:
477
+ if part != "":
478
+ if int(part, 16) != 0:
479
+ return False
480
+ return True
481
+
482
+
483
+ def fix_all_zero_ip(remote_addr: str, connect_addr: str) -> str:
484
+ """
485
+ Use connect_addr to fix ActorRef.address return by remote server.
486
+ When remote server listen on "0.0.0.0:port" or ":::port", it will return ActorRef.address set to listening addr,
487
+ it cannot be use by client for the following interaction unless we fix it.
488
+ (client will treat 0.0.0.0 as 127.0.0.1)
489
+
490
+ NOTE: Server might return a different addr from a pool for load-balance purpose.
491
+ """
492
+ if remote_addr == connect_addr:
493
+ return remote_addr
494
+ if not is_v4_zero_ip(remote_addr) and not is_v6_zero_ip(remote_addr):
495
+ # Remote server returns on non-zero ip
496
+ return remote_addr
497
+ if is_v4_zero_ip(connect_addr) or is_v6_zero_ip(connect_addr):
498
+ # Client connect to local server
499
+ return remote_addr
500
+ remote_port = remote_addr.split(":")[-1]
501
+ connect_ip = ":".join(connect_addr.split(":")[0:-1]) # Remote the port
502
+ return f"{connect_ip}:{remote_port}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: xoscar
3
- Version: 0.2.1
3
+ Version: 0.3.2
4
4
  Summary: Python actor framework for heterogeneous computing.
5
5
  Home-page: http://github.com/xorbitsai/xoscar
6
6
  Author: Qin Xuye
@@ -18,11 +18,12 @@ Classifier: Programming Language :: Python :: 3.11
18
18
  Classifier: Programming Language :: Python :: Implementation :: CPython
19
19
  Classifier: Topic :: Software Development :: Libraries
20
20
  Description-Content-Type: text/markdown
21
- Requires-Dist: numpy >=1.14.0
21
+ Requires-Dist: numpy <2.0.0,>=1.14.0
22
22
  Requires-Dist: pandas >=1.0.0
23
23
  Requires-Dist: cloudpickle >=1.5.0
24
24
  Requires-Dist: psutil >=5.9.0
25
25
  Requires-Dist: tblib >=1.7.0
26
+ Requires-Dist: packaging
26
27
  Requires-Dist: pickle5 ; python_version < "3.8"
27
28
  Requires-Dist: uvloop >=0.14.0 ; sys_platform != "win32"
28
29
  Requires-Dist: scipy >=1.0.0 ; sys_platform != "win32" or python_version >= "3.10"
@@ -1,7 +1,7 @@
1
- xoscar-0.2.1.dist-info/RECORD,,
2
- xoscar-0.2.1.dist-info/WHEEL,sha256=wt_0D7ETubDJ-d2If9gFyi_RXCxFIy3PzOgckP8MsJY,109
3
- xoscar-0.2.1.dist-info/top_level.txt,sha256=vYlqqY4Nys8Thm1hePIuUv8eQePdULVWMmt7lXtX_ZA,21
4
- xoscar-0.2.1.dist-info/METADATA,sha256=rG9DA6iIoOVLoyKK0z5tid9xhSVnWccijS9blRQyfSc,9214
1
+ xoscar-0.3.2.dist-info/RECORD,,
2
+ xoscar-0.3.2.dist-info/WHEEL,sha256=WRwPRH7mjFT-dTZ5lq85StrNRg9EtOV3GBZVL26Gb_o,109
3
+ xoscar-0.3.2.dist-info/top_level.txt,sha256=vYlqqY4Nys8Thm1hePIuUv8eQePdULVWMmt7lXtX_ZA,21
4
+ xoscar-0.3.2.dist-info/METADATA,sha256=Sw1MUXmjiJDvY37wnlzYTfiAyRYM3pqIMfefEvjPr94,9246
5
5
  xoscar/_utils.pyx,sha256=UR1FtYXAYKIdEWR9HulEpMbSOrkQWi6xGz63d4IQmG0,7059
6
6
  xoscar/backend.py,sha256=is436OPkZfSpQXaoqTRVta5eoye_pp45RFgCstAk2hU,1850
7
7
  xoscar/core.pxd,sha256=4lBq8J0kjcXcsGuvN7Kv4xcL5liHwTTFWlqyK7XAEnw,1280
@@ -10,17 +10,16 @@ xoscar/context.pxd,sha256=qKa0OyDPZtVymftSh447m-RzFZgmz8rGqQBa7qlauvc,725
10
10
  xoscar/batch.py,sha256=DpArS0L3WYJ_HVPG-6hSYEwoAFY1mY2-mlC4Jp5M_Dw,7872
11
11
  xoscar/nvutils.py,sha256=qmW4mKLU0WB2yCs198ccQOgLL02zB7Fsa-AotO3NOmg,20412
12
12
  xoscar/constants.py,sha256=Yn59lRIOvE1VFwyuZB5G2-gxYIyhIZ1rVovbdFAR2NM,759
13
- xoscar/__init__.py,sha256=9BapEEmHU9OlpDOIc_4LwXNHfauP1XDW0YRnAUKZp_8,1721
13
+ xoscar/__init__.py,sha256=0zX8kKaio3ZIrlzB79WybcravMJw1OxPWjDspTgJFyQ,1608
14
14
  xoscar/api.py,sha256=3hztPoOxg8A_mlhWyWgVP7FMXG0PATA1TP4Rbaj7A-g,13327
15
- xoscar/_utils.cpython-38-darwin.so,sha256=Gax7PtJkyx_-Au0YJ8IAzfep8KWa0ChQpNWczRNsQB4,191280
16
- xoscar/utils.py,sha256=TH81N2EWUDfAMdlkPYSh0juZS2EbdvvdhWx_6euQygk,14672
15
+ xoscar/_utils.cpython-38-darwin.so,sha256=dMAe_zrbClyGKdkK_fwghRRVcsmKpu-UiGW0GGszkNU,191288
16
+ xoscar/utils.py,sha256=TYp6wC8xx2AjKcoKt6Xk0bwhFeccBJKCK50YQE1XOV4,16076
17
17
  xoscar/debug.py,sha256=9Z8SgE2WaKYQcyDo-5-DxEJQ533v7kWjrvCd28pSx3E,5069
18
18
  xoscar/libcpp.pxd,sha256=DJqBxLFOKL4iRr9Kale5UH3rbvPRD1x5bTSOPHFpz9I,1147
19
- xoscar/entrypoints.py,sha256=t-PfnqYDyjzXbV-Z-hjaQxpf_m95eSx2saAsb-V2ODY,1642
20
19
  xoscar/context.pyx,sha256=8CdgPnWcE9eOp3N600WgDQ03MCi8P73eUOGcfV7Zksg,10942
21
- xoscar/context.cpython-38-darwin.so,sha256=eTBMnKqupkSQ1JZuVjfivBs7VNQKXI4LoljEjuBZTOg,231328
20
+ xoscar/context.cpython-38-darwin.so,sha256=ljX58QrPptyVgyTXRJwepSV5rL63PfNRCLOV8h1nftY,231336
22
21
  xoscar/errors.py,sha256=wBlQOKsXf0Fc4skN39tDie0YZT-VIAuLNRgoDl2pZcA,1241
23
- xoscar/core.cpython-38-darwin.so,sha256=RcQwQZrzCOUV99HLD7RLICUD7Uk1Zu_bR8ENQn0rtI0,500640
22
+ xoscar/core.cpython-38-darwin.so,sha256=Xfg8mJa09SQuBWGWIj2a7MOdHw8zJBQ9n_a-mnMuna0,500640
24
23
  xoscar/core.pyx,sha256=Aqc2i8Fetsd5wRAPF4kL0ddnBZn3E2HRNCvup79BbQc,21730
25
24
  xoscar/driver.py,sha256=498fowtJr6b3FE8FIOA_Tc1Vwx88nfZw7p0FxrML0h4,1372
26
25
  xoscar/profiling.py,sha256=BC5OF0HzSaXv8V7w-y-B8r5gV5DgxHFoTEIF6jCMioQ,8015
@@ -33,10 +32,10 @@ xoscar/metrics/backends/prometheus/__init__.py,sha256=h_JgzSqV5lP6vQ6XX_17kE4IY4
33
32
  xoscar/metrics/backends/prometheus/prometheus_metric.py,sha256=MxoMvVrg0pOkKpkjJ0PcAuEaaEJR2FZljmPrLjQ1-oc,2050
34
33
  xoscar/metrics/backends/console/console_metric.py,sha256=y5CCtH33j3AqI5_Uhwi4mgOcAhyhb4cWv_YvR6fxcbQ,2082
35
34
  xoscar/metrics/backends/console/__init__.py,sha256=h_JgzSqV5lP6vQ6XX_17kE4IY4BRnvKta_7VLQAL1ms,581
36
- xoscar/collective/xoscar_pygloo.cpython-38-darwin.so,sha256=j_7pbuNN3_v7myl1c2n2iwU292UtQkUs0z5a9byzjeI,1268160
35
+ xoscar/collective/xoscar_pygloo.cpython-38-darwin.so,sha256=ptd1C1I79zPHLwJUdBND2SQ8DNBK3Vq0YpOxsyhjAeY,1268616
37
36
  xoscar/collective/__init__.py,sha256=XsClIkO_3Jd8GDifYuAbZCmJLAo9ZqGvnjUn9iuogmU,774
38
37
  xoscar/collective/core.py,sha256=WfMJZloiRiqsLlIMhU4Pa47eo0jE-hoXdbTBwZPM6TM,23498
39
- xoscar/collective/common.py,sha256=b9JkCnXEl-SWkHbMtmVmyJG7RzqPw4IIpHJfMwruU2M,3273
38
+ xoscar/collective/common.py,sha256=INAnISbfnRicbbbDHTqbSr9ITb89ZphH5BUkSpEdXXU,3561
40
39
  xoscar/collective/utils.py,sha256=3S4qF4JEnAUD3RiWVBUj-ZptL83CBSwGYyVZyIasAsE,1178
41
40
  xoscar/collective/process_group.py,sha256=zy7LcIFnEcmrcxuECI89v0bQlUbSqQMkVyBw468WBnk,22599
42
41
  xoscar/serialization/exception.py,sha256=Jy8Lsk0z-VJyEUaWeuZIwkmxqaoB-nLKMa1D15Cl4js,1634
@@ -47,31 +46,31 @@ xoscar/serialization/numpy.py,sha256=5Kem87CvpJmzUMp3QHk4WeHU30FoQWTJJP2SwIcaQG0
47
46
  xoscar/serialization/cuda.py,sha256=iFUEnN4SiquBIhyieyOrfw3TnKnW-tU_vYgqOxO_DrA,3758
48
47
  xoscar/serialization/scipy.py,sha256=yOEi0NB8cqQ6e2UnCZ1w006RsB7T725tIL-DM_hNcsU,2482
49
48
  xoscar/serialization/aio.py,sha256=S9e3rHMBwqqKmJtDz7KzYAqWc8w9bttA0Dj83IBfEU0,4577
50
- xoscar/serialization/core.cpython-38-darwin.so,sha256=M7bL9_9CvkVqbQz8bdLzFm4b6997cxDHit4mZ660YiM,476160
49
+ xoscar/serialization/core.cpython-38-darwin.so,sha256=mi0BA8J8w5u3XjmfoFY_f1yvgnaxZsvfzzjQoUxEAKw,476160
51
50
  xoscar/serialization/core.pyx,sha256=E3xIKmdI2gn99JduR3yuU_YTm-lOyG0Tkc7fZVBWCho,30131
52
51
  xoscar/backends/config.py,sha256=EG26f0GwX_f4dAhwTW77RBjiK9h8R_3JrD-rBF1bAq8,4984
53
52
  xoscar/backends/allocate_strategy.py,sha256=tC1Nbq2tJohahUwd-zoRYHEDX65wyuX8tmeY45uWj_w,4845
54
- xoscar/backends/message.cpython-38-darwin.so,sha256=udOZBvJmAmUxvHoe7Va8dTxhaNpYLgqT5nR-fsl7bK0,418016
53
+ xoscar/backends/message.cpython-38-darwin.so,sha256=x0nu6jtsBT_fqOpHPwz8qWoXo7eNMLJ50pcxtVAIGHI,418016
55
54
  xoscar/backends/__init__.py,sha256=VHEBQcUWM5bj027W8EUf9PiJUAP7JoMrRw3Tsvy5ySw,643
56
- xoscar/backends/core.py,sha256=o6g3ZOW7PkGmiu-nNtp6I3Sd_2KkQDwOsKz-FdgRFs0,7390
57
- xoscar/backends/context.py,sha256=b4mDqcrA7uBsy9Rb5laxlbujCyj8GpBglpjkNcg-Mg0,15285
55
+ xoscar/backends/core.py,sha256=aHb3mMZ9vJe6pxg0P8kSOKvjXF1IaqgOgyhKVhHpNLM,7432
56
+ xoscar/backends/context.py,sha256=Vr_PibRxYCDQ_gYK7r-BOlw9TXw8VQbFsVTH7K7mHPk,15470
58
57
  xoscar/backends/router.py,sha256=mhSvM5KVfV882jricVcpyxAqHEvhS4zL6ivczC6fOTE,7746
59
58
  xoscar/backends/message.pyx,sha256=_rXcsWPcWu77Z_38rvjDBdQojpY5xJoaHQrt57_LVyo,17612
60
- xoscar/backends/pool.py,sha256=NqsCCL3LYqlDYrsZiI8hKeg4UpHom4jcsKGEhzueuWY,58156
59
+ xoscar/backends/pool.py,sha256=bvS1r31O01E8jTdoWOhSqcFymksNqO2nX3Fkqary8Ro,59149
61
60
  xoscar/backends/indigen/backend.py,sha256=znl_fZzWGEtLH8hZ9j9Kkf0fva25jEem2_KO7I1RVvc,1612
62
61
  xoscar/backends/indigen/__init__.py,sha256=tKHP5ClzedBRBpZsLRVErR3EUNbbDm4CY4u0rCFJr44,685
63
62
  xoscar/backends/indigen/driver.py,sha256=VGzkacYKykegW5qhCuhx01gdgBZEKJjNIyfNCnA6Nm8,952
64
- xoscar/backends/indigen/pool.py,sha256=Fa8rqH3_OkrwMRtO3UJWjDDB-u58ckmN1tOMRGjsX8I,16122
63
+ xoscar/backends/indigen/pool.py,sha256=3C1N2sbq02maUjl7jDhRkyYAoYmZD8hZBct6wxblq_Y,16709
65
64
  xoscar/backends/test/backend.py,sha256=nv9WFhH5Bbq4Q1HB9yfpciZBaeHT4IQAtzugBWESrUY,1263
66
65
  xoscar/backends/test/__init__.py,sha256=j2ZfD6prD9WjUxRUDC7Eq5Z7N7TkL6fFr59oNyc_vY4,682
67
- xoscar/backends/test/pool.py,sha256=43pMkebFAFLilvla5-AOS5W91shLycvfkbKwjdmlNWQ,7131
66
+ xoscar/backends/test/pool.py,sha256=TW4X6J-92Pti66103poQBNDBznX6CBD3RLOc_zixjTo,7257
68
67
  xoscar/backends/communication/ucx.py,sha256=eidp4l-YAzFMCYaeUcvpK4ecapg-92fXFKO-t_bBkTU,19267
69
68
  xoscar/backends/communication/__init__.py,sha256=tB05BlK63iWQnfJgRzKt4mFKRtmWUki5hUGSZQwAotc,1050
70
69
  xoscar/backends/communication/core.py,sha256=sJeE3foRIqVPXldzYpFKHDSsabfAIFBU4JuXY4OyklY,2130
71
70
  xoscar/backends/communication/utils.py,sha256=AmovE-hmWLXNCPwHafYuaRjOk8m42BUyT3XBqfXQRVI,3664
72
71
  xoscar/backends/communication/errors.py,sha256=V3CdBe2xX9Rwv32f2dH2Msc84yaUhlyerZ42-739o1Q,723
73
72
  xoscar/backends/communication/socket.py,sha256=VBPiesyjX8c3ECWn8kv8qGwK3xCBqh_CHPrNDapYH6w,11819
74
- xoscar/backends/communication/dummy.py,sha256=Xgn-gQ8bM3P9jzozQHGOk6gaNGnxckDJ2Su07y8wAhk,7796
73
+ xoscar/backends/communication/dummy.py,sha256=gaKPNiN4x2aGZV3IGaaa8eaweBVjRh8B19jU1B5t2yw,7798
75
74
  xoscar/backends/communication/base.py,sha256=0P4Tr35GSWpRp394e9jVWUUoKKa-gIk177eYPw1BnSU,7421
76
75
  xoscar/aio/__init__.py,sha256=4Rv9V_wDIKlg7VcJeo1GVlvobwskYb1jYXef-0GQOaY,809
77
76
  xoscar/aio/_threads.py,sha256=WE9_NZY3K9n5bAzXRbj1Bc4dxS-1m1erMfZsUu-ULU4,1313
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: bdist_wheel (0.43.0)
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp38-cp38-macosx_10_9_x86_64
5
5
 
xoscar/entrypoints.py DELETED
@@ -1,42 +0,0 @@
1
- # Copyright 2022-2023 XProbe Inc.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- import functools
16
- import logging
17
- import warnings
18
-
19
- logger = logging.getLogger(__name__)
20
-
21
-
22
- # from https://github.com/numba/numba/blob/master/numba/core/entrypoints.py
23
- # Must put this here to avoid extensions re-triggering initialization
24
- @functools.lru_cache(maxsize=None)
25
- def init_extension_entrypoints():
26
- """Execute all `xoscar_extensions` entry points with the name `init`
27
- If extensions have already been initialized, this function does nothing.
28
- """
29
- from pkg_resources import iter_entry_points # type: ignore
30
-
31
- for entry_point in iter_entry_points("xoscar_extensions", "init"):
32
- logger.info("Loading extension: %s", entry_point)
33
- try:
34
- func = entry_point.load()
35
- func()
36
- except Exception as e:
37
- msg = "Xoscar extension module '{}' failed to load due to '{}({})'."
38
- warnings.warn(
39
- msg.format(entry_point.module_name, type(e).__name__, str(e)),
40
- stacklevel=2,
41
- )
42
- logger.info("Extension loading failed for: %s", entry_point)