dask-cuda 25.2.0__py3-none-any.whl → 25.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dask_cuda/GIT_COMMIT ADDED
@@ -0,0 +1 @@
1
+ e9ebd92886e6f518af02faf8a2cdadeb700b25a9
dask_cuda/VERSION CHANGED
@@ -1 +1 @@
1
- 25.02.00
1
+ 25.04.00
dask_cuda/__init__.py CHANGED
@@ -5,8 +5,6 @@ if sys.platform != "linux":
5
5
 
6
6
  import dask
7
7
  import dask.utils
8
- import dask.dataframe.shuffle
9
- from .explicit_comms.dataframe.shuffle import patch_shuffle_expression
10
8
  from distributed.protocol.cuda import cuda_deserialize, cuda_serialize
11
9
  from distributed.protocol.serialize import dask_deserialize, dask_serialize
12
10
 
@@ -14,30 +12,43 @@ from ._version import __git_commit__, __version__
14
12
  from .cuda_worker import CUDAWorker
15
13
 
16
14
  from .local_cuda_cluster import LocalCUDACluster
17
- from .proxify_device_objects import proxify_decorator, unproxify_decorator
18
15
 
19
16
 
20
- # Monkey patching Dask to make use of explicit-comms when `DASK_EXPLICIT_COMMS=True`
21
- patch_shuffle_expression()
22
- # Monkey patching Dask to make use of proxify and unproxify in compatibility mode
23
- dask.dataframe.shuffle.shuffle_group = proxify_decorator(
24
- dask.dataframe.shuffle.shuffle_group
25
- )
26
- dask.dataframe.core._concat = unproxify_decorator(dask.dataframe.core._concat)
27
-
28
-
29
- def _register_cudf_spill_aware():
30
- import cudf
31
-
32
- # Only enable Dask/cuDF spilling if cuDF spilling is disabled, see
33
- # https://github.com/rapidsai/dask-cuda/issues/1363
34
- if not cudf.get_option("spill"):
35
- # This reproduces the implementation of `_register_cudf`, see
36
- # https://github.com/dask/distributed/blob/40fcd65e991382a956c3b879e438be1b100dff97/distributed/protocol/__init__.py#L106-L115
37
- from cudf.comm import serialize
38
-
39
-
40
- for registry in [cuda_serialize, cuda_deserialize, dask_serialize, dask_deserialize]:
41
- for lib in ["cudf", "dask_cudf"]:
42
- if lib in registry._lazy:
43
- registry._lazy[lib] = _register_cudf_spill_aware
17
+ try:
18
+ import dask.dataframe as dask_dataframe
19
+ except ImportError:
20
+ # Dask DataFrame (optional) isn't installed
21
+ dask_dataframe = None
22
+
23
+
24
+ if dask_dataframe is not None:
25
+ from .explicit_comms.dataframe.shuffle import patch_shuffle_expression
26
+ from .proxify_device_objects import proxify_decorator, unproxify_decorator
27
+
28
+ # Monkey patching Dask to make use of explicit-comms when `DASK_EXPLICIT_COMMS=True`
29
+ patch_shuffle_expression()
30
+ # Monkey patching Dask to make use of proxify and unproxify in compatibility mode
31
+ dask_dataframe.shuffle.shuffle_group = proxify_decorator(
32
+ dask.dataframe.shuffle.shuffle_group
33
+ )
34
+ dask_dataframe.core._concat = unproxify_decorator(dask.dataframe.core._concat)
35
+
36
+ def _register_cudf_spill_aware():
37
+ import cudf
38
+
39
+ # Only enable Dask/cuDF spilling if cuDF spilling is disabled, see
40
+ # https://github.com/rapidsai/dask-cuda/issues/1363
41
+ if not cudf.get_option("spill"):
42
+ # This reproduces the implementation of `_register_cudf`, see
43
+ # https://github.com/dask/distributed/blob/40fcd65e991382a956c3b879e438be1b100dff97/distributed/protocol/__init__.py#L106-L115
44
+ from cudf.comm import serialize
45
+
46
+ for registry in [
47
+ cuda_serialize,
48
+ cuda_deserialize,
49
+ dask_serialize,
50
+ dask_deserialize,
51
+ ]:
52
+ for lib in ["cudf", "dask_cudf"]:
53
+ if lib in registry._lazy:
54
+ registry._lazy[lib] = _register_cudf_spill_aware
@@ -1,15 +1,21 @@
1
+ # Copyright (c) 2021-2025 NVIDIA CORPORATION.
1
2
  import asyncio
2
3
  import concurrent.futures
3
4
  import contextlib
4
5
  import time
5
6
  import uuid
7
+ import weakref
6
8
  from typing import Any, Dict, Hashable, Iterable, List, Optional
7
9
 
8
10
  import distributed.comm
11
+ from dask.tokenize import tokenize
9
12
  from distributed import Client, Worker, default_client, get_worker
10
13
  from distributed.comm.addressing import parse_address, parse_host_port, unparse_address
11
14
 
12
- _default_comms = None
15
+ # Mapping tokenize(client ID, [worker addresses]) to CommsContext
16
+ _comms_cache: weakref.WeakValueDictionary[
17
+ str, "CommsContext"
18
+ ] = weakref.WeakValueDictionary()
13
19
 
14
20
 
15
21
  def get_multi_lock_or_null_context(multi_lock_context, *args, **kwargs):
@@ -38,9 +44,10 @@ def get_multi_lock_or_null_context(multi_lock_context, *args, **kwargs):
38
44
 
39
45
 
40
46
  def default_comms(client: Optional[Client] = None) -> "CommsContext":
41
- """Return the default comms object
47
+ """Return the default comms object for ``client``.
42
48
 
43
- Creates a new default comms object if no one exist.
49
+ Creates a new default comms object if one does not already exist
50
+ for ``client``.
44
51
 
45
52
  Parameters
46
53
  ----------
@@ -52,11 +59,31 @@ def default_comms(client: Optional[Client] = None) -> "CommsContext":
52
59
  -------
53
60
  comms: CommsContext
54
61
  The default comms object
62
+
63
+ Notes
64
+ -----
65
+ There are some subtle points around explicit-comms and the lifecycle
66
+ of a Dask Cluster.
67
+
68
+ A :class:`CommsContext` establishes explicit communication channels
69
+ between the workers *at the time it's created*. If workers are added
70
+ or removed, they will not be included in the communication channels
71
+ with the other workers.
72
+
73
+ If you need to refresh the explicit communications channels, then
74
+ create a new :class:`CommsContext` object or call ``default_comms``
75
+ again after workers have been added to or removed from the cluster.
55
76
  """
56
- global _default_comms
57
- if _default_comms is None:
58
- _default_comms = CommsContext(client=client)
59
- return _default_comms
77
+ # Comms are unique to a {client, [workers]} pair, so we key our
78
+ # cache by the token of that.
79
+ client = client or default_client()
80
+ token = tokenize(client.id, list(client.scheduler_info()["workers"].keys()))
81
+ maybe_comms = _comms_cache.get(token)
82
+ if maybe_comms is None:
83
+ maybe_comms = CommsContext(client=client)
84
+ _comms_cache[token] = maybe_comms
85
+
86
+ return maybe_comms
60
87
 
61
88
 
62
89
  def worker_state(sessionId: Optional[int] = None) -> dict:
dask_cuda/proxy_object.py CHANGED
@@ -11,10 +11,6 @@ from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Tuple, Type, Un
11
11
  import pandas
12
12
 
13
13
  import dask
14
- import dask.array.core
15
- import dask.dataframe.backends
16
- import dask.dataframe.dispatch
17
- import dask.dataframe.utils
18
14
  import dask.utils
19
15
  import distributed.protocol
20
16
  import distributed.utils
@@ -30,6 +26,22 @@ if TYPE_CHECKING:
30
26
  from .proxify_host_file import ProxyManager
31
27
 
32
28
 
29
+ try:
30
+ import dask.dataframe as dask_dataframe
31
+ import dask.dataframe.backends
32
+ import dask.dataframe.dispatch
33
+ import dask.dataframe.utils
34
+ except ImportError:
35
+ dask_dataframe = None
36
+
37
+
38
+ try:
39
+ import dask.array as dask_array
40
+ import dask.array.core
41
+ except ImportError:
42
+ dask_array = None
43
+
44
+
33
45
  # List of attributes that should be copied to the proxy at creation, which makes
34
46
  # them accessible without deserialization of the proxied object
35
47
  _FIXED_ATTRS = ["name", "__len__"]
@@ -884,14 +896,6 @@ def obj_pxy_dask_deserialize(header, frames):
884
896
  return subclass(pxy)
885
897
 
886
898
 
887
- @dask.dataframe.dispatch.get_parallel_type.register(ProxyObject)
888
- def get_parallel_type_proxy_object(obj: ProxyObject):
889
- # Notice, `get_parallel_type()` needs a instance not a type object
890
- return dask.dataframe.dispatch.get_parallel_type(
891
- obj.__class__.__new__(obj.__class__)
892
- )
893
-
894
-
895
899
  def unproxify_input_wrapper(func):
896
900
  """Unproxify the input of `func`"""
897
901
 
@@ -904,26 +908,42 @@ def unproxify_input_wrapper(func):
904
908
  return wrapper
905
909
 
906
910
 
907
- # Register dispatch of ProxyObject on all known dispatch objects
908
- for dispatch in (
909
- dask.dataframe.dispatch.hash_object_dispatch,
910
- dask.dataframe.dispatch.make_meta_dispatch,
911
- dask.dataframe.utils.make_scalar,
912
- dask.dataframe.dispatch.group_split_dispatch,
913
- dask.array.core.tensordot_lookup,
914
- dask.array.core.einsum_lookup,
915
- dask.array.core.concatenate_lookup,
916
- ):
917
- dispatch.register(ProxyObject, unproxify_input_wrapper(dispatch))
918
-
919
- dask.dataframe.dispatch.concat_dispatch.register(
920
- ProxyObject, unproxify_input_wrapper(dask.dataframe.dispatch.concat)
921
- )
922
-
923
-
924
- # We overwrite the Dask dispatch of Pandas objects in order to
925
- # deserialize all ProxyObjects before concatenating
926
- dask.dataframe.dispatch.concat_dispatch.register(
927
- (pandas.DataFrame, pandas.Series, pandas.Index),
928
- unproxify_input_wrapper(dask.dataframe.backends.concat_pandas),
929
- )
911
+ if dask_array is not None:
912
+
913
+ # Register dispatch of ProxyObject on all known dispatch objects
914
+ for dispatch in (
915
+ dask.array.core.tensordot_lookup,
916
+ dask.array.core.einsum_lookup,
917
+ dask.array.core.concatenate_lookup,
918
+ ):
919
+ dispatch.register(ProxyObject, unproxify_input_wrapper(dispatch))
920
+
921
+
922
+ if dask_dataframe is not None:
923
+
924
+ @dask.dataframe.dispatch.get_parallel_type.register(ProxyObject)
925
+ def get_parallel_type_proxy_object(obj: ProxyObject):
926
+ # Notice, `get_parallel_type()` needs a instance not a type object
927
+ return dask.dataframe.dispatch.get_parallel_type(
928
+ obj.__class__.__new__(obj.__class__)
929
+ )
930
+
931
+ # Register dispatch of ProxyObject on all known dispatch objects
932
+ for dispatch in (
933
+ dask.dataframe.dispatch.hash_object_dispatch,
934
+ dask.dataframe.dispatch.make_meta_dispatch,
935
+ dask.dataframe.utils.make_scalar,
936
+ dask.dataframe.dispatch.group_split_dispatch,
937
+ ):
938
+ dispatch.register(ProxyObject, unproxify_input_wrapper(dispatch))
939
+
940
+ dask.dataframe.dispatch.concat_dispatch.register(
941
+ ProxyObject, unproxify_input_wrapper(dask.dataframe.dispatch.concat)
942
+ )
943
+
944
+ # We overwrite the Dask dispatch of Pandas objects in order to
945
+ # deserialize all ProxyObjects before concatenating
946
+ dask.dataframe.dispatch.concat_dispatch.register(
947
+ (pandas.DataFrame, pandas.Series, pandas.Index),
948
+ unproxify_input_wrapper(dask.dataframe.backends.concat_pandas),
949
+ )
@@ -1,3 +1,5 @@
1
+ # Copyright (c) 2021-2025 NVIDIA CORPORATION.
2
+
1
3
  import asyncio
2
4
  import multiprocessing as mp
3
5
  import os
@@ -415,3 +417,116 @@ def test_lock_workers():
415
417
  p.join()
416
418
 
417
419
  assert all(p.exitcode == 0 for p in ps)
420
+
421
+
422
+ def test_create_destroy_create():
423
+ # https://github.com/rapidsai/dask-cuda/issues/1450
424
+ assert len(comms._comms_cache) == 0
425
+ with LocalCluster(n_workers=1) as cluster:
426
+ with Client(cluster) as client:
427
+ context = comms.default_comms()
428
+ scheduler_addresses_old = list(client.scheduler_info()["workers"].keys())
429
+ comms_addresses_old = list(comms.default_comms().worker_addresses)
430
+ assert comms.default_comms() is context
431
+ assert len(comms._comms_cache) == 1
432
+
433
+ # Add a worker, which should have a new comms object
434
+ cluster.scale(2)
435
+ client.wait_for_workers(2, timeout=5)
436
+ context2 = comms.default_comms()
437
+ assert context is not context2
438
+ assert len(comms._comms_cache) == 2
439
+
440
+ del context
441
+ del context2
442
+ assert len(comms._comms_cache) == 0
443
+ assert scheduler_addresses_old == comms_addresses_old
444
+
445
+ # A new cluster should have a new comms object. Previously, this failed
446
+ # because we referenced the old cluster's addresses.
447
+ with LocalCluster(n_workers=1) as cluster:
448
+ with Client(cluster) as client:
449
+ scheduler_addresses_new = list(client.scheduler_info()["workers"].keys())
450
+ comms_addresses_new = list(comms.default_comms().worker_addresses)
451
+
452
+ assert scheduler_addresses_new == comms_addresses_new
453
+
454
+
455
+ def test_scaled_cluster_gets_new_comms_context():
456
+ # Ensure that if we create a CommsContext, scale the cluster,
457
+ # and create a new CommsContext, then the new CommsContext
458
+ # should include the new worker.
459
+ # https://github.com/rapidsai/dask-cuda/issues/1450
460
+
461
+ name = "explicit-comms-shuffle"
462
+ ddf = dd.from_pandas(pd.DataFrame({"key": np.arange(10)}), npartitions=2)
463
+
464
+ with LocalCluster(n_workers=2) as cluster:
465
+ with Client(cluster) as client:
466
+ context_1 = comms.default_comms()
467
+
468
+ def check(dask_worker, session_id: int):
469
+ has_state = hasattr(dask_worker, "_explicit_comm_state")
470
+ has_state_for_session = (
471
+ has_state and session_id in dask_worker._explicit_comm_state
472
+ )
473
+ if has_state_for_session:
474
+ n_workers = dask_worker._explicit_comm_state[session_id]["nworkers"]
475
+ else:
476
+ n_workers = None
477
+ return {
478
+ "has_state": has_state,
479
+ "has_state_for_session": has_state_for_session,
480
+ "n_workers": n_workers,
481
+ }
482
+
483
+ result_1 = client.run(check, session_id=context_1.sessionId)
484
+ expected_values = {
485
+ "has_state": True,
486
+ "has_state_for_session": True,
487
+ "n_workers": 2,
488
+ }
489
+ expected_1 = {
490
+ k: expected_values for k in client.scheduler_info()["workers"]
491
+ }
492
+ assert result_1 == expected_1
493
+
494
+ # Run a shuffle with the initial setup as a sanity test
495
+ with dask.config.set(explicit_comms=True):
496
+ shuffled = ddf.shuffle(on="key", npartitions=4)
497
+ assert any(name in str(key) for key in shuffled.dask)
498
+ result = shuffled.compute()
499
+
500
+ with dask.config.set(explicit_comms=False):
501
+ shuffled = ddf.shuffle(on="key", npartitions=4)
502
+ expected = shuffled.compute()
503
+
504
+ assert_eq(result, expected)
505
+
506
+ # --- Scale the cluster ---
507
+ cluster.scale(3)
508
+ client.wait_for_workers(3, timeout=5)
509
+
510
+ context_2 = comms.default_comms()
511
+ result_2 = client.run(check, session_id=context_2.sessionId)
512
+ expected_values = {
513
+ "has_state": True,
514
+ "has_state_for_session": True,
515
+ "n_workers": 3,
516
+ }
517
+ expected_2 = {
518
+ k: expected_values for k in client.scheduler_info()["workers"]
519
+ }
520
+ assert result_2 == expected_2
521
+
522
+ # Run a shuffle with the new setup
523
+ with dask.config.set(explicit_comms=True):
524
+ shuffled = ddf.shuffle(on="key", npartitions=4)
525
+ assert any(name in str(key) for key in shuffled.dask)
526
+ result = shuffled.compute()
527
+
528
+ with dask.config.set(explicit_comms=False):
529
+ shuffled = ddf.shuffle(on="key", npartitions=4)
530
+ expected = shuffled.compute()
531
+
532
+ assert_eq(result, expected)
@@ -1,4 +1,5 @@
1
1
  import multiprocessing as mp
2
+ import sys
2
3
 
3
4
  import numpy
4
5
  import psutil
@@ -214,3 +215,38 @@ def test_initialize_ucx_all(protocol):
214
215
  p.start()
215
216
  p.join()
216
217
  assert not p.exitcode
218
+
219
+
220
+ def _test_dask_cuda_import():
221
+ # Check that importing `dask_cuda` does NOT
222
+ # require `dask.dataframe` or `dask.array`.
223
+
224
+ # Patch sys.modules so that `dask.dataframe`
225
+ # and `dask.array` cannot be found.
226
+ with pytest.MonkeyPatch.context() as monkeypatch:
227
+ for k in list(sys.modules):
228
+ if k.startswith("dask.dataframe") or k.startswith("dask.array"):
229
+ monkeypatch.setitem(sys.modules, k, None)
230
+ monkeypatch.delitem(sys.modules, "dask_cuda")
231
+
232
+ # Check that top-level imports still succeed.
233
+ import dask_cuda # noqa: F401
234
+ from dask_cuda import CUDAWorker # noqa: F401
235
+ from dask_cuda import LocalCUDACluster
236
+
237
+ with LocalCUDACluster(
238
+ dashboard_address=None,
239
+ n_workers=1,
240
+ threads_per_worker=1,
241
+ processes=True,
242
+ worker_class=IncreasedCloseTimeoutNanny,
243
+ ) as cluster:
244
+ with Client(cluster) as client:
245
+ client.run(lambda *args: None)
246
+
247
+
248
+ def test_dask_cuda_import():
249
+ p = mp.Process(target=_test_dask_cuda_import)
250
+ p.start()
251
+ p.join()
252
+ assert not p.exitcode
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: dask-cuda
3
- Version: 25.2.0
3
+ Version: 25.4.0
4
4
  Summary: Utilities for Dask and CUDA interactions
5
5
  Author: NVIDIA Corporation
6
6
  License: Apache 2.0
@@ -23,13 +23,14 @@ Requires-Dist: numba<0.61.0a0,>=0.59.1
23
23
  Requires-Dist: numpy<3.0a0,>=1.23
24
24
  Requires-Dist: pandas>=1.3
25
25
  Requires-Dist: pynvml<13.0.0a0,>=12.0.0
26
- Requires-Dist: rapids-dask-dependency==25.2.*
26
+ Requires-Dist: rapids-dask-dependency==25.4.*
27
27
  Requires-Dist: zict>=2.0.0
28
28
  Provides-Extra: docs
29
29
  Requires-Dist: numpydoc>=1.1.0; extra == "docs"
30
30
  Requires-Dist: sphinx; extra == "docs"
31
31
  Requires-Dist: sphinx-click>=2.7.1; extra == "docs"
32
32
  Requires-Dist: sphinx-rtd-theme>=0.5.1; extra == "docs"
33
+ Dynamic: license-file
33
34
 
34
35
  Dask CUDA
35
36
  =========
@@ -1,5 +1,6 @@
1
- dask_cuda/VERSION,sha256=gWnOsR7j8lHNsXJO_balY3FJzbDTto6xlQk1ItvppEY,8
2
- dask_cuda/__init__.py,sha256=YMnNzbZ1pDrsbgBc9ipsxBQyYn72IzoHAei4_imXHok,1665
1
+ dask_cuda/GIT_COMMIT,sha256=wbY8QunTBf6nZeA4ulUfzAdQWyE7hoxV330KmJ3VnjA,41
2
+ dask_cuda/VERSION,sha256=EM36MPurzJgotElKb8R7ZaIOF2woBA69gsVnmiyf-LY,8
3
+ dask_cuda/__init__.py,sha256=Wbc7R0voN4vsQkb7SKuVXH0YXuXtfnAxrupxfM4lT10,1933
3
4
  dask_cuda/_version.py,sha256=cHDO9AzNtxkCVhwYu7hL3H7RPAkQnxpKBjElOst3rkI,964
4
5
  dask_cuda/cli.py,sha256=cScVyNiA_l9uXeDgkIcmbcR4l4cH1_1shqSqsVmuHPE,17053
5
6
  dask_cuda/cuda_worker.py,sha256=rZ1ITG_ZCbuaMA9e8uSqCjU8Km4AMphGGrxpBPQG8xU,9477
@@ -13,7 +14,7 @@ dask_cuda/local_cuda_cluster.py,sha256=wqwKVRV6jT13sf9e-XsvbVBlTrnhmcbmHQBFPTFca
13
14
  dask_cuda/plugins.py,sha256=A2aT8HA6q_JhIEx6-XKcpbWEbl7aTg1GNoZQH8_vh00,7197
14
15
  dask_cuda/proxify_device_objects.py,sha256=99CD7LOE79YiQGJ12sYl_XImVhJXpFR4vG5utdkjTQo,8108
15
16
  dask_cuda/proxify_host_file.py,sha256=Wf5CFCC1JN5zmfvND3ls0M5FL01Y8VhHrk0xV3UQ9kk,30850
16
- dask_cuda/proxy_object.py,sha256=Zwn3mUIS_6NqNvPRTbcu6auXTQpEs8Tc-pc3_kfBBNY,29616
17
+ dask_cuda/proxy_object.py,sha256=mrCCGwS-mltcY8oddJEXnPL6rV2dBpGgsFypBVbxRsA,30150
17
18
  dask_cuda/utils.py,sha256=Goq-m78rYZ-bcJitg47N1h_PC4PDuzXG0CUVH7V8azU,25515
18
19
  dask_cuda/utils_test.py,sha256=WNMR0gic2tuP3pgygcR9g52NfyX8iGMOan6juXhpkCE,1694
19
20
  dask_cuda/worker_spec.py,sha256=7-Uq_e5q2SkTlsmctMcYLCa9_3RiiVHZLIN7ctfaFmE,4376
@@ -27,17 +28,17 @@ dask_cuda/benchmarks/local_cupy_map_overlap.py,sha256=YAllGFuG6MePfPL8gdZ-Ld7a44
27
28
  dask_cuda/benchmarks/read_parquet.py,sha256=spKu6RLWYngPZq9hnaoU0mz7INIaJnErfqjBG2wH8Zc,7614
28
29
  dask_cuda/benchmarks/utils.py,sha256=_x0XXL_F3W-fExpuQfTBwuK3WnrVuXQQepbnvjUqS9o,30075
29
30
  dask_cuda/explicit_comms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- dask_cuda/explicit_comms/comms.py,sha256=Su6PuNo68IyS-AwoqU4S9TmqWsLvUdNa0jot2hx8jQQ,10400
31
+ dask_cuda/explicit_comms/comms.py,sha256=uq-XPOH38dFcYS_13Vomj2ER6zxQz7DPeSM000mOVmY,11541
31
32
  dask_cuda/explicit_comms/dataframe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
33
  dask_cuda/explicit_comms/dataframe/shuffle.py,sha256=g9xDyFKmblEuevZt5Drh66uMLw-LUNOI8CIucDdACmY,21231
33
34
  dask_cuda/tests/test_cudf_builtin_spilling.py,sha256=qVN9J0Hdv66A9COFArLIdRriyyxEKpS3lEZGHbVHaq8,4903
34
35
  dask_cuda/tests/test_dask_cuda_worker.py,sha256=C1emlr47yGa3TdSSlAXJRzguY4bcH74htk21x9th7nQ,20556
35
36
  dask_cuda/tests/test_device_host_file.py,sha256=79ssUISo1YhsW_7HdwqPfsH2LRzS2bi5BjPym1Sdgqw,5882
36
37
  dask_cuda/tests/test_dgx.py,sha256=BPCF4ZvhrVKkT43OOFHdijuo-M34vW3V18C8rRH1HXg,7489
37
- dask_cuda/tests/test_explicit_comms.py,sha256=F4_84bx3ODO8Q6ERHXFJF31uXj4gehLwKQncT1evbuM,14895
38
+ dask_cuda/tests/test_explicit_comms.py,sha256=xnQjjUrd6RFd9CS99pVuWY1frfiMXzRv_fW4rk9opOk,19465
38
39
  dask_cuda/tests/test_from_array.py,sha256=okT1B6UqHmLxoy0uER0Ylm3UyOmi5BAXwJpTuTAw44I,601
39
40
  dask_cuda/tests/test_gds.py,sha256=j1Huud6UGm1fbkyRLQEz_ysrVw__5AimwSn_M-2GEvs,1513
40
- dask_cuda/tests/test_initialize.py,sha256=Rba59ZbljEm1yyN94_sWZPEE_f7hWln95aiBVc49pmY,6960
41
+ dask_cuda/tests/test_initialize.py,sha256=4Ovv_ClokKibPX6wfuaoQgN4eKCohagRFoE3s3D7Huk,8119
41
42
  dask_cuda/tests/test_local_cuda_cluster.py,sha256=Lc9QncyGwBwhaZPGBfreXJf3ZC9Zd8SjDc2fpeQ-BT0,19710
42
43
  dask_cuda/tests/test_proxify_host_file.py,sha256=LC3jjo_gbfhdIy1Zy_ynmgyv31HXFoBINCe1-XXZ4XU,18994
43
44
  dask_cuda/tests/test_proxy.py,sha256=U9uE-QesTwquNKzTReEKiYgoRgS_pfGW-A-gJNppHyg,23817
@@ -45,11 +46,11 @@ dask_cuda/tests/test_spill.py,sha256=CYMbp5HDBYlZ7T_n8RfSOZxaWFcAQKjprjRM7Wupcdw
45
46
  dask_cuda/tests/test_utils.py,sha256=PQI_oTONWnKSKlkQfEeK-vlmYa0-cPpDjDEbm74cNCE,9104
46
47
  dask_cuda/tests/test_version.py,sha256=vK2HjlRLX0nxwvRsYxBqhoZryBNZklzA-vdnyuWDxVg,365
47
48
  dask_cuda/tests/test_worker_spec.py,sha256=Bvu85vkqm6ZDAYPXKMJlI2pm9Uc5tiYKNtO4goXSw-I,2399
49
+ dask_cuda-25.4.0.dist-info/licenses/LICENSE,sha256=MjI3I-EgxfEvZlgjk82rgiFsZqSDXHFETd2QJ89UwDA,11348
48
50
  examples/ucx/client_initialize.py,sha256=YN3AXHF8btcMd6NicKKhKR9SXouAsK1foJhFspbOn70,1262
49
51
  examples/ucx/local_cuda_cluster.py,sha256=7xVY3EhwhkY2L4VZin_BiMCbrjhirDNChoC86KiETNc,1983
50
- dask_cuda-25.2.0.dist-info/LICENSE,sha256=MjI3I-EgxfEvZlgjk82rgiFsZqSDXHFETd2QJ89UwDA,11348
51
- dask_cuda-25.2.0.dist-info/METADATA,sha256=c6G1F5I_jeDlfXs42b9LSwtVc5HvUtfyEgZ6cLiw7fM,2272
52
- dask_cuda-25.2.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
53
- dask_cuda-25.2.0.dist-info/entry_points.txt,sha256=UcRaKVEpywtxc6pF1VnfMB0UK4sJg7a8_NdZF67laPM,136
54
- dask_cuda-25.2.0.dist-info/top_level.txt,sha256=3kKxJxeM108fuYc_lwwlklP7YBU9IEmdmRAouzi397o,33
55
- dask_cuda-25.2.0.dist-info/RECORD,,
52
+ dask_cuda-25.4.0.dist-info/METADATA,sha256=udK2maTnpkUBnOOtTvGOwySUtJxnIo4rcIOmySPBuOk,2294
53
+ dask_cuda-25.4.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
54
+ dask_cuda-25.4.0.dist-info/entry_points.txt,sha256=UcRaKVEpywtxc6pF1VnfMB0UK4sJg7a8_NdZF67laPM,136
55
+ dask_cuda-25.4.0.dist-info/top_level.txt,sha256=3kKxJxeM108fuYc_lwwlklP7YBU9IEmdmRAouzi397o,33
56
+ dask_cuda-25.4.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5