dask-cuda 25.8.0__py3-none-any.whl → 25.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. dask_cuda/GIT_COMMIT +1 -1
  2. dask_cuda/VERSION +1 -1
  3. dask_cuda/benchmarks/local_cudf_groupby.py +1 -1
  4. dask_cuda/benchmarks/local_cudf_merge.py +1 -1
  5. dask_cuda/benchmarks/local_cudf_shuffle.py +1 -1
  6. dask_cuda/benchmarks/local_cupy.py +1 -1
  7. dask_cuda/benchmarks/local_cupy_map_overlap.py +1 -1
  8. dask_cuda/benchmarks/utils.py +1 -1
  9. dask_cuda/cuda_worker.py +1 -1
  10. dask_cuda/get_device_memory_objects.py +1 -4
  11. dask_cuda/initialize.py +140 -121
  12. dask_cuda/local_cuda_cluster.py +10 -25
  13. dask_cuda/tests/test_cudf_builtin_spilling.py +3 -1
  14. dask_cuda/tests/test_dask_setup.py +193 -0
  15. dask_cuda/tests/test_dgx.py +16 -32
  16. dask_cuda/tests/test_explicit_comms.py +11 -10
  17. dask_cuda/tests/test_from_array.py +1 -5
  18. dask_cuda/tests/test_initialize.py +230 -41
  19. dask_cuda/tests/test_local_cuda_cluster.py +16 -62
  20. dask_cuda/tests/test_proxify_host_file.py +9 -4
  21. dask_cuda/tests/test_proxy.py +8 -8
  22. dask_cuda/tests/test_spill.py +3 -3
  23. dask_cuda/tests/test_utils.py +8 -23
  24. dask_cuda/tests/test_worker_spec.py +5 -2
  25. dask_cuda/utils.py +12 -66
  26. dask_cuda/utils_test.py +0 -13
  27. dask_cuda/worker_spec.py +7 -9
  28. {dask_cuda-25.8.0.dist-info → dask_cuda-25.10.0.dist-info}/METADATA +11 -4
  29. dask_cuda-25.10.0.dist-info/RECORD +63 -0
  30. shared-actions/check_nightly_success/check-nightly-success/check.py +1 -1
  31. dask_cuda/tests/test_rdd_ucx.py +0 -160
  32. dask_cuda-25.8.0.dist-info/RECORD +0 -63
  33. {dask_cuda-25.8.0.dist-info → dask_cuda-25.10.0.dist-info}/WHEEL +0 -0
  34. {dask_cuda-25.8.0.dist-info → dask_cuda-25.10.0.dist-info}/entry_points.txt +0 -0
  35. {dask_cuda-25.8.0.dist-info → dask_cuda-25.10.0.dist-info}/licenses/LICENSE +0 -0
  36. {dask_cuda-25.8.0.dist-info → dask_cuda-25.10.0.dist-info}/top_level.txt +0 -0
@@ -24,7 +24,6 @@ from dask_cuda.utils import (
24
24
  parse_device_memory_limit,
25
25
  unpack_bitmask,
26
26
  )
27
- from dask_cuda.utils_test import get_ucx_implementation
28
27
 
29
28
 
30
29
  @patch.dict(os.environ, {"CUDA_VISIBLE_DEVICES": "0,1,2"})
@@ -88,15 +87,11 @@ def test_get_device_total_memory():
88
87
  assert total_mem is None
89
88
 
90
89
 
91
- @pytest.mark.parametrize(
92
- "protocol",
93
- ["ucx", "ucx-old"],
94
- )
95
- def test_get_preload_options_default(protocol):
96
- get_ucx_implementation(protocol)
90
+ def test_get_preload_options_default():
91
+ pytest.importorskip("distributed_ucxx")
97
92
 
98
93
  opts = get_preload_options(
99
- protocol=protocol,
94
+ protocol="ucx",
100
95
  create_cuda_context=True,
101
96
  )
102
97
 
@@ -106,18 +101,14 @@ def test_get_preload_options_default(protocol):
106
101
  assert opts["preload_argv"] == ["--create-cuda-context"]
107
102
 
108
103
 
109
- @pytest.mark.parametrize(
110
- "protocol",
111
- ["ucx", "ucx-old"],
112
- )
113
104
  @pytest.mark.parametrize("enable_tcp", [True, False])
114
105
  @pytest.mark.parametrize("enable_infiniband", [True, False])
115
106
  @pytest.mark.parametrize("enable_nvlink", [True, False])
116
- def test_get_preload_options(protocol, enable_tcp, enable_infiniband, enable_nvlink):
117
- get_ucx_implementation(protocol)
107
+ def test_get_preload_options(enable_tcp, enable_infiniband, enable_nvlink):
108
+ pytest.importorskip("distributed_ucxx")
118
109
 
119
110
  opts = get_preload_options(
120
- protocol=protocol,
111
+ protocol="ucx",
121
112
  create_cuda_context=True,
122
113
  enable_tcp_over_ucx=enable_tcp,
123
114
  enable_infiniband=enable_infiniband,
@@ -137,17 +128,11 @@ def test_get_preload_options(protocol, enable_tcp, enable_infiniband, enable_nvl
137
128
  assert "--enable-nvlink" in opts["preload_argv"]
138
129
 
139
130
 
140
- @pytest.mark.parametrize(
141
- "protocol",
142
- ["ucx", "ucx-old"],
143
- )
144
131
  @pytest.mark.parametrize("enable_tcp_over_ucx", [True, False, None])
145
132
  @pytest.mark.parametrize("enable_nvlink", [True, False, None])
146
133
  @pytest.mark.parametrize("enable_infiniband", [True, False, None])
147
- def test_get_ucx_config(
148
- protocol, enable_tcp_over_ucx, enable_infiniband, enable_nvlink
149
- ):
150
- get_ucx_implementation(protocol)
134
+ def test_get_ucx_config(enable_tcp_over_ucx, enable_infiniband, enable_nvlink):
135
+ pytest.importorskip("distributed_ucxx")
151
136
 
152
137
  kwargs = {
153
138
  "enable_tcp_over_ucx": enable_tcp_over_ucx,
@@ -31,7 +31,7 @@ def _check_env_value(spec, k, v):
31
31
  @pytest.mark.parametrize("num_devices", [1, 4])
32
32
  @pytest.mark.parametrize("cls", [Nanny])
33
33
  @pytest.mark.parametrize("interface", [None, "eth0", "enp1s0f0"])
34
- @pytest.mark.parametrize("protocol", [None, "tcp", "ucx", "ucx-old"])
34
+ @pytest.mark.parametrize("protocol", [None, "tcp", "ucx"])
35
35
  @pytest.mark.parametrize("dashboard_address", [None, ":0", ":8787"])
36
36
  @pytest.mark.parametrize("threads_per_worker", [1, 8])
37
37
  @pytest.mark.parametrize("silence_logs", [False, True])
@@ -48,6 +48,9 @@ def test_worker_spec(
48
48
  enable_infiniband,
49
49
  enable_nvlink,
50
50
  ):
51
+ if protocol == "ucx":
52
+ pytest.importorskip("distributed_ucxx")
53
+
51
54
  def _test():
52
55
  return worker_spec(
53
56
  CUDA_VISIBLE_DEVICES=list(range(num_devices)),
@@ -61,7 +64,7 @@ def test_worker_spec(
61
64
  enable_nvlink=enable_nvlink,
62
65
  )
63
66
 
64
- if (enable_infiniband or enable_nvlink) and protocol not in ("ucx", "ucx-old"):
67
+ if (enable_infiniband or enable_nvlink) and protocol != "ucx":
65
68
  with pytest.raises(
66
69
  TypeError, match="Enabling InfiniBand or NVLink requires protocol='ucx'"
67
70
  ):
dask_cuda/utils.py CHANGED
@@ -1,7 +1,6 @@
1
1
  # SPDX-FileCopyrightText: Copyright (c) 2019-2025, NVIDIA CORPORATION & AFFILIATES.
2
2
  # SPDX-License-Identifier: Apache-2.0
3
3
 
4
- import importlib
5
4
  import math
6
5
  import operator
7
6
  import os
@@ -19,7 +18,6 @@ import pynvml
19
18
  import toolz
20
19
 
21
20
  import dask
22
- import distributed # noqa: required for dask.config.get("distributed.comm.ucx")
23
21
  from dask.config import canonical_name
24
22
  from dask.utils import format_bytes, parse_bytes
25
23
  from distributed import wait
@@ -266,17 +264,16 @@ def get_ucx_config(
266
264
  enable_infiniband=None,
267
265
  enable_nvlink=None,
268
266
  enable_rdmacm=None,
269
- protocol=None,
270
267
  ):
271
- ucx_config = dask.config.get("distributed.comm.ucx")
268
+ try:
269
+ import distributed_ucxx
270
+ except ImportError:
271
+ return None
272
272
 
273
- # TODO: remove along with `protocol` kwarg when UCX-Py is removed, see
274
- # https://github.com/rapidsai/dask-cuda/issues/1517
275
- if protocol in ("ucx", "ucxx", "ucx-old"):
276
- ucx_config[canonical_name("ucx-protocol", ucx_config)] = protocol
273
+ distributed_ucxx.config.setup_config()
274
+ ucx_config = dask.config.get("distributed-ucxx")
277
275
 
278
276
  ucx_config[canonical_name("create-cuda-context", ucx_config)] = True
279
- ucx_config[canonical_name("reuse-endpoints", ucx_config)] = False
280
277
 
281
278
  # If any transport is explicitly disabled (`False`) by the user, others that
282
279
  # are not specified should be enabled (`True`). If transports are explicitly
@@ -358,11 +355,7 @@ def get_preload_options(
358
355
  if create_cuda_context:
359
356
  preload_options["preload_argv"].append("--create-cuda-context")
360
357
 
361
- try:
362
- _get_active_ucx_implementation_name(protocol)
363
- except ValueError:
364
- pass
365
- else:
358
+ if protocol in ("ucx", "ucxx"):
366
359
  initialize_ucx_argv = []
367
360
  if enable_tcp_over_ucx:
368
361
  initialize_ucx_argv.append("--enable-tcp-over-ucx")
@@ -828,21 +821,12 @@ def get_worker_config(dask_worker):
828
821
  ret["device-memory-limit"] = dask_worker.data.device_buffer.n
829
822
 
830
823
  # using ucx ?
831
- scheme, loc = parse_address(dask_worker.scheduler.address)
832
- ret["protocol"] = scheme
833
- try:
834
- protocol = _get_active_ucx_implementation_name(scheme)
835
- except ValueError:
836
- pass
837
- else:
838
- if protocol == "ucxx":
839
- import ucxx
840
-
841
- ret["ucx-transports"] = ucxx.get_active_transports()
842
- elif protocol == "ucx-old":
843
- import ucp
824
+ protocol, loc = parse_address(dask_worker.scheduler.address)
825
+ ret["protocol"] = protocol
826
+ if protocol in ("ucx", "ucxx"):
827
+ import ucxx
844
828
 
845
- ret["ucx-transports"] = ucp.get_active_transports()
829
+ ret["ucx-transports"] = ucxx.get_active_transports()
846
830
 
847
831
  # comm timeouts
848
832
  ret["distributed.comm.timeouts"] = dask.config.get("distributed.comm.timeouts")
@@ -988,41 +972,3 @@ class CommaSeparatedChoice(click.Choice):
988
972
  choices_str = ", ".join(f"'{c}'" for c in self.choices)
989
973
  self.fail(f"invalid choice(s): {v}. (choices are: {choices_str})")
990
974
  return values
991
-
992
-
993
- def _get_active_ucx_implementation_name(protocol):
994
- """Get the name of active UCX implementation.
995
-
996
- Determine what UCX implementation is being activated based on a series of
997
- conditions. UCXX is selected if:
998
- - The protocol is `"ucxx"`, or the protocol is `"ucx"` and the `distributed-ucxx`
999
- package is installed.
1000
- UCX-Py is selected if:
1001
- - The protocol is `"ucx-old"`, or the protocol is `"ucx"` and the `distributed-ucxx`
1002
- package is not installed, in which case a `FutureWarning` is also raised.
1003
-
1004
- Parameters
1005
- ----------
1006
- protocol: str
1007
- The communication protocol selected.
1008
-
1009
- Returns
1010
- -------
1011
- The selected implementation type, either "ucxx" or "ucx-old".
1012
-
1013
- Raises
1014
- ------
1015
- ValueError
1016
- If protocol is not a valid UCX protocol.
1017
- """
1018
- has_ucxx = importlib.util.find_spec("distributed_ucxx") is not None
1019
-
1020
- if protocol == "ucxx" or (has_ucxx and protocol == "ucx"):
1021
- # With https://github.com/rapidsai/rapids-dask-dependency/pull/116,
1022
- # `protocol="ucx"` now points to UCXX (if distributed-ucxx is installed),
1023
- # thus call the UCXX initializer.
1024
- return "ucxx"
1025
- elif protocol in ("ucx", "ucx-old"):
1026
- return "ucx-old"
1027
- else:
1028
- raise ValueError("Protocol is neither UCXX nor UCX-Py")
dask_cuda/utils_test.py CHANGED
@@ -6,8 +6,6 @@ from typing import Literal
6
6
  import distributed
7
7
  from distributed import Nanny, Worker
8
8
 
9
- from .utils import _get_active_ucx_implementation_name
10
-
11
9
 
12
10
  class MockWorker(Worker):
13
11
  """Mock Worker class preventing NVML from getting used by SystemMonitor.
@@ -48,14 +46,3 @@ class IncreasedCloseTimeoutNanny(Nanny):
48
46
  self, timeout: float = 30.0, reason: str = "nanny-close"
49
47
  ) -> Literal["OK"]:
50
48
  return await super().close(timeout=timeout, reason=reason)
51
-
52
-
53
- def get_ucx_implementation(protocol):
54
- import pytest
55
-
56
- protocol = _get_active_ucx_implementation_name(protocol)
57
-
58
- if protocol == "ucxx":
59
- return pytest.importorskip("ucxx")
60
- else:
61
- return pytest.importorskip("ucp")
dask_cuda/worker_spec.py CHANGED
@@ -9,7 +9,7 @@ from distributed.system import MEMORY_LIMIT
9
9
  from .initialize import initialize
10
10
  from .local_cuda_cluster import cuda_visible_devices
11
11
  from .plugins import CPUAffinity
12
- from .utils import _get_active_ucx_implementation_name, get_cpu_affinity, get_gpu_count
12
+ from .utils import get_cpu_affinity, get_gpu_count
13
13
 
14
14
 
15
15
  def worker_spec(
@@ -84,14 +84,12 @@ def worker_spec(
84
84
  'preload_argv': ['--create-cuda-context']}}}
85
85
 
86
86
  """
87
- if enable_tcp_over_ucx or enable_infiniband or enable_nvlink:
88
- try:
89
- _get_active_ucx_implementation_name(protocol)
90
- except ValueError:
91
- raise TypeError(
92
- "Enabling InfiniBand or NVLink requires protocol='ucx', "
93
- "protocol='ucxx' or protocol='ucx-old'"
94
- ) from None
87
+ if (
88
+ enable_tcp_over_ucx or enable_infiniband or enable_nvlink
89
+ ) and protocol != "ucx":
90
+ raise TypeError(
91
+ "Enabling InfiniBand or NVLink requires protocol='ucx'"
92
+ ) from None
95
93
 
96
94
  if CUDA_VISIBLE_DEVICES is None:
97
95
  CUDA_VISIBLE_DEVICES = os.environ.get(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dask-cuda
3
- Version: 25.8.0
3
+ Version: 25.10.0
4
4
  Summary: Utilities for Dask and CUDA interactions
5
5
  Author: NVIDIA Corporation
6
6
  License: Apache-2.0
@@ -20,17 +20,24 @@ Requires-Python: >=3.10
20
20
  Description-Content-Type: text/markdown
21
21
  License-File: LICENSE
22
22
  Requires-Dist: click>=8.1
23
- Requires-Dist: numba<0.62.0a0,>=0.59.1
23
+ Requires-Dist: cuda-core==0.3.*
24
+ Requires-Dist: numba-cuda<0.20.0a0,>=0.19.1
24
25
  Requires-Dist: numpy<3.0a0,>=1.23
26
+ Requires-Dist: nvidia-ml-py>=12
25
27
  Requires-Dist: pandas>=1.3
26
- Requires-Dist: pynvml<13.0.0a0,>=12.0.0
27
- Requires-Dist: rapids-dask-dependency==25.8.*
28
+ Requires-Dist: rapids-dask-dependency==25.10.*
28
29
  Requires-Dist: zict>=2.0.0
29
30
  Provides-Extra: docs
30
31
  Requires-Dist: numpydoc>=1.1.0; extra == "docs"
31
32
  Requires-Dist: sphinx; extra == "docs"
32
33
  Requires-Dist: sphinx-click>=2.7.1; extra == "docs"
33
34
  Requires-Dist: sphinx-rtd-theme>=0.5.1; extra == "docs"
35
+ Provides-Extra: cu12
36
+ Requires-Dist: cuda-core[cu12]==0.3.*; extra == "cu12"
37
+ Requires-Dist: numba-cuda[cu12]<0.20.0a0,>=0.19.1; extra == "cu12"
38
+ Provides-Extra: cu13
39
+ Requires-Dist: cuda-core[cu13]==0.3.*; extra == "cu13"
40
+ Requires-Dist: numba-cuda[cu13]<0.20.0a0,>=0.19.1; extra == "cu13"
34
41
  Dynamic: license-file
35
42
 
36
43
  Dask CUDA
@@ -0,0 +1,63 @@
1
+ dask_cuda/GIT_COMMIT,sha256=b9kAPQayKE7_CmJ29dDIOd-qHhFKihEapaSOkl84OFU,41
2
+ dask_cuda/VERSION,sha256=qFUmAhDerTzlTeG7G1N8kkXh-gyZzDU9R0ARLQtoP_Y,8
3
+ dask_cuda/__init__.py,sha256=Wbc7R0voN4vsQkb7SKuVXH0YXuXtfnAxrupxfM4lT10,1933
4
+ dask_cuda/_compat.py,sha256=AG2lKGAtZitDPBjHeFDKLTN_B5HKodrhZ2kHlk1Z-D0,498
5
+ dask_cuda/_version.py,sha256=cHDO9AzNtxkCVhwYu7hL3H7RPAkQnxpKBjElOst3rkI,964
6
+ dask_cuda/cli.py,sha256=VRYuryhViVWkCH7H7fDDTMToSOC17nAUMIPbd3K2jRs,17490
7
+ dask_cuda/cuda_worker.py,sha256=OwAi8Qv45a-KTBC2XJdRdd-iuId3vb0bTTONOuq9DEQ,8586
8
+ dask_cuda/device_host_file.py,sha256=wTiTyYthUrR8l2WM7iV5lvjQDzeYxnnK7GUlPsHI6p4,11042
9
+ dask_cuda/disk_io.py,sha256=IpD2hA-AjTw3SEu3w9pT9ELAUpKW0XcphPh8WwlKH70,6757
10
+ dask_cuda/get_device_memory_objects.py,sha256=je10ygTAiifDxZupmyr_wzkbDuf9n3_saQGdDJAUwig,4483
11
+ dask_cuda/initialize.py,sha256=DH8_zOmK81_kygijCvAVdvcBz9QvH7IwhLueNaRGuos,8035
12
+ dask_cuda/is_device_object.py,sha256=x9klFdeQzLcug7wZMxN3GK2AS121tlDe-LQ2uznm5yo,1179
13
+ dask_cuda/is_spillable_object.py,sha256=8gj6QgtKcmzrpQwy8rE-pS1R8tjaJOeD-Fzr6LumjJg,1596
14
+ dask_cuda/local_cuda_cluster.py,sha256=ihf0jTUAyRuKJZPgmvvZ0ZLsgp-No9QuxGZzRBdQWNU,20024
15
+ dask_cuda/plugins.py,sha256=u4gWQy8DgaTAsd59KCNXLG6zmdmGg5qhaI8ha2rMEFs,7085
16
+ dask_cuda/proxify_device_objects.py,sha256=hGHK2gBkuaMYyrZqcitRGUiH_up56R1hsC7b_6YcCBU,8325
17
+ dask_cuda/proxify_host_file.py,sha256=TbdszPvAgV4CRPMz99tumnCWwiCTqMBZqHQua3RRpHE,31031
18
+ dask_cuda/proxy_object.py,sha256=eII-S0vdpFa6NLebpHFVQuJ2eyXJ5cBg8sucgFEa91g,30307
19
+ dask_cuda/utils.py,sha256=981BxG2hOcjgr5FNMIdOsKTC_jyTi8D7HC3lHWkC8wM,31518
20
+ dask_cuda/utils_test.py,sha256=U-_CRo7bkRJh5bDrCY9pGNKnU1PlJ4VA1gzrbh8Ubv4,1833
21
+ dask_cuda/worker_common.py,sha256=uXoYZ1IZanAbHddfpL3NbVDV5WqCF0m94nOiqqvEnxc,7168
22
+ dask_cuda/worker_spec.py,sha256=JNjY46iIR1OkanCfKBEcusVFgv33u2Lc2Pg7Lb30HfU,4531
23
+ dask_cuda/benchmarks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
+ dask_cuda/benchmarks/common.py,sha256=4AHgWkomo1RsPwH6eogz4vbE9vg_Dg-krScn9B1BRuw,7057
25
+ dask_cuda/benchmarks/local_cudf_groupby.py,sha256=9jmSnP0A6Q8MR7j5nCGtNivw9S1jh1XD7lRQge9AtRQ,8635
26
+ dask_cuda/benchmarks/local_cudf_merge.py,sha256=XCBaPxydH2DGzetp7cN3u5W3bnkOg_wfo3RQYz0GHDY,12330
27
+ dask_cuda/benchmarks/local_cudf_shuffle.py,sha256=kqtA6eoC1r5TTEEDm48zuS8BoK05NYDSInpkfFcwvYQ,8375
28
+ dask_cuda/benchmarks/local_cupy.py,sha256=jqq-wGsVmvZuKmtRu7smvRmnXL_jCksj083zCOKEBOA,10446
29
+ dask_cuda/benchmarks/local_cupy_map_overlap.py,sha256=-6UoiH5PZgRRRClj6NoUTLCd4nvhAZMtj9wWK2tsqkA,6140
30
+ dask_cuda/benchmarks/read_parquet.py,sha256=spKu6RLWYngPZq9hnaoU0mz7INIaJnErfqjBG2wH8Zc,7614
31
+ dask_cuda/benchmarks/utils.py,sha256=o7RoUZnbHgib5KcR7-m86x3F6tIUGbgq_a45PMHURwg,30224
32
+ dask_cuda/explicit_comms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
+ dask_cuda/explicit_comms/comms.py,sha256=FbSDPQUz0gwQNa8EQMNEGEwWCM2KhuDk4CiNNjZy6qo,11611
34
+ dask_cuda/explicit_comms/dataframe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
+ dask_cuda/explicit_comms/dataframe/shuffle.py,sha256=vq-NBPYtKvKcfo7-eikPYaeWDf-V_DTHR5PVE_E-dyE,23863
36
+ dask_cuda/tests/conftest.py,sha256=NjTnsgu7UewTpoTakt-2bpe0rtarwni25anjZPWbbb0,1501
37
+ dask_cuda/tests/test_cudf_builtin_spilling.py,sha256=fhkLAyLBbDmS5pbyR1XbafS_OVW0Z6oVc3GerfPY0wg,5029
38
+ dask_cuda/tests/test_dask_cuda_worker.py,sha256=iOkYAGSYfql7Wxxnb1hZJ7oedQyLmweXAHpQhckaTgY,23232
39
+ dask_cuda/tests/test_dask_setup.py,sha256=gwu_RXIw3qMQExzUjIWzsClMavJEAQwZj9SgZCbF960,6741
40
+ dask_cuda/tests/test_device_host_file.py,sha256=79ssUISo1YhsW_7HdwqPfsH2LRzS2bi5BjPym1Sdgqw,5882
41
+ dask_cuda/tests/test_dgx.py,sha256=ZyzLelIbzELv_-gnh_CP6uedAVqfb5yzaXuT0GD-7WM,6700
42
+ dask_cuda/tests/test_explicit_comms.py,sha256=FzK0wOaR3y6jMp-7Lj0xrhIhRRlpfGBEstBGqYbBMrg,20551
43
+ dask_cuda/tests/test_from_array.py,sha256=AtGWl0Q9-u53hEGgGcwO65nZ0lCvybRIFHZP2p499EU,589
44
+ dask_cuda/tests/test_gds.py,sha256=j1Huud6UGm1fbkyRLQEz_ysrVw__5AimwSn_M-2GEvs,1513
45
+ dask_cuda/tests/test_initialize.py,sha256=fVlA8I1XnPMg3h5ARDKiVC8zzmYpTBNtDgiBgnazBwA,14751
46
+ dask_cuda/tests/test_local_cuda_cluster.py,sha256=LDet1NCYCX2wNEhun1B9KflJhajUjwHgzE_QE-RY_1A,21819
47
+ dask_cuda/tests/test_proxify_host_file.py,sha256=EdU75aZ_805ggK00wC46DMW5iJ7lcGtm9xgPoEHU_Jc,20123
48
+ dask_cuda/tests/test_proxy.py,sha256=HyjwevaeJIsyqg9RpbzJsVp9VquDQfupHcanXSECzcs,23933
49
+ dask_cuda/tests/test_spill.py,sha256=IzOOqb5hDYM2OntZZBavy1fwB6ShWmbmZw7zHE0w6Ug,15760
50
+ dask_cuda/tests/test_utils.py,sha256=wrCRRN9zpk1NarYPLyB0da6axSSzfjsdFp6gufVFjO4,12018
51
+ dask_cuda/tests/test_version.py,sha256=vK2HjlRLX0nxwvRsYxBqhoZryBNZklzA-vdnyuWDxVg,365
52
+ dask_cuda/tests/test_worker_spec.py,sha256=gO0lLJqoPH_Ti6YYrj5k56h_ux-QW2Zrc_MuKPei-K4,2597
53
+ dask_cuda-25.10.0.dist-info/licenses/LICENSE,sha256=MjI3I-EgxfEvZlgjk82rgiFsZqSDXHFETd2QJ89UwDA,11348
54
+ examples/ucx/client_initialize.py,sha256=YN3AXHF8btcMd6NicKKhKR9SXouAsK1foJhFspbOn70,1262
55
+ examples/ucx/local_cuda_cluster.py,sha256=7xVY3EhwhkY2L4VZin_BiMCbrjhirDNChoC86KiETNc,1983
56
+ shared-actions/check_nightly_success/check-nightly-success/check.py,sha256=AK7qCq6OcXnoOhJSOUc_aIl4VA1uo6nK5nJtFoZwNJQ,5371
57
+ shared-actions/telemetry-impls/summarize/bump_time.py,sha256=FXcGDqjCabsL6vlqd5RIV2PCWi2ns_ju8Ul0ERM21cA,2033
58
+ shared-actions/telemetry-impls/summarize/send_trace.py,sha256=i30O_cKZ1OtXIbqaTE3R1JLJSw2XoYdmoVpM2Nc9wj8,16555
59
+ dask_cuda-25.10.0.dist-info/METADATA,sha256=fYduQsSFn4GsVscPIj2spybnzVf0HXmh0cHF59E6LdE,2662
60
+ dask_cuda-25.10.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
+ dask_cuda-25.10.0.dist-info/entry_points.txt,sha256=UcRaKVEpywtxc6pF1VnfMB0UK4sJg7a8_NdZF67laPM,136
62
+ dask_cuda-25.10.0.dist-info/top_level.txt,sha256=SaQxjNeXM9ZhEBjU6CY25OIOt2UojSREGiVxg9UIc08,63
63
+ dask_cuda-25.10.0.dist-info/RECORD,,
@@ -72,7 +72,7 @@ def main(
72
72
 
73
73
  for branch, branch_runs in branch_dict.items():
74
74
  # Only consider RAPIDS release branches, which have versions like
75
- # '25.02' (RAPIDS) or '0.42' (ucxx, ucx-py).
75
+ # '25.10' (RAPIDS) or '0.46' (ucxx).
76
76
  if not re.match("branch-[0-9]{1,2}.[0-9]{2}", branch):
77
77
  continue
78
78
 
@@ -1,160 +0,0 @@
1
- # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES.
2
- # SPDX-License-Identifier: Apache-2.0
3
-
4
-
5
- import importlib
6
- import io
7
- import multiprocessing as mp
8
- import sys
9
-
10
- import pytest
11
-
12
- from dask_cuda import LocalCUDACluster
13
-
14
- mp = mp.get_context("spawn") # type: ignore
15
-
16
-
17
- def _has_distributed_ucxx() -> bool:
18
- return bool(importlib.util.find_spec("distributed_ucxx"))
19
-
20
-
21
- def _test_protocol_ucx():
22
- with LocalCUDACluster(protocol="ucx") as cluster:
23
- assert cluster.scheduler_comm.address.startswith("ucx://")
24
-
25
- if _has_distributed_ucxx():
26
- import distributed_ucxx
27
-
28
- assert all(
29
- isinstance(batched_send.comm, distributed_ucxx.ucxx.UCXX)
30
- for batched_send in cluster.scheduler.stream_comms.values()
31
- )
32
- else:
33
- import rapids_dask_dependency
34
-
35
- assert all(
36
- isinstance(
37
- batched_send.comm,
38
- rapids_dask_dependency.patches.distributed.comm.__rdd_patch_ucx.UCX,
39
- )
40
- for batched_send in cluster.scheduler.stream_comms.values()
41
- )
42
-
43
-
44
- def _test_protocol_ucxx():
45
- if _has_distributed_ucxx():
46
- with LocalCUDACluster(protocol="ucxx") as cluster:
47
- assert cluster.scheduler_comm.address.startswith("ucxx://")
48
- import distributed_ucxx
49
-
50
- assert all(
51
- isinstance(batched_send.comm, distributed_ucxx.ucxx.UCXX)
52
- for batched_send in cluster.scheduler.stream_comms.values()
53
- )
54
- else:
55
- with pytest.raises(RuntimeError, match="Cluster failed to start"):
56
- LocalCUDACluster(protocol="ucxx")
57
-
58
-
59
- def _test_protocol_ucx_old():
60
- with LocalCUDACluster(protocol="ucx-old") as cluster:
61
- assert cluster.scheduler_comm.address.startswith("ucx-old://")
62
-
63
- import rapids_dask_dependency
64
-
65
- assert all(
66
- isinstance(
67
- batched_send.comm,
68
- rapids_dask_dependency.patches.distributed.comm.__rdd_patch_ucx.UCX,
69
- )
70
- for batched_send in cluster.scheduler.stream_comms.values()
71
- )
72
-
73
-
74
- def _run_test_with_output_capture(test_func_name, conn):
75
- """Run a test function in a subprocess and capture stdout/stderr."""
76
- # Redirect stdout and stderr to capture output
77
- old_stdout = sys.stdout
78
- old_stderr = sys.stderr
79
- captured_output = io.StringIO()
80
- sys.stdout = sys.stderr = captured_output
81
-
82
- try:
83
- # Import and run the test function
84
- if test_func_name == "_test_protocol_ucx":
85
- _test_protocol_ucx()
86
- elif test_func_name == "_test_protocol_ucxx":
87
- _test_protocol_ucxx()
88
- elif test_func_name == "_test_protocol_ucx_old":
89
- _test_protocol_ucx_old()
90
- else:
91
- raise ValueError(f"Unknown test function: {test_func_name}")
92
-
93
- output = captured_output.getvalue()
94
- conn.send((True, output)) # True = success
95
- except Exception as e:
96
- output = captured_output.getvalue()
97
- output += f"\nException: {e}"
98
- import traceback
99
-
100
- output += f"\nTraceback:\n{traceback.format_exc()}"
101
- conn.send((False, output)) # False = failure
102
- finally:
103
- # Restore original stdout/stderr
104
- sys.stdout = old_stdout
105
- sys.stderr = old_stderr
106
- conn.close()
107
-
108
-
109
- @pytest.mark.parametrize("protocol", ["ucx", "ucxx", "ucx-old"])
110
- def test_rdd_protocol(protocol):
111
- """Test rapids-dask-dependency protocol selection"""
112
- if protocol == "ucx":
113
- test_func_name = "_test_protocol_ucx"
114
- elif protocol == "ucxx":
115
- test_func_name = "_test_protocol_ucxx"
116
- else:
117
- test_func_name = "_test_protocol_ucx_old"
118
-
119
- # Create a pipe for communication between parent and child processes
120
- parent_conn, child_conn = mp.Pipe()
121
- p = mp.Process(
122
- target=_run_test_with_output_capture, args=(test_func_name, child_conn)
123
- )
124
-
125
- p.start()
126
- p.join(timeout=60)
127
-
128
- if p.is_alive():
129
- p.kill()
130
- p.close()
131
- raise TimeoutError("Test process timed out")
132
-
133
- # Get the result from the child process
134
- success, output = parent_conn.recv()
135
-
136
- # Check that the test passed
137
- assert success, f"Test failed in subprocess. Output:\n{output}"
138
-
139
- # For the ucx protocol, check if warnings are printed when distributed_ucxx is not
140
- # available
141
- if protocol == "ucx" and not _has_distributed_ucxx():
142
- # Check if the warning about protocol='ucx' is printed
143
- print(f"Output for {protocol} protocol:\n{output}")
144
- assert (
145
- "you have requested protocol='ucx'" in output
146
- ), f"Expected warning not found in output: {output}"
147
- assert (
148
- "'distributed-ucxx' is not installed" in output
149
- ), f"Expected warning about distributed-ucxx not found in output: {output}"
150
- elif protocol == "ucx" and _has_distributed_ucxx():
151
- # When distributed_ucxx is available, the warning should NOT be printed
152
- assert "you have requested protocol='ucx'" not in output, (
153
- "Warning should not be printed when distributed_ucxx is available: "
154
- f"{output}"
155
- )
156
- elif protocol == "ucx-old":
157
- # The ucx-old protocol should not generate warnings
158
- assert (
159
- "you have requested protocol='ucx'" not in output
160
- ), f"Warning should not be printed for ucx-old protocol: {output}"
@@ -1,63 +0,0 @@
1
- dask_cuda/GIT_COMMIT,sha256=RAxzl5uQmWcKe5o7s9iHwueNfKc91R_-kniXtpRqmng,41
2
- dask_cuda/VERSION,sha256=mZz9G1Ul4kEOksaMu07UE-AVtGzT1t91nQu3CK9KUAk,8
3
- dask_cuda/__init__.py,sha256=Wbc7R0voN4vsQkb7SKuVXH0YXuXtfnAxrupxfM4lT10,1933
4
- dask_cuda/_compat.py,sha256=AG2lKGAtZitDPBjHeFDKLTN_B5HKodrhZ2kHlk1Z-D0,498
5
- dask_cuda/_version.py,sha256=cHDO9AzNtxkCVhwYu7hL3H7RPAkQnxpKBjElOst3rkI,964
6
- dask_cuda/cli.py,sha256=VRYuryhViVWkCH7H7fDDTMToSOC17nAUMIPbd3K2jRs,17490
7
- dask_cuda/cuda_worker.py,sha256=7ZLZ3AY3l1fLumx2XynUOej5Sx6bwZQRomT4Nj9pbyA,8590
8
- dask_cuda/device_host_file.py,sha256=wTiTyYthUrR8l2WM7iV5lvjQDzeYxnnK7GUlPsHI6p4,11042
9
- dask_cuda/disk_io.py,sha256=IpD2hA-AjTw3SEu3w9pT9ELAUpKW0XcphPh8WwlKH70,6757
10
- dask_cuda/get_device_memory_objects.py,sha256=Nk0f5kv2f0e6JSd5hrwenAOARgF4NS21Zv_x3zF3ONI,4577
11
- dask_cuda/initialize.py,sha256=JBgXNzt52OzcusC89k0eVn2ivbCoCzZxOPNvQgj3YfE,7385
12
- dask_cuda/is_device_object.py,sha256=x9klFdeQzLcug7wZMxN3GK2AS121tlDe-LQ2uznm5yo,1179
13
- dask_cuda/is_spillable_object.py,sha256=8gj6QgtKcmzrpQwy8rE-pS1R8tjaJOeD-Fzr6LumjJg,1596
14
- dask_cuda/local_cuda_cluster.py,sha256=pocJgHbs8h2z_hfChU2_s7kwYKuYTgFZtmrEgYHjWwc,20735
15
- dask_cuda/plugins.py,sha256=u4gWQy8DgaTAsd59KCNXLG6zmdmGg5qhaI8ha2rMEFs,7085
16
- dask_cuda/proxify_device_objects.py,sha256=hGHK2gBkuaMYyrZqcitRGUiH_up56R1hsC7b_6YcCBU,8325
17
- dask_cuda/proxify_host_file.py,sha256=TbdszPvAgV4CRPMz99tumnCWwiCTqMBZqHQua3RRpHE,31031
18
- dask_cuda/proxy_object.py,sha256=eII-S0vdpFa6NLebpHFVQuJ2eyXJ5cBg8sucgFEa91g,30307
19
- dask_cuda/utils.py,sha256=smUv6DGJlHgXjfASVJMlRL1NgECiwwQ2zgn_FCWxMQ4,33454
20
- dask_cuda/utils_test.py,sha256=CKRMB5KUAg-7VSf21AU1TQoqGpJXm9ftgRV7mGIGQ3s,2132
21
- dask_cuda/worker_common.py,sha256=uXoYZ1IZanAbHddfpL3NbVDV5WqCF0m94nOiqqvEnxc,7168
22
- dask_cuda/worker_spec.py,sha256=cI4vS08gyrIU3PKJIjjWZNXChUm5Pv9LwaShPqYYMUQ,4698
23
- dask_cuda/benchmarks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- dask_cuda/benchmarks/common.py,sha256=4AHgWkomo1RsPwH6eogz4vbE9vg_Dg-krScn9B1BRuw,7057
25
- dask_cuda/benchmarks/local_cudf_groupby.py,sha256=ehKOJqnHgQoEyNWuebIWdJP8f_bRiwBd4pax3hkQP_8,8646
26
- dask_cuda/benchmarks/local_cudf_merge.py,sha256=D2yXcEj1dLvfdd-ugV6kLgcUoiHzLmN3jNNs_BjaWME,12341
27
- dask_cuda/benchmarks/local_cudf_shuffle.py,sha256=MxUHzif_7TFPj96yE-Y3-U_NMXD6VJLNB1fB39LXgrk,8386
28
- dask_cuda/benchmarks/local_cupy.py,sha256=1riE9_hVkDxfSP2pWU0h2VF9HTXhF3C2ArKb19KMnVo,10457
29
- dask_cuda/benchmarks/local_cupy_map_overlap.py,sha256=27OtGOhmHIQhHSAmyOm0GgUX8u10npmwBo_q5fLAqUM,6151
30
- dask_cuda/benchmarks/read_parquet.py,sha256=spKu6RLWYngPZq9hnaoU0mz7INIaJnErfqjBG2wH8Zc,7614
31
- dask_cuda/benchmarks/utils.py,sha256=pl0VG8BQnIrVvpdVQtP8ybX1SOcv1E01jZzhkfOIPw4,30235
32
- dask_cuda/explicit_comms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
- dask_cuda/explicit_comms/comms.py,sha256=FbSDPQUz0gwQNa8EQMNEGEwWCM2KhuDk4CiNNjZy6qo,11611
34
- dask_cuda/explicit_comms/dataframe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
- dask_cuda/explicit_comms/dataframe/shuffle.py,sha256=vq-NBPYtKvKcfo7-eikPYaeWDf-V_DTHR5PVE_E-dyE,23863
36
- dask_cuda/tests/conftest.py,sha256=NjTnsgu7UewTpoTakt-2bpe0rtarwni25anjZPWbbb0,1501
37
- dask_cuda/tests/test_cudf_builtin_spilling.py,sha256=qVN9J0Hdv66A9COFArLIdRriyyxEKpS3lEZGHbVHaq8,4903
38
- dask_cuda/tests/test_dask_cuda_worker.py,sha256=iOkYAGSYfql7Wxxnb1hZJ7oedQyLmweXAHpQhckaTgY,23232
39
- dask_cuda/tests/test_device_host_file.py,sha256=79ssUISo1YhsW_7HdwqPfsH2LRzS2bi5BjPym1Sdgqw,5882
40
- dask_cuda/tests/test_dgx.py,sha256=Y7OgFlrq_BzoF4L8qoA5byKX1kDA1foaFzT-gEfhrsI,7294
41
- dask_cuda/tests/test_explicit_comms.py,sha256=ULrVhOcVavEWyZBxVSXDOFT_nznF8mRrU1M3fq_DUjo,20622
42
- dask_cuda/tests/test_from_array.py,sha256=-mXVwrw3MjB32vFfdUvxiwojqu9mEYNyjZjGPg1h1sE,733
43
- dask_cuda/tests/test_gds.py,sha256=j1Huud6UGm1fbkyRLQEz_ysrVw__5AimwSn_M-2GEvs,1513
44
- dask_cuda/tests/test_initialize.py,sha256=2Fm84sAN0A9Kxhm7PiWgrwppNCaJLA2yj65Y3LPOrNw,7542
45
- dask_cuda/tests/test_local_cuda_cluster.py,sha256=wYhY4OzQcYDtevL2peT6k5bAmZs2MDpQMrwPs2To7AY,23083
46
- dask_cuda/tests/test_proxify_host_file.py,sha256=6YPQia-V5tcVOy2ZNwL9ZpD6qKxKCcaNZZK-8f4aqpM,19996
47
- dask_cuda/tests/test_proxy.py,sha256=3uREA-d4y0lmGHKGJ5z9OiAYTBie1Y3bpB8djQRrluE,23996
48
- dask_cuda/tests/test_rdd_ucx.py,sha256=nMbEm7kxAATHfVyZOkfpexyk-ZKtCbLzvPHdspK877M,5433
49
- dask_cuda/tests/test_spill.py,sha256=QmBdFBvmZsDbOo0IAeUAJQfMyx-BBxokosch4gZHa00,15704
50
- dask_cuda/tests/test_utils.py,sha256=bTpiNQyKTxLHzHTLn-G0SWVgetq_tqUGq7rbafvdwgg,12297
51
- dask_cuda/tests/test_version.py,sha256=vK2HjlRLX0nxwvRsYxBqhoZryBNZklzA-vdnyuWDxVg,365
52
- dask_cuda/tests/test_worker_spec.py,sha256=dlZ4OIOl7CVm4euMtvCUif1QetCWcntSRdwzAmkH6ec,2550
53
- dask_cuda-25.8.0.dist-info/licenses/LICENSE,sha256=MjI3I-EgxfEvZlgjk82rgiFsZqSDXHFETd2QJ89UwDA,11348
54
- examples/ucx/client_initialize.py,sha256=YN3AXHF8btcMd6NicKKhKR9SXouAsK1foJhFspbOn70,1262
55
- examples/ucx/local_cuda_cluster.py,sha256=7xVY3EhwhkY2L4VZin_BiMCbrjhirDNChoC86KiETNc,1983
56
- shared-actions/check_nightly_success/check-nightly-success/check.py,sha256=lBhwgJALfUXdk4B9IhYf1AV0OUu0dQol3GtUe-CnUgY,5379
57
- shared-actions/telemetry-impls/summarize/bump_time.py,sha256=FXcGDqjCabsL6vlqd5RIV2PCWi2ns_ju8Ul0ERM21cA,2033
58
- shared-actions/telemetry-impls/summarize/send_trace.py,sha256=i30O_cKZ1OtXIbqaTE3R1JLJSw2XoYdmoVpM2Nc9wj8,16555
59
- dask_cuda-25.8.0.dist-info/METADATA,sha256=euY_vTYPba5yD4W8FtKDhCAjF4qs0ZU6-z5cs2lxYQg,2345
60
- dask_cuda-25.8.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
- dask_cuda-25.8.0.dist-info/entry_points.txt,sha256=UcRaKVEpywtxc6pF1VnfMB0UK4sJg7a8_NdZF67laPM,136
62
- dask_cuda-25.8.0.dist-info/top_level.txt,sha256=SaQxjNeXM9ZhEBjU6CY25OIOt2UojSREGiVxg9UIc08,63
63
- dask_cuda-25.8.0.dist-info/RECORD,,