dask-cuda 24.12.0__py3-none-any.whl → 25.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dask_cuda/VERSION CHANGED
@@ -1 +1 @@
1
- 24.12.00
1
+ 25.02.00
dask_cuda/__init__.py CHANGED
@@ -5,46 +5,20 @@ if sys.platform != "linux":
5
5
 
6
6
  import dask
7
7
  import dask.utils
8
- import dask.dataframe.core
9
8
  import dask.dataframe.shuffle
10
- import dask.dataframe.multi
11
- import dask.bag.core
9
+ from .explicit_comms.dataframe.shuffle import patch_shuffle_expression
12
10
  from distributed.protocol.cuda import cuda_deserialize, cuda_serialize
13
11
  from distributed.protocol.serialize import dask_deserialize, dask_serialize
14
12
 
15
13
  from ._version import __git_commit__, __version__
16
14
  from .cuda_worker import CUDAWorker
17
- from .explicit_comms.dataframe.shuffle import (
18
- get_rearrange_by_column_wrapper,
19
- get_default_shuffle_method,
20
- )
15
+
21
16
  from .local_cuda_cluster import LocalCUDACluster
22
17
  from .proxify_device_objects import proxify_decorator, unproxify_decorator
23
18
 
24
19
 
25
- if dask.config.get("dataframe.query-planning", None) is not False and dask.config.get(
26
- "explicit-comms", False
27
- ):
28
- raise NotImplementedError(
29
- "The 'explicit-comms' config is not yet supported when "
30
- "query-planning is enabled in dask. Please use the shuffle "
31
- "API directly, or use the legacy dask-dataframe API "
32
- "(set the 'dataframe.query-planning' config to `False`"
33
- "before importing `dask.dataframe`).",
34
- )
35
-
36
-
37
20
  # Monkey patching Dask to make use of explicit-comms when `DASK_EXPLICIT_COMMS=True`
38
- dask.dataframe.shuffle.rearrange_by_column = get_rearrange_by_column_wrapper(
39
- dask.dataframe.shuffle.rearrange_by_column
40
- )
41
- # We have to replace all modules that imports Dask's `get_default_shuffle_method()`
42
- # TODO: introduce a shuffle-algorithm dispatcher in Dask so we don't need this hack
43
- dask.dataframe.shuffle.get_default_shuffle_method = get_default_shuffle_method
44
- dask.dataframe.multi.get_default_shuffle_method = get_default_shuffle_method
45
- dask.bag.core.get_default_shuffle_method = get_default_shuffle_method
46
-
47
-
21
+ patch_shuffle_expression()
48
22
  # Monkey patching Dask to make use of proxify and unproxify in compatibility mode
49
23
  dask.dataframe.shuffle.shuffle_group = proxify_decorator(
50
24
  dask.dataframe.shuffle.shuffle_group
@@ -246,7 +246,6 @@ def parse_args():
246
246
  return parse_benchmark_args(
247
247
  description="Distributed shuffle (dask/cudf) benchmark",
248
248
  args_list=special_args,
249
- check_explicit_comms=False,
250
249
  )
251
250
 
252
251
 
@@ -251,7 +251,6 @@ def parse_args():
251
251
  args = parse_benchmark_args(
252
252
  description="Parquet read benchmark",
253
253
  args_list=special_args,
254
- check_explicit_comms=False,
255
254
  )
256
255
  args.no_show_p2p_bandwidth = True
257
256
  return args
@@ -11,7 +11,6 @@ from typing import Any, Callable, Mapping, NamedTuple, Optional, Tuple
11
11
  import numpy as np
12
12
  import pandas as pd
13
13
 
14
- from dask import config
15
14
  from dask.distributed import Client, SSHCluster
16
15
  from dask.utils import format_bytes, format_time, parse_bytes
17
16
  from distributed.comm.addressing import get_address_host
@@ -52,7 +51,6 @@ def as_noop(dsk):
52
51
  def parse_benchmark_args(
53
52
  description="Generic dask-cuda Benchmark",
54
53
  args_list=[],
55
- check_explicit_comms=True,
56
54
  ):
57
55
  parser = argparse.ArgumentParser(description=description)
58
56
  worker_args = parser.add_argument_group(description="Worker configuration")
@@ -377,24 +375,6 @@ def parse_benchmark_args(
377
375
  if args.multi_node and len(args.hosts.split(",")) < 2:
378
376
  raise ValueError("--multi-node requires at least 2 hosts")
379
377
 
380
- # Raise error early if "explicit-comms" is not allowed
381
- if (
382
- check_explicit_comms
383
- and args.backend == "explicit-comms"
384
- and config.get(
385
- "dataframe.query-planning",
386
- None,
387
- )
388
- is not False
389
- ):
390
- raise NotImplementedError(
391
- "The 'explicit-comms' config is not yet supported when "
392
- "query-planning is enabled in dask. Please use the legacy "
393
- "dask-dataframe API by setting the following environment "
394
- "variable before executing:",
395
- " DASK_DATAFRAME__QUERY_PLANNING=False",
396
- )
397
-
398
378
  return args
399
379
 
400
380
 
@@ -1,8 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
- import functools
5
- import inspect
6
4
  from collections import defaultdict
7
5
  from math import ceil
8
6
  from operator import getitem
@@ -20,7 +18,7 @@ import distributed.worker
20
18
  from dask.base import tokenize
21
19
  from dask.dataframe import DataFrame, Series
22
20
  from dask.dataframe.core import _concat as dd_concat
23
- from dask.dataframe.shuffle import group_split_dispatch, hash_object_dispatch
21
+ from dask.dataframe.dispatch import group_split_dispatch, hash_object_dispatch
24
22
  from distributed import wait
25
23
  from distributed.protocol import nested_deserialize, to_serialize
26
24
  from distributed.worker import Worker
@@ -33,6 +31,20 @@ T = TypeVar("T")
33
31
  Proxify = Callable[[T], T]
34
32
 
35
33
 
34
+ try:
35
+ from dask.dataframe import dask_expr
36
+
37
+ except ImportError:
38
+ # TODO: Remove when pinned to dask>2024.12.1
39
+ import dask_expr
40
+
41
+ if not dd._dask_expr_enabled():
42
+ raise ValueError(
43
+ "The legacy DataFrame API is not supported in dask_cudf>24.12. "
44
+ "Please enable query-planning, or downgrade to dask_cudf<=24.12"
45
+ )
46
+
47
+
36
48
  def get_proxify(worker: Worker) -> Proxify:
37
49
  """Get function to proxify objects"""
38
50
  from dask_cuda.proxify_host_file import ProxifyHostFile
@@ -570,40 +582,48 @@ def _use_explicit_comms() -> bool:
570
582
  return False
571
583
 
572
584
 
573
- def get_rearrange_by_column_wrapper(func):
574
- """Returns a function wrapper that dispatch the shuffle to explicit-comms.
585
+ def patch_shuffle_expression() -> None:
586
+ """Patch Dasks Shuffle expression.
575
587
 
576
- Notice, this is monkey patched into Dask at dask_cuda import
588
+ Notice, this is monkey patched into Dask at dask_cuda
589
+ import, and it changes `Shuffle._layer` to lower into
590
+ an `ECShuffle` expression when the 'explicit-comms'
591
+ config is set to `True`.
577
592
  """
578
593
 
579
- func_sig = inspect.signature(func)
580
-
581
- @functools.wraps(func)
582
- def wrapper(*args, **kwargs):
583
- if _use_explicit_comms():
584
- # Convert `*args, **kwargs` to a dict of `keyword -> values`
585
- kw = func_sig.bind(*args, **kwargs)
586
- kw.apply_defaults()
587
- kw = kw.arguments
588
- # Notice, we only overwrite the default and the "tasks" shuffle
589
- # algorithm. The "disk" and "p2p" algorithm, we don't touch.
590
- if kw["shuffle_method"] in ("tasks", None):
591
- col = kw["col"]
592
- if isinstance(col, str):
593
- col = [col]
594
- return shuffle(kw["df"], col, kw["npartitions"], kw["ignore_index"])
595
- return func(*args, **kwargs)
596
-
597
- return wrapper
598
-
599
-
600
- def get_default_shuffle_method() -> str:
601
- """Return the default shuffle algorithm used by Dask
594
+ class ECShuffle(dask_expr._shuffle.TaskShuffle):
595
+ """Explicit-Comms Shuffle Expression."""
596
+
597
+ def _layer(self):
598
+ # Execute an explicit-comms shuffle
599
+ if not hasattr(self, "_ec_shuffled"):
600
+ on = self.partitioning_index
601
+ df = dask_expr.new_collection(self.frame)
602
+ self._ec_shuffled = shuffle(
603
+ df,
604
+ [on] if isinstance(on, str) else on,
605
+ self.npartitions_out,
606
+ self.ignore_index,
607
+ )
608
+ graph = self._ec_shuffled.dask.copy()
609
+ shuffled_name = self._ec_shuffled._name
610
+ for i in range(self.npartitions_out):
611
+ graph[(self._name, i)] = graph[(shuffled_name, i)]
612
+ return graph
613
+
614
+ _base_lower = dask_expr._shuffle.Shuffle._lower
615
+
616
+ def _patched_lower(self):
617
+ if self.method in (None, "tasks") and _use_explicit_comms():
618
+ return ECShuffle(
619
+ self.frame,
620
+ self.partitioning_index,
621
+ self.npartitions_out,
622
+ self.ignore_index,
623
+ self.options,
624
+ self.original_partitioning_index,
625
+ )
626
+ else:
627
+ return _base_lower(self)
602
628
 
603
- This changes the default shuffle algorithm from "p2p" to "tasks"
604
- when explicit comms is enabled.
605
- """
606
- ret = dask.config.get("dataframe.shuffle.algorithm", None)
607
- if ret is None and _use_explicit_comms():
608
- return "tasks"
609
- return dask.utils.get_default_shuffle_method()
629
+ dask_expr._shuffle.Shuffle._lower = _patched_lower
dask_cuda/plugins.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import importlib
2
+ import logging
2
3
  import os
3
4
  from typing import Callable, Dict
4
5
 
@@ -12,7 +13,15 @@ class CPUAffinity(WorkerPlugin):
12
13
  self.cores = cores
13
14
 
14
15
  def setup(self, worker=None):
15
- os.sched_setaffinity(0, self.cores)
16
+ try:
17
+ os.sched_setaffinity(0, self.cores)
18
+ except Exception:
19
+ logger = logging.getLogger("distributed.worker")
20
+ logger.warning(
21
+ "Setting CPU affinity for GPU failed. Please refer to the following "
22
+ "link for troubleshooting information: "
23
+ "https://docs.rapids.ai/api/dask-cuda/nightly/troubleshooting/#setting-cpu-affinity-failure" # noqa: E501
24
+ )
16
25
 
17
26
 
18
27
  class CUDFSetup(WorkerPlugin):
dask_cuda/proxy_object.py CHANGED
@@ -12,7 +12,8 @@ import pandas
12
12
 
13
13
  import dask
14
14
  import dask.array.core
15
- import dask.dataframe.methods
15
+ import dask.dataframe.backends
16
+ import dask.dataframe.dispatch
16
17
  import dask.dataframe.utils
17
18
  import dask.utils
18
19
  import distributed.protocol
@@ -22,16 +23,6 @@ from distributed.protocol.compression import decompress
22
23
 
23
24
  from dask_cuda.disk_io import disk_read
24
25
 
25
- try:
26
- from dask.dataframe.backends import concat_pandas
27
- except ImportError:
28
- from dask.dataframe.methods import concat_pandas
29
-
30
- try:
31
- from dask.dataframe.dispatch import make_meta_dispatch as make_meta_dispatch
32
- except ImportError:
33
- from dask.dataframe.utils import make_meta as make_meta_dispatch
34
-
35
26
  from .disk_io import SpillToDiskFile
36
27
  from .is_device_object import is_device_object
37
28
 
@@ -893,10 +884,12 @@ def obj_pxy_dask_deserialize(header, frames):
893
884
  return subclass(pxy)
894
885
 
895
886
 
896
- @dask.dataframe.core.get_parallel_type.register(ProxyObject)
887
+ @dask.dataframe.dispatch.get_parallel_type.register(ProxyObject)
897
888
  def get_parallel_type_proxy_object(obj: ProxyObject):
898
889
  # Notice, `get_parallel_type()` needs a instance not a type object
899
- return dask.dataframe.core.get_parallel_type(obj.__class__.__new__(obj.__class__))
890
+ return dask.dataframe.dispatch.get_parallel_type(
891
+ obj.__class__.__new__(obj.__class__)
892
+ )
900
893
 
901
894
 
902
895
  def unproxify_input_wrapper(func):
@@ -913,24 +906,24 @@ def unproxify_input_wrapper(func):
913
906
 
914
907
  # Register dispatch of ProxyObject on all known dispatch objects
915
908
  for dispatch in (
916
- dask.dataframe.core.hash_object_dispatch,
917
- make_meta_dispatch,
909
+ dask.dataframe.dispatch.hash_object_dispatch,
910
+ dask.dataframe.dispatch.make_meta_dispatch,
918
911
  dask.dataframe.utils.make_scalar,
919
- dask.dataframe.core.group_split_dispatch,
912
+ dask.dataframe.dispatch.group_split_dispatch,
920
913
  dask.array.core.tensordot_lookup,
921
914
  dask.array.core.einsum_lookup,
922
915
  dask.array.core.concatenate_lookup,
923
916
  ):
924
917
  dispatch.register(ProxyObject, unproxify_input_wrapper(dispatch))
925
918
 
926
- dask.dataframe.methods.concat_dispatch.register(
927
- ProxyObject, unproxify_input_wrapper(dask.dataframe.methods.concat)
919
+ dask.dataframe.dispatch.concat_dispatch.register(
920
+ ProxyObject, unproxify_input_wrapper(dask.dataframe.dispatch.concat)
928
921
  )
929
922
 
930
923
 
931
924
  # We overwrite the Dask dispatch of Pandas objects in order to
932
925
  # deserialize all ProxyObjects before concatenating
933
- dask.dataframe.methods.concat_dispatch.register(
926
+ dask.dataframe.dispatch.concat_dispatch.register(
934
927
  (pandas.DataFrame, pandas.Series, pandas.Index),
935
- unproxify_input_wrapper(concat_pandas),
928
+ unproxify_input_wrapper(dask.dataframe.backends.concat_pandas),
936
929
  )
@@ -320,6 +320,7 @@ def test_unknown_argument():
320
320
  assert b"Scheduler address: --my-argument" in ret.stderr
321
321
 
322
322
 
323
+ @pytest.mark.xfail(reason="https://github.com/rapidsai/dask-cuda/issues/1441")
323
324
  @patch.dict(os.environ, {"CUDA_VISIBLE_DEVICES": "0"})
324
325
  def test_pre_import(loop): # noqa: F811
325
326
  module = None
@@ -25,16 +25,6 @@ from dask_cuda.utils_test import IncreasedCloseTimeoutNanny
25
25
  mp = mp.get_context("spawn") # type: ignore
26
26
  ucp = pytest.importorskip("ucp")
27
27
 
28
- QUERY_PLANNING_ON = dask.config.get("dataframe.query-planning", None) is not False
29
-
30
- # Skip these tests when dask-expr is active (for now)
31
- query_planning_skip = pytest.mark.skipif(
32
- QUERY_PLANNING_ON,
33
- reason=(
34
- "The 'explicit-comms' config is not supported "
35
- "when query planning is enabled."
36
- ),
37
- )
38
28
 
39
29
  # Set default shuffle method to "tasks"
40
30
  if dask.config.get("dataframe.shuffle.method", None) is None:
@@ -98,7 +88,6 @@ def _test_dataframe_merge_empty_partitions(nrows, npartitions):
98
88
  pd.testing.assert_frame_equal(got, expected)
99
89
 
100
90
 
101
- @query_planning_skip
102
91
  def test_dataframe_merge_empty_partitions():
103
92
  # Notice, we use more partitions than rows
104
93
  p = mp.Process(target=_test_dataframe_merge_empty_partitions, args=(2, 4))
@@ -250,7 +239,7 @@ def _test_dask_use_explicit_comms(in_cluster):
250
239
  ):
251
240
  dask.config.refresh() # Trigger re-read of the environment variables
252
241
  with pytest.raises(ValueError, match="explicit-comms-batchsize"):
253
- ddf.shuffle(on="key", npartitions=4)
242
+ ddf.shuffle(on="key", npartitions=4).dask
254
243
 
255
244
  if in_cluster:
256
245
  with LocalCluster(
@@ -267,7 +256,6 @@ def _test_dask_use_explicit_comms(in_cluster):
267
256
  check_shuffle()
268
257
 
269
258
 
270
- @query_planning_skip
271
259
  @pytest.mark.parametrize("in_cluster", [True, False])
272
260
  def test_dask_use_explicit_comms(in_cluster):
273
261
  def _timeout(process, function, timeout):
@@ -330,7 +318,6 @@ def _test_dataframe_shuffle_merge(backend, protocol, n_workers):
330
318
  assert_eq(got, expected)
331
319
 
332
320
 
333
- @query_planning_skip
334
321
  @pytest.mark.parametrize("nworkers", [1, 2, 4])
335
322
  @pytest.mark.parametrize("backend", ["pandas", "cudf"])
336
323
  @pytest.mark.parametrize("protocol", ["tcp", "ucx", "ucxx"])
@@ -504,27 +504,27 @@ def test_pandas():
504
504
  df1 = pandas.DataFrame({"a": range(10)})
505
505
  df2 = pandas.DataFrame({"a": range(10)})
506
506
 
507
- res = dask.dataframe.methods.concat([df1, df2])
508
- got = dask.dataframe.methods.concat([df1, df2])
507
+ res = dask.dataframe.dispatch.concat([df1, df2])
508
+ got = dask.dataframe.dispatch.concat([df1, df2])
509
509
  assert_frame_equal(res, got)
510
510
 
511
- got = dask.dataframe.methods.concat([proxy_object.asproxy(df1), df2])
511
+ got = dask.dataframe.dispatch.concat([proxy_object.asproxy(df1), df2])
512
512
  assert_frame_equal(res, got)
513
513
 
514
- got = dask.dataframe.methods.concat([df1, proxy_object.asproxy(df2)])
514
+ got = dask.dataframe.dispatch.concat([df1, proxy_object.asproxy(df2)])
515
515
  assert_frame_equal(res, got)
516
516
 
517
517
  df1 = pandas.Series(range(10))
518
518
  df2 = pandas.Series(range(10))
519
519
 
520
- res = dask.dataframe.methods.concat([df1, df2])
521
- got = dask.dataframe.methods.concat([df1, df2])
520
+ res = dask.dataframe.dispatch.concat([df1, df2])
521
+ got = dask.dataframe.dispatch.concat([df1, df2])
522
522
  assert all(res == got)
523
523
 
524
- got = dask.dataframe.methods.concat([proxy_object.asproxy(df1), df2])
524
+ got = dask.dataframe.dispatch.concat([proxy_object.asproxy(df1), df2])
525
525
  assert all(res == got)
526
526
 
527
- got = dask.dataframe.methods.concat([df1, proxy_object.asproxy(df2)])
527
+ got = dask.dataframe.dispatch.concat([df1, proxy_object.asproxy(df2)])
528
528
  assert all(res == got)
529
529
 
530
530
 
@@ -1,6 +1,7 @@
1
1
  import os
2
2
  from unittest.mock import patch
3
3
 
4
+ import pynvml
4
5
  import pytest
5
6
  from numba import cuda
6
7
 
@@ -197,7 +198,6 @@ def test_get_ucx_config(enable_tcp_over_ucx, enable_infiniband, enable_nvlink):
197
198
 
198
199
 
199
200
  def test_parse_visible_devices():
200
- pynvml = pytest.importorskip("pynvml")
201
201
  pynvml.nvmlInit()
202
202
  indices = []
203
203
  uuids = []
@@ -250,7 +250,6 @@ def test_parse_device_memory_limit():
250
250
 
251
251
 
252
252
  def test_parse_visible_mig_devices():
253
- pynvml = pytest.importorskip("pynvml")
254
253
  pynvml.nvmlInit()
255
254
  for index in range(get_gpu_count()):
256
255
  handle = pynvml.nvmlDeviceGetHandleByIndex(index)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: dask-cuda
3
- Version: 24.12.0
3
+ Version: 25.2.0
4
4
  Summary: Utilities for Dask and CUDA interactions
5
5
  Author: NVIDIA Corporation
6
6
  License: Apache 2.0
@@ -19,24 +19,17 @@ Requires-Python: >=3.10
19
19
  Description-Content-Type: text/markdown
20
20
  License-File: LICENSE
21
21
  Requires-Dist: click>=8.1
22
- Requires-Dist: numba>=0.57
22
+ Requires-Dist: numba<0.61.0a0,>=0.59.1
23
23
  Requires-Dist: numpy<3.0a0,>=1.23
24
24
  Requires-Dist: pandas>=1.3
25
- Requires-Dist: pynvml<12.0.0a0,>=11.0.0
26
- Requires-Dist: rapids-dask-dependency==24.12.*
25
+ Requires-Dist: pynvml<13.0.0a0,>=12.0.0
26
+ Requires-Dist: rapids-dask-dependency==25.2.*
27
27
  Requires-Dist: zict>=2.0.0
28
28
  Provides-Extra: docs
29
29
  Requires-Dist: numpydoc>=1.1.0; extra == "docs"
30
30
  Requires-Dist: sphinx; extra == "docs"
31
31
  Requires-Dist: sphinx-click>=2.7.1; extra == "docs"
32
32
  Requires-Dist: sphinx-rtd-theme>=0.5.1; extra == "docs"
33
- Provides-Extra: test
34
- Requires-Dist: cudf==24.12.*; extra == "test"
35
- Requires-Dist: dask-cudf==24.12.*; extra == "test"
36
- Requires-Dist: kvikio==24.12.*; extra == "test"
37
- Requires-Dist: pytest; extra == "test"
38
- Requires-Dist: pytest-cov; extra == "test"
39
- Requires-Dist: ucx-py==0.41.*; extra == "test"
40
33
 
41
34
  Dask CUDA
42
35
  =========
@@ -1,5 +1,5 @@
1
- dask_cuda/VERSION,sha256=NltZ4By82NzVjz00LGPhCXfkG4BB0JdUSXqlG8fiVuo,8
2
- dask_cuda/__init__.py,sha256=eOCH3Wj0A8X0qbNUoNA15dgxb2O-ZApha4QHq5EEVFw,2748
1
+ dask_cuda/VERSION,sha256=gWnOsR7j8lHNsXJO_balY3FJzbDTto6xlQk1ItvppEY,8
2
+ dask_cuda/__init__.py,sha256=YMnNzbZ1pDrsbgBc9ipsxBQyYn72IzoHAei4_imXHok,1665
3
3
  dask_cuda/_version.py,sha256=cHDO9AzNtxkCVhwYu7hL3H7RPAkQnxpKBjElOst3rkI,964
4
4
  dask_cuda/cli.py,sha256=cScVyNiA_l9uXeDgkIcmbcR4l4cH1_1shqSqsVmuHPE,17053
5
5
  dask_cuda/cuda_worker.py,sha256=rZ1ITG_ZCbuaMA9e8uSqCjU8Km4AMphGGrxpBPQG8xU,9477
@@ -10,10 +10,10 @@ dask_cuda/initialize.py,sha256=Gjcxs_c8DTafgsHe5-2mw4lJdOmbFJJAZVOnxA8lTjM,6462
10
10
  dask_cuda/is_device_object.py,sha256=CnajvbQiX0FzFzwft0MqK1OPomx3ZGDnDxT56wNjixw,1046
11
11
  dask_cuda/is_spillable_object.py,sha256=CddGmg0tuSpXh2m_TJSY6GRpnl1WRHt1CRcdWgHPzWA,1457
12
12
  dask_cuda/local_cuda_cluster.py,sha256=wqwKVRV6jT13sf9e-XsvbVBlTrnhmcbmHQBFPTFcayw,20335
13
- dask_cuda/plugins.py,sha256=yGHEurbYhL4jucQrmsxLfOyE5c3bSJdfs6GVwvDAeEA,6770
13
+ dask_cuda/plugins.py,sha256=A2aT8HA6q_JhIEx6-XKcpbWEbl7aTg1GNoZQH8_vh00,7197
14
14
  dask_cuda/proxify_device_objects.py,sha256=99CD7LOE79YiQGJ12sYl_XImVhJXpFR4vG5utdkjTQo,8108
15
15
  dask_cuda/proxify_host_file.py,sha256=Wf5CFCC1JN5zmfvND3ls0M5FL01Y8VhHrk0xV3UQ9kk,30850
16
- dask_cuda/proxy_object.py,sha256=bZq92kjgFB-ad_luSAFT_RItV3nssmiEk4OOSp34laU,29812
16
+ dask_cuda/proxy_object.py,sha256=Zwn3mUIS_6NqNvPRTbcu6auXTQpEs8Tc-pc3_kfBBNY,29616
17
17
  dask_cuda/utils.py,sha256=Goq-m78rYZ-bcJitg47N1h_PC4PDuzXG0CUVH7V8azU,25515
18
18
  dask_cuda/utils_test.py,sha256=WNMR0gic2tuP3pgygcR9g52NfyX8iGMOan6juXhpkCE,1694
19
19
  dask_cuda/worker_spec.py,sha256=7-Uq_e5q2SkTlsmctMcYLCa9_3RiiVHZLIN7ctfaFmE,4376
@@ -21,35 +21,35 @@ dask_cuda/benchmarks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
21
21
  dask_cuda/benchmarks/common.py,sha256=YFhxBYkoxIV-2mddSbLwTbyg67U4zXDd2_fFq9oP3_A,6922
22
22
  dask_cuda/benchmarks/local_cudf_groupby.py,sha256=zrDiF-yBAUxVt9mWOTH5hUm-pb-XnVX-G9gvCEX7_GI,8512
23
23
  dask_cuda/benchmarks/local_cudf_merge.py,sha256=Q7lnZ87-O7j28hkS-i_5hMApTX8VsuI4ftZf2XAnp1E,12195
24
- dask_cuda/benchmarks/local_cudf_shuffle.py,sha256=8FjPFtiC-UqZcdPfocdMuzq_8TURAQWJlmhfcMWdo4w,8276
24
+ dask_cuda/benchmarks/local_cudf_shuffle.py,sha256=Ied7r_fdGuOJyikBVVkMaIX3niJIlF39C1Xk6IVwgo4,8240
25
25
  dask_cuda/benchmarks/local_cupy.py,sha256=RCxQJd88bn3vyMAJDPK3orUpxzvDZY957wOSYkfriq0,10323
26
26
  dask_cuda/benchmarks/local_cupy_map_overlap.py,sha256=YAllGFuG6MePfPL8gdZ-Ld7a44-G0eEaHZJWB4vFPdY,6017
27
- dask_cuda/benchmarks/read_parquet.py,sha256=TARcG-TS1NGcQWJmuAKtfmBmy5LAaLc3xgtKgAd1DaA,7650
28
- dask_cuda/benchmarks/utils.py,sha256=_NSWS5e8SzZ6vxDcEFo97Y8gs_e23Qqd-c3r83BA6PU,30748
27
+ dask_cuda/benchmarks/read_parquet.py,sha256=spKu6RLWYngPZq9hnaoU0mz7INIaJnErfqjBG2wH8Zc,7614
28
+ dask_cuda/benchmarks/utils.py,sha256=_x0XXL_F3W-fExpuQfTBwuK3WnrVuXQQepbnvjUqS9o,30075
29
29
  dask_cuda/explicit_comms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
30
  dask_cuda/explicit_comms/comms.py,sha256=Su6PuNo68IyS-AwoqU4S9TmqWsLvUdNa0jot2hx8jQQ,10400
31
31
  dask_cuda/explicit_comms/dataframe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
- dask_cuda/explicit_comms/dataframe/shuffle.py,sha256=4xfhfbTGa36YPs_ex1_fFhzfGMYJq-QkS5q0RwgeHh8,20645
32
+ dask_cuda/explicit_comms/dataframe/shuffle.py,sha256=g9xDyFKmblEuevZt5Drh66uMLw-LUNOI8CIucDdACmY,21231
33
33
  dask_cuda/tests/test_cudf_builtin_spilling.py,sha256=qVN9J0Hdv66A9COFArLIdRriyyxEKpS3lEZGHbVHaq8,4903
34
- dask_cuda/tests/test_dask_cuda_worker.py,sha256=6rroHvJAn5R3X9LwIcE8QrPxG1GO3PaxXVjhbdQ90Pw,20477
34
+ dask_cuda/tests/test_dask_cuda_worker.py,sha256=C1emlr47yGa3TdSSlAXJRzguY4bcH74htk21x9th7nQ,20556
35
35
  dask_cuda/tests/test_device_host_file.py,sha256=79ssUISo1YhsW_7HdwqPfsH2LRzS2bi5BjPym1Sdgqw,5882
36
36
  dask_cuda/tests/test_dgx.py,sha256=BPCF4ZvhrVKkT43OOFHdijuo-M34vW3V18C8rRH1HXg,7489
37
- dask_cuda/tests/test_explicit_comms.py,sha256=Pa5vVx63qWtScnVJuS31WESXIt2FPyTJVFO-0OUbbmU,15276
37
+ dask_cuda/tests/test_explicit_comms.py,sha256=F4_84bx3ODO8Q6ERHXFJF31uXj4gehLwKQncT1evbuM,14895
38
38
  dask_cuda/tests/test_from_array.py,sha256=okT1B6UqHmLxoy0uER0Ylm3UyOmi5BAXwJpTuTAw44I,601
39
39
  dask_cuda/tests/test_gds.py,sha256=j1Huud6UGm1fbkyRLQEz_ysrVw__5AimwSn_M-2GEvs,1513
40
40
  dask_cuda/tests/test_initialize.py,sha256=Rba59ZbljEm1yyN94_sWZPEE_f7hWln95aiBVc49pmY,6960
41
41
  dask_cuda/tests/test_local_cuda_cluster.py,sha256=Lc9QncyGwBwhaZPGBfreXJf3ZC9Zd8SjDc2fpeQ-BT0,19710
42
42
  dask_cuda/tests/test_proxify_host_file.py,sha256=LC3jjo_gbfhdIy1Zy_ynmgyv31HXFoBINCe1-XXZ4XU,18994
43
- dask_cuda/tests/test_proxy.py,sha256=51qsXGJBg_hwSMRsC_QvJBz4wVM0Bf8fbFmTUFA7HJE,23809
43
+ dask_cuda/tests/test_proxy.py,sha256=U9uE-QesTwquNKzTReEKiYgoRgS_pfGW-A-gJNppHyg,23817
44
44
  dask_cuda/tests/test_spill.py,sha256=CYMbp5HDBYlZ7T_n8RfSOZxaWFcAQKjprjRM7Wupcdw,13419
45
- dask_cuda/tests/test_utils.py,sha256=JRIwXfemc3lWSzLJX0VcvR1_0wB4yeoOTsw7kB6z6pU,9176
45
+ dask_cuda/tests/test_utils.py,sha256=PQI_oTONWnKSKlkQfEeK-vlmYa0-cPpDjDEbm74cNCE,9104
46
46
  dask_cuda/tests/test_version.py,sha256=vK2HjlRLX0nxwvRsYxBqhoZryBNZklzA-vdnyuWDxVg,365
47
47
  dask_cuda/tests/test_worker_spec.py,sha256=Bvu85vkqm6ZDAYPXKMJlI2pm9Uc5tiYKNtO4goXSw-I,2399
48
48
  examples/ucx/client_initialize.py,sha256=YN3AXHF8btcMd6NicKKhKR9SXouAsK1foJhFspbOn70,1262
49
49
  examples/ucx/local_cuda_cluster.py,sha256=7xVY3EhwhkY2L4VZin_BiMCbrjhirDNChoC86KiETNc,1983
50
- dask_cuda-24.12.0.dist-info/LICENSE,sha256=MjI3I-EgxfEvZlgjk82rgiFsZqSDXHFETd2QJ89UwDA,11348
51
- dask_cuda-24.12.0.dist-info/METADATA,sha256=qFewjmkl67EsxFm9VoMTmw_XOOK3savtnO9hK-Qwx-E,2557
52
- dask_cuda-24.12.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
53
- dask_cuda-24.12.0.dist-info/entry_points.txt,sha256=UcRaKVEpywtxc6pF1VnfMB0UK4sJg7a8_NdZF67laPM,136
54
- dask_cuda-24.12.0.dist-info/top_level.txt,sha256=3kKxJxeM108fuYc_lwwlklP7YBU9IEmdmRAouzi397o,33
55
- dask_cuda-24.12.0.dist-info/RECORD,,
50
+ dask_cuda-25.2.0.dist-info/LICENSE,sha256=MjI3I-EgxfEvZlgjk82rgiFsZqSDXHFETd2QJ89UwDA,11348
51
+ dask_cuda-25.2.0.dist-info/METADATA,sha256=c6G1F5I_jeDlfXs42b9LSwtVc5HvUtfyEgZ6cLiw7fM,2272
52
+ dask_cuda-25.2.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
53
+ dask_cuda-25.2.0.dist-info/entry_points.txt,sha256=UcRaKVEpywtxc6pF1VnfMB0UK4sJg7a8_NdZF67laPM,136
54
+ dask_cuda-25.2.0.dist-info/top_level.txt,sha256=3kKxJxeM108fuYc_lwwlklP7YBU9IEmdmRAouzi397o,33
55
+ dask_cuda-25.2.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.6.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5