scikit-learn-intelex 2024.5.0__py310-none-manylinux1_x86_64.whl → 2024.7.0__py310-none-manylinux1_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of scikit-learn-intelex might be problematic. Click here for more details.

Files changed (73) hide show
  1. {scikit_learn_intelex-2024.5.0.dist-info → scikit_learn_intelex-2024.7.0.dist-info}/METADATA +2 -2
  2. scikit_learn_intelex-2024.7.0.dist-info/RECORD +122 -0
  3. sklearnex/_config.py +3 -15
  4. sklearnex/_device_offload.py +9 -168
  5. sklearnex/basic_statistics/basic_statistics.py +127 -1
  6. sklearnex/basic_statistics/tests/test_basic_statistics.py +251 -0
  7. sklearnex/basic_statistics/tests/test_incremental_basic_statistics.py +1 -1
  8. sklearnex/cluster/dbscan.py +3 -1
  9. sklearnex/cluster/k_means.py +8 -0
  10. sklearnex/cluster/tests/test_dbscan.py +8 -6
  11. sklearnex/cluster/tests/test_kmeans.py +15 -3
  12. sklearnex/conftest.py +11 -1
  13. sklearnex/covariance/incremental_covariance.py +64 -13
  14. sklearnex/covariance/tests/test_incremental_covariance.py +35 -0
  15. sklearnex/decomposition/pca.py +25 -1
  16. sklearnex/decomposition/tests/test_pca.py +4 -2
  17. sklearnex/dispatcher.py +109 -1
  18. sklearnex/ensemble/_forest.py +121 -57
  19. sklearnex/ensemble/tests/test_forest.py +7 -0
  20. sklearnex/glob/dispatcher.py +16 -2
  21. sklearnex/linear_model/coordinate_descent.py +13 -0
  22. sklearnex/linear_model/incremental_linear.py +102 -25
  23. sklearnex/linear_model/linear.py +25 -39
  24. sklearnex/linear_model/logistic_regression.py +92 -74
  25. sklearnex/linear_model/ridge.py +7 -0
  26. sklearnex/linear_model/tests/test_incremental_linear.py +10 -10
  27. sklearnex/linear_model/tests/test_linear.py +30 -5
  28. sklearnex/linear_model/tests/test_logreg.py +45 -3
  29. sklearnex/manifold/t_sne.py +4 -0
  30. sklearnex/metrics/pairwise.py +5 -0
  31. sklearnex/metrics/ranking.py +3 -0
  32. sklearnex/model_selection/split.py +3 -0
  33. sklearnex/neighbors/_lof.py +9 -0
  34. sklearnex/neighbors/common.py +45 -1
  35. sklearnex/neighbors/knn_classification.py +1 -20
  36. sklearnex/neighbors/knn_regression.py +25 -20
  37. sklearnex/neighbors/knn_unsupervised.py +31 -7
  38. sklearnex/preview/__init__.py +1 -1
  39. sklearnex/preview/decomposition/__init__.py +19 -0
  40. sklearnex/preview/decomposition/incremental_pca.py +228 -0
  41. sklearnex/preview/decomposition/tests/test_incremental_pca.py +266 -0
  42. sklearnex/preview/linear_model/__init__.py +19 -0
  43. sklearnex/preview/linear_model/ridge.py +419 -0
  44. sklearnex/preview/linear_model/tests/test_ridge.py +102 -0
  45. sklearnex/spmd/basic_statistics/tests/test_basic_statistics_spmd.py +107 -0
  46. sklearnex/spmd/cluster/tests/test_dbscan_spmd.py +97 -0
  47. sklearnex/spmd/cluster/tests/test_kmeans_spmd.py +172 -0
  48. sklearnex/spmd/covariance/tests/test_covariance_spmd.py +107 -0
  49. sklearnex/spmd/decomposition/tests/test_pca_spmd.py +128 -0
  50. sklearnex/spmd/ensemble/tests/test_forest_spmd.py +265 -0
  51. sklearnex/spmd/linear_model/tests/test_linear_regression_spmd.py +145 -0
  52. sklearnex/spmd/linear_model/tests/test_logistic_regression_spmd.py +163 -0
  53. sklearnex/spmd/neighbors/tests/test_neighbors_spmd.py +288 -0
  54. sklearnex/svm/_common.py +163 -20
  55. sklearnex/svm/nusvc.py +40 -4
  56. sklearnex/svm/nusvr.py +31 -2
  57. sklearnex/svm/svc.py +40 -4
  58. sklearnex/svm/svr.py +31 -2
  59. sklearnex/svm/tests/test_svm.py +12 -20
  60. sklearnex/tests/_utils.py +185 -30
  61. sklearnex/tests/_utils_spmd.py +185 -0
  62. sklearnex/tests/test_common.py +54 -0
  63. sklearnex/tests/test_config.py +4 -0
  64. sklearnex/tests/test_memory_usage.py +185 -126
  65. sklearnex/tests/test_monkeypatch.py +12 -4
  66. sklearnex/tests/test_patching.py +21 -25
  67. sklearnex/tests/test_run_to_run_stability.py +295 -0
  68. sklearnex/utils/_namespace.py +1 -1
  69. scikit_learn_intelex-2024.5.0.dist-info/RECORD +0 -104
  70. sklearnex/tests/test_run_to_run_stability_tests.py +0 -428
  71. {scikit_learn_intelex-2024.5.0.dist-info → scikit_learn_intelex-2024.7.0.dist-info}/LICENSE.txt +0 -0
  72. {scikit_learn_intelex-2024.5.0.dist-info → scikit_learn_intelex-2024.7.0.dist-info}/WHEEL +0 -0
  73. {scikit_learn_intelex-2024.5.0.dist-info → scikit_learn_intelex-2024.7.0.dist-info}/top_level.txt +0 -0
sklearnex/dispatcher.py CHANGED
@@ -45,12 +45,15 @@ def get_patch_map_core(preview=False):
45
45
 
46
46
  if _is_new_patching_available():
47
47
  import sklearn.covariance as covariance_module
48
+ import sklearn.decomposition as decomposition_module
48
49
 
49
50
  # Preview classes for patching
50
51
  from .preview.cluster import KMeans as KMeans_sklearnex
51
52
  from .preview.covariance import (
52
53
  EmpiricalCovariance as EmpiricalCovariance_sklearnex,
53
54
  )
55
+ from .preview.decomposition import IncrementalPCA as IncrementalPCA_sklearnex
56
+ from .preview.linear_model import Ridge as Ridge_sklearnex
54
57
 
55
58
  # Since the state of the lru_cache without preview cannot be
56
59
  # guaranteed to not have already enabled sklearnex algorithms
@@ -62,7 +65,7 @@ def get_patch_map_core(preview=False):
62
65
  sklearn_obj = mapping["kmeans"][0][1]
63
66
  mapping.pop("kmeans")
64
67
  mapping["kmeans"] = [
65
- [(cluster_module, "kmeans", KMeans_sklearnex), sklearn_obj]
68
+ [(cluster_module, "KMeans", KMeans_sklearnex), sklearn_obj]
66
69
  ]
67
70
 
68
71
  # Covariance
@@ -76,6 +79,27 @@ def get_patch_map_core(preview=False):
76
79
  None,
77
80
  ]
78
81
  ]
82
+
83
+ # IncrementalPCA
84
+ mapping["incrementalpca"] = [
85
+ [
86
+ (
87
+ decomposition_module,
88
+ "IncrementalPCA",
89
+ IncrementalPCA_sklearnex,
90
+ ),
91
+ None,
92
+ ]
93
+ ]
94
+
95
+ # Ridge
96
+ linear_model_module, _, _ = mapping["ridge"][0][0]
97
+ sklearn_obj = mapping["ridge"][0][1]
98
+ mapping.pop("ridge")
99
+ mapping["ridge"] = [
100
+ [(linear_model_module, "Ridge", Ridge_sklearnex), sklearn_obj]
101
+ ]
102
+
79
103
  return mapping
80
104
 
81
105
  from daal4py.sklearn.monkeypatch.dispatcher import _get_map_of_algorithms
@@ -97,6 +121,9 @@ def get_patch_map_core(preview=False):
97
121
  import sklearn.decomposition as decomposition_module
98
122
  import sklearn.ensemble as ensemble_module
99
123
  import sklearn.linear_model as linear_model_module
124
+ import sklearn.manifold as manifold_module
125
+ import sklearn.metrics as metrics_module
126
+ import sklearn.model_selection as model_selection_module
100
127
  import sklearn.neighbors as neighbors_module
101
128
  import sklearn.svm as svm_module
102
129
 
@@ -124,11 +151,18 @@ def get_patch_map_core(preview=False):
124
151
  from .ensemble import ExtraTreesRegressor as ExtraTreesRegressor_sklearnex
125
152
  from .ensemble import RandomForestClassifier as RandomForestClassifier_sklearnex
126
153
  from .ensemble import RandomForestRegressor as RandomForestRegressor_sklearnex
154
+ from .linear_model import ElasticNet as ElasticNet_sklearnex
127
155
  from .linear_model import (
128
156
  IncrementalLinearRegression as IncrementalLinearRegression_sklearnex,
129
157
  )
158
+ from .linear_model import Lasso as Lasso_sklearnex
130
159
  from .linear_model import LinearRegression as LinearRegression_sklearnex
131
160
  from .linear_model import LogisticRegression as LogisticRegression_sklearnex
161
+ from .linear_model import Ridge as Ridge_sklearnex
162
+ from .manifold import TSNE as TSNE_sklearnex
163
+ from .metrics import pairwise_distances as pairwise_distances_sklearnex
164
+ from .metrics import roc_auc_score as roc_auc_score_sklearnex
165
+ from .model_selection import train_test_split as train_test_split_sklearnex
132
166
  from .neighbors import KNeighborsClassifier as KNeighborsClassifier_sklearnex
133
167
  from .neighbors import KNeighborsRegressor as KNeighborsRegressor_sklearnex
134
168
  from .neighbors import LocalOutlierFactor as LocalOutlierFactor_sklearnex
@@ -154,6 +188,32 @@ def get_patch_map_core(preview=False):
154
188
  mapping["nusvr"] = [[(svm_module, "NuSVR", NuSVR_sklearnex), None]]
155
189
  mapping["nusvc"] = [[(svm_module, "NuSVC", NuSVC_sklearnex), None]]
156
190
 
191
+ # ElasticNet
192
+ mapping.pop("elasticnet")
193
+ mapping["elasticnet"] = [
194
+ [
195
+ (
196
+ linear_model_module,
197
+ "ElasticNet",
198
+ ElasticNet_sklearnex,
199
+ ),
200
+ None,
201
+ ]
202
+ ]
203
+
204
+ # Lasso
205
+ mapping.pop("lasso")
206
+ mapping["lasso"] = [
207
+ [
208
+ (
209
+ linear_model_module,
210
+ "Lasso",
211
+ Lasso_sklearnex,
212
+ ),
213
+ None,
214
+ ]
215
+ ]
216
+
157
217
  # Linear Regression
158
218
  mapping.pop("linear")
159
219
  mapping.pop("linearregression")
@@ -187,6 +247,54 @@ def get_patch_map_core(preview=False):
187
247
  ]
188
248
  mapping["logisticregression"] = mapping["log_reg"]
189
249
 
250
+ # Ridge
251
+ mapping.pop("ridge")
252
+ mapping["ridge"] = [
253
+ [
254
+ (
255
+ linear_model_module,
256
+ "Ridge",
257
+ Ridge_sklearnex,
258
+ ),
259
+ None,
260
+ ]
261
+ ]
262
+
263
+ # manifold
264
+ mapping.pop("tsne")
265
+ mapping["tsne"] = [
266
+ [
267
+ (manifold_module, "TSNE", TSNE_sklearnex),
268
+ None,
269
+ ]
270
+ ]
271
+
272
+ # metrics
273
+ mapping.pop("distances")
274
+ mapping.pop("roc_auc_score")
275
+ mapping["distances"] = [
276
+ [
277
+ (metrics_module, "pairwise_distances", pairwise_distances_sklearnex),
278
+ None,
279
+ ]
280
+ ]
281
+ mapping["pairwise_distances"] = mapping["distances"]
282
+ mapping["roc_auc_score"] = [
283
+ [
284
+ (metrics_module, "roc_auc_score", roc_auc_score_sklearnex),
285
+ None,
286
+ ]
287
+ ]
288
+
289
+ # model_selection
290
+ mapping.pop("train_test_split")
291
+ mapping["train_test_split"] = [
292
+ [
293
+ (model_selection_module, "train_test_split", train_test_split_sklearnex),
294
+ None,
295
+ ]
296
+ ]
297
+
190
298
  # kNN
191
299
  mapping.pop("knn_classifier")
192
300
  mapping.pop("kneighborsclassifier")
@@ -29,7 +29,7 @@ from sklearn.ensemble._forest import ForestClassifier as sklearn_ForestClassifie
29
29
  from sklearn.ensemble._forest import ForestRegressor as sklearn_ForestRegressor
30
30
  from sklearn.ensemble._forest import _get_n_samples_bootstrap
31
31
  from sklearn.exceptions import DataConversionWarning
32
- from sklearn.metrics import accuracy_score
32
+ from sklearn.metrics import accuracy_score, r2_score
33
33
  from sklearn.tree import (
34
34
  DecisionTreeClassifier,
35
35
  DecisionTreeRegressor,
@@ -38,7 +38,12 @@ from sklearn.tree import (
38
38
  )
39
39
  from sklearn.tree._tree import Tree
40
40
  from sklearn.utils import check_random_state, deprecated
41
- from sklearn.utils.validation import check_array, check_is_fitted
41
+ from sklearn.utils.validation import (
42
+ _check_sample_weight,
43
+ check_array,
44
+ check_is_fitted,
45
+ check_X_y,
46
+ )
42
47
 
43
48
  from daal4py.sklearn._n_jobs_support import control_n_jobs
44
49
  from daal4py.sklearn._utils import (
@@ -70,14 +75,15 @@ class BaseForest(ABC):
70
75
  X, y = self._validate_data(
71
76
  X,
72
77
  y,
73
- multi_output=False,
78
+ multi_output=True,
74
79
  accept_sparse=False,
75
80
  dtype=[np.float64, np.float32],
76
81
  force_all_finite=False,
82
+ ensure_2d=True,
77
83
  )
78
84
 
79
85
  if sample_weight is not None:
80
- sample_weight = self.check_sample_weight(sample_weight, X)
86
+ sample_weight = _check_sample_weight(sample_weight, X)
81
87
 
82
88
  if y.ndim == 2 and y.shape[1] == 1:
83
89
  warnings.warn(
@@ -97,8 +103,6 @@ class BaseForest(ABC):
97
103
 
98
104
  y, expanded_class_weight = self._validate_y_class_weight(y)
99
105
 
100
- self.n_features_in_ = X.shape[1]
101
-
102
106
  if expanded_class_weight is not None:
103
107
  if sample_weight is not None:
104
108
  sample_weight = sample_weight * expanded_class_weight
@@ -114,7 +118,9 @@ class BaseForest(ABC):
114
118
  "min_samples_split": self.min_samples_split,
115
119
  "min_samples_leaf": self.min_samples_leaf,
116
120
  "min_weight_fraction_leaf": self.min_weight_fraction_leaf,
117
- "max_features": self.max_features,
121
+ "max_features": self._to_absolute_max_features(
122
+ self.max_features, self.n_features_in_
123
+ ),
118
124
  "max_leaf_nodes": self.max_leaf_nodes,
119
125
  "min_impurity_decrease": self.min_impurity_decrease,
120
126
  "bootstrap": self.bootstrap,
@@ -174,6 +180,45 @@ class BaseForest(ABC):
174
180
  self._validate_estimator()
175
181
  return self
176
182
 
183
+ def _to_absolute_max_features(self, max_features, n_features):
184
+ if max_features is None:
185
+ return n_features
186
+ if isinstance(max_features, str):
187
+ if max_features == "auto":
188
+ if not sklearn_check_version("1.3"):
189
+ if sklearn_check_version("1.1"):
190
+ warnings.warn(
191
+ "`max_features='auto'` has been deprecated in 1.1 "
192
+ "and will be removed in 1.3. To keep the past behaviour, "
193
+ "explicitly set `max_features=1.0` or remove this "
194
+ "parameter as it is also the default value for "
195
+ "RandomForestRegressors and ExtraTreesRegressors.",
196
+ FutureWarning,
197
+ )
198
+ return (
199
+ max(1, int(np.sqrt(n_features)))
200
+ if isinstance(self, ForestClassifier)
201
+ else n_features
202
+ )
203
+ if max_features == "sqrt":
204
+ return max(1, int(np.sqrt(n_features)))
205
+ if max_features == "log2":
206
+ return max(1, int(np.log2(n_features)))
207
+ allowed_string_values = (
208
+ '"sqrt" or "log2"'
209
+ if sklearn_check_version("1.3")
210
+ else '"auto", "sqrt" or "log2"'
211
+ )
212
+ raise ValueError(
213
+ "Invalid value for max_features. Allowed string "
214
+ f"values are {allowed_string_values}."
215
+ )
216
+ if isinstance(max_features, (numbers.Integral, np.integer)):
217
+ return max_features
218
+ if max_features > 0.0:
219
+ return max(1, int(max_features * n_features))
220
+ return 0
221
+
177
222
  def _check_parameters(self):
178
223
  if isinstance(self.min_samples_leaf, numbers.Integral):
179
224
  if not 1 <= self.min_samples_leaf:
@@ -249,38 +294,6 @@ class BaseForest(ABC):
249
294
  "min_bin_size must be integral number but was " "%r" % self.min_bin_size
250
295
  )
251
296
 
252
- def check_sample_weight(self, sample_weight, X, dtype=None):
253
- n_samples = _num_samples(X)
254
-
255
- if dtype is not None and dtype not in [np.float32, np.float64]:
256
- dtype = np.float64
257
-
258
- if sample_weight is None:
259
- sample_weight = np.ones(n_samples, dtype=dtype)
260
- elif isinstance(sample_weight, numbers.Number):
261
- sample_weight = np.full(n_samples, sample_weight, dtype=dtype)
262
- else:
263
- if dtype is None:
264
- dtype = [np.float64, np.float32]
265
- sample_weight = check_array(
266
- sample_weight,
267
- accept_sparse=False,
268
- ensure_2d=False,
269
- dtype=dtype,
270
- order="C",
271
- force_all_finite=False,
272
- )
273
- if sample_weight.ndim != 1:
274
- raise ValueError("Sample weights must be 1D array or scalar")
275
-
276
- if sample_weight.shape != (n_samples,):
277
- raise ValueError(
278
- "sample_weight.shape == {}, expected {}!".format(
279
- sample_weight.shape, (n_samples,)
280
- )
281
- )
282
- return sample_weight
283
-
284
297
  @property
285
298
  def estimators_(self):
286
299
  if hasattr(self, "_cached_estimators_"):
@@ -518,7 +531,7 @@ class ForestClassifier(sklearn_ForestClassifier, BaseForest):
518
531
  )
519
532
 
520
533
  if patching_status.get_status():
521
- X, y = self._validate_data(
534
+ X, y = check_X_y(
522
535
  X,
523
536
  y,
524
537
  multi_output=True,
@@ -738,6 +751,10 @@ class ForestClassifier(sklearn_ForestClassifier, BaseForest):
738
751
  or self.estimator.__class__ == DecisionTreeClassifier,
739
752
  "ExtraTrees only supported starting from oneDAL version 2023.1",
740
753
  ),
754
+ (
755
+ not self.oob_score,
756
+ "oob_scores using r2 or accuracy not implemented.",
757
+ ),
741
758
  (sample_weight is None, "sample_weight is not supported."),
742
759
  ]
743
760
  )
@@ -780,24 +797,43 @@ class ForestClassifier(sklearn_ForestClassifier, BaseForest):
780
797
  check_is_fitted(self, "_onedal_estimator")
781
798
 
782
799
  if sklearn_check_version("1.0"):
783
- self._check_feature_names(X, reset=False)
784
-
785
- X = check_array(
786
- X,
787
- dtype=[np.float64, np.float32],
788
- force_all_finite=False,
789
- ) # Warning, order of dtype matters
800
+ X = self._validate_data(
801
+ X,
802
+ dtype=[np.float64, np.float32],
803
+ force_all_finite=False,
804
+ reset=False,
805
+ ensure_2d=True,
806
+ )
807
+ else:
808
+ X = check_array(
809
+ X,
810
+ dtype=[np.float64, np.float32],
811
+ force_all_finite=False,
812
+ ) # Warning, order of dtype matters
813
+ self._check_n_features(X, reset=False)
790
814
 
791
815
  res = self._onedal_estimator.predict(X, queue=queue)
792
816
  return np.take(self.classes_, res.ravel().astype(np.int64, casting="unsafe"))
793
817
 
794
818
  def _onedal_predict_proba(self, X, queue=None):
795
- X = check_array(X, dtype=[np.float64, np.float32], force_all_finite=False)
796
819
  check_is_fitted(self, "_onedal_estimator")
797
820
 
798
- self._check_n_features(X, reset=False)
799
821
  if sklearn_check_version("1.0"):
800
- self._check_feature_names(X, reset=False)
822
+ X = self._validate_data(
823
+ X,
824
+ dtype=[np.float64, np.float32],
825
+ force_all_finite=False,
826
+ reset=False,
827
+ ensure_2d=True,
828
+ )
829
+ else:
830
+ X = check_array(
831
+ X,
832
+ dtype=[np.float64, np.float32],
833
+ force_all_finite=False,
834
+ ) # Warning, order of dtype matters
835
+ self._check_n_features(X, reset=False)
836
+
801
837
  return self._onedal_estimator.predict_proba(X, queue=queue)
802
838
 
803
839
  def _onedal_score(self, X, y, sample_weight=None, queue=None):
@@ -914,7 +950,7 @@ class ForestRegressor(sklearn_ForestRegressor, BaseForest):
914
950
  )
915
951
 
916
952
  if patching_status.get_status():
917
- X, y = self._validate_data(
953
+ X, y = check_X_y(
918
954
  X,
919
955
  y,
920
956
  multi_output=True,
@@ -996,7 +1032,7 @@ class ForestRegressor(sklearn_ForestRegressor, BaseForest):
996
1032
  ]
997
1033
  )
998
1034
 
999
- elif method_name == "predict":
1035
+ elif method_name in ["predict", "score"]:
1000
1036
  X = data[0]
1001
1037
 
1002
1038
  patching_status.and_conditions(
@@ -1046,11 +1082,12 @@ class ForestRegressor(sklearn_ForestRegressor, BaseForest):
1046
1082
  or self.estimator.__class__ == DecisionTreeClassifier,
1047
1083
  "ExtraTrees only supported starting from oneDAL version 2023.1",
1048
1084
  ),
1085
+ (not self.oob_score, "oob_score value is not sklearn conformant."),
1049
1086
  (sample_weight is None, "sample_weight is not supported."),
1050
1087
  ]
1051
1088
  )
1052
1089
 
1053
- elif method_name == "predict":
1090
+ elif method_name in ["predict", "score"]:
1054
1091
  X = data[0]
1055
1092
 
1056
1093
  patching_status.and_conditions(
@@ -1083,16 +1120,28 @@ class ForestRegressor(sklearn_ForestRegressor, BaseForest):
1083
1120
  return patching_status
1084
1121
 
1085
1122
  def _onedal_predict(self, X, queue=None):
1086
- X = check_array(
1087
- X, dtype=[np.float64, np.float32], force_all_finite=False
1088
- ) # Warning, order of dtype matters
1089
1123
  check_is_fitted(self, "_onedal_estimator")
1090
1124
 
1091
1125
  if sklearn_check_version("1.0"):
1092
- self._check_feature_names(X, reset=False)
1126
+ X = self._validate_data(
1127
+ X,
1128
+ dtype=[np.float64, np.float32],
1129
+ force_all_finite=False,
1130
+ reset=False,
1131
+ ensure_2d=True,
1132
+ ) # Warning, order of dtype matters
1133
+ else:
1134
+ X = check_array(
1135
+ X, dtype=[np.float64, np.float32], force_all_finite=False
1136
+ ) # Warning, order of dtype matters
1093
1137
 
1094
1138
  return self._onedal_estimator.predict(X, queue=queue)
1095
1139
 
1140
+ def _onedal_score(self, X, y, sample_weight=None, queue=None):
1141
+ return r2_score(
1142
+ y, self._onedal_predict(X, queue=queue), sample_weight=sample_weight
1143
+ )
1144
+
1096
1145
  def fit(self, X, y, sample_weight=None):
1097
1146
  dispatch(
1098
1147
  self,
@@ -1119,8 +1168,23 @@ class ForestRegressor(sklearn_ForestRegressor, BaseForest):
1119
1168
  X,
1120
1169
  )
1121
1170
 
1171
+ @wrap_output_data
1172
+ def score(self, X, y, sample_weight=None):
1173
+ return dispatch(
1174
+ self,
1175
+ "score",
1176
+ {
1177
+ "onedal": self.__class__._onedal_score,
1178
+ "sklearn": sklearn_ForestRegressor.score,
1179
+ },
1180
+ X,
1181
+ y,
1182
+ sample_weight=sample_weight,
1183
+ )
1184
+
1122
1185
  fit.__doc__ = sklearn_ForestRegressor.fit.__doc__
1123
1186
  predict.__doc__ = sklearn_ForestRegressor.predict.__doc__
1187
+ score.__doc__ = sklearn_ForestRegressor.score.__doc__
1124
1188
 
1125
1189
 
1126
1190
  @control_n_jobs(decorated_methods=["fit", "predict", "predict_proba", "score"])
@@ -14,6 +14,7 @@
14
14
  # limitations under the License.
15
15
  # ===============================================================================
16
16
 
17
+ import numpy as np
17
18
  import pytest
18
19
  from numpy.testing import assert_allclose
19
20
  from sklearn.datasets import make_classification, make_regression
@@ -47,6 +48,8 @@ def test_sklearnex_import_rf_classifier(dataframe, queue):
47
48
 
48
49
  @pytest.mark.parametrize("dataframe,queue", get_dataframes_and_queues())
49
50
  def test_sklearnex_import_rf_regression(dataframe, queue):
51
+ if queue and queue.sycl_device.is_gpu:
52
+ pytest.skip("RF regressor predict for the GPU sycl_queue is buggy.")
50
53
  from sklearnex.ensemble import RandomForestRegressor
51
54
 
52
55
  X, y = make_regression(n_features=4, n_informative=2, random_state=0, shuffle=False)
@@ -67,6 +70,8 @@ def test_sklearnex_import_rf_regression(dataframe, queue):
67
70
 
68
71
  @pytest.mark.parametrize("dataframe,queue", get_dataframes_and_queues())
69
72
  def test_sklearnex_import_et_classifier(dataframe, queue):
73
+ if queue and queue.sycl_device.is_gpu:
74
+ pytest.skip("ET classifier predict for the GPU sycl_queue is buggy.")
70
75
  from sklearnex.ensemble import ExtraTreesClassifier
71
76
 
72
77
  X, y = make_classification(
@@ -88,6 +93,8 @@ def test_sklearnex_import_et_classifier(dataframe, queue):
88
93
 
89
94
  @pytest.mark.parametrize("dataframe,queue", get_dataframes_and_queues())
90
95
  def test_sklearnex_import_et_regression(dataframe, queue):
96
+ if queue and queue.sycl_device.is_gpu:
97
+ pytest.skip("ET regressor predict for the GPU sycl_queue is buggy.")
91
98
  from sklearnex.ensemble import ExtraTreesRegressor
92
99
 
93
100
  X, y = make_regression(n_features=1, random_state=0, shuffle=False)
@@ -17,18 +17,32 @@
17
17
 
18
18
  def get_patch_str(name=None, verbose=True):
19
19
  return f"""try:
20
+ # TEMP. FIX: sklearnex.patch_sklearn imports sklearn beforehand
21
+ # when it didn't initialized _threadpool_controller required for
22
+ # pairwise distances dispatching during imports.
23
+ # Manually setting and deleting _threadpool_controller during patch fixes it.
24
+ import sklearn
25
+ from threadpoolctl import ThreadpoolController
26
+ sklearn._threadpool_controller = ThreadpoolController()
20
27
  from sklearnex import patch_sklearn
21
28
  patch_sklearn(name={str(name)}, verbose={str(verbose)})
22
- del patch_sklearn
29
+ del patch_sklearn, sklearn._threadpool_controller
23
30
  except ImportError:
24
31
  pass"""
25
32
 
26
33
 
27
34
  def get_patch_str_re():
28
35
  return r"""\ntry:
36
+ \# TEMP. FIX: sklearnex.patch_sklearn imports sklearn beforehand
37
+ \# when it didn't initialized _threadpool_controller required for
38
+ \# pairwise distances dispatching during imports.
39
+ \# Manually setting and deleting _threadpool_controller during patch fixes it.
40
+ import sklearn
41
+ from threadpoolctl import ThreadpoolController
42
+ sklearn._threadpool_controller = ThreadpoolController\(\)
29
43
  from sklearnex import patch_sklearn
30
44
  patch_sklearn\(name=.*, verbose=.*\)
31
- del patch_sklearn
45
+ del patch_sklearn, sklearn._threadpool_controller
32
46
  except ImportError:
33
47
  pass\n"""
34
48
 
@@ -15,3 +15,16 @@
15
15
  # ===============================================================================
16
16
 
17
17
  from daal4py.sklearn.linear_model import ElasticNet, Lasso
18
+ from onedal._device_offload import support_usm_ndarray
19
+
20
+ # Note: `sklearnex.linear_model.ElasticNet` only has functional
21
+ # sycl GPU support. No GPU device will be offloaded.
22
+ ElasticNet.fit = support_usm_ndarray(queue_param=False)(ElasticNet.fit)
23
+ ElasticNet.predict = support_usm_ndarray(queue_param=False)(ElasticNet.predict)
24
+ ElasticNet.score = support_usm_ndarray(queue_param=False)(ElasticNet.score)
25
+
26
+ # Note: `sklearnex.linear_model.Lasso` only has functional
27
+ # sycl GPU support. No GPU device will be offloaded.
28
+ Lasso.fit = support_usm_ndarray(queue_param=False)(Lasso.fit)
29
+ Lasso.predict = support_usm_ndarray(queue_param=False)(Lasso.predict)
30
+ Lasso.score = support_usm_ndarray(queue_param=False)(Lasso.score)