mlpack 4.6.1__cp38-cp38-win_amd64.whl → 4.6.2__cp38-cp38-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. mlpack/__init__.py +3 -3
  2. mlpack/adaboost_classify.cp38-win_amd64.pyd +0 -0
  3. mlpack/adaboost_probabilities.cp38-win_amd64.pyd +0 -0
  4. mlpack/adaboost_train.cp38-win_amd64.pyd +0 -0
  5. mlpack/approx_kfn.cp38-win_amd64.pyd +0 -0
  6. mlpack/arma_numpy.cp38-win_amd64.pyd +0 -0
  7. mlpack/bayesian_linear_regression.cp38-win_amd64.pyd +0 -0
  8. mlpack/cf.cp38-win_amd64.pyd +0 -0
  9. mlpack/dbscan.cp38-win_amd64.pyd +0 -0
  10. mlpack/decision_tree.cp38-win_amd64.pyd +0 -0
  11. mlpack/det.cp38-win_amd64.pyd +0 -0
  12. mlpack/emst.cp38-win_amd64.pyd +0 -0
  13. mlpack/fastmks.cp38-win_amd64.pyd +0 -0
  14. mlpack/gmm_generate.cp38-win_amd64.pyd +0 -0
  15. mlpack/gmm_probability.cp38-win_amd64.pyd +0 -0
  16. mlpack/gmm_train.cp38-win_amd64.pyd +0 -0
  17. mlpack/hmm_generate.cp38-win_amd64.pyd +0 -0
  18. mlpack/hmm_loglik.cp38-win_amd64.pyd +0 -0
  19. mlpack/hmm_train.cp38-win_amd64.pyd +0 -0
  20. mlpack/hmm_viterbi.cp38-win_amd64.pyd +0 -0
  21. mlpack/hoeffding_tree.cp38-win_amd64.pyd +0 -0
  22. mlpack/image_converter.cp38-win_amd64.pyd +0 -0
  23. mlpack/include/mlpack/core/cv/k_fold_cv.hpp +21 -12
  24. mlpack/include/mlpack/core/cv/k_fold_cv_impl.hpp +49 -39
  25. mlpack/include/mlpack/core/data/detect_file_type_impl.hpp +9 -46
  26. mlpack/include/mlpack/core/data/save_impl.hpp +315 -315
  27. mlpack/include/mlpack/core/data/utilities.hpp +158 -158
  28. mlpack/include/mlpack/core/math/ccov.hpp +1 -0
  29. mlpack/include/mlpack/core/math/ccov_impl.hpp +4 -5
  30. mlpack/include/mlpack/core/math/make_alias.hpp +98 -3
  31. mlpack/include/mlpack/core/util/arma_traits.hpp +19 -2
  32. mlpack/include/mlpack/core/util/gitversion.hpp +1 -1
  33. mlpack/include/mlpack/core/util/sfinae_utility.hpp +24 -2
  34. mlpack/include/mlpack/core/util/version.hpp +1 -1
  35. mlpack/include/mlpack/methods/ann/dists/bernoulli_distribution_impl.hpp +1 -2
  36. mlpack/include/mlpack/methods/ann/init_rules/network_init.hpp +5 -5
  37. mlpack/include/mlpack/methods/ann/layer/batch_norm.hpp +3 -2
  38. mlpack/include/mlpack/methods/ann/layer/batch_norm_impl.hpp +19 -20
  39. mlpack/include/mlpack/methods/ann/layer/concat.hpp +1 -0
  40. mlpack/include/mlpack/methods/ann/layer/concat_impl.hpp +6 -7
  41. mlpack/include/mlpack/methods/ann/layer/convolution_impl.hpp +3 -3
  42. mlpack/include/mlpack/methods/ann/layer/grouped_convolution_impl.hpp +3 -3
  43. mlpack/include/mlpack/methods/ann/layer/linear3d.hpp +1 -0
  44. mlpack/include/mlpack/methods/ann/layer/linear3d_impl.hpp +11 -14
  45. mlpack/include/mlpack/methods/ann/layer/max_pooling.hpp +5 -4
  46. mlpack/include/mlpack/methods/ann/layer/max_pooling_impl.hpp +15 -14
  47. mlpack/include/mlpack/methods/ann/layer/mean_pooling.hpp +3 -2
  48. mlpack/include/mlpack/methods/ann/layer/mean_pooling_impl.hpp +14 -15
  49. mlpack/include/mlpack/methods/ann/layer/multihead_attention.hpp +6 -5
  50. mlpack/include/mlpack/methods/ann/layer/multihead_attention_impl.hpp +24 -25
  51. mlpack/include/mlpack/methods/ann/layer/nearest_interpolation.hpp +1 -0
  52. mlpack/include/mlpack/methods/ann/layer/nearest_interpolation_impl.hpp +4 -4
  53. mlpack/include/mlpack/methods/ann/layer/padding.hpp +1 -0
  54. mlpack/include/mlpack/methods/ann/layer/padding_impl.hpp +12 -13
  55. mlpack/include/mlpack/methods/ann/layer/recurrent_layer.hpp +3 -2
  56. mlpack/include/mlpack/methods/ann/loss_functions/cosine_embedding_loss_impl.hpp +5 -4
  57. mlpack/include/mlpack/methods/ann/rnn.hpp +19 -18
  58. mlpack/include/mlpack/methods/ann/rnn_impl.hpp +15 -15
  59. mlpack/include/mlpack/methods/bayesian_linear_regression/bayesian_linear_regression_impl.hpp +3 -8
  60. mlpack/include/mlpack/methods/decision_tree/fitness_functions/gini_gain.hpp +5 -8
  61. mlpack/include/mlpack/methods/decision_tree/fitness_functions/information_gain.hpp +5 -8
  62. mlpack/include/mlpack/methods/gmm/diagonal_gmm_impl.hpp +2 -1
  63. mlpack/include/mlpack/methods/gmm/eigenvalue_ratio_constraint.hpp +3 -3
  64. mlpack/include/mlpack/methods/gmm/gmm_impl.hpp +2 -1
  65. mlpack/include/mlpack/methods/hmm/hmm_impl.hpp +10 -5
  66. mlpack/include/mlpack/methods/random_forest/random_forest.hpp +57 -37
  67. mlpack/include/mlpack/methods/random_forest/random_forest_impl.hpp +69 -59
  68. mlpack/kde.cp38-win_amd64.pyd +0 -0
  69. mlpack/kernel_pca.cp38-win_amd64.pyd +0 -0
  70. mlpack/kfn.cp38-win_amd64.pyd +0 -0
  71. mlpack/kmeans.cp38-win_amd64.pyd +0 -0
  72. mlpack/knn.cp38-win_amd64.pyd +0 -0
  73. mlpack/krann.cp38-win_amd64.pyd +0 -0
  74. mlpack/lars.cp38-win_amd64.pyd +0 -0
  75. mlpack/linear_regression_predict.cp38-win_amd64.pyd +0 -0
  76. mlpack/linear_regression_train.cp38-win_amd64.pyd +0 -0
  77. mlpack/linear_svm.cp38-win_amd64.pyd +0 -0
  78. mlpack/lmnn.cp38-win_amd64.pyd +0 -0
  79. mlpack/local_coordinate_coding.cp38-win_amd64.pyd +0 -0
  80. mlpack/logistic_regression.cp38-win_amd64.pyd +0 -0
  81. mlpack/lsh.cp38-win_amd64.pyd +0 -0
  82. mlpack/mean_shift.cp38-win_amd64.pyd +0 -0
  83. mlpack/nbc.cp38-win_amd64.pyd +0 -0
  84. mlpack/nca.cp38-win_amd64.pyd +0 -0
  85. mlpack/nmf.cp38-win_amd64.pyd +0 -0
  86. mlpack/pca.cp38-win_amd64.pyd +0 -0
  87. mlpack/perceptron.cp38-win_amd64.pyd +0 -0
  88. mlpack/preprocess_binarize.cp38-win_amd64.pyd +0 -0
  89. mlpack/preprocess_describe.cp38-win_amd64.pyd +0 -0
  90. mlpack/preprocess_one_hot_encoding.cp38-win_amd64.pyd +0 -0
  91. mlpack/preprocess_scale.cp38-win_amd64.pyd +0 -0
  92. mlpack/preprocess_split.cp38-win_amd64.pyd +0 -0
  93. mlpack/radical.cp38-win_amd64.pyd +0 -0
  94. mlpack/random_forest.cp38-win_amd64.pyd +0 -0
  95. mlpack/softmax_regression.cp38-win_amd64.pyd +0 -0
  96. mlpack/sparse_coding.cp38-win_amd64.pyd +0 -0
  97. mlpack-4.6.2.dist-info/DELVEWHEEL +2 -0
  98. {mlpack-4.6.1.dist-info → mlpack-4.6.2.dist-info}/METADATA +2 -2
  99. {mlpack-4.6.1.dist-info → mlpack-4.6.2.dist-info}/RECORD +102 -102
  100. mlpack-4.6.1.dist-info/DELVEWHEEL +0 -2
  101. {mlpack-4.6.1.dist-info → mlpack-4.6.2.dist-info}/WHEEL +0 -0
  102. {mlpack-4.6.1.dist-info → mlpack-4.6.2.dist-info}/top_level.txt +0 -0
  103. mlpack.libs/{.load-order-mlpack-4.6.1 → .load-order-mlpack-4.6.2} +1 -1
@@ -46,7 +46,7 @@ template<
46
46
  bool UseBootstrap,
47
47
  typename BootstrapType
48
48
  >
49
- template<typename MatType>
49
+ template<typename MatType, typename LabelsType>
50
50
  RandomForest<
51
51
  FitnessFunction,
52
52
  DimensionSelectionType,
@@ -55,7 +55,7 @@ RandomForest<
55
55
  UseBootstrap,
56
56
  BootstrapType
57
57
  >::RandomForest(const MatType& dataset,
58
- const arma::Row<size_t>& labels,
58
+ const LabelsType& labels,
59
59
  const size_t numClasses,
60
60
  const size_t numTrees,
61
61
  const size_t minimumLeafSize,
@@ -68,8 +68,8 @@ RandomForest<
68
68
  // Pass off work to the Train() method.
69
69
  data::DatasetInfo info; // Ignored.
70
70
  arma::rowvec weights; // Fake weights, not used.
71
- Train<false, false>(dataset, info, labels, numClasses, weights, numTrees,
72
- minimumLeafSize, minimumGainSplit, maximumDepth, false,
71
+ TrainInternal<false, false>(dataset, info, labels, numClasses, weights,
72
+ numTrees, minimumLeafSize, minimumGainSplit, maximumDepth, false,
73
73
  dimensionSelector, bootstrap);
74
74
  }
75
75
 
@@ -81,7 +81,7 @@ template<
81
81
  bool UseBootstrap,
82
82
  typename BootstrapType
83
83
  >
84
- template<typename MatType>
84
+ template<typename MatType, typename LabelsType>
85
85
  RandomForest<
86
86
  FitnessFunction,
87
87
  DimensionSelectionType,
@@ -91,7 +91,7 @@ RandomForest<
91
91
  BootstrapType
92
92
  >::RandomForest(const MatType& dataset,
93
93
  const data::DatasetInfo& datasetInfo,
94
- const arma::Row<size_t>& labels,
94
+ const LabelsType& labels,
95
95
  const size_t numClasses,
96
96
  const size_t numTrees,
97
97
  const size_t minimumLeafSize,
@@ -103,7 +103,7 @@ RandomForest<
103
103
  {
104
104
  // Pass off work to the Train() method.
105
105
  arma::rowvec weights; // Fake weights, not used.
106
- Train<false, true>(dataset, datasetInfo, labels, numClasses, weights,
106
+ TrainInternal<false, true>(dataset, datasetInfo, labels, numClasses, weights,
107
107
  numTrees, minimumLeafSize, minimumGainSplit, maximumDepth, false,
108
108
  dimensionSelector, bootstrap);
109
109
  }
@@ -116,7 +116,7 @@ template<
116
116
  bool UseBootstrap,
117
117
  typename BootstrapType
118
118
  >
119
- template<typename MatType>
119
+ template<typename MatType, typename LabelsType, typename WeightsType>
120
120
  RandomForest<
121
121
  FitnessFunction,
122
122
  DimensionSelectionType,
@@ -125,21 +125,23 @@ RandomForest<
125
125
  UseBootstrap,
126
126
  BootstrapType
127
127
  >::RandomForest(const MatType& dataset,
128
- const arma::Row<size_t>& labels,
128
+ const LabelsType& labels,
129
129
  const size_t numClasses,
130
- const arma::rowvec& weights,
130
+ const WeightsType& weights,
131
131
  const size_t numTrees,
132
132
  const size_t minimumLeafSize,
133
133
  const double minimumGainSplit,
134
134
  const size_t maximumDepth,
135
135
  DimensionSelectionType dimensionSelector,
136
- BootstrapType bootstrap) :
136
+ BootstrapType bootstrap,
137
+ const std::enable_if_t<arma::is_arma_type<
138
+ std::remove_reference_t<WeightsType>>::value>*) :
137
139
  avgGain(0.0)
138
140
  {
139
141
  // Pass off work to the Train() method.
140
142
  data::DatasetInfo info; // Ignored by Train().
141
- Train<true, false>(dataset, info, labels, numClasses, weights, numTrees,
142
- minimumLeafSize, minimumGainSplit, maximumDepth, false,
143
+ TrainInternal<true, false>(dataset, info, labels, numClasses, weights,
144
+ numTrees, minimumLeafSize, minimumGainSplit, maximumDepth, false,
143
145
  dimensionSelector, bootstrap);
144
146
  }
145
147
 
@@ -151,7 +153,7 @@ template<
151
153
  bool UseBootstrap,
152
154
  typename BootstrapType
153
155
  >
154
- template<typename MatType>
156
+ template<typename MatType, typename LabelsType, typename WeightsType>
155
157
  RandomForest<
156
158
  FitnessFunction,
157
159
  DimensionSelectionType,
@@ -161,19 +163,21 @@ RandomForest<
161
163
  BootstrapType
162
164
  >::RandomForest(const MatType& dataset,
163
165
  const data::DatasetInfo& datasetInfo,
164
- const arma::Row<size_t>& labels,
166
+ const LabelsType& labels,
165
167
  const size_t numClasses,
166
- const arma::rowvec& weights,
168
+ const WeightsType& weights,
167
169
  const size_t numTrees,
168
170
  const size_t minimumLeafSize,
169
171
  const double minimumGainSplit,
170
172
  const size_t maximumDepth,
171
173
  DimensionSelectionType dimensionSelector,
172
- BootstrapType bootstrap) :
174
+ BootstrapType bootstrap,
175
+ const std::enable_if_t<arma::is_arma_type<
176
+ std::remove_reference_t<WeightsType>>::value>*) :
173
177
  avgGain(0.0)
174
178
  {
175
179
  // Pass off work to the Train() method.
176
- Train<true, true>(dataset, datasetInfo, labels, numClasses, weights,
180
+ TrainInternal<true, true>(dataset, datasetInfo, labels, numClasses, weights,
177
181
  numTrees, minimumLeafSize, minimumGainSplit, maximumDepth, false,
178
182
  dimensionSelector, bootstrap);
179
183
  }
@@ -186,7 +190,7 @@ template<
186
190
  bool UseBootstrap,
187
191
  typename BootstrapType
188
192
  >
189
- template<typename MatType>
193
+ template<typename MatType, typename LabelsType>
190
194
  double RandomForest<
191
195
  FitnessFunction,
192
196
  DimensionSelectionType,
@@ -195,7 +199,7 @@ double RandomForest<
195
199
  UseBootstrap,
196
200
  BootstrapType
197
201
  >::Train(const MatType& dataset,
198
- const arma::Row<size_t>& labels,
202
+ const LabelsType& labels,
199
203
  const size_t numClasses,
200
204
  const size_t numTrees,
201
205
  const size_t minimumLeafSize,
@@ -208,10 +212,9 @@ double RandomForest<
208
212
  // Pass off to Train().
209
213
  data::DatasetInfo datasetInfo; // Ignored by Train().
210
214
  arma::rowvec weights; // Ignored by Train().
211
- return
212
- Train<false, false>(dataset, datasetInfo, labels, numClasses, weights,
213
- numTrees, minimumLeafSize, minimumGainSplit, maximumDepth, warmStart,
214
- dimensionSelector, bootstrap);
215
+ return TrainInternal<false, false>(dataset, datasetInfo, labels, numClasses,
216
+ weights, numTrees, minimumLeafSize, minimumGainSplit, maximumDepth,
217
+ warmStart, dimensionSelector, bootstrap);
215
218
  }
216
219
 
217
220
  template<
@@ -222,7 +225,7 @@ template<
222
225
  bool UseBootstrap,
223
226
  typename BootstrapType
224
227
  >
225
- template<typename MatType>
228
+ template<typename MatType, typename LabelsType>
226
229
  double RandomForest<
227
230
  FitnessFunction,
228
231
  DimensionSelectionType,
@@ -232,7 +235,7 @@ double RandomForest<
232
235
  BootstrapType
233
236
  >::Train(const MatType& dataset,
234
237
  const data::DatasetInfo& datasetInfo,
235
- const arma::Row<size_t>& labels,
238
+ const LabelsType& labels,
236
239
  const size_t numClasses,
237
240
  const size_t numTrees,
238
241
  const size_t minimumLeafSize,
@@ -244,10 +247,9 @@ double RandomForest<
244
247
  {
245
248
  // Pass off to Train().
246
249
  arma::rowvec weights; // Ignored by Train().
247
- return
248
- Train<false, true>(dataset, datasetInfo, labels, numClasses, weights,
249
- numTrees, minimumLeafSize, minimumGainSplit, maximumDepth, warmStart,
250
- dimensionSelector, bootstrap);
250
+ return TrainInternal<false, true>(dataset, datasetInfo, labels, numClasses,
251
+ weights, numTrees, minimumLeafSize, minimumGainSplit, maximumDepth,
252
+ warmStart, dimensionSelector, bootstrap);
251
253
  }
252
254
 
253
255
  template<
@@ -258,7 +260,7 @@ template<
258
260
  bool UseBootstrap,
259
261
  typename BootstrapType
260
262
  >
261
- template<typename MatType>
263
+ template<typename MatType, typename LabelsType, typename WeightsType>
262
264
  double RandomForest<
263
265
  FitnessFunction,
264
266
  DimensionSelectionType,
@@ -267,23 +269,24 @@ double RandomForest<
267
269
  UseBootstrap,
268
270
  BootstrapType
269
271
  >::Train(const MatType& dataset,
270
- const arma::Row<size_t>& labels,
272
+ const LabelsType& labels,
271
273
  const size_t numClasses,
272
- const arma::rowvec& weights,
274
+ const WeightsType& weights,
273
275
  const size_t numTrees,
274
276
  const size_t minimumLeafSize,
275
277
  const double minimumGainSplit,
276
278
  const size_t maximumDepth,
277
279
  const bool warmStart,
278
280
  DimensionSelectionType dimensionSelector,
279
- BootstrapType bootstrap)
281
+ BootstrapType bootstrap,
282
+ const std::enable_if_t<arma::is_arma_type<
283
+ std::remove_reference_t<WeightsType>>::value>*)
280
284
  {
281
285
  // Pass off to Train().
282
286
  data::DatasetInfo datasetInfo; // Ignored by Train().
283
- return
284
- Train<true, false>(dataset, datasetInfo, labels, numClasses, weights,
285
- numTrees, minimumLeafSize, minimumGainSplit, maximumDepth, warmStart,
286
- dimensionSelector, bootstrap);
287
+ return TrainInternal<true, false>(dataset, datasetInfo, labels, numClasses,
288
+ weights, numTrees, minimumLeafSize, minimumGainSplit, maximumDepth,
289
+ warmStart, dimensionSelector, bootstrap);
287
290
  }
288
291
 
289
292
  template<
@@ -294,7 +297,7 @@ template<
294
297
  bool UseBootstrap,
295
298
  typename BootstrapType
296
299
  >
297
- template<typename MatType>
300
+ template<typename MatType, typename LabelsType, typename WeightsType>
298
301
  double RandomForest<
299
302
  FitnessFunction,
300
303
  DimensionSelectionType,
@@ -304,22 +307,23 @@ double RandomForest<
304
307
  BootstrapType
305
308
  >::Train(const MatType& dataset,
306
309
  const data::DatasetInfo& datasetInfo,
307
- const arma::Row<size_t>& labels,
310
+ const LabelsType& labels,
308
311
  const size_t numClasses,
309
- const arma::rowvec& weights,
312
+ const WeightsType& weights,
310
313
  const size_t numTrees,
311
314
  const size_t minimumLeafSize,
312
315
  const double minimumGainSplit,
313
316
  const size_t maximumDepth,
314
317
  const bool warmStart,
315
318
  DimensionSelectionType dimensionSelector,
316
- BootstrapType bootstrap)
319
+ BootstrapType bootstrap,
320
+ const std::enable_if_t<arma::is_arma_type<
321
+ std::remove_reference_t<WeightsType>>::value>*)
317
322
  {
318
323
  // Pass off to Train().
319
- return
320
- Train<true, true>(dataset, datasetInfo, labels, numClasses, weights,
321
- numTrees, minimumLeafSize, minimumGainSplit, maximumDepth, warmStart,
322
- dimensionSelector, bootstrap);
324
+ return TrainInternal<true, true>(dataset, datasetInfo, labels, numClasses,
325
+ weights, numTrees, minimumLeafSize, minimumGainSplit, maximumDepth,
326
+ warmStart, dimensionSelector, bootstrap);
323
327
  }
324
328
 
325
329
  template<
@@ -515,7 +519,13 @@ template<
515
519
  bool UseBootstrap,
516
520
  typename BootstrapType
517
521
  >
518
- template<bool UseWeights, bool UseDatasetInfo, typename MatType>
522
+ template<
523
+ bool UseWeights,
524
+ bool UseDatasetInfo,
525
+ typename MatType,
526
+ typename LabelsType,
527
+ typename WeightsType
528
+ >
519
529
  double RandomForest<
520
530
  FitnessFunction,
521
531
  DimensionSelectionType,
@@ -523,18 +533,18 @@ double RandomForest<
523
533
  CategoricalSplitType,
524
534
  UseBootstrap,
525
535
  BootstrapType
526
- >::Train(const MatType& dataset,
527
- const data::DatasetInfo& datasetInfo,
528
- const arma::Row<size_t>& labels,
529
- const size_t numClasses,
530
- const arma::rowvec& weights,
531
- const size_t numTrees,
532
- const size_t minimumLeafSize,
533
- const double minimumGainSplit,
534
- const size_t maximumDepth,
535
- const bool warmStart,
536
- DimensionSelectionType& dimensionSelector,
537
- BootstrapType& bootstrap)
536
+ >::TrainInternal(const MatType& dataset,
537
+ const data::DatasetInfo& datasetInfo,
538
+ const LabelsType& labels,
539
+ const size_t numClasses,
540
+ const WeightsType& weights,
541
+ const size_t numTrees,
542
+ const size_t minimumLeafSize,
543
+ const double minimumGainSplit,
544
+ const size_t maximumDepth,
545
+ const bool warmStart,
546
+ DimensionSelectionType& dimensionSelector,
547
+ BootstrapType& bootstrap)
538
548
  {
539
549
  // Reset the forest if we are not doing a warm-start.
540
550
  if (!warmStart)
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
@@ -0,0 +1,2 @@
1
+ Version: 1.10.0
2
+ Arguments: ['C:\\Users\\VssAdministrator\\AppData\\Local\\Temp\\cibw-run-85089mfr\\cp38-win_amd64\\build\\venv\\Scripts\\delvewheel', 'repair', '--add-path', 'D:\\a\\1\\s/OpenBLAS-0.3.21/bin/', '-w', 'C:\\Users\\VssAdministrator\\AppData\\Local\\Temp\\cibw-run-85089mfr\\cp38-win_amd64\\repaired_wheel', 'C:\\Users\\VssAdministrator\\AppData\\Local\\Temp\\cibw-run-85089mfr\\cp38-win_amd64\\built_wheel\\mlpack-4.6.2-cp38-cp38-win_amd64.whl']
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlpack
3
- Version: 4.6.1
3
+ Version: 4.6.2
4
4
  Summary: a flexible, fast machine learning library
5
5
  Home-page: http://www.mlpack.org/
6
6
  Author: mlpack developers
7
7
  Author-email: mlpack@lists.mlpack.org
8
8
  License: BSD
9
- Project-URL: Documentation, http://www.mlpack.org/doc/mlpack-4.6.1/python.html
9
+ Project-URL: Documentation, http://www.mlpack.org/doc/mlpack-4.6.2/python.html
10
10
  Project-URL: Source, https://github.com/mlpack/mlpack/
11
11
  Project-URL: Tracker, https://github.com/mlpack/mlpack/issues
12
12
  Keywords: machine learning,data mining,deep learning,optimization