wavetrainer 0.0.41__tar.gz → 0.0.43__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {wavetrainer-0.0.41/wavetrainer.egg-info → wavetrainer-0.0.43}/PKG-INFO +1 -1
  2. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/setup.py +1 -1
  3. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/__init__.py +1 -1
  4. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/xgboost/xgboost_model.py +5 -1
  5. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/selector/selector.py +13 -10
  6. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/trainer.py +6 -4
  7. {wavetrainer-0.0.41 → wavetrainer-0.0.43/wavetrainer.egg-info}/PKG-INFO +1 -1
  8. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/LICENSE +0 -0
  9. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/MANIFEST.in +0 -0
  10. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/README.md +0 -0
  11. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/requirements.txt +0 -0
  12. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/setup.cfg +0 -0
  13. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/tests/__init__.py +0 -0
  14. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/tests/model/__init__.py +0 -0
  15. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/tests/model/catboost_kwargs_test.py +0 -0
  16. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/tests/trainer_test.py +0 -0
  17. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/calibrator/__init__.py +0 -0
  18. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/calibrator/calibrator.py +0 -0
  19. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/calibrator/calibrator_router.py +0 -0
  20. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/calibrator/mapie_calibrator.py +0 -0
  21. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/calibrator/vennabers_calibrator.py +0 -0
  22. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/create.py +0 -0
  23. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/exceptions.py +0 -0
  24. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/fit.py +0 -0
  25. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/__init__.py +0 -0
  26. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/catboost/__init__.py +0 -0
  27. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/catboost/catboost_classifier_wrap.py +0 -0
  28. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/catboost/catboost_kwargs.py +0 -0
  29. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/catboost/catboost_model.py +0 -0
  30. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/catboost/catboost_regressor_wrap.py +0 -0
  31. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/model.py +0 -0
  32. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/model_router.py +0 -0
  33. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/tabpfn/__init__.py +0 -0
  34. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/tabpfn/tabpfn_model.py +0 -0
  35. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/xgboost/__init__.py +0 -0
  36. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/xgboost/early_stopper.py +0 -0
  37. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model/xgboost/xgboost_logger.py +0 -0
  38. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/model_type.py +0 -0
  39. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/params.py +0 -0
  40. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/__init__.py +0 -0
  41. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/base_selector_reducer.py +0 -0
  42. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/combined_reducer.py +0 -0
  43. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/constant_reducer.py +0 -0
  44. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/correlation_reducer.py +0 -0
  45. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/duplicate_reducer.py +0 -0
  46. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/non_categorical_numeric_columns.py +0 -0
  47. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/nonnumeric_reducer.py +0 -0
  48. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/reducer.py +0 -0
  49. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/select_by_single_feature_performance_reducer.py +0 -0
  50. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/smart_correlation_reducer.py +0 -0
  51. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/reducer/unseen_reducer.py +0 -0
  52. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/selector/__init__.py +0 -0
  53. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/__init__.py +0 -0
  54. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/class_weights.py +0 -0
  55. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/combined_weights.py +0 -0
  56. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/exponential_weights.py +0 -0
  57. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/linear_weights.py +0 -0
  58. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/noop_weights.py +0 -0
  59. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/sigmoid_weights.py +0 -0
  60. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/weights.py +0 -0
  61. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/weights/weights_router.py +0 -0
  62. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/windower/__init__.py +0 -0
  63. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer/windower/windower.py +0 -0
  64. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer.egg-info/SOURCES.txt +0 -0
  65. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer.egg-info/dependency_links.txt +0 -0
  66. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer.egg-info/not-zip-safe +0 -0
  67. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer.egg-info/requires.txt +0 -0
  68. {wavetrainer-0.0.41 → wavetrainer-0.0.43}/wavetrainer.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: wavetrainer
3
- Version: 0.0.41
3
+ Version: 0.0.43
4
4
  Summary: A library for automatically finding the optimal model within feature and hyperparameter space.
5
5
  Home-page: https://github.com/8W9aG/wavetrainer
6
6
  Author: Will Sackfield
@@ -23,7 +23,7 @@ def install_requires() -> typing.List[str]:
23
23
 
24
24
  setup(
25
25
  name='wavetrainer',
26
- version='0.0.41',
26
+ version='0.0.43',
27
27
  description='A library for automatically finding the optimal model within feature and hyperparameter space.',
28
28
  long_description=long_description,
29
29
  long_description_content_type='text/markdown',
@@ -2,5 +2,5 @@
2
2
 
3
3
  from .create import create
4
4
 
5
- __VERSION__ = "0.0.41"
5
+ __VERSION__ = "0.0.43"
6
6
  __all__ = ("create",)
@@ -107,7 +107,11 @@ class XGBoostModel(Model):
107
107
  @property
108
108
  def feature_importances(self) -> dict[str, float]:
109
109
  bst = self._provide_xgboost()
110
- return bst.get_booster().get_score(importance_type="weight") # type: ignore
110
+ try:
111
+ return bst.get_booster().get_score(importance_type="weight") # type: ignore
112
+ except XGBoostError as exc:
113
+ print(str(exc))
114
+ return {}
111
115
 
112
116
  def provide_estimator(self):
113
117
  return self._provide_xgboost()
@@ -1,6 +1,6 @@
1
1
  """The selector class."""
2
2
 
3
- # pylint: disable=too-many-locals
3
+ # pylint: disable=too-many-locals,line-too-long
4
4
  import functools
5
5
  import json
6
6
  import logging
@@ -59,12 +59,12 @@ class Selector(Params, Fit):
59
59
  raise ValueError("y is not a series.")
60
60
  if len(df.columns) <= 1:
61
61
  return self
62
- n_features_to_select = max(1, int(len(df.columns) * self._feature_ratio))
63
- steps = int((len(df.columns) - n_features_to_select) / self._steps)
62
+ print(
63
+ f"Performing feature selection with {self._steps} steps and a total ratio of {self._feature_ratio}"
64
+ )
64
65
  current_features = df.columns.values.tolist()
65
- self._model.fit(df, y=y, w=w, eval_x=eval_x, eval_y=eval_y)
66
66
 
67
- def set_current_features():
67
+ def set_current_features(required_features: int):
68
68
  nonlocal current_features
69
69
  feature_importances = self._model.feature_importances
70
70
  if not feature_importances:
@@ -80,24 +80,27 @@ class Selector(Params, Fit):
80
80
  )
81
81
  if not current_features:
82
82
  current_features = [list(feature_importances.keys())[0]]
83
+ current_features = current_features[:required_features]
83
84
 
84
- for i in range(steps):
85
+ n_features = len(current_features)
86
+ for i in range(self._steps):
85
87
  print(
86
88
  f"Recursive Feature Elimination Step {i}, current features: {len(current_features)}"
87
89
  )
88
90
  ratio_diff = 1.0 - self._feature_ratio
89
- ratio_step = ratio_diff / float(steps)
91
+ ratio_step = ratio_diff / float(self._steps)
90
92
  current_ratio = 1.0 - (ratio_step * i)
91
93
  n_features = max(1, int(len(df.columns) * current_ratio))
92
94
  if n_features >= len(current_features):
93
95
  continue
94
- set_current_features()
96
+
97
+ self._model.fit(df, y=y, w=w, eval_x=eval_x, eval_y=eval_y)
98
+ set_current_features(n_features)
95
99
  print(f"Reduced features to {len(current_features)}")
96
100
  df = df[current_features]
97
101
  if eval_x is not None:
98
102
  eval_x = eval_x[current_features]
99
- self._model.fit(df, y=y, w=w, eval_x=eval_x, eval_y=eval_y)
100
- set_current_features()
103
+
101
104
  self._selector = current_features
102
105
 
103
106
  return self
@@ -281,13 +281,14 @@ class Trainer(Fit):
281
281
  x_test = selector.transform(x_test)
282
282
  print(f"Selection took {time.time() - start_selector}")
283
283
  start_train = time.time()
284
- x_pred = model.fit_transform(
284
+ model.fit(
285
285
  x_train,
286
286
  y=y_train,
287
287
  w=w,
288
288
  eval_x=x_test if not no_evaluation else None,
289
289
  eval_y=y_test if not no_evaluation else None,
290
290
  )
291
+ y_pred = model.transform(x_test)
291
292
  print(f"Training took {time.time() - start_train}")
292
293
 
293
294
  # Calibrate
@@ -295,13 +296,12 @@ class Trainer(Fit):
295
296
  calibrator = CalibratorRouter(model)
296
297
  calibrator.set_options(trial, x)
297
298
  calibrator.fit(
298
- x_pred if calibrator.predictions_as_x(y_train) else x_train,
299
- y=y_train,
299
+ y_pred if calibrator.predictions_as_x(y_test) else x_test,
300
+ y=y_test,
300
301
  )
301
302
  print(f"Calibrating took {time.time() - start_calibrate}")
302
303
 
303
304
  # Output
304
- y_pred = model.transform(x_test)
305
305
  cal_pred = calibrator.transform(
306
306
  y_pred if calibrator.predictions_as_x(y_test) else x_test
307
307
  )
@@ -441,6 +441,8 @@ class Trainer(Fit):
441
441
  if self._max_train_timeout is None
442
442
  else self._max_train_timeout.total_seconds(),
443
443
  )
444
+ else:
445
+ break
444
446
 
445
447
  _fit(study.best_trial, test_df, test_series, True, test_idx, True)
446
448
  last_processed_dt = test_idx
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: wavetrainer
3
- Version: 0.0.41
3
+ Version: 0.0.43
4
4
  Summary: A library for automatically finding the optimal model within feature and hyperparameter space.
5
5
  Home-page: https://github.com/8W9aG/wavetrainer
6
6
  Author: Will Sackfield
File without changes
File without changes
File without changes
File without changes