wavetrainer 0.0.10__tar.gz → 0.0.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. {wavetrainer-0.0.10/wavetrainer.egg-info → wavetrainer-0.0.12}/PKG-INFO +1 -1
  2. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/setup.py +1 -1
  3. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/__init__.py +1 -1
  4. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/create.py +6 -0
  5. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/trainer.py +20 -2
  6. {wavetrainer-0.0.10 → wavetrainer-0.0.12/wavetrainer.egg-info}/PKG-INFO +1 -1
  7. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/LICENSE +0 -0
  8. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/MANIFEST.in +0 -0
  9. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/README.md +0 -0
  10. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/requirements.txt +0 -0
  11. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/setup.cfg +0 -0
  12. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/tests/__init__.py +0 -0
  13. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/tests/model/__init__.py +0 -0
  14. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/tests/model/catboost_kwargs_test.py +0 -0
  15. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/tests/trainer_test.py +0 -0
  16. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/calibrator/__init__.py +0 -0
  17. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/calibrator/calibrator.py +0 -0
  18. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/calibrator/calibrator_router.py +0 -0
  19. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/calibrator/mapie_calibrator.py +0 -0
  20. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/calibrator/vennabers_calibrator.py +0 -0
  21. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/exceptions.py +0 -0
  22. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/fit.py +0 -0
  23. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/model/__init__.py +0 -0
  24. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/model/catboost_classifier_wrap.py +0 -0
  25. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/model/catboost_kwargs.py +0 -0
  26. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/model/catboost_model.py +0 -0
  27. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/model/catboost_regressor_wrap.py +0 -0
  28. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/model/model.py +0 -0
  29. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/model/model_router.py +0 -0
  30. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/model_type.py +0 -0
  31. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/params.py +0 -0
  32. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/reducer/__init__.py +0 -0
  33. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/reducer/base_selector_reducer.py +0 -0
  34. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/reducer/combined_reducer.py +0 -0
  35. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/reducer/constant_reducer.py +0 -0
  36. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/reducer/correlation_reducer.py +0 -0
  37. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/reducer/duplicate_reducer.py +0 -0
  38. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/reducer/nonnumeric_reducer.py +0 -0
  39. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/reducer/reducer.py +0 -0
  40. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/selector/__init__.py +0 -0
  41. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/selector/selector.py +0 -0
  42. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/__init__.py +0 -0
  43. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/class_weights.py +0 -0
  44. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/combined_weights.py +0 -0
  45. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/exponential_weights.py +0 -0
  46. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/linear_weights.py +0 -0
  47. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/noop_weights.py +0 -0
  48. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/sigmoid_weights.py +0 -0
  49. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/weights.py +0 -0
  50. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/weights/weights_router.py +0 -0
  51. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/windower/__init__.py +0 -0
  52. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer/windower/windower.py +0 -0
  53. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer.egg-info/SOURCES.txt +0 -0
  54. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer.egg-info/dependency_links.txt +0 -0
  55. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer.egg-info/not-zip-safe +0 -0
  56. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer.egg-info/requires.txt +0 -0
  57. {wavetrainer-0.0.10 → wavetrainer-0.0.12}/wavetrainer.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: wavetrainer
3
- Version: 0.0.10
3
+ Version: 0.0.12
4
4
  Summary: A library for automatically finding the optimal model within feature and hyperparameter space.
5
5
  Home-page: https://github.com/8W9aG/wavetrainer
6
6
  Author: Will Sackfield
@@ -23,7 +23,7 @@ def install_requires() -> typing.List[str]:
23
23
 
24
24
  setup(
25
25
  name='wavetrainer',
26
- version='0.0.10',
26
+ version='0.0.12',
27
27
  description='A library for automatically finding the optimal model within feature and hyperparameter space.',
28
28
  long_description=long_description,
29
29
  long_description_content_type='text/markdown',
@@ -2,5 +2,5 @@
2
2
 
3
3
  from .create import create
4
4
 
5
- __VERSION__ = "0.0.10"
5
+ __VERSION__ = "0.0.12"
6
6
  __all__ = ("create",)
@@ -1,5 +1,7 @@
1
1
  """A function for creating a new trainer."""
2
2
 
3
+ # pylint: disable=too-many-arguments,too-many-positional-arguments
4
+
3
5
  import datetime
4
6
 
5
7
  from .trainer import Trainer
@@ -11,6 +13,8 @@ def create(
11
13
  test_size: float | datetime.timedelta | None = None,
12
14
  validation_size: float | datetime.timedelta | None = None,
13
15
  dt_column: str | None = None,
16
+ max_train_timeout: datetime.timedelta | None = None,
17
+ cutoff_dt: datetime.datetime | None = None,
14
18
  ) -> Trainer:
15
19
  """Create a trainer."""
16
20
  return Trainer(
@@ -19,4 +23,6 @@ def create(
19
23
  test_size=test_size,
20
24
  validation_size=validation_size,
21
25
  dt_column=dt_column,
26
+ max_train_timeout=max_train_timeout,
27
+ cutoff_dt=cutoff_dt,
22
28
  )
@@ -41,7 +41,7 @@ _DT_COLUMN_KEY = "dt_column"
41
41
  class Trainer(Fit):
42
42
  """A class for training and predicting from an array of data."""
43
43
 
44
- # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-statements,too-many-locals,too-many-branches
44
+ # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-statements,too-many-locals,too-many-branches,too-many-instance-attributes
45
45
 
46
46
  def __init__(
47
47
  self,
@@ -51,6 +51,8 @@ class Trainer(Fit):
51
51
  test_size: float | datetime.timedelta | None = None,
52
52
  validation_size: float | datetime.timedelta | None = None,
53
53
  dt_column: str | None = None,
54
+ max_train_timeout: datetime.timedelta | None = None,
55
+ cutoff_dt: datetime.datetime | None = None,
54
56
  ):
55
57
  tqdm.tqdm.pandas()
56
58
 
@@ -64,6 +66,8 @@ class Trainer(Fit):
64
66
  test_size = 0.15
65
67
  if validation_size is None:
66
68
  validation_size = 0.15
69
+ if cutoff_dt is None:
70
+ cutoff_dt = datetime.datetime.now()
67
71
 
68
72
  params_file = os.path.join(self._folder, _PARAMS_FILENAME)
69
73
  if walkforward_timedelta is None:
@@ -137,6 +141,8 @@ class Trainer(Fit):
137
141
  self._test_size = test_size
138
142
  self._validation_size = validation_size
139
143
  self._dt_column = dt_column
144
+ self._max_train_timeout = max_train_timeout
145
+ self._cutoff_dt = cutoff_dt
140
146
 
141
147
  def _provide_study(self, column: str) -> optuna.Study:
142
148
  storage_name = f"sqlite:///{self._folder}/{column}/{_STUDYDB_FILENAME}"
@@ -170,6 +176,8 @@ class Trainer(Fit):
170
176
  if self._dt_column is None
171
177
  else pd.DatetimeIndex(pd.to_datetime(df[self._dt_column]))
172
178
  )
179
+ df = df[dt_index < self._cutoff_dt] # type: ignore
180
+ y = y.iloc[: len(df)]
173
181
 
174
182
  def _fit_column(y_series: pd.Series):
175
183
  column_dir = os.path.join(self._folder, str(y_series.name))
@@ -185,6 +193,7 @@ class Trainer(Fit):
185
193
  save: bool,
186
194
  split_idx: datetime.datetime,
187
195
  ) -> float:
196
+ print(f"Beginning trial for: {split_idx.isoformat()}")
188
197
  trial.set_user_attr(_IDX_USR_ATTR_KEY, split_idx.isoformat())
189
198
 
190
199
  train_dt_index = dt_index[: len(x)]
@@ -289,7 +298,12 @@ class Trainer(Fit):
289
298
  initial_trials = max(self._trials - len(study.trials), 0)
290
299
  if initial_trials > 0:
291
300
  study.optimize(
292
- test_objective, n_trials=initial_trials, show_progress_bar=True
301
+ test_objective,
302
+ n_trials=initial_trials,
303
+ show_progress_bar=True,
304
+ timeout=None
305
+ if self._max_train_timeout is None
306
+ else self._max_train_timeout.total_seconds(),
293
307
  )
294
308
 
295
309
  train_len = len(df[dt_index < start_test_index])
@@ -319,6 +333,7 @@ class Trainer(Fit):
319
333
  found = True
320
334
  break
321
335
  if found:
336
+ last_processed_dt = test_dt
322
337
  continue
323
338
 
324
339
  test_df = df.iloc[: train_len + count + test_len]
@@ -336,6 +351,9 @@ class Trainer(Fit):
336
351
  validate_objctive, idx=test_idx, series=test_series
337
352
  ),
338
353
  n_trials=1,
354
+ timeout=None
355
+ if self._max_train_timeout is None
356
+ else self._max_train_timeout.total_seconds(),
339
357
  )
340
358
 
341
359
  _fit(study.best_trial, test_df, test_series, True, test_idx)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: wavetrainer
3
- Version: 0.0.10
3
+ Version: 0.0.12
4
4
  Summary: A library for automatically finding the optimal model within feature and hyperparameter space.
5
5
  Home-page: https://github.com/8W9aG/wavetrainer
6
6
  Author: Will Sackfield
File without changes
File without changes
File without changes
File without changes