wavetrainer 0.0.11__tar.gz → 0.0.12__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {wavetrainer-0.0.11/wavetrainer.egg-info → wavetrainer-0.0.12}/PKG-INFO +1 -1
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/setup.py +1 -1
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/__init__.py +1 -1
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/create.py +2 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/trainer.py +9 -1
- {wavetrainer-0.0.11 → wavetrainer-0.0.12/wavetrainer.egg-info}/PKG-INFO +1 -1
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/LICENSE +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/MANIFEST.in +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/README.md +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/requirements.txt +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/setup.cfg +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/tests/__init__.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/tests/model/__init__.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/tests/model/catboost_kwargs_test.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/tests/trainer_test.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/calibrator/__init__.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/calibrator/calibrator.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/calibrator/calibrator_router.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/calibrator/mapie_calibrator.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/calibrator/vennabers_calibrator.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/exceptions.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/fit.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/model/__init__.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/model/catboost_classifier_wrap.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/model/catboost_kwargs.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/model/catboost_model.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/model/catboost_regressor_wrap.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/model/model.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/model/model_router.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/model_type.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/params.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/reducer/__init__.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/reducer/base_selector_reducer.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/reducer/combined_reducer.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/reducer/constant_reducer.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/reducer/correlation_reducer.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/reducer/duplicate_reducer.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/reducer/nonnumeric_reducer.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/reducer/reducer.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/selector/__init__.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/selector/selector.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/__init__.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/class_weights.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/combined_weights.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/exponential_weights.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/linear_weights.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/noop_weights.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/sigmoid_weights.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/weights.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/weights/weights_router.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/windower/__init__.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer/windower/windower.py +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer.egg-info/SOURCES.txt +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer.egg-info/dependency_links.txt +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer.egg-info/not-zip-safe +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer.egg-info/requires.txt +0 -0
- {wavetrainer-0.0.11 → wavetrainer-0.0.12}/wavetrainer.egg-info/top_level.txt +0 -0
@@ -23,7 +23,7 @@ def install_requires() -> typing.List[str]:
|
|
23
23
|
|
24
24
|
setup(
|
25
25
|
name='wavetrainer',
|
26
|
-
version='0.0.
|
26
|
+
version='0.0.12',
|
27
27
|
description='A library for automatically finding the optimal model within feature and hyperparameter space.',
|
28
28
|
long_description=long_description,
|
29
29
|
long_description_content_type='text/markdown',
|
@@ -14,6 +14,7 @@ def create(
|
|
14
14
|
validation_size: float | datetime.timedelta | None = None,
|
15
15
|
dt_column: str | None = None,
|
16
16
|
max_train_timeout: datetime.timedelta | None = None,
|
17
|
+
cutoff_dt: datetime.datetime | None = None,
|
17
18
|
) -> Trainer:
|
18
19
|
"""Create a trainer."""
|
19
20
|
return Trainer(
|
@@ -23,4 +24,5 @@ def create(
|
|
23
24
|
validation_size=validation_size,
|
24
25
|
dt_column=dt_column,
|
25
26
|
max_train_timeout=max_train_timeout,
|
27
|
+
cutoff_dt=cutoff_dt,
|
26
28
|
)
|
@@ -41,7 +41,7 @@ _DT_COLUMN_KEY = "dt_column"
|
|
41
41
|
class Trainer(Fit):
|
42
42
|
"""A class for training and predicting from an array of data."""
|
43
43
|
|
44
|
-
# pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-statements,too-many-locals,too-many-branches
|
44
|
+
# pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-statements,too-many-locals,too-many-branches,too-many-instance-attributes
|
45
45
|
|
46
46
|
def __init__(
|
47
47
|
self,
|
@@ -52,6 +52,7 @@ class Trainer(Fit):
|
|
52
52
|
validation_size: float | datetime.timedelta | None = None,
|
53
53
|
dt_column: str | None = None,
|
54
54
|
max_train_timeout: datetime.timedelta | None = None,
|
55
|
+
cutoff_dt: datetime.datetime | None = None,
|
55
56
|
):
|
56
57
|
tqdm.tqdm.pandas()
|
57
58
|
|
@@ -65,6 +66,8 @@ class Trainer(Fit):
|
|
65
66
|
test_size = 0.15
|
66
67
|
if validation_size is None:
|
67
68
|
validation_size = 0.15
|
69
|
+
if cutoff_dt is None:
|
70
|
+
cutoff_dt = datetime.datetime.now()
|
68
71
|
|
69
72
|
params_file = os.path.join(self._folder, _PARAMS_FILENAME)
|
70
73
|
if walkforward_timedelta is None:
|
@@ -139,6 +142,7 @@ class Trainer(Fit):
|
|
139
142
|
self._validation_size = validation_size
|
140
143
|
self._dt_column = dt_column
|
141
144
|
self._max_train_timeout = max_train_timeout
|
145
|
+
self._cutoff_dt = cutoff_dt
|
142
146
|
|
143
147
|
def _provide_study(self, column: str) -> optuna.Study:
|
144
148
|
storage_name = f"sqlite:///{self._folder}/{column}/{_STUDYDB_FILENAME}"
|
@@ -172,6 +176,8 @@ class Trainer(Fit):
|
|
172
176
|
if self._dt_column is None
|
173
177
|
else pd.DatetimeIndex(pd.to_datetime(df[self._dt_column]))
|
174
178
|
)
|
179
|
+
df = df[dt_index < self._cutoff_dt] # type: ignore
|
180
|
+
y = y.iloc[: len(df)]
|
175
181
|
|
176
182
|
def _fit_column(y_series: pd.Series):
|
177
183
|
column_dir = os.path.join(self._folder, str(y_series.name))
|
@@ -187,6 +193,7 @@ class Trainer(Fit):
|
|
187
193
|
save: bool,
|
188
194
|
split_idx: datetime.datetime,
|
189
195
|
) -> float:
|
196
|
+
print(f"Beginning trial for: {split_idx.isoformat()}")
|
190
197
|
trial.set_user_attr(_IDX_USR_ATTR_KEY, split_idx.isoformat())
|
191
198
|
|
192
199
|
train_dt_index = dt_index[: len(x)]
|
@@ -326,6 +333,7 @@ class Trainer(Fit):
|
|
326
333
|
found = True
|
327
334
|
break
|
328
335
|
if found:
|
336
|
+
last_processed_dt = test_dt
|
329
337
|
continue
|
330
338
|
|
331
339
|
test_df = df.iloc[: train_len + count + test_len]
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|