ins-pricing 0.4.4__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ins_pricing/README.md +74 -56
- ins_pricing/__init__.py +142 -90
- ins_pricing/cli/BayesOpt_entry.py +52 -50
- ins_pricing/cli/BayesOpt_incremental.py +832 -898
- ins_pricing/cli/Explain_Run.py +31 -23
- ins_pricing/cli/Explain_entry.py +532 -579
- ins_pricing/cli/Pricing_Run.py +31 -23
- ins_pricing/cli/bayesopt_entry_runner.py +1440 -1438
- ins_pricing/cli/utils/cli_common.py +256 -256
- ins_pricing/cli/utils/cli_config.py +375 -375
- ins_pricing/cli/utils/import_resolver.py +382 -365
- ins_pricing/cli/utils/notebook_utils.py +340 -340
- ins_pricing/cli/watchdog_run.py +209 -201
- ins_pricing/frontend/README.md +573 -419
- ins_pricing/frontend/__init__.py +10 -10
- ins_pricing/frontend/config_builder.py +1 -0
- ins_pricing/frontend/example_workflows.py +1 -1
- ins_pricing/governance/__init__.py +20 -20
- ins_pricing/governance/release.py +159 -159
- ins_pricing/modelling/README.md +67 -0
- ins_pricing/modelling/__init__.py +147 -92
- ins_pricing/modelling/bayesopt/README.md +59 -0
- ins_pricing/modelling/{core/bayesopt → bayesopt}/__init__.py +64 -102
- ins_pricing/modelling/{core/bayesopt → bayesopt}/config_preprocess.py +562 -550
- ins_pricing/modelling/{core/bayesopt → bayesopt}/core.py +965 -962
- ins_pricing/modelling/{core/bayesopt → bayesopt}/model_explain_mixin.py +296 -296
- ins_pricing/modelling/{core/bayesopt → bayesopt}/model_plotting_mixin.py +482 -548
- ins_pricing/modelling/{core/bayesopt → bayesopt}/models/__init__.py +27 -27
- ins_pricing/modelling/{core/bayesopt → bayesopt}/models/model_ft_trainer.py +915 -913
- ins_pricing/modelling/{core/bayesopt → bayesopt}/models/model_gnn.py +788 -785
- ins_pricing/modelling/{core/bayesopt → bayesopt}/models/model_resn.py +448 -446
- ins_pricing/modelling/bayesopt/trainers/__init__.py +19 -0
- ins_pricing/modelling/{core/bayesopt → bayesopt}/trainers/trainer_base.py +1308 -1308
- ins_pricing/modelling/{core/bayesopt → bayesopt}/trainers/trainer_ft.py +3 -3
- ins_pricing/modelling/{core/bayesopt → bayesopt}/trainers/trainer_glm.py +197 -198
- ins_pricing/modelling/{core/bayesopt → bayesopt}/trainers/trainer_gnn.py +344 -344
- ins_pricing/modelling/{core/bayesopt → bayesopt}/trainers/trainer_resn.py +283 -283
- ins_pricing/modelling/{core/bayesopt → bayesopt}/trainers/trainer_xgb.py +346 -347
- ins_pricing/modelling/bayesopt/utils/__init__.py +67 -0
- ins_pricing/modelling/bayesopt/utils/constants.py +21 -0
- ins_pricing/modelling/bayesopt/utils/io_utils.py +7 -0
- ins_pricing/modelling/bayesopt/utils/losses.py +27 -0
- ins_pricing/modelling/bayesopt/utils/metrics_and_devices.py +17 -0
- ins_pricing/modelling/{core/bayesopt → bayesopt}/utils/torch_trainer_mixin.py +623 -623
- ins_pricing/modelling/{core/evaluation.py → evaluation.py} +113 -104
- ins_pricing/modelling/explain/__init__.py +55 -55
- ins_pricing/modelling/explain/metrics.py +27 -174
- ins_pricing/modelling/explain/permutation.py +237 -237
- ins_pricing/modelling/plotting/__init__.py +40 -36
- ins_pricing/modelling/plotting/compat.py +228 -0
- ins_pricing/modelling/plotting/curves.py +572 -572
- ins_pricing/modelling/plotting/diagnostics.py +163 -163
- ins_pricing/modelling/plotting/geo.py +362 -362
- ins_pricing/modelling/plotting/importance.py +121 -121
- ins_pricing/pricing/__init__.py +27 -27
- ins_pricing/production/__init__.py +35 -25
- ins_pricing/production/{predict.py → inference.py} +140 -57
- ins_pricing/production/monitoring.py +8 -21
- ins_pricing/reporting/__init__.py +11 -11
- ins_pricing/setup.py +1 -1
- ins_pricing/tests/production/test_inference.py +90 -0
- ins_pricing/utils/__init__.py +116 -83
- ins_pricing/utils/device.py +255 -255
- ins_pricing/utils/features.py +53 -0
- ins_pricing/utils/io.py +72 -0
- ins_pricing/{modelling/core/bayesopt/utils → utils}/losses.py +125 -129
- ins_pricing/utils/metrics.py +158 -24
- ins_pricing/utils/numerics.py +76 -0
- ins_pricing/utils/paths.py +9 -1
- {ins_pricing-0.4.4.dist-info → ins_pricing-0.5.0.dist-info}/METADATA +55 -35
- ins_pricing-0.5.0.dist-info/RECORD +131 -0
- ins_pricing/CHANGELOG.md +0 -272
- ins_pricing/RELEASE_NOTES_0.2.8.md +0 -344
- ins_pricing/docs/LOSS_FUNCTIONS.md +0 -78
- ins_pricing/docs/modelling/BayesOpt_USAGE.md +0 -945
- ins_pricing/docs/modelling/README.md +0 -34
- ins_pricing/frontend/QUICKSTART.md +0 -152
- ins_pricing/modelling/core/BayesOpt.py +0 -146
- ins_pricing/modelling/core/__init__.py +0 -1
- ins_pricing/modelling/core/bayesopt/PHASE2_REFACTORING_SUMMARY.md +0 -449
- ins_pricing/modelling/core/bayesopt/PHASE3_REFACTORING_SUMMARY.md +0 -406
- ins_pricing/modelling/core/bayesopt/REFACTORING_SUMMARY.md +0 -247
- ins_pricing/modelling/core/bayesopt/trainers/__init__.py +0 -19
- ins_pricing/modelling/core/bayesopt/utils/__init__.py +0 -86
- ins_pricing/modelling/core/bayesopt/utils/constants.py +0 -183
- ins_pricing/modelling/core/bayesopt/utils/io_utils.py +0 -126
- ins_pricing/modelling/core/bayesopt/utils/metrics_and_devices.py +0 -555
- ins_pricing/modelling/core/bayesopt/utils.py +0 -105
- ins_pricing/modelling/core/bayesopt/utils_backup.py +0 -1503
- ins_pricing/tests/production/test_predict.py +0 -233
- ins_pricing-0.4.4.dist-info/RECORD +0 -137
- /ins_pricing/modelling/{core/bayesopt → bayesopt}/config_components.py +0 -0
- /ins_pricing/modelling/{core/bayesopt → bayesopt}/models/model_ft_components.py +0 -0
- /ins_pricing/modelling/{core/bayesopt → bayesopt}/utils/distributed_utils.py +0 -0
- {ins_pricing-0.4.4.dist-info → ins_pricing-0.5.0.dist-info}/WHEEL +0 -0
- {ins_pricing-0.4.4.dist-info → ins_pricing-0.5.0.dist-info}/top_level.txt +0 -0
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
# ins_pricing
|
|
2
|
-
|
|
3
|
-
This directory contains reusable production-grade tooling and training frameworks, with a focus on the BayesOpt series.
|
|
4
|
-
|
|
5
|
-
Key contents:
|
|
6
|
-
- `core/bayesopt/`: core subpackage (data preprocessing, trainers, models, plotting, explainability)
|
|
7
|
-
- `plotting/`: standalone plotting helpers (lift/roc/importance/geo)
|
|
8
|
-
- `explain/`: explainability helpers (Permutation/Integrated Gradients/SHAP)
|
|
9
|
-
- `core/BayesOpt.py`: compatibility entry point for legacy imports
|
|
10
|
-
- `cli/BayesOpt_entry.py`: batch training CLI
|
|
11
|
-
- `cli/BayesOpt_incremental.py`: incremental training CLI
|
|
12
|
-
- `cli/utils/cli_common.py` / `cli/utils/notebook_utils.py`: shared CLI and notebook utilities
|
|
13
|
-
- `examples/modelling/config_template.json` / `examples/modelling/config_incremental_template.json`: config templates
|
|
14
|
-
- `cli/Explain_entry.py` / `cli/Explain_Run.py`: explainability entry points (load trained models)
|
|
15
|
-
- `examples/modelling/config_explain_template.json` / `examples/modelling/Explain_Run.ipynb`: explainability demo
|
|
16
|
-
|
|
17
|
-
Note: `examples/modelling/` is kept in the repo only and is not shipped in the PyPI package.
|
|
18
|
-
Migration note: CLI entry points now live under `cli/` and demo assets are under `examples/modelling/`.
|
|
19
|
-
|
|
20
|
-
Common usage:
|
|
21
|
-
- CLI: `python ins_pricing/cli/BayesOpt_entry.py --config-json ...`
|
|
22
|
-
- Notebook: `from ins_pricing.bayesopt import BayesOptModel`
|
|
23
|
-
|
|
24
|
-
Explainability (load trained models under `Results/model` and explain a validation set):
|
|
25
|
-
- CLI: `python ins_pricing/cli/Explain_entry.py --config-json ins_pricing/examples/modelling/config_explain_template.json`
|
|
26
|
-
- Notebook: open `ins_pricing/examples/modelling/Explain_Run.ipynb` and run it
|
|
27
|
-
|
|
28
|
-
Notes:
|
|
29
|
-
- Models load from `output_dir/model` by default (override with `explain.model_dir`).
|
|
30
|
-
- Validation data can be specified via `explain.validation_path`.
|
|
31
|
-
|
|
32
|
-
Operational notes:
|
|
33
|
-
- Training outputs are written to `plot/`, `Results/`, and `model/` by default.
|
|
34
|
-
- Keep large data and secrets outside the repo and use environment variables or `.env`.
|
|
@@ -1,152 +0,0 @@
|
|
|
1
|
-
# Quick Start Guide
|
|
2
|
-
|
|
3
|
-
Get started with the Insurance Pricing Model Training Frontend in 3 easy steps.
|
|
4
|
-
|
|
5
|
-
## Prerequisites
|
|
6
|
-
|
|
7
|
-
1. Install the `ins_pricing` package
|
|
8
|
-
2. Install Gradio:
|
|
9
|
-
```bash
|
|
10
|
-
pip install gradio>=4.0.0
|
|
11
|
-
```
|
|
12
|
-
|
|
13
|
-
## Step 1: Launch the Application
|
|
14
|
-
|
|
15
|
-
### On Windows:
|
|
16
|
-
Double-click `start_app.bat` or run:
|
|
17
|
-
```bash
|
|
18
|
-
python -m ins_pricing.frontend.app
|
|
19
|
-
```
|
|
20
|
-
|
|
21
|
-
### On Linux/Mac:
|
|
22
|
-
Run the shell script:
|
|
23
|
-
```bash
|
|
24
|
-
./start_app.sh
|
|
25
|
-
```
|
|
26
|
-
|
|
27
|
-
Or use Python directly:
|
|
28
|
-
```bash
|
|
29
|
-
python -m ins_pricing.frontend.app
|
|
30
|
-
```
|
|
31
|
-
|
|
32
|
-
The web interface will automatically open at `http://localhost:7860`
|
|
33
|
-
|
|
34
|
-
## Step 2: Configure Your Model
|
|
35
|
-
|
|
36
|
-
### Option A: Upload Existing Config (Recommended)
|
|
37
|
-
1. Go to the **Configuration** tab
|
|
38
|
-
2. Click **"Upload JSON Config File"**
|
|
39
|
-
3. Select a config file (e.g., `config_xgb_direct.json` from `examples/`)
|
|
40
|
-
4. Click **"Load Config"**
|
|
41
|
-
|
|
42
|
-
### Option B: Manual Configuration
|
|
43
|
-
1. Go to the **Configuration** tab
|
|
44
|
-
2. Scroll to **"Manual Configuration"**
|
|
45
|
-
3. Fill in the required fields:
|
|
46
|
-
- **Data Directory**: Path to your data folder
|
|
47
|
-
- **Model List**: Model name(s)
|
|
48
|
-
- **Target Column**: Your target variable
|
|
49
|
-
- **Weight Column**: Your weight variable
|
|
50
|
-
- **Feature List**: Comma-separated features
|
|
51
|
-
- **Categorical Features**: Comma-separated categorical features
|
|
52
|
-
4. Adjust other settings as needed
|
|
53
|
-
5. Click **"Build Configuration"**
|
|
54
|
-
|
|
55
|
-
## Step 3: Run Training
|
|
56
|
-
|
|
57
|
-
1. Switch to the **Run Task** tab
|
|
58
|
-
2. Click **"Run Task"**
|
|
59
|
-
3. Watch real-time logs appear below
|
|
60
|
-
|
|
61
|
-
Training will start automatically and logs will update in real-time!
|
|
62
|
-
|
|
63
|
-
## New Features
|
|
64
|
-
|
|
65
|
-
### FT Two-Step Workflow
|
|
66
|
-
|
|
67
|
-
For advanced FT-Transformer → XGB/ResN training:
|
|
68
|
-
|
|
69
|
-
1. **Prepare Base Config**: Create or load a base configuration
|
|
70
|
-
2. **Go to FT Two-Step Workflow tab**
|
|
71
|
-
3. **Step 1 - FT Embedding Generation**:
|
|
72
|
-
- Configure DDP settings
|
|
73
|
-
- Click "Prepare Step 1 Config"
|
|
74
|
-
- Copy the config to Configuration tab
|
|
75
|
-
- Run it in "Run Task" tab
|
|
76
|
-
4. **Step 2 - Train XGB/ResN**:
|
|
77
|
-
- After Step 1 completes, click "Prepare Step 2 Configs"
|
|
78
|
-
- Choose which models to train (XGB, ResN, or both)
|
|
79
|
-
- Copy the generated configs and run them
|
|
80
|
-
|
|
81
|
-
### Open Results Folder
|
|
82
|
-
|
|
83
|
-
- In the **Run Task** tab, click **"📁 Open Results Folder"**
|
|
84
|
-
- Automatically opens the output directory in your file explorer
|
|
85
|
-
- Works on Windows, macOS, and Linux
|
|
86
|
-
|
|
87
|
-
## Example Configuration
|
|
88
|
-
|
|
89
|
-
Here's a minimal example to get started:
|
|
90
|
-
|
|
91
|
-
```json
|
|
92
|
-
{
|
|
93
|
-
"data_dir": "./Data",
|
|
94
|
-
"model_list": ["od"],
|
|
95
|
-
"model_categories": ["bc"],
|
|
96
|
-
"target": "response",
|
|
97
|
-
"weight": "weights",
|
|
98
|
-
"feature_list": ["age", "gender", "region"],
|
|
99
|
-
"categorical_features": ["gender", "region"],
|
|
100
|
-
"runner": {
|
|
101
|
-
"mode": "entry",
|
|
102
|
-
"model_keys": ["xgb"],
|
|
103
|
-
"max_evals": 50
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
```
|
|
107
|
-
|
|
108
|
-
Save this as `my_first_config.json` and upload it!
|
|
109
|
-
|
|
110
|
-
## Tips
|
|
111
|
-
|
|
112
|
-
- **Save Your Config**: After building a configuration, save it using the "Save Configuration" button for reuse
|
|
113
|
-
- **Check Logs**: Training logs update in real-time - watch for errors or progress indicators
|
|
114
|
-
- **GPU Usage**: Toggle "Use GPU" checkbox in Training Settings to enable/disable GPU acceleration
|
|
115
|
-
- **Model Selection**: Specify which models to train in "Model Keys" (xgb, resn, ft, gnn)
|
|
116
|
-
- **Open Results**: Use the "📁 Open Results Folder" button to quickly access output files
|
|
117
|
-
- **FT Workflow**: Use the dedicated FT tab for automated two-step FT → XGB/ResN training
|
|
118
|
-
|
|
119
|
-
## Troubleshooting
|
|
120
|
-
|
|
121
|
-
**Problem**: Interface doesn't load
|
|
122
|
-
- **Solution**: Check that port 7860 is not in use, or specify a different port
|
|
123
|
-
|
|
124
|
-
**Problem**: Configuration validation fails
|
|
125
|
-
- **Solution**: Ensure all required fields are filled and feature lists are properly formatted
|
|
126
|
-
|
|
127
|
-
**Problem**: Training doesn't start
|
|
128
|
-
- **Solution**: Verify data paths exist and configuration is valid
|
|
129
|
-
|
|
130
|
-
**Problem**: Results folder won't open
|
|
131
|
-
- **Solution**: Make sure the task has run at least once to create the output directory
|
|
132
|
-
|
|
133
|
-
**Problem**: Step 2 configs fail to generate
|
|
134
|
-
- **Solution**: Ensure Step 1 completed successfully and embedding files exist
|
|
135
|
-
|
|
136
|
-
## Next Steps
|
|
137
|
-
|
|
138
|
-
- Explore advanced options in the Configuration tab
|
|
139
|
-
- Try the FT Two-Step Workflow for better model performance
|
|
140
|
-
- Experiment with different model combinations (xgb, resn, ft)
|
|
141
|
-
- Try different split strategies
|
|
142
|
-
- Use the Explain mode for model interpretability
|
|
143
|
-
- Check the full [README.md](README.md) for detailed documentation
|
|
144
|
-
|
|
145
|
-
## Support
|
|
146
|
-
|
|
147
|
-
For issues or questions, refer to:
|
|
148
|
-
- Full documentation: [README.md](README.md)
|
|
149
|
-
- Example configs: `ins_pricing/examples/`
|
|
150
|
-
- Package documentation: `ins_pricing/docs/`
|
|
151
|
-
|
|
152
|
-
Happy modeling!
|
|
@@ -1,146 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import torch
|
|
4
|
-
|
|
5
|
-
try:
|
|
6
|
-
from .bayesopt.config_preprocess import (
|
|
7
|
-
BayesOptConfig,
|
|
8
|
-
DatasetPreprocessor,
|
|
9
|
-
OutputManager,
|
|
10
|
-
VersionManager,
|
|
11
|
-
)
|
|
12
|
-
from .bayesopt.core import BayesOptModel
|
|
13
|
-
from .bayesopt.models import (
|
|
14
|
-
FeatureTokenizer,
|
|
15
|
-
FTTransformerCore,
|
|
16
|
-
FTTransformerSklearn,
|
|
17
|
-
GraphNeuralNetSklearn,
|
|
18
|
-
MaskedTabularDataset,
|
|
19
|
-
ResBlock,
|
|
20
|
-
ResNetSequential,
|
|
21
|
-
ResNetSklearn,
|
|
22
|
-
ScaledTransformerEncoderLayer,
|
|
23
|
-
SimpleGraphLayer,
|
|
24
|
-
SimpleGNN,
|
|
25
|
-
TabularDataset,
|
|
26
|
-
)
|
|
27
|
-
from .bayesopt.trainers import (
|
|
28
|
-
FTTrainer,
|
|
29
|
-
GLMTrainer,
|
|
30
|
-
GNNTrainer,
|
|
31
|
-
ResNetTrainer,
|
|
32
|
-
TrainerBase,
|
|
33
|
-
XGBTrainer,
|
|
34
|
-
_xgb_cuda_available,
|
|
35
|
-
)
|
|
36
|
-
from .bayesopt.utils import (
|
|
37
|
-
EPS,
|
|
38
|
-
DistributedUtils,
|
|
39
|
-
IOUtils,
|
|
40
|
-
PlotUtils,
|
|
41
|
-
TorchTrainerMixin,
|
|
42
|
-
TrainingUtils,
|
|
43
|
-
compute_batch_size,
|
|
44
|
-
csv_to_dict,
|
|
45
|
-
ensure_parent_dir,
|
|
46
|
-
free_cuda,
|
|
47
|
-
infer_factor_and_cate_list,
|
|
48
|
-
plot_dlift_list,
|
|
49
|
-
plot_lift_list,
|
|
50
|
-
set_global_seed,
|
|
51
|
-
split_data,
|
|
52
|
-
tweedie_loss,
|
|
53
|
-
)
|
|
54
|
-
except ImportError: # pragma: no cover
|
|
55
|
-
from bayesopt.config_preprocess import (
|
|
56
|
-
BayesOptConfig,
|
|
57
|
-
DatasetPreprocessor,
|
|
58
|
-
OutputManager,
|
|
59
|
-
VersionManager,
|
|
60
|
-
)
|
|
61
|
-
from bayesopt.core import BayesOptModel
|
|
62
|
-
from bayesopt.models import (
|
|
63
|
-
FeatureTokenizer,
|
|
64
|
-
FTTransformerCore,
|
|
65
|
-
FTTransformerSklearn,
|
|
66
|
-
GraphNeuralNetSklearn,
|
|
67
|
-
MaskedTabularDataset,
|
|
68
|
-
ResBlock,
|
|
69
|
-
ResNetSequential,
|
|
70
|
-
ResNetSklearn,
|
|
71
|
-
ScaledTransformerEncoderLayer,
|
|
72
|
-
SimpleGraphLayer,
|
|
73
|
-
SimpleGNN,
|
|
74
|
-
TabularDataset,
|
|
75
|
-
)
|
|
76
|
-
from bayesopt.trainers import (
|
|
77
|
-
FTTrainer,
|
|
78
|
-
GLMTrainer,
|
|
79
|
-
GNNTrainer,
|
|
80
|
-
ResNetTrainer,
|
|
81
|
-
TrainerBase,
|
|
82
|
-
XGBTrainer,
|
|
83
|
-
_xgb_cuda_available,
|
|
84
|
-
)
|
|
85
|
-
from bayesopt.utils import (
|
|
86
|
-
EPS,
|
|
87
|
-
DistributedUtils,
|
|
88
|
-
IOUtils,
|
|
89
|
-
PlotUtils,
|
|
90
|
-
TorchTrainerMixin,
|
|
91
|
-
TrainingUtils,
|
|
92
|
-
compute_batch_size,
|
|
93
|
-
csv_to_dict,
|
|
94
|
-
ensure_parent_dir,
|
|
95
|
-
free_cuda,
|
|
96
|
-
infer_factor_and_cate_list,
|
|
97
|
-
plot_dlift_list,
|
|
98
|
-
plot_lift_list,
|
|
99
|
-
set_global_seed,
|
|
100
|
-
split_data,
|
|
101
|
-
tweedie_loss,
|
|
102
|
-
)
|
|
103
|
-
|
|
104
|
-
__all__ = [
|
|
105
|
-
"BayesOptConfig",
|
|
106
|
-
"DatasetPreprocessor",
|
|
107
|
-
"OutputManager",
|
|
108
|
-
"VersionManager",
|
|
109
|
-
"BayesOptModel",
|
|
110
|
-
"FeatureTokenizer",
|
|
111
|
-
"FTTransformerCore",
|
|
112
|
-
"FTTransformerSklearn",
|
|
113
|
-
"GraphNeuralNetSklearn",
|
|
114
|
-
"MaskedTabularDataset",
|
|
115
|
-
"ResBlock",
|
|
116
|
-
"ResNetSequential",
|
|
117
|
-
"ResNetSklearn",
|
|
118
|
-
"ScaledTransformerEncoderLayer",
|
|
119
|
-
"SimpleGraphLayer",
|
|
120
|
-
"SimpleGNN",
|
|
121
|
-
"TabularDataset",
|
|
122
|
-
"FTTrainer",
|
|
123
|
-
"GLMTrainer",
|
|
124
|
-
"GNNTrainer",
|
|
125
|
-
"ResNetTrainer",
|
|
126
|
-
"TrainerBase",
|
|
127
|
-
"XGBTrainer",
|
|
128
|
-
"_xgb_cuda_available",
|
|
129
|
-
"EPS",
|
|
130
|
-
"DistributedUtils",
|
|
131
|
-
"IOUtils",
|
|
132
|
-
"PlotUtils",
|
|
133
|
-
"TorchTrainerMixin",
|
|
134
|
-
"TrainingUtils",
|
|
135
|
-
"compute_batch_size",
|
|
136
|
-
"csv_to_dict",
|
|
137
|
-
"ensure_parent_dir",
|
|
138
|
-
"free_cuda",
|
|
139
|
-
"infer_factor_and_cate_list",
|
|
140
|
-
"plot_dlift_list",
|
|
141
|
-
"plot_lift_list",
|
|
142
|
-
"set_global_seed",
|
|
143
|
-
"split_data",
|
|
144
|
-
"tweedie_loss",
|
|
145
|
-
"torch",
|
|
146
|
-
]
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"""Core modelling modules (bayesopt + evaluation)."""
|