pg-sui 1.0.2.1__py3-none-any.whl → 1.6.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pg-sui might be problematic. Click here for more details.

Files changed (112) hide show
  1. {pg_sui-1.0.2.1.dist-info → pg_sui-1.6.8.dist-info}/METADATA +51 -70
  2. pg_sui-1.6.8.dist-info/RECORD +78 -0
  3. {pg_sui-1.0.2.1.dist-info → pg_sui-1.6.8.dist-info}/WHEEL +1 -1
  4. pg_sui-1.6.8.dist-info/entry_points.txt +4 -0
  5. pg_sui-1.6.8.dist-info/top_level.txt +1 -0
  6. pgsui/__init__.py +35 -54
  7. pgsui/_version.py +34 -0
  8. pgsui/cli.py +635 -0
  9. pgsui/data_processing/config.py +576 -0
  10. pgsui/data_processing/containers.py +1782 -0
  11. pgsui/data_processing/transformers.py +121 -1103
  12. pgsui/electron/app/__main__.py +5 -0
  13. pgsui/electron/app/icons/icons/1024x1024.png +0 -0
  14. pgsui/electron/app/icons/icons/128x128.png +0 -0
  15. pgsui/electron/app/icons/icons/16x16.png +0 -0
  16. pgsui/electron/app/icons/icons/24x24.png +0 -0
  17. pgsui/electron/app/icons/icons/256x256.png +0 -0
  18. pgsui/electron/app/icons/icons/32x32.png +0 -0
  19. pgsui/electron/app/icons/icons/48x48.png +0 -0
  20. pgsui/electron/app/icons/icons/512x512.png +0 -0
  21. pgsui/electron/app/icons/icons/64x64.png +0 -0
  22. pgsui/electron/app/icons/icons/icon.icns +0 -0
  23. pgsui/electron/app/icons/icons/icon.ico +0 -0
  24. pgsui/electron/app/main.js +189 -0
  25. pgsui/electron/app/package-lock.json +6893 -0
  26. pgsui/electron/app/package.json +50 -0
  27. pgsui/electron/app/preload.js +15 -0
  28. pgsui/electron/app/server.py +146 -0
  29. pgsui/electron/app/ui/logo.png +0 -0
  30. pgsui/electron/app/ui/renderer.js +130 -0
  31. pgsui/electron/app/ui/styles.css +59 -0
  32. pgsui/electron/app/ui/ui_shim.js +72 -0
  33. pgsui/electron/bootstrap.py +43 -0
  34. pgsui/electron/launch.py +59 -0
  35. pgsui/electron/package.json +14 -0
  36. pgsui/example_data/popmaps/{test.popmap → phylogen_nomx.popmap} +185 -99
  37. pgsui/example_data/vcf_files/phylogen_subset14K.vcf.gz +0 -0
  38. pgsui/example_data/vcf_files/phylogen_subset14K.vcf.gz.tbi +0 -0
  39. pgsui/impute/deterministic/imputers/allele_freq.py +691 -0
  40. pgsui/impute/deterministic/imputers/mode.py +679 -0
  41. pgsui/impute/deterministic/imputers/nmf.py +221 -0
  42. pgsui/impute/deterministic/imputers/phylo.py +971 -0
  43. pgsui/impute/deterministic/imputers/ref_allele.py +530 -0
  44. pgsui/impute/supervised/base.py +339 -0
  45. pgsui/impute/supervised/imputers/hist_gradient_boosting.py +293 -0
  46. pgsui/impute/supervised/imputers/random_forest.py +287 -0
  47. pgsui/impute/unsupervised/base.py +924 -0
  48. pgsui/impute/unsupervised/callbacks.py +89 -263
  49. pgsui/impute/unsupervised/imputers/autoencoder.py +972 -0
  50. pgsui/impute/unsupervised/imputers/nlpca.py +1264 -0
  51. pgsui/impute/unsupervised/imputers/ubp.py +1288 -0
  52. pgsui/impute/unsupervised/imputers/vae.py +957 -0
  53. pgsui/impute/unsupervised/loss_functions.py +158 -0
  54. pgsui/impute/unsupervised/models/autoencoder_model.py +208 -558
  55. pgsui/impute/unsupervised/models/nlpca_model.py +149 -468
  56. pgsui/impute/unsupervised/models/ubp_model.py +198 -1317
  57. pgsui/impute/unsupervised/models/vae_model.py +259 -618
  58. pgsui/impute/unsupervised/nn_scorers.py +215 -0
  59. pgsui/utils/classification_viz.py +591 -0
  60. pgsui/utils/misc.py +35 -480
  61. pgsui/utils/plotting.py +514 -824
  62. pgsui/utils/scorers.py +212 -438
  63. pg_sui-1.0.2.1.dist-info/RECORD +0 -75
  64. pg_sui-1.0.2.1.dist-info/top_level.txt +0 -3
  65. pgsui/example_data/phylip_files/test_n10.phy +0 -118
  66. pgsui/example_data/phylip_files/test_n100.phy +0 -118
  67. pgsui/example_data/phylip_files/test_n2.phy +0 -118
  68. pgsui/example_data/phylip_files/test_n500.phy +0 -118
  69. pgsui/example_data/structure_files/test.nopops.1row.10sites.str +0 -117
  70. pgsui/example_data/structure_files/test.nopops.2row.100sites.str +0 -234
  71. pgsui/example_data/structure_files/test.nopops.2row.10sites.str +0 -234
  72. pgsui/example_data/structure_files/test.nopops.2row.30sites.str +0 -234
  73. pgsui/example_data/structure_files/test.nopops.2row.allsites.str +0 -234
  74. pgsui/example_data/structure_files/test.pops.1row.10sites.str +0 -117
  75. pgsui/example_data/structure_files/test.pops.2row.10sites.str +0 -234
  76. pgsui/example_data/trees/test.iqtree +0 -376
  77. pgsui/example_data/trees/test.qmat +0 -5
  78. pgsui/example_data/trees/test.rate +0 -2033
  79. pgsui/example_data/trees/test.tre +0 -1
  80. pgsui/example_data/trees/test_n10.rate +0 -19
  81. pgsui/example_data/trees/test_n100.rate +0 -109
  82. pgsui/example_data/trees/test_n500.rate +0 -509
  83. pgsui/example_data/trees/test_siterates.txt +0 -2024
  84. pgsui/example_data/trees/test_siterates_n10.txt +0 -10
  85. pgsui/example_data/trees/test_siterates_n100.txt +0 -100
  86. pgsui/example_data/trees/test_siterates_n500.txt +0 -500
  87. pgsui/example_data/vcf_files/test.vcf +0 -244
  88. pgsui/example_data/vcf_files/test.vcf.gz +0 -0
  89. pgsui/example_data/vcf_files/test.vcf.gz.tbi +0 -0
  90. pgsui/impute/estimators.py +0 -735
  91. pgsui/impute/impute.py +0 -1486
  92. pgsui/impute/simple_imputers.py +0 -1439
  93. pgsui/impute/supervised/iterative_imputer_fixedparams.py +0 -785
  94. pgsui/impute/supervised/iterative_imputer_gridsearch.py +0 -1027
  95. pgsui/impute/unsupervised/keras_classifiers.py +0 -702
  96. pgsui/impute/unsupervised/models/in_development/cnn_model.py +0 -486
  97. pgsui/impute/unsupervised/neural_network_imputers.py +0 -1424
  98. pgsui/impute/unsupervised/neural_network_methods.py +0 -1549
  99. pgsui/pg_sui.py +0 -261
  100. pgsui/utils/sequence_tools.py +0 -407
  101. simulation/sim_benchmarks.py +0 -333
  102. simulation/sim_treeparams.py +0 -475
  103. test/__init__.py +0 -0
  104. test/pg_sui_simtest.py +0 -215
  105. test/pg_sui_testing.py +0 -523
  106. test/test.py +0 -297
  107. test/test_pgsui.py +0 -374
  108. test/test_tkc.py +0 -214
  109. {pg_sui-1.0.2.1.dist-info → pg_sui-1.6.8.dist-info/licenses}/LICENSE +0 -0
  110. /pgsui/{example_data/trees → electron/app}/__init__.py +0 -0
  111. /pgsui/impute/{unsupervised/models/in_development → supervised/imputers}/__init__.py +0 -0
  112. {simulation → pgsui/impute/unsupervised/imputers}/__init__.py +0 -0
@@ -0,0 +1,339 @@
1
+ import json
2
+ from pathlib import Path
3
+ from typing import Any, Dict, List
4
+
5
+ import matplotlib.pyplot as plt
6
+ import numpy as np
7
+ from sklearn.exceptions import NotFittedError
8
+ from sklearn.experimental import enable_iterative_imputer # noqa
9
+ from sklearn.metrics import (
10
+ accuracy_score,
11
+ classification_report,
12
+ f1_score,
13
+ precision_score,
14
+ recall_score,
15
+ )
16
+ from snpio.utils.logging import LoggerManager
17
+
18
+ from pgsui.utils.classification_viz import ClassificationReportVisualizer
19
+
20
+
21
+ class BaseImputer:
22
+ """A base class for supervised, iterative imputer models.
23
+
24
+ This class provides a common framework and shared functionality for imputers that use scikit-learn's `IterativeImputer`. It is not intended for direct instantiation. Child classes should inherit from this class and provide a specific estimator model (e.g., RandomForest, GradientBoosting).
25
+
26
+ Notes:
27
+ - A hyperparameter tuning workflow using Optuna.
28
+ - Standardized data splitting, model training, and evaluation methods.
29
+ - Utilities for creating output directories and handling model state.
30
+ """
31
+
32
+ def __init__(self, verbose: bool = False, debug: bool = False) -> None:
33
+ """Initializes the BaseImputer class.
34
+
35
+ This class sets up logging and verbosity/debug settings. It also contains methods that all supervised imputers will share.
36
+
37
+ Note:
38
+ Inheriting child classes must define `self.prefix` before calling `super().__init__()`, as it is required for logger initialization.
39
+
40
+ Args:
41
+ verbose (bool): If True, enables detailed logging output. Defaults to False.
42
+ debug (bool): If True, enables debug mode. Defaults to False.
43
+ """
44
+ self.verbose = verbose
45
+ self.debug = debug
46
+
47
+ logman = LoggerManager(
48
+ __name__, prefix=self.prefix, verbose=self.verbose, debug=self.debug
49
+ )
50
+ self.logger = logman.get_logger()
51
+
52
+ def _create_model_directories(self, prefix: str, outdirs: List[str]) -> None:
53
+ """Creates the output directory structure for the imputer.
54
+
55
+ This method sets up a standardized folder hierarchy for saving models, plots, metrics, and optimization results, organized by the model's name.
56
+
57
+ Args:
58
+ prefix (str): The prefix for the main output directory.
59
+ outdirs (List[str]): A list of subdirectories to create (e.g., 'models', 'plots').
60
+ """
61
+ base_dir = Path(f"{prefix}_output") / "Supervised"
62
+ for d in outdirs:
63
+ subdir = base_dir / d / self.model_name
64
+ setattr(self, f"{d}_dir", subdir)
65
+ subdir.mkdir(parents=True, exist_ok=True)
66
+
67
+ def _make_class_reports(
68
+ self,
69
+ y_true: np.ndarray,
70
+ y_pred: np.ndarray,
71
+ metrics: Dict[str, float],
72
+ y_pred_proba: np.ndarray | None = None,
73
+ labels: List[str] = ["REF", "HET", "ALT"],
74
+ ) -> None:
75
+ """Generate and save detailed classification reports and visualizations.
76
+
77
+ 3-class (zygosity) or 10-class (IUPAC) depending on `labels` length.
78
+
79
+ Args:
80
+ y_true (np.ndarray): True labels (1D array).
81
+ y_pred (np.ndarray): Predicted labels (1D array).
82
+ metrics (Dict[str, float]): Computed metrics.
83
+ y_pred_proba (np.ndarray | None): Predicted probabilities (2D array). Defaults to None.
84
+ labels (List[str], optional): Class label names
85
+ (default: ["REF", "HET", "ALT"] for 3-class).
86
+ """
87
+ report_name = "zygosity" if len(labels) == 3 else "iupac"
88
+ middle = "IUPAC" if report_name == "iupac" else "Zygosity"
89
+
90
+ msg = f"{middle} Report (on {y_true.size} total genotypes)"
91
+ self.logger.info(msg)
92
+
93
+ if y_pred_proba is not None:
94
+ self.plotter_.plot_metrics(
95
+ y_true,
96
+ y_pred_proba,
97
+ metrics,
98
+ label_names=labels,
99
+ prefix=report_name,
100
+ )
101
+
102
+ self.plotter_.plot_confusion_matrix(
103
+ y_true, y_pred, label_names=labels, prefix=report_name
104
+ )
105
+
106
+ self.logger.info(
107
+ "\n"
108
+ + classification_report(
109
+ y_true,
110
+ y_pred,
111
+ labels=list(range(len(labels))),
112
+ target_names=labels,
113
+ zero_division=0,
114
+ )
115
+ )
116
+
117
+ report = classification_report(
118
+ y_true,
119
+ y_pred,
120
+ labels=list(range(len(labels))),
121
+ target_names=labels,
122
+ zero_division=0,
123
+ output_dict=True,
124
+ )
125
+
126
+ with open(self.metrics_dir / f"{report_name}_report.json", "w") as f:
127
+ json.dump(report, f, indent=4)
128
+
129
+ viz = ClassificationReportVisualizer(reset_kwargs=self.plotter_.param_dict)
130
+
131
+ plots = viz.plot_all(
132
+ report,
133
+ title_prefix=f"{self.model_name} {middle} Report",
134
+ show=getattr(self, "show_plots", False),
135
+ heatmap_classes_only=True,
136
+ )
137
+
138
+ for name, fig in plots.items():
139
+ fout = self.plots_dir / f"{report_name}_report_{name}.{self.plot_format}"
140
+ if hasattr(fig, "savefig"):
141
+ fig.savefig(fout, dpi=300, facecolor="#111122")
142
+ plt.close(fig)
143
+ else:
144
+ fig.write_html(file=fout.with_suffix(".html"))
145
+
146
+ viz._reset_mpl_style()
147
+
148
+ def _evaluate_012_and_plot(self, y_true: np.ndarray, y_pred: np.ndarray) -> None:
149
+ """0/1/2 zygosity report & confusion matrix.
150
+
151
+ This method generates a classification report and confusion matrix for genotypes encoded as 0, 1, or 2. If the data is haploid, it treats genotypes 1 and 2 as equivalent (presence of the alternate allele).
152
+
153
+ Args:
154
+ y_true (np.ndarray): True genotypes (0/1/2) for masked
155
+ y_pred (np.ndarray): Predicted genotypes (0/1/2) for masked
156
+
157
+ Raises:
158
+ NotFittedError: If fit() and transform() have not been called.
159
+ """
160
+ labels = [0, 1, 2]
161
+ # Haploid parity: fold ALT (2) into ALT/Present (1)
162
+ if self.is_haploid_:
163
+ y_true[y_true == 2] = 1
164
+ y_pred[y_pred == 2] = 1
165
+ labels = [0, 1]
166
+
167
+ metrics = {
168
+ "n_masked_test": int(y_true.size),
169
+ "accuracy": accuracy_score(y_true, y_pred),
170
+ "f1": f1_score(
171
+ y_true, y_pred, average="macro", labels=labels, zero_division=0
172
+ ),
173
+ "precision": precision_score(
174
+ y_true, y_pred, average="macro", labels=labels, zero_division=0
175
+ ),
176
+ "recall": recall_score(
177
+ y_true, y_pred, average="macro", labels=labels, zero_division=0
178
+ ),
179
+ }
180
+
181
+ metrics.update({f"zygosity_{k}": v for k, v in metrics.items()})
182
+
183
+ report_names = ["REF", "HET"] if self.is_haploid_ else ["REF", "HET", "ALT"]
184
+
185
+ self.logger.info(
186
+ f"\n{classification_report(y_true, y_pred, labels=labels, target_names=report_names, zero_division=0)}"
187
+ )
188
+
189
+ report = classification_report(
190
+ y_true,
191
+ y_pred,
192
+ labels=labels,
193
+ target_names=report_names,
194
+ zero_division=0,
195
+ output_dict=True,
196
+ )
197
+
198
+ viz = ClassificationReportVisualizer(reset_kwargs=self.plotter_.param_dict)
199
+
200
+ plots = viz.plot_all(
201
+ report,
202
+ title_prefix=f"{self.model_name} Zygosity Report",
203
+ show=getattr(self, "show_plots", False),
204
+ heatmap_classes_only=True,
205
+ )
206
+
207
+ for name, fig in plots.items():
208
+ fout = self.plots_dir / f"zygosity_report_{name}.{self.plot_format}"
209
+ if hasattr(fig, "savefig"):
210
+ fig.savefig(fout, dpi=300, facecolor="#111122")
211
+ plt.close(fig)
212
+ else:
213
+ fig.write_html(file=fout.with_suffix(".html"))
214
+
215
+ viz._reset_mpl_style()
216
+
217
+ # Save JSON
218
+ self._save_report(report, suffix="zygosity")
219
+
220
+ # Confusion matrix
221
+ self.plotter_.plot_confusion_matrix(
222
+ y_true, y_pred, label_names=report_names, prefix="zygosity"
223
+ )
224
+
225
+ def _evaluate_iupac10_and_plot(
226
+ self, y_true: np.ndarray, y_pred: np.ndarray
227
+ ) -> None:
228
+ """10-class IUPAC report & confusion matrix.
229
+
230
+ This method generates a classification report and confusion matrix for genotypes encoded using the 10 IUPAC codes (0-9). The IUPAC codes represent various nucleotide combinations, including ambiguous bases.
231
+
232
+ Args:
233
+ y_true (np.ndarray): True genotypes (0-9) for masked
234
+ y_pred (np.ndarray): Predicted genotypes (0-9) for masked
235
+
236
+ Raises:
237
+ NotFittedError: If fit() and transform() have not been called.
238
+ """
239
+ labels_idx = list(range(10))
240
+ labels_names = ["A", "C", "G", "T", "W", "R", "M", "K", "Y", "S"]
241
+
242
+ metrics = {
243
+ "accuracy": accuracy_score(y_true, y_pred),
244
+ "f1": f1_score(
245
+ y_true, y_pred, average="macro", labels=labels_idx, zero_division=0
246
+ ),
247
+ "precision": precision_score(
248
+ y_true, y_pred, average="macro", labels=labels_idx, zero_division=0
249
+ ),
250
+ "recall": recall_score(
251
+ y_true, y_pred, average="macro", labels=labels_idx, zero_division=0
252
+ ),
253
+ }
254
+ metrics.update({f"iupac_{k}": v for k, v in metrics.items()})
255
+
256
+ self.logger.info(
257
+ f"\n{classification_report(y_true, y_pred, labels=labels_idx, target_names=labels_names, zero_division=0)}"
258
+ )
259
+
260
+ report = classification_report(
261
+ y_true,
262
+ y_pred,
263
+ labels=labels_idx,
264
+ target_names=labels_names,
265
+ zero_division=0,
266
+ output_dict=True,
267
+ )
268
+
269
+ viz = ClassificationReportVisualizer(reset_kwargs=self.plotter_.param_dict)
270
+
271
+ plots = viz.plot_all(
272
+ report,
273
+ title_prefix=f"{self.model_name} IUPAC Report",
274
+ show=getattr(self, "show_plots", False),
275
+ heatmap_classes_only=True,
276
+ )
277
+
278
+ # Reset the style from Optuna's plotting.
279
+ plt.rcParams.update(self.plotter_.param_dict)
280
+
281
+ for name, fig in plots.items():
282
+ fout = self.plots_dir / f"iupac_report_{name}.{self.plot_format}"
283
+ if hasattr(fig, "savefig"):
284
+ fig.savefig(fout, dpi=300, facecolor="#111122")
285
+ plt.close(fig)
286
+ else:
287
+ fig.write_html(file=fout.with_suffix(".html"))
288
+
289
+ # Reset the style
290
+ viz._reset_mpl_style()
291
+
292
+ # Save JSON
293
+ self._save_report(report, suffix="iupac")
294
+
295
+ # Confusion matrix
296
+ self.plotter_.plot_confusion_matrix(
297
+ y_true, y_pred, label_names=labels_names, prefix="iupac"
298
+ )
299
+
300
+ def _save_report(self, report_dict: Dict[str, float], suffix: str) -> None:
301
+ """Save classification report dictionary as a JSON file.
302
+
303
+ This method saves the provided classification report dictionary to a JSON file in the metrics directory, appending the specified suffix to the filename.
304
+
305
+ Args:
306
+ report_dict (Dict[str, float]): The classification report dictionary to save.
307
+ suffix (str): Suffix to append to the filename (e.g., 'zygosity' or 'iupac').
308
+
309
+ Raises:
310
+ NotFittedError: If fit() and transform() have not been called.
311
+ """
312
+ if not self.is_fit_:
313
+ msg = "No report to save. Ensure fit() has been called."
314
+ raise NotFittedError(msg)
315
+
316
+ out_fp = self.metrics_dir / f"classification_report_{suffix}.json"
317
+
318
+ with open(out_fp, "w") as f:
319
+ json.dump(report_dict, f, indent=4)
320
+
321
+ self.logger.info(f"{self.model_name} {suffix} report saved to {out_fp}.")
322
+
323
+ def _save_best_params(self, best_params: Dict[str, Any]) -> None:
324
+ """Save the best hyperparameters to a JSON file.
325
+
326
+ This method saves the best hyperparameters found during hyperparameter tuning to a JSON file in the optimization directory. The filename includes the model name for easy identification.
327
+
328
+ Args:
329
+ best_params (Dict[str, Any]): A dictionary of the best hyperparameters to save.
330
+ """
331
+ if not hasattr(self, "parameters_dir"):
332
+ msg = "Attribute 'parameters_dir' not found. Ensure _create_model_directories() has been called."
333
+ self.logger.error(msg)
334
+ raise AttributeError(msg)
335
+
336
+ fout = self.parameters_dir / "best_parameters.json"
337
+
338
+ with open(fout, "w") as f:
339
+ json.dump(best_params, f, indent=4)
@@ -0,0 +1,293 @@
1
+ # Standard library
2
+ from __future__ import annotations
3
+
4
+ from typing import TYPE_CHECKING, Any, Dict, Generator, List
5
+
6
+ # Third-party
7
+ import numpy as np
8
+ from sklearn.ensemble import HistGradientBoostingClassifier
9
+ from sklearn.exceptions import NotFittedError
10
+ from sklearn.experimental import enable_iterative_imputer # noqa
11
+ from sklearn.impute import IterativeImputer
12
+ from sklearn.model_selection import train_test_split
13
+
14
+ # Project
15
+ from snpio.analysis.genotype_encoder import GenotypeEncoder
16
+ from snpio.utils.logging import LoggerManager
17
+
18
+ from pgsui.data_processing.config import apply_dot_overrides, load_yaml_to_dataclass
19
+ from pgsui.data_processing.containers import (
20
+ HGBConfig,
21
+ _HGBParams,
22
+ _ImputerParams,
23
+ _SimParams,
24
+ )
25
+ from pgsui.data_processing.transformers import SimGenotypeDataTransformer
26
+ from pgsui.impute.supervised.base import BaseImputer
27
+ from pgsui.utils.plotting import Plotting
28
+ from pgsui.utils.scorers import Scorer
29
+
30
+ if TYPE_CHECKING:
31
+ from snpio.read_input.genotype_data import GenotypeData
32
+
33
+
34
+ def ensure_hgb_config(config: HGBConfig | Dict | str | None) -> HGBConfig:
35
+ """Resolve HGB configuration from dataclass, mapping, or YAML path."""
36
+
37
+ if config is None:
38
+ return HGBConfig()
39
+ if isinstance(config, HGBConfig):
40
+ return config
41
+ if isinstance(config, str):
42
+ return load_yaml_to_dataclass(
43
+ config, HGBConfig, preset_builder=HGBConfig.from_preset
44
+ )
45
+ if isinstance(config, dict):
46
+ payload = dict(config)
47
+ preset = payload.pop("preset", None)
48
+ base = HGBConfig.from_preset(preset) if preset else HGBConfig()
49
+
50
+ def _flatten(prefix: str, data: Dict[str, Any], out: Dict[str, Any]) -> None:
51
+ for key, value in data.items():
52
+ dotted = f"{prefix}.{key}" if prefix else key
53
+ if isinstance(value, dict):
54
+ _flatten(dotted, value, out)
55
+ else:
56
+ out[dotted] = value
57
+
58
+ flat: Dict[str, Any] = {}
59
+ _flatten("", payload, flat)
60
+ return apply_dot_overrides(base, flat)
61
+
62
+ raise TypeError("config must be an HGBConfig, dict, YAML path, or None.")
63
+
64
+
65
+ class ImputeHistGradientBoosting(BaseImputer):
66
+ """Supervised HGB imputer driven by :class:`HGBConfig`."""
67
+
68
+ def __init__(
69
+ self,
70
+ genotype_data: "GenotypeData",
71
+ *,
72
+ config: HGBConfig | Dict | str | None = None,
73
+ overrides: Dict | None = None,
74
+ ) -> None:
75
+ self.model_name = "ImputeHistGradientBoosting"
76
+ self.Model = HistGradientBoostingClassifier
77
+
78
+ cfg = ensure_hgb_config(config)
79
+ if overrides:
80
+ cfg = cfg.apply_overrides(overrides)
81
+ self.cfg = cfg
82
+
83
+ self.genotype_data = genotype_data
84
+ self.pgenc = GenotypeEncoder(genotype_data)
85
+
86
+ self.prefix = cfg.io.prefix
87
+ self.seed = cfg.io.seed
88
+ self.n_jobs = cfg.io.n_jobs
89
+ self.verbose = cfg.io.verbose
90
+ self.debug = cfg.io.debug
91
+
92
+ super().__init__(verbose=self.verbose, debug=self.debug)
93
+
94
+ logman = LoggerManager(
95
+ __name__, prefix=self.prefix, verbose=self.verbose, debug=self.debug
96
+ )
97
+ self.logger = logman.get_logger()
98
+
99
+ self._create_model_directories(
100
+ self.prefix, ["models", "plots", "metrics", "optimize", "parameters"]
101
+ )
102
+
103
+ self.plot_format = cfg.plot.fmt
104
+ if self.plot_format.startswith("."):
105
+ self.plot_format = self.plot_format.lstrip(".")
106
+ self.plot_fontsize = cfg.plot.fontsize
107
+ self.title_fontsize = cfg.plot.fontsize
108
+ self.plot_dpi = cfg.plot.dpi
109
+ self.despine = cfg.plot.despine
110
+ self.show_plots = cfg.plot.show
111
+
112
+ self.validation_split = cfg.train.validation_split
113
+
114
+ class_weight = getattr(cfg.model, "class_weight", "balanced")
115
+ self.params = _HGBParams(
116
+ max_iter=cfg.model.n_estimators,
117
+ learning_rate=cfg.model.learning_rate,
118
+ max_depth=cfg.model.max_depth,
119
+ min_samples_leaf=cfg.model.min_samples_leaf,
120
+ max_features=cfg.model.max_features,
121
+ n_iter_no_change=cfg.model.n_iter_no_change,
122
+ tol=cfg.model.tol,
123
+ class_weight=class_weight,
124
+ random_state=self.seed,
125
+ verbose=self.debug,
126
+ )
127
+
128
+ self.imputer_params = _ImputerParams(
129
+ n_nearest_features=cfg.imputer.n_nearest_features,
130
+ max_iter=cfg.imputer.max_iter,
131
+ random_state=self.seed,
132
+ verbose=self.verbose,
133
+ )
134
+
135
+ self.sim_params = _SimParams(
136
+ prop_missing=cfg.sim.prop_missing,
137
+ strategy=cfg.sim.strategy,
138
+ missing_val=cfg.sim.missing_val,
139
+ het_boost=cfg.sim.het_boost,
140
+ seed=self.seed,
141
+ )
142
+
143
+ self.max_iter = cfg.imputer.max_iter
144
+ self.n_nearest_features = cfg.imputer.n_nearest_features
145
+
146
+ # Will be set in fit()
147
+ self.is_haploid_: bool | None = None
148
+ self.num_classes_: int | None = None
149
+ self.num_features_: int | None = None
150
+ self.models_: List[HistGradientBoostingClassifier | None] | None = None
151
+ self.is_fit_: bool = False
152
+
153
+ def fit(self) -> "BaseImputer":
154
+ """Fit the imputer using self.genotype_data with no arguments.
155
+
156
+ This method prepares the imputer by splitting the data into training and testing sets, and masking all originally observed genotype entries in the test set to facilitate unbiased evaluation. It does not perform any actual imputation since the RefAllele imputer is deterministic.
157
+
158
+ Steps:
159
+ 1) Encode to 0/1/2 with -9/-1 as missing.
160
+ 2) Split samples into train/test.
161
+ 3) Train IterativeImputer on train (convert missing -> NaN).
162
+ 4) Evaluate on test **non-missing positions** (reconstruction metrics) and call your original plotting stack via _make_class_reports().
163
+
164
+ Returns:
165
+ BaseImputer: self.
166
+ """
167
+ # Prepare utilities & metadata
168
+ self.scorers_ = Scorer(
169
+ prefix=self.prefix, average="macro", verbose=self.verbose, debug=self.debug
170
+ )
171
+
172
+ self.plotter_ = Plotting(
173
+ self.model_name,
174
+ prefix=self.prefix,
175
+ plot_format=self.plot_format,
176
+ plot_dpi=self.plot_dpi,
177
+ plot_fontsize=self.plot_fontsize,
178
+ title_fontsize=self.title_fontsize,
179
+ despine=self.despine,
180
+ show_plots=self.show_plots,
181
+ verbose=self.verbose,
182
+ debug=self.debug,
183
+ )
184
+
185
+ X_int = self.pgenc.genotypes_012
186
+ self.X012_ = X_int.astype(float)
187
+ self.X012_[self.X012_ < 0] = np.nan # Ensure missing are NaN
188
+ self.is_haploid_ = np.count_nonzero(self.X012_ == 1) == 0
189
+ self.num_classes_ = 2 if self.is_haploid_ else 3
190
+ self.n_samples_, self.n_features_ = X_int.shape
191
+
192
+ # Split
193
+ X_train, X_test = train_test_split(
194
+ self.X012_,
195
+ test_size=self.validation_split,
196
+ random_state=self.seed,
197
+ shuffle=True,
198
+ )
199
+
200
+ # Simulate missing values on test set.
201
+ sim_transformer = SimGenotypeDataTransformer(**self.sim_params.to_dict())
202
+
203
+ X_test = np.nan_to_num(X_test, nan=-1) # ensure missing are -1
204
+ sim_transformer.fit(X_test)
205
+ X_test_sim, missing_masks = sim_transformer.transform(X_test)
206
+ sim_mask = missing_masks["simulated"]
207
+ X_test_sim[X_test_sim < 0] = np.nan # ensure missing are NaN
208
+
209
+ self.model_params_ = self.params.to_dict()
210
+ self.model_params_["random_state"] = self.seed
211
+
212
+ # Train IterativeImputer
213
+ est = self.Model(**self.model_params_)
214
+
215
+ self.imputer_ = IterativeImputer(estimator=est, **self.imputer_params.to_dict())
216
+
217
+ self.imputer_.fit(X_train)
218
+ self.is_fit_ = True
219
+
220
+ X_test_imputed = self.imputer_.transform(X_test_sim)
221
+
222
+ # Predict on simulated test set
223
+ y_true_flat = X_test[sim_mask].copy()
224
+ y_pred_flat = X_test_imputed[sim_mask].copy()
225
+
226
+ # Round and clip predictions to valid {0,1,2} or {0,1} if haploid.
227
+ if self.is_haploid_:
228
+ y_pred_flat = np.clip(np.rint(y_pred_flat), 0, 1).astype(int, copy=False)
229
+ y_true_flat = np.clip(np.rint(y_true_flat), 0, 1).astype(int, copy=False)
230
+ else:
231
+ y_pred_flat = np.clip(np.rint(y_pred_flat), 0, 2).astype(int, copy=False)
232
+ y_true_flat = np.clip(np.rint(y_true_flat), 0, 2).astype(int, copy=False)
233
+
234
+ # Evaluate (012 / zygosity)
235
+ self._evaluate_012_and_plot(y_true_flat.copy(), y_pred_flat.copy())
236
+
237
+ # Evaluate (IUPAC)
238
+ encodings_dict = {
239
+ "A": 0,
240
+ "C": 1,
241
+ "G": 2,
242
+ "T": 3,
243
+ "W": 4,
244
+ "R": 5,
245
+ "M": 6,
246
+ "K": 7,
247
+ "Y": 8,
248
+ "S": 9,
249
+ "N": -1,
250
+ }
251
+
252
+ y_true_iupac_tmp = self.pgenc.decode_012(y_true_flat)
253
+ y_pred_iupac_tmp = self.pgenc.decode_012(y_pred_flat)
254
+ y_true_iupac = self.pgenc.convert_int_iupac(
255
+ y_true_iupac_tmp, encodings_dict=encodings_dict
256
+ )
257
+ y_pred_iupac = self.pgenc.convert_int_iupac(
258
+ y_pred_iupac_tmp, encodings_dict=encodings_dict
259
+ )
260
+ self._evaluate_iupac10_and_plot(y_true_iupac, y_pred_iupac)
261
+
262
+ self.best_params_ = self.model_params_
263
+ self.best_params_.update(self.imputer_params.to_dict())
264
+ self.best_params_.update(self.sim_params.to_dict())
265
+ self._save_best_params(self.best_params_)
266
+
267
+ return self
268
+
269
+ def transform(self) -> np.ndarray:
270
+ """Impute all samples and return imputed genotypes.
271
+
272
+ This method applies the trained imputer to the entire dataset, filling in missing genotype values. It ensures that any remaining missing values after imputation are set to -9, and decodes the imputed 0/1/2 genotypes back to their original format.
273
+
274
+ Returns:
275
+ np.ndarray: (n_samples, n_loci) integers with no -9/-1/NaN.
276
+
277
+ Raises:
278
+ NotFittedError: If fit() has not been called prior to transform().
279
+ """
280
+ if not self.is_fit_:
281
+ msg = "Imputer has not been fit; call fit() before transform()."
282
+ self.logger.error(msg)
283
+ raise NotFittedError(msg)
284
+
285
+ X = self.X012_.copy()
286
+ X_imp = self.imputer_.transform(X)
287
+
288
+ if np.any(X_imp < 0) or np.isnan(X_imp).any():
289
+ self.logger.warning("Some imputed values are still missing; setting to -9.")
290
+ X_imp[X_imp < 0] = -9
291
+ X_imp[np.isnan(X_imp)] = -9
292
+
293
+ return self.pgenc.decode_012(X_imp)