dragon-ml-toolbox 6.3.0__py3-none-any.whl → 6.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dragon-ml-toolbox might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dragon-ml-toolbox
3
- Version: 6.3.0
3
+ Version: 6.4.0
4
4
  Summary: A collection of tools for data science and machine learning projects.
5
5
  Author-email: Karl Loza <luigiloza@gmail.com>
6
6
  License-Expression: MIT
@@ -240,6 +240,7 @@ pip install "dragon-ml-toolbox[gui-torch,plot]"
240
240
  ```Bash
241
241
  custom_logger
242
242
  GUI_tools
243
+ ML_models
243
244
  ML_inference
244
245
  path_manager
245
246
  ```
@@ -1,13 +1,13 @@
1
- dragon_ml_toolbox-6.3.0.dist-info/licenses/LICENSE,sha256=2uUFNy7D0TLgHim1K5s3DIJ4q_KvxEXVilnU20cWliY,1066
2
- dragon_ml_toolbox-6.3.0.dist-info/licenses/LICENSE-THIRD-PARTY.md,sha256=lY4_rJPnLnMu7YBQaY-_iz1JRDcLdQzNCyeLAF1glJY,1837
1
+ dragon_ml_toolbox-6.4.0.dist-info/licenses/LICENSE,sha256=2uUFNy7D0TLgHim1K5s3DIJ4q_KvxEXVilnU20cWliY,1066
2
+ dragon_ml_toolbox-6.4.0.dist-info/licenses/LICENSE-THIRD-PARTY.md,sha256=lY4_rJPnLnMu7YBQaY-_iz1JRDcLdQzNCyeLAF1glJY,1837
3
3
  ml_tools/ETL_engineering.py,sha256=4wwZXi9_U7xfCY70jGBaKniOeZ0m75ppxWpQBd_DmLc,39369
4
4
  ml_tools/GUI_tools.py,sha256=n4ZZ5kEjwK5rkOCFJE41HeLFfjhpJVLUSzk9Kd9Kr_0,45410
5
5
  ml_tools/MICE_imputation.py,sha256=oFHg-OytOzPYTzBR_wIRHhP71cMn3aupDeT59ABsXlQ,11576
6
6
  ml_tools/ML_callbacks.py,sha256=noedVMmHZ72Odbg28zqx5wkhhvX2v-jXicKE_NCAiqU,13838
7
7
  ml_tools/ML_datasetmaster.py,sha256=98dAfP-i7BjhGpmGSaxtuUOZeiUN_8KpjBwZEmPCpgk,35485
8
8
  ml_tools/ML_evaluation.py,sha256=-Z5fXQi2ou6l5Oyir06bO90SZIZVrjQfgoVAqKgSjks,13800
9
- ml_tools/ML_inference.py,sha256=B3gPb19DXwl4iHjYew5r5SlYLODdDFda90BlbpuUEqs,5928
10
- ml_tools/ML_models.py,sha256=4xEjtic5xrt_FD2bWgB_Bis13fDmy3UVUrP7N9cyMLQ,10375
9
+ ml_tools/ML_inference.py,sha256=62F5RPC19bTHXUMTjnj2KMMg-wJdhLdVZDw--xJyiwM,12715
10
+ ml_tools/ML_models.py,sha256=QBPlu5d6QCKh-rlUJOAR3qVdFgOFqEzRPv1jXvRdOsw,10380
11
11
  ml_tools/ML_optimization.py,sha256=GX-qZ2mCI3gWRCTP5w7lXrZpfGle3J_mE0O68seIoio,13475
12
12
  ml_tools/ML_trainer.py,sha256=1q_CDXuMfndRsPuNofUn2mg2TlhG6MYuGqjWxTDgN9c,15112
13
13
  ml_tools/PSO_optimization.py,sha256=9Y074d-B5h4Wvp9YPiy6KAeXM-Yv6Il3gWalKvOLVgo,22705
@@ -27,7 +27,7 @@ ml_tools/keys.py,sha256=HtPG8-MWh89C32A7eIlfuuA-DLwkxGkoDfwR2TGN9CQ,1074
27
27
  ml_tools/optimization_tools.py,sha256=EL5tgNFwRo-82pbRE1CFVy9noNhULD7wprWuKadPheg,5090
28
28
  ml_tools/path_manager.py,sha256=Z8e7w3MPqQaN8xmTnKuXZS6CIW59BFwwqGhGc00sdp4,13692
29
29
  ml_tools/utilities.py,sha256=LqXXTovaHbA5AOKRk6Ru6DgAPAM0wPfYU70kUjYBryo,19231
30
- dragon_ml_toolbox-6.3.0.dist-info/METADATA,sha256=5C-wcadr6Ej8vNO1nLsvSGMbtQw4xzxz6fCxMekvDKY,6728
31
- dragon_ml_toolbox-6.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
32
- dragon_ml_toolbox-6.3.0.dist-info/top_level.txt,sha256=wm-oxax3ciyez6VoO4zsFd-gSok2VipYXnbg3TH9PtU,9
33
- dragon_ml_toolbox-6.3.0.dist-info/RECORD,,
30
+ dragon_ml_toolbox-6.4.0.dist-info/METADATA,sha256=jzs_BIaUzjLYIMUOVOgDl2qkyO-Z7Q00rZLZDzkxBkQ,6738
31
+ dragon_ml_toolbox-6.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
32
+ dragon_ml_toolbox-6.4.0.dist-info/top_level.txt,sha256=wm-oxax3ciyez6VoO4zsFd-gSok2VipYXnbg3TH9PtU,9
33
+ dragon_ml_toolbox-6.4.0.dist-info/RECORD,,
ml_tools/ML_inference.py CHANGED
@@ -10,7 +10,9 @@ from .path_manager import make_fullpath
10
10
  from .keys import PyTorchInferenceKeys
11
11
 
12
12
  __all__ = [
13
- "PyTorchInferenceHandler"
13
+ "PyTorchInferenceHandler",
14
+ "multi_inference_regression",
15
+ "multi_inference_classification"
14
16
  ]
15
17
 
16
18
  class PyTorchInferenceHandler:
@@ -131,10 +133,155 @@ class PyTorchInferenceHandler:
131
133
  else: # classification
132
134
  return {
133
135
  PyTorchInferenceKeys.LABELS: tensor_results[PyTorchInferenceKeys.LABELS].item(),
134
- # Move tensor to CPU before converting to NumPy
136
+ # Move tensor to CPU before converting to NumPy
135
137
  PyTorchInferenceKeys.PROBABILITIES: tensor_results[PyTorchInferenceKeys.PROBABILITIES].cpu().numpy()
136
138
  }
137
-
139
+
140
+
141
+ def multi_inference_regression(handlers: list[PyTorchInferenceHandler],
142
+ feature_vector: Union[np.ndarray, torch.Tensor],
143
+ output: Literal["numpy","torch"]="numpy") -> dict[str,Any]:
144
+ """
145
+ Performs regression inference using multiple models on a single feature vector.
146
+
147
+ This function iterates through a list of PyTorchInferenceHandler objects,
148
+ each configured for a different regression target. It runs a prediction for
149
+ each handler using the same input feature vector and returns the results
150
+ in a dictionary.
151
+
152
+ The function adapts its behavior based on the input dimensions:
153
+ - 1D input: Returns a dictionary mapping target ID to a single value.
154
+ - 2D input: Returns a dictionary mapping target ID to a list of values.
155
+
156
+ Args:
157
+ handlers (list[PyTorchInferenceHandler]): A list of initialized inference
158
+ handlers. Each handler must have a unique `target_id` and be configured with `task="regression"`.
159
+ feature_vector (Union[np.ndarray, torch.Tensor]): An input sample (1D) or a batch of samples (2D) to be fed into each regression model.
160
+ output (Literal["numpy", "torch"], optional): The desired format for the output predictions.
161
+ - "numpy": Returns predictions as Python scalars or NumPy arrays.
162
+ - "torch": Returns predictions as PyTorch tensors.
163
+
164
+ Returns:
165
+ (dict[str, Any]): A dictionary mapping each handler's `target_id` to its
166
+ predicted regression values.
167
+
168
+ Raises:
169
+ AttributeError: If any handler in the list is missing a `target_id`.
170
+ ValueError: If any handler's `task` is not 'regression' or if the input `feature_vector` is not 1D or 2D.
171
+ """
172
+ # check batch dimension
173
+ is_single_sample = feature_vector.ndim == 1
174
+
175
+ # Reshape a 1D vector to a 2D batch of one for uniform processing.
176
+ if is_single_sample:
177
+ feature_vector = feature_vector.reshape(1, -1)
178
+
179
+ # Validate that the input is a 2D tensor.
180
+ if feature_vector.ndim != 2:
181
+ raise ValueError("Input feature_vector must be a 1D or 2D array/tensor.")
182
+
183
+ results: dict[str,Any] = dict()
184
+ for handler in handlers:
185
+ # validation
186
+ if handler.target_id is None:
187
+ raise AttributeError("All inference handlers must have a 'target_id' attribute.")
188
+ if handler.task != "regression":
189
+ raise ValueError(
190
+ f"Invalid task type: The handler for target_id '{handler.target_id}' "
191
+ f"is for '{handler.task}', but only 'regression' tasks are supported."
192
+ )
193
+ # inference
194
+ if output == "numpy":
195
+ result = handler.predict_batch_numpy(feature_vector)[PyTorchInferenceKeys.PREDICTIONS]
196
+ else: # torch
197
+ result = handler.predict_batch(feature_vector)[PyTorchInferenceKeys.PREDICTIONS]
198
+
199
+ # Unpack single results and update result dictionary
200
+ # If the original input was 1D, extract the single prediction from the array.
201
+ if is_single_sample:
202
+ results[handler.target_id] = result[0]
203
+ else:
204
+ results[handler.target_id] = result
205
+
206
+ return results
207
+
208
+
209
+ def multi_inference_classification(
210
+ handlers: list[PyTorchInferenceHandler],
211
+ feature_vector: Union[np.ndarray, torch.Tensor],
212
+ output: Literal["numpy","torch"]="numpy"
213
+ ) -> tuple[dict[str, Any], dict[str, Any]]:
214
+ """
215
+ Performs classification inference on a single sample or a batch.
216
+
217
+ This function iterates through a list of PyTorchInferenceHandler objects,
218
+ each configured for a different classification target. It returns two
219
+ dictionaries: one for the predicted labels and one for the probabilities.
220
+
221
+ The function adapts its behavior based on the input dimensions:
222
+ - 1D input: The dictionaries map target ID to a single label and a single probability array.
223
+ - 2D input: The dictionaries map target ID to an array of labels and an array of probability arrays.
224
+
225
+ Args:
226
+ handlers (list[PyTorchInferenceHandler]): A list of initialized inference handlers. Each must have a unique `target_id` and be configured
227
+ with `task="classification"`.
228
+ feature_vector (Union[np.ndarray, torch.Tensor]): An input sample (1D)
229
+ or a batch of samples (2D) for prediction.
230
+ output (Literal["numpy", "torch"], optional): The desired format for the
231
+ output predictions.
232
+
233
+ Returns:
234
+ (tuple[dict[str, Any], dict[str, Any]]): A tuple containing two dictionaries:
235
+ 1. A dictionary mapping `target_id` to the predicted label(s).
236
+ 2. A dictionary mapping `target_id` to the prediction probabilities.
237
+
238
+ Raises:
239
+ AttributeError: If any handler in the list is missing a `target_id`.
240
+ ValueError: If any handler's `task` is not 'classification' or if the input `feature_vector` is not 1D or 2D.
241
+ """
242
+ # Store if the original input was a single sample
243
+ is_single_sample = feature_vector.ndim == 1
244
+
245
+ # Reshape a 1D vector to a 2D batch of one for uniform processing
246
+ if is_single_sample:
247
+ feature_vector = feature_vector.reshape(1, -1)
248
+
249
+ if feature_vector.ndim != 2:
250
+ raise ValueError("Input feature_vector must be a 1D or 2D array/tensor.")
251
+
252
+ # Initialize two dictionaries for results
253
+ labels_results: dict[str, Any] = dict()
254
+ probs_results: dict[str, Any] = dict()
255
+
256
+ for handler in handlers:
257
+ # Validation
258
+ if handler.target_id is None:
259
+ raise AttributeError("All inference handlers must have a 'target_id' attribute.")
260
+ if handler.task != "classification":
261
+ raise ValueError(
262
+ f"Invalid task type: The handler for target_id '{handler.target_id}' "
263
+ f"is for '{handler.task}', but this function only supports 'classification'."
264
+ )
265
+
266
+ # Always use the batch method to get both labels and probabilities
267
+ if output == "numpy":
268
+ result = handler.predict_batch_numpy(feature_vector)
269
+ else: # torch
270
+ result = handler.predict_batch(feature_vector)
271
+
272
+ labels = result[PyTorchInferenceKeys.LABELS]
273
+ probabilities = result[PyTorchInferenceKeys.PROBABILITIES]
274
+
275
+ # If the original input was 1D, unpack the single result from the batch array
276
+ if is_single_sample:
277
+ labels_results[handler.target_id] = labels[0]
278
+ probs_results[handler.target_id] = probabilities[0]
279
+ else:
280
+ labels_results[handler.target_id] = labels
281
+ probs_results[handler.target_id] = probabilities
282
+
283
+ return labels_results, probs_results
284
+
138
285
 
139
286
  def info():
140
287
  _script_info(__all__)
ml_tools/ML_models.py CHANGED
@@ -223,7 +223,7 @@ def save_architecture(model: nn.Module, directory: Union[str, Path], verbose: bo
223
223
  json.dump(config, f, indent=4)
224
224
 
225
225
  if verbose:
226
- _LOGGER.info(f"✅ Architecture for '{model.__class__.__name__}' saved to '{path_dir}'")
226
+ _LOGGER.info(f"✅ Architecture for '{model.__class__.__name__}' saved to '{path_dir.name}'")
227
227
 
228
228
 
229
229
  def load_architecture(filepath: Union[str, Path], expected_model_class: type, verbose: bool=True) -> nn.Module: