dragon-ml-toolbox 6.3.0__py3-none-any.whl → 6.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dragon-ml-toolbox might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dragon-ml-toolbox
3
- Version: 6.3.0
3
+ Version: 6.4.1
4
4
  Summary: A collection of tools for data science and machine learning projects.
5
5
  Author-email: Karl Loza <luigiloza@gmail.com>
6
6
  License-Expression: MIT
@@ -240,6 +240,7 @@ pip install "dragon-ml-toolbox[gui-torch,plot]"
240
240
  ```Bash
241
241
  custom_logger
242
242
  GUI_tools
243
+ ML_models
243
244
  ML_inference
244
245
  path_manager
245
246
  ```
@@ -1,13 +1,13 @@
1
- dragon_ml_toolbox-6.3.0.dist-info/licenses/LICENSE,sha256=2uUFNy7D0TLgHim1K5s3DIJ4q_KvxEXVilnU20cWliY,1066
2
- dragon_ml_toolbox-6.3.0.dist-info/licenses/LICENSE-THIRD-PARTY.md,sha256=lY4_rJPnLnMu7YBQaY-_iz1JRDcLdQzNCyeLAF1glJY,1837
1
+ dragon_ml_toolbox-6.4.1.dist-info/licenses/LICENSE,sha256=2uUFNy7D0TLgHim1K5s3DIJ4q_KvxEXVilnU20cWliY,1066
2
+ dragon_ml_toolbox-6.4.1.dist-info/licenses/LICENSE-THIRD-PARTY.md,sha256=lY4_rJPnLnMu7YBQaY-_iz1JRDcLdQzNCyeLAF1glJY,1837
3
3
  ml_tools/ETL_engineering.py,sha256=4wwZXi9_U7xfCY70jGBaKniOeZ0m75ppxWpQBd_DmLc,39369
4
4
  ml_tools/GUI_tools.py,sha256=n4ZZ5kEjwK5rkOCFJE41HeLFfjhpJVLUSzk9Kd9Kr_0,45410
5
5
  ml_tools/MICE_imputation.py,sha256=oFHg-OytOzPYTzBR_wIRHhP71cMn3aupDeT59ABsXlQ,11576
6
6
  ml_tools/ML_callbacks.py,sha256=noedVMmHZ72Odbg28zqx5wkhhvX2v-jXicKE_NCAiqU,13838
7
7
  ml_tools/ML_datasetmaster.py,sha256=98dAfP-i7BjhGpmGSaxtuUOZeiUN_8KpjBwZEmPCpgk,35485
8
8
  ml_tools/ML_evaluation.py,sha256=-Z5fXQi2ou6l5Oyir06bO90SZIZVrjQfgoVAqKgSjks,13800
9
- ml_tools/ML_inference.py,sha256=B3gPb19DXwl4iHjYew5r5SlYLODdDFda90BlbpuUEqs,5928
10
- ml_tools/ML_models.py,sha256=4xEjtic5xrt_FD2bWgB_Bis13fDmy3UVUrP7N9cyMLQ,10375
9
+ ml_tools/ML_inference.py,sha256=xp3o2p-l9bUvLPoNatqudok0qz43Tl38pTtDowO5YFE,13554
10
+ ml_tools/ML_models.py,sha256=QBPlu5d6QCKh-rlUJOAR3qVdFgOFqEzRPv1jXvRdOsw,10380
11
11
  ml_tools/ML_optimization.py,sha256=GX-qZ2mCI3gWRCTP5w7lXrZpfGle3J_mE0O68seIoio,13475
12
12
  ml_tools/ML_trainer.py,sha256=1q_CDXuMfndRsPuNofUn2mg2TlhG6MYuGqjWxTDgN9c,15112
13
13
  ml_tools/PSO_optimization.py,sha256=9Y074d-B5h4Wvp9YPiy6KAeXM-Yv6Il3gWalKvOLVgo,22705
@@ -27,7 +27,7 @@ ml_tools/keys.py,sha256=HtPG8-MWh89C32A7eIlfuuA-DLwkxGkoDfwR2TGN9CQ,1074
27
27
  ml_tools/optimization_tools.py,sha256=EL5tgNFwRo-82pbRE1CFVy9noNhULD7wprWuKadPheg,5090
28
28
  ml_tools/path_manager.py,sha256=Z8e7w3MPqQaN8xmTnKuXZS6CIW59BFwwqGhGc00sdp4,13692
29
29
  ml_tools/utilities.py,sha256=LqXXTovaHbA5AOKRk6Ru6DgAPAM0wPfYU70kUjYBryo,19231
30
- dragon_ml_toolbox-6.3.0.dist-info/METADATA,sha256=5C-wcadr6Ej8vNO1nLsvSGMbtQw4xzxz6fCxMekvDKY,6728
31
- dragon_ml_toolbox-6.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
32
- dragon_ml_toolbox-6.3.0.dist-info/top_level.txt,sha256=wm-oxax3ciyez6VoO4zsFd-gSok2VipYXnbg3TH9PtU,9
33
- dragon_ml_toolbox-6.3.0.dist-info/RECORD,,
30
+ dragon_ml_toolbox-6.4.1.dist-info/METADATA,sha256=l9pA5HMky3zBNxje4m-fTGRGlaJdqhpBO4cH59SclGg,6738
31
+ dragon_ml_toolbox-6.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
32
+ dragon_ml_toolbox-6.4.1.dist-info/top_level.txt,sha256=wm-oxax3ciyez6VoO4zsFd-gSok2VipYXnbg3TH9PtU,9
33
+ dragon_ml_toolbox-6.4.1.dist-info/RECORD,,
ml_tools/ML_inference.py CHANGED
@@ -10,7 +10,9 @@ from .path_manager import make_fullpath
10
10
  from .keys import PyTorchInferenceKeys
11
11
 
12
12
  __all__ = [
13
- "PyTorchInferenceHandler"
13
+ "PyTorchInferenceHandler",
14
+ "multi_inference_regression",
15
+ "multi_inference_classification"
14
16
  ]
15
17
 
16
18
  class PyTorchInferenceHandler:
@@ -131,10 +133,172 @@ class PyTorchInferenceHandler:
131
133
  else: # classification
132
134
  return {
133
135
  PyTorchInferenceKeys.LABELS: tensor_results[PyTorchInferenceKeys.LABELS].item(),
134
- # Move tensor to CPU before converting to NumPy
136
+ # Move tensor to CPU before converting to NumPy
135
137
  PyTorchInferenceKeys.PROBABILITIES: tensor_results[PyTorchInferenceKeys.PROBABILITIES].cpu().numpy()
136
138
  }
137
-
139
+
140
+
141
+ def multi_inference_regression(handlers: list[PyTorchInferenceHandler],
142
+ feature_vector: Union[np.ndarray, torch.Tensor],
143
+ output: Literal["numpy","torch"]="numpy") -> dict[str,Any]:
144
+ """
145
+ Performs regression inference using multiple models on a single feature vector.
146
+
147
+ This function iterates through a list of PyTorchInferenceHandler objects,
148
+ each configured for a different regression target. It runs a prediction for
149
+ each handler using the same input feature vector and returns the results
150
+ in a dictionary.
151
+
152
+ The function adapts its behavior based on the input dimensions:
153
+ - 1D input: Returns a dictionary mapping target ID to a single value.
154
+ - 2D input: Returns a dictionary mapping target ID to a list of values.
155
+
156
+ Args:
157
+ handlers (list[PyTorchInferenceHandler]): A list of initialized inference
158
+ handlers. Each handler must have a unique `target_id` and be configured with `task="regression"`.
159
+ feature_vector (Union[np.ndarray, torch.Tensor]): An input sample (1D) or a batch of samples (2D) to be fed into each regression model.
160
+ output (Literal["numpy", "torch"], optional): The desired format for the output predictions.
161
+ - "numpy": Returns predictions as Python scalars or NumPy arrays.
162
+ - "torch": Returns predictions as PyTorch tensors.
163
+
164
+ Returns:
165
+ (dict[str, Any]): A dictionary mapping each handler's `target_id` to its
166
+ predicted regression values.
167
+
168
+ Raises:
169
+ AttributeError: If any handler in the list is missing a `target_id`.
170
+ ValueError: If any handler's `task` is not 'regression' or if the input `feature_vector` is not 1D or 2D.
171
+ """
172
+ # check batch dimension
173
+ is_single_sample = feature_vector.ndim == 1
174
+
175
+ # Reshape a 1D vector to a 2D batch of one for uniform processing.
176
+ if is_single_sample:
177
+ feature_vector = feature_vector.reshape(1, -1)
178
+
179
+ # Validate that the input is a 2D tensor.
180
+ if feature_vector.ndim != 2:
181
+ raise ValueError("Input feature_vector must be a 1D or 2D array/tensor.")
182
+
183
+ results: dict[str,Any] = dict()
184
+ for handler in handlers:
185
+ # validation
186
+ if handler.target_id is None:
187
+ raise AttributeError("All inference handlers must have a 'target_id' attribute.")
188
+ if handler.task != "regression":
189
+ raise ValueError(
190
+ f"Invalid task type: The handler for target_id '{handler.target_id}' "
191
+ f"is for '{handler.task}', but only 'regression' tasks are supported."
192
+ )
193
+
194
+ # inference
195
+ if output == "numpy":
196
+ # This path returns NumPy arrays or standard Python scalars
197
+ numpy_result = handler.predict_batch_numpy(feature_vector)[PyTorchInferenceKeys.PREDICTIONS]
198
+ if is_single_sample:
199
+ # For a single sample, convert the 1-element array to a Python scalar
200
+ results[handler.target_id] = numpy_result.item()
201
+ else:
202
+ # For a batch, return the full NumPy array of predictions
203
+ results[handler.target_id] = numpy_result
204
+
205
+ else: # output == "torch"
206
+ # This path returns PyTorch tensors on the model's device
207
+ torch_result = handler.predict_batch(feature_vector)[PyTorchInferenceKeys.PREDICTIONS]
208
+ if is_single_sample:
209
+ # For a single sample, return the 0-dim tensor
210
+ results[handler.target_id] = torch_result[0]
211
+ else:
212
+ # For a batch, return the full tensor of predictions
213
+ results[handler.target_id] = torch_result
214
+
215
+ return results
216
+
217
+
218
+ def multi_inference_classification(
219
+ handlers: list[PyTorchInferenceHandler],
220
+ feature_vector: Union[np.ndarray, torch.Tensor],
221
+ output: Literal["numpy","torch"]="numpy"
222
+ ) -> tuple[dict[str, Any], dict[str, Any]]:
223
+ """
224
+ Performs classification inference on a single sample or a batch.
225
+
226
+ This function iterates through a list of PyTorchInferenceHandler objects,
227
+ each configured for a different classification target. It returns two
228
+ dictionaries: one for the predicted labels and one for the probabilities.
229
+
230
+ The function adapts its behavior based on the input dimensions:
231
+ - 1D input: The dictionaries map target ID to a single label and a single probability array.
232
+ - 2D input: The dictionaries map target ID to an array of labels and an array of probability arrays.
233
+
234
+ Args:
235
+ handlers (list[PyTorchInferenceHandler]): A list of initialized inference handlers. Each must have a unique `target_id` and be configured
236
+ with `task="classification"`.
237
+ feature_vector (Union[np.ndarray, torch.Tensor]): An input sample (1D)
238
+ or a batch of samples (2D) for prediction.
239
+ output (Literal["numpy", "torch"], optional): The desired format for the
240
+ output predictions.
241
+
242
+ Returns:
243
+ (tuple[dict[str, Any], dict[str, Any]]): A tuple containing two dictionaries:
244
+ 1. A dictionary mapping `target_id` to the predicted label(s).
245
+ 2. A dictionary mapping `target_id` to the prediction probabilities.
246
+
247
+ Raises:
248
+ AttributeError: If any handler in the list is missing a `target_id`.
249
+ ValueError: If any handler's `task` is not 'classification' or if the input `feature_vector` is not 1D or 2D.
250
+ """
251
+ # Store if the original input was a single sample
252
+ is_single_sample = feature_vector.ndim == 1
253
+
254
+ # Reshape a 1D vector to a 2D batch of one for uniform processing
255
+ if is_single_sample:
256
+ feature_vector = feature_vector.reshape(1, -1)
257
+
258
+ if feature_vector.ndim != 2:
259
+ raise ValueError("Input feature_vector must be a 1D or 2D array/tensor.")
260
+
261
+ # Initialize two dictionaries for results
262
+ labels_results: dict[str, Any] = dict()
263
+ probs_results: dict[str, Any] = dict()
264
+
265
+ for handler in handlers:
266
+ # Validation
267
+ if handler.target_id is None:
268
+ raise AttributeError("All inference handlers must have a 'target_id' attribute.")
269
+ if handler.task != "classification":
270
+ raise ValueError(
271
+ f"Invalid task type: The handler for target_id '{handler.target_id}' "
272
+ f"is for '{handler.task}', but this function only supports 'classification'."
273
+ )
274
+
275
+ # Inference
276
+ if output == "numpy":
277
+ # predict_batch_numpy returns a dict of NumPy arrays
278
+ result = handler.predict_batch_numpy(feature_vector)
279
+ else: # torch
280
+ # predict_batch returns a dict of Torch tensors
281
+ result = handler.predict_batch(feature_vector)
282
+
283
+ labels = result[PyTorchInferenceKeys.LABELS]
284
+ probabilities = result[PyTorchInferenceKeys.PROBABILITIES]
285
+
286
+ if is_single_sample:
287
+ # For "numpy", convert the single label to a Python int scalar.
288
+ # For "torch", get the 0-dim tensor label.
289
+ if output == "numpy":
290
+ labels_results[handler.target_id] = labels.item()
291
+ else: # torch
292
+ labels_results[handler.target_id] = labels[0]
293
+
294
+ # The probabilities are an array/tensor of values
295
+ probs_results[handler.target_id] = probabilities[0]
296
+ else:
297
+ labels_results[handler.target_id] = labels
298
+ probs_results[handler.target_id] = probabilities
299
+
300
+ return labels_results, probs_results
301
+
138
302
 
139
303
  def info():
140
304
  _script_info(__all__)
ml_tools/ML_models.py CHANGED
@@ -223,7 +223,7 @@ def save_architecture(model: nn.Module, directory: Union[str, Path], verbose: bo
223
223
  json.dump(config, f, indent=4)
224
224
 
225
225
  if verbose:
226
- _LOGGER.info(f"✅ Architecture for '{model.__class__.__name__}' saved to '{path_dir}'")
226
+ _LOGGER.info(f"✅ Architecture for '{model.__class__.__name__}' saved to '{path_dir.name}'")
227
227
 
228
228
 
229
229
  def load_architecture(filepath: Union[str, Path], expected_model_class: type, verbose: bool=True) -> nn.Module: