stouputils 1.14.0__py3-none-any.whl → 1.14.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. stouputils/__init__.pyi +15 -0
  2. stouputils/_deprecated.pyi +12 -0
  3. stouputils/all_doctests.pyi +46 -0
  4. stouputils/applications/__init__.pyi +2 -0
  5. stouputils/applications/automatic_docs.py +3 -0
  6. stouputils/applications/automatic_docs.pyi +106 -0
  7. stouputils/applications/upscaler/__init__.pyi +3 -0
  8. stouputils/applications/upscaler/config.pyi +18 -0
  9. stouputils/applications/upscaler/image.pyi +109 -0
  10. stouputils/applications/upscaler/video.pyi +60 -0
  11. stouputils/archive.pyi +67 -0
  12. stouputils/backup.pyi +109 -0
  13. stouputils/collections.pyi +86 -0
  14. stouputils/continuous_delivery/__init__.pyi +5 -0
  15. stouputils/continuous_delivery/cd_utils.pyi +129 -0
  16. stouputils/continuous_delivery/github.pyi +162 -0
  17. stouputils/continuous_delivery/pypi.pyi +52 -0
  18. stouputils/continuous_delivery/pyproject.pyi +67 -0
  19. stouputils/continuous_delivery/stubs.pyi +39 -0
  20. stouputils/ctx.pyi +211 -0
  21. stouputils/data_science/config/get.py +51 -51
  22. stouputils/data_science/data_processing/image/__init__.py +66 -66
  23. stouputils/data_science/data_processing/image/auto_contrast.py +79 -79
  24. stouputils/data_science/data_processing/image/axis_flip.py +58 -58
  25. stouputils/data_science/data_processing/image/bias_field_correction.py +74 -74
  26. stouputils/data_science/data_processing/image/binary_threshold.py +73 -73
  27. stouputils/data_science/data_processing/image/blur.py +59 -59
  28. stouputils/data_science/data_processing/image/brightness.py +54 -54
  29. stouputils/data_science/data_processing/image/canny.py +110 -110
  30. stouputils/data_science/data_processing/image/clahe.py +92 -92
  31. stouputils/data_science/data_processing/image/common.py +30 -30
  32. stouputils/data_science/data_processing/image/contrast.py +53 -53
  33. stouputils/data_science/data_processing/image/curvature_flow_filter.py +74 -74
  34. stouputils/data_science/data_processing/image/denoise.py +378 -378
  35. stouputils/data_science/data_processing/image/histogram_equalization.py +123 -123
  36. stouputils/data_science/data_processing/image/invert.py +64 -64
  37. stouputils/data_science/data_processing/image/laplacian.py +60 -60
  38. stouputils/data_science/data_processing/image/median_blur.py +52 -52
  39. stouputils/data_science/data_processing/image/noise.py +59 -59
  40. stouputils/data_science/data_processing/image/normalize.py +65 -65
  41. stouputils/data_science/data_processing/image/random_erase.py +66 -66
  42. stouputils/data_science/data_processing/image/resize.py +69 -69
  43. stouputils/data_science/data_processing/image/rotation.py +80 -80
  44. stouputils/data_science/data_processing/image/salt_pepper.py +68 -68
  45. stouputils/data_science/data_processing/image/sharpening.py +55 -55
  46. stouputils/data_science/data_processing/image/shearing.py +64 -64
  47. stouputils/data_science/data_processing/image/threshold.py +64 -64
  48. stouputils/data_science/data_processing/image/translation.py +71 -71
  49. stouputils/data_science/data_processing/image/zoom.py +83 -83
  50. stouputils/data_science/data_processing/image_augmentation.py +118 -118
  51. stouputils/data_science/data_processing/image_preprocess.py +183 -183
  52. stouputils/data_science/data_processing/prosthesis_detection.py +359 -359
  53. stouputils/data_science/data_processing/technique.py +481 -481
  54. stouputils/data_science/dataset/__init__.py +45 -45
  55. stouputils/data_science/dataset/dataset.py +292 -292
  56. stouputils/data_science/dataset/dataset_loader.py +135 -135
  57. stouputils/data_science/dataset/grouping_strategy.py +296 -296
  58. stouputils/data_science/dataset/image_loader.py +100 -100
  59. stouputils/data_science/dataset/xy_tuple.py +696 -696
  60. stouputils/data_science/metric_dictionnary.py +106 -106
  61. stouputils/data_science/mlflow_utils.py +206 -206
  62. stouputils/data_science/models/abstract_model.py +149 -149
  63. stouputils/data_science/models/all.py +85 -85
  64. stouputils/data_science/models/keras/all.py +38 -38
  65. stouputils/data_science/models/keras/convnext.py +62 -62
  66. stouputils/data_science/models/keras/densenet.py +50 -50
  67. stouputils/data_science/models/keras/efficientnet.py +60 -60
  68. stouputils/data_science/models/keras/mobilenet.py +56 -56
  69. stouputils/data_science/models/keras/resnet.py +52 -52
  70. stouputils/data_science/models/keras/squeezenet.py +233 -233
  71. stouputils/data_science/models/keras/vgg.py +42 -42
  72. stouputils/data_science/models/keras/xception.py +38 -38
  73. stouputils/data_science/models/keras_utils/callbacks/__init__.py +20 -20
  74. stouputils/data_science/models/keras_utils/callbacks/colored_progress_bar.py +219 -219
  75. stouputils/data_science/models/keras_utils/callbacks/learning_rate_finder.py +148 -148
  76. stouputils/data_science/models/keras_utils/callbacks/model_checkpoint_v2.py +31 -31
  77. stouputils/data_science/models/keras_utils/callbacks/progressive_unfreezing.py +249 -249
  78. stouputils/data_science/models/keras_utils/callbacks/warmup_scheduler.py +66 -66
  79. stouputils/data_science/models/keras_utils/losses/__init__.py +12 -12
  80. stouputils/data_science/models/keras_utils/losses/next_generation_loss.py +56 -56
  81. stouputils/data_science/models/keras_utils/visualizations.py +416 -416
  82. stouputils/data_science/models/sandbox.py +116 -116
  83. stouputils/data_science/range_tuple.py +234 -234
  84. stouputils/data_science/utils.py +285 -285
  85. stouputils/decorators.pyi +242 -0
  86. stouputils/image.pyi +172 -0
  87. stouputils/installer/__init__.py +18 -18
  88. stouputils/installer/__init__.pyi +5 -0
  89. stouputils/installer/common.pyi +39 -0
  90. stouputils/installer/downloader.pyi +24 -0
  91. stouputils/installer/linux.py +144 -144
  92. stouputils/installer/linux.pyi +39 -0
  93. stouputils/installer/main.py +223 -223
  94. stouputils/installer/main.pyi +57 -0
  95. stouputils/installer/windows.py +136 -136
  96. stouputils/installer/windows.pyi +31 -0
  97. stouputils/io.pyi +213 -0
  98. stouputils/parallel.py +12 -10
  99. stouputils/parallel.pyi +211 -0
  100. stouputils/print.pyi +136 -0
  101. stouputils/py.typed +1 -1
  102. stouputils/stouputils/parallel.pyi +4 -4
  103. stouputils/version_pkg.pyi +15 -0
  104. {stouputils-1.14.0.dist-info → stouputils-1.14.2.dist-info}/METADATA +1 -1
  105. stouputils-1.14.2.dist-info/RECORD +171 -0
  106. stouputils-1.14.0.dist-info/RECORD +0 -140
  107. {stouputils-1.14.0.dist-info → stouputils-1.14.2.dist-info}/WHEEL +0 -0
  108. {stouputils-1.14.0.dist-info → stouputils-1.14.2.dist-info}/entry_points.txt +0 -0
@@ -1,183 +1,183 @@
1
-
2
- # Imports
3
- import os
4
- import shutil
5
- from typing import Any
6
-
7
- import cv2
8
- import numpy as np
9
- from numpy.typing import NDArray
10
-
11
- from ...decorators import handle_error
12
- from ...parallel import multiprocessing, CPU_COUNT
13
- from ...print import warning, error
14
- from ...io import clean_path, super_copy
15
- from .technique import ProcessingTechnique
16
-
17
-
18
- # Image dataset augmentation class
19
- class ImageDatasetPreprocess:
20
- """ Image dataset preprocessing class. Check the class constructor for more information. """
21
-
22
- # Class constructor (configuration)
23
- def __init__(self, techniques: list[ProcessingTechnique] | None = None) -> None:
24
- """ Initialize the image dataset augmentation class with the given parameters.
25
-
26
- Args:
27
- techniques (list[ProcessingTechnique]): List of processing techniques to apply.
28
- """
29
- if techniques is None:
30
- techniques = []
31
- assert all(isinstance(x, ProcessingTechnique) for x in techniques), (
32
- "All techniques must be ProcessingTechnique objects"
33
- )
34
- self.techniques: list[ProcessingTechnique] = [x.deterministic(use_default=True) for x in techniques]
35
-
36
- @handle_error(message="Error while getting files recursively")
37
- def get_files_recursively(
38
- self,
39
- source: str,
40
- destination: str,
41
- extensions: tuple[str,...] = (".jpg", ".jpeg", ".png", ".bmp", ".tiff", ".tif")
42
- ) -> dict[str, str]:
43
- """ Recursively get all files in a directory and their destinations.
44
-
45
- Args:
46
- source (str): Path to the source directory
47
- destination (str): Path to the destination directory
48
- extensions (tuple[str,...]): Tuple of extensions to consider (e.g. (".jpg", ".png"))
49
- Returns:
50
- dict[str, str]: Dictionary mapping source paths to destination paths
51
- """
52
- files: dict[str, str] = {}
53
-
54
- if os.path.isfile(source) and source.endswith(extensions):
55
- files[source] = destination
56
- elif os.path.isdir(source):
57
- for item in os.listdir(source):
58
- item_path: str = f"{source}/{item}"
59
- item_dest: str = f"{destination}/{item}"
60
- files.update(self.get_files_recursively(item_path, item_dest, extensions))
61
- return files
62
-
63
-
64
- @handle_error(message="Error while getting queue of files to process")
65
- def get_queue(self, dataset_path: str, destination_path: str) -> list[tuple[str, str, list[ProcessingTechnique]]]:
66
- """ Get the queue of images to process with their techniques.
67
-
68
- This method converts the processing techniques ranges to fixed values and builds a queue
69
- of files to process by recursively finding all images in the dataset path.
70
-
71
- Args:
72
- dataset_path (str): Path to the dataset directory
73
- destination_path (str): Path to the destination directory where processed images will be saved
74
- Returns:
75
- list[tuple[str, str, list[ProcessingTechnique]]]: Queue of (source_path, dest_path, techniques) tuples
76
- """
77
- # Convert technique ranges to fixed values
78
- self.techniques = [x.deterministic(use_default=True) for x in self.techniques]
79
-
80
- # Build queue by recursively finding all images and their destinations
81
- return [
82
- (path, dest, self.techniques)
83
- for path, dest in
84
- self.get_files_recursively(dataset_path, destination_path).items()
85
- ]
86
-
87
-
88
- @handle_error(message="Error while processing the dataset")
89
- def process_dataset(
90
- self,
91
- dataset_path: str,
92
- destination_path: str,
93
- max_workers: int = CPU_COUNT,
94
- ignore_confirmation: bool = False
95
- ) -> None:
96
- """ Preprocess the dataset by applying the given processing techniques to the images.
97
-
98
- Args:
99
- dataset_path (str): Path to the dataset
100
- destination_path (str): Path to the destination dataset
101
- max_workers (int): Number of workers to use (Defaults to CPU_COUNT)
102
- ignore_confirmation (bool): If True, don't ask for confirmation
103
- """
104
- # Clean paths
105
- dataset_path = clean_path(dataset_path)
106
- destination_path = clean_path(destination_path)
107
-
108
- # If destination folder exists, ask user if they want to delete it
109
- if os.path.isdir(destination_path):
110
- if not ignore_confirmation:
111
- warning(f"Destination folder '{destination_path}' already exists.\nDo you want to delete it? (y/N)")
112
- if input().lower() == "y":
113
- shutil.rmtree(destination_path)
114
- else:
115
- error("Aborting...", exit=False)
116
- return
117
- else:
118
- warning(f"Destination folder '{destination_path}' already exists.\nDeleting it...")
119
- shutil.rmtree(destination_path)
120
-
121
- # Prepare the multiprocessing arguments (image path, destination path, techniques)
122
- queue: list[tuple[str, str, list[ProcessingTechnique]]] = self.get_queue(dataset_path, destination_path)
123
-
124
- # Apply the processing techniques in parallel
125
- splitted: list[str] = dataset_path.split('/')
126
- short_path: str = f".../{splitted[-1]}" if len(splitted) > 2 else dataset_path
127
- multiprocessing(
128
- self.apply_techniques,
129
- queue,
130
- use_starmap=True,
131
- desc=f"Processing dataset '{short_path}'",
132
- max_workers=max_workers
133
- )
134
-
135
-
136
- @staticmethod
137
- def apply_techniques(path: str, dest: str, techniques: list[ProcessingTechnique], use_padding: bool = True) -> None:
138
- """ Apply the processing techniques to the image.
139
-
140
- Args:
141
- path (str): Path to the image
142
- dest (str): Path to the destination image
143
- techniques (list[ProcessingTechnique]): List of processing techniques to apply
144
- use_padding (bool): If True, add padding to the image before applying techniques
145
- """
146
- if not techniques:
147
- super_copy(path, dest)
148
- return
149
-
150
- # Read the image
151
- img: NDArray[Any] = cv2.imread(path, cv2.IMREAD_UNCHANGED)
152
-
153
- if not use_padding:
154
- # Add a padding (to avoid cutting the image)
155
- previous_shape: tuple[int, ...] = img.shape[:2]
156
- padding: int = max(previous_shape[0], previous_shape[1]) // 2
157
- img = np.pad( # pyright: ignore [reportUnknownMemberType]
158
- img,
159
- pad_width=((padding, padding), (padding, padding), (0, 0)),
160
- mode="constant",
161
- constant_values=0
162
- )
163
-
164
- # Compute the dividers that will be used to adjust techniques parameters
165
- dividers: tuple[float, float] = (
166
- img.shape[0] / previous_shape[0],
167
- img.shape[1] / previous_shape[1]
168
- )
169
- else:
170
- dividers = (1.0, 1.0)
171
- padding = 0
172
-
173
- # Apply the processing techniques
174
- for technique in techniques:
175
- img = technique.apply(img, dividers)
176
-
177
- # Remove the padding
178
- if not use_padding:
179
- img = img[padding:-padding, padding:-padding, :]
180
-
181
- # Save the image
182
- os.makedirs(os.path.dirname(dest), exist_ok=True)
183
- cv2.imwrite(dest, img)
1
+
2
+ # Imports
3
+ import os
4
+ import shutil
5
+ from typing import Any
6
+
7
+ import cv2
8
+ import numpy as np
9
+ from numpy.typing import NDArray
10
+
11
+ from ...decorators import handle_error
12
+ from ...parallel import multiprocessing, CPU_COUNT
13
+ from ...print import warning, error
14
+ from ...io import clean_path, super_copy
15
+ from .technique import ProcessingTechnique
16
+
17
+
18
+ # Image dataset augmentation class
19
+ class ImageDatasetPreprocess:
20
+ """ Image dataset preprocessing class. Check the class constructor for more information. """
21
+
22
+ # Class constructor (configuration)
23
+ def __init__(self, techniques: list[ProcessingTechnique] | None = None) -> None:
24
+ """ Initialize the image dataset augmentation class with the given parameters.
25
+
26
+ Args:
27
+ techniques (list[ProcessingTechnique]): List of processing techniques to apply.
28
+ """
29
+ if techniques is None:
30
+ techniques = []
31
+ assert all(isinstance(x, ProcessingTechnique) for x in techniques), (
32
+ "All techniques must be ProcessingTechnique objects"
33
+ )
34
+ self.techniques: list[ProcessingTechnique] = [x.deterministic(use_default=True) for x in techniques]
35
+
36
+ @handle_error(message="Error while getting files recursively")
37
+ def get_files_recursively(
38
+ self,
39
+ source: str,
40
+ destination: str,
41
+ extensions: tuple[str,...] = (".jpg", ".jpeg", ".png", ".bmp", ".tiff", ".tif")
42
+ ) -> dict[str, str]:
43
+ """ Recursively get all files in a directory and their destinations.
44
+
45
+ Args:
46
+ source (str): Path to the source directory
47
+ destination (str): Path to the destination directory
48
+ extensions (tuple[str,...]): Tuple of extensions to consider (e.g. (".jpg", ".png"))
49
+ Returns:
50
+ dict[str, str]: Dictionary mapping source paths to destination paths
51
+ """
52
+ files: dict[str, str] = {}
53
+
54
+ if os.path.isfile(source) and source.endswith(extensions):
55
+ files[source] = destination
56
+ elif os.path.isdir(source):
57
+ for item in os.listdir(source):
58
+ item_path: str = f"{source}/{item}"
59
+ item_dest: str = f"{destination}/{item}"
60
+ files.update(self.get_files_recursively(item_path, item_dest, extensions))
61
+ return files
62
+
63
+
64
+ @handle_error(message="Error while getting queue of files to process")
65
+ def get_queue(self, dataset_path: str, destination_path: str) -> list[tuple[str, str, list[ProcessingTechnique]]]:
66
+ """ Get the queue of images to process with their techniques.
67
+
68
+ This method converts the processing techniques ranges to fixed values and builds a queue
69
+ of files to process by recursively finding all images in the dataset path.
70
+
71
+ Args:
72
+ dataset_path (str): Path to the dataset directory
73
+ destination_path (str): Path to the destination directory where processed images will be saved
74
+ Returns:
75
+ list[tuple[str, str, list[ProcessingTechnique]]]: Queue of (source_path, dest_path, techniques) tuples
76
+ """
77
+ # Convert technique ranges to fixed values
78
+ self.techniques = [x.deterministic(use_default=True) for x in self.techniques]
79
+
80
+ # Build queue by recursively finding all images and their destinations
81
+ return [
82
+ (path, dest, self.techniques)
83
+ for path, dest in
84
+ self.get_files_recursively(dataset_path, destination_path).items()
85
+ ]
86
+
87
+
88
+ @handle_error(message="Error while processing the dataset")
89
+ def process_dataset(
90
+ self,
91
+ dataset_path: str,
92
+ destination_path: str,
93
+ max_workers: int = CPU_COUNT,
94
+ ignore_confirmation: bool = False
95
+ ) -> None:
96
+ """ Preprocess the dataset by applying the given processing techniques to the images.
97
+
98
+ Args:
99
+ dataset_path (str): Path to the dataset
100
+ destination_path (str): Path to the destination dataset
101
+ max_workers (int): Number of workers to use (Defaults to CPU_COUNT)
102
+ ignore_confirmation (bool): If True, don't ask for confirmation
103
+ """
104
+ # Clean paths
105
+ dataset_path = clean_path(dataset_path)
106
+ destination_path = clean_path(destination_path)
107
+
108
+ # If destination folder exists, ask user if they want to delete it
109
+ if os.path.isdir(destination_path):
110
+ if not ignore_confirmation:
111
+ warning(f"Destination folder '{destination_path}' already exists.\nDo you want to delete it? (y/N)")
112
+ if input().lower() == "y":
113
+ shutil.rmtree(destination_path)
114
+ else:
115
+ error("Aborting...", exit=False)
116
+ return
117
+ else:
118
+ warning(f"Destination folder '{destination_path}' already exists.\nDeleting it...")
119
+ shutil.rmtree(destination_path)
120
+
121
+ # Prepare the multiprocessing arguments (image path, destination path, techniques)
122
+ queue: list[tuple[str, str, list[ProcessingTechnique]]] = self.get_queue(dataset_path, destination_path)
123
+
124
+ # Apply the processing techniques in parallel
125
+ splitted: list[str] = dataset_path.split('/')
126
+ short_path: str = f".../{splitted[-1]}" if len(splitted) > 2 else dataset_path
127
+ multiprocessing(
128
+ self.apply_techniques,
129
+ queue,
130
+ use_starmap=True,
131
+ desc=f"Processing dataset '{short_path}'",
132
+ max_workers=max_workers
133
+ )
134
+
135
+
136
+ @staticmethod
137
+ def apply_techniques(path: str, dest: str, techniques: list[ProcessingTechnique], use_padding: bool = True) -> None:
138
+ """ Apply the processing techniques to the image.
139
+
140
+ Args:
141
+ path (str): Path to the image
142
+ dest (str): Path to the destination image
143
+ techniques (list[ProcessingTechnique]): List of processing techniques to apply
144
+ use_padding (bool): If True, add padding to the image before applying techniques
145
+ """
146
+ if not techniques:
147
+ super_copy(path, dest)
148
+ return
149
+
150
+ # Read the image
151
+ img: NDArray[Any] = cv2.imread(path, cv2.IMREAD_UNCHANGED)
152
+
153
+ if not use_padding:
154
+ # Add a padding (to avoid cutting the image)
155
+ previous_shape: tuple[int, ...] = img.shape[:2]
156
+ padding: int = max(previous_shape[0], previous_shape[1]) // 2
157
+ img = np.pad( # pyright: ignore [reportUnknownMemberType]
158
+ img,
159
+ pad_width=((padding, padding), (padding, padding), (0, 0)),
160
+ mode="constant",
161
+ constant_values=0
162
+ )
163
+
164
+ # Compute the dividers that will be used to adjust techniques parameters
165
+ dividers: tuple[float, float] = (
166
+ img.shape[0] / previous_shape[0],
167
+ img.shape[1] / previous_shape[1]
168
+ )
169
+ else:
170
+ dividers = (1.0, 1.0)
171
+ padding = 0
172
+
173
+ # Apply the processing techniques
174
+ for technique in techniques:
175
+ img = technique.apply(img, dividers)
176
+
177
+ # Remove the padding
178
+ if not use_padding:
179
+ img = img[padding:-padding, padding:-padding, :]
180
+
181
+ # Save the image
182
+ os.makedirs(os.path.dirname(dest), exist_ok=True)
183
+ cv2.imwrite(dest, img)