stouputils 1.7.0__tar.gz → 1.7.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {stouputils-1.7.0 → stouputils-1.7.2}/PKG-INFO +1 -1
- {stouputils-1.7.0 → stouputils-1.7.2}/pyproject.toml +1 -1
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/all_doctests.py +6 -2
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/archive.py +3 -3
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/collections.py +65 -7
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/collections.pyi +13 -1
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/cd_utils.py +7 -5
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/github.py +6 -3
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/pyproject.py +2 -2
- stouputils-1.7.2/stouputils/image.py +225 -0
- stouputils-1.7.2/stouputils/image.pyi +112 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/io.py +49 -34
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/io.pyi +2 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/parallel.py +16 -9
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/parallel.pyi +1 -1
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/print.py +2 -3
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/print.pyi +1 -1
- stouputils-1.7.0/stouputils/image.py +0 -95
- stouputils-1.7.0/stouputils/image.pyi +0 -44
- {stouputils-1.7.0 → stouputils-1.7.2}/.gitignore +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/LICENSE +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/README.md +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/__init__.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/__main__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/all_doctests.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/__init__.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/automatic_docs.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/automatic_docs.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/upscaler/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/upscaler/__init__.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/upscaler/config.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/upscaler/config.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/upscaler/image.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/upscaler/image.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/upscaler/video.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/applications/upscaler/video.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/archive.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/backup.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/backup.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/__init__.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/cd_utils.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/github.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/pypi.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/pypi.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/pyproject.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/stubs.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/continuous_delivery/stubs.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/ctx.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/ctx.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/config/get.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/config/set.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/auto_contrast.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/axis_flip.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/bias_field_correction.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/binary_threshold.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/blur.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/brightness.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/canny.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/clahe.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/common.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/contrast.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/curvature_flow_filter.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/denoise.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/histogram_equalization.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/invert.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/laplacian.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/median_blur.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/noise.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/normalize.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/random_erase.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/resize.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/rotation.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/salt_pepper.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/sharpening.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/shearing.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/threshold.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/translation.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image/zoom.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image_augmentation.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/image_preprocess.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/prosthesis_detection.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/data_processing/technique.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/dataset/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/dataset/dataset.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/dataset/dataset_loader.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/dataset/grouping_strategy.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/dataset/image_loader.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/dataset/xy_tuple.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/metric_dictionnary.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/metric_utils.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/mlflow_utils.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/abstract_model.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/all.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/base_keras.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/all.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/convnext.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/densenet.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/efficientnet.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/mobilenet.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/resnet.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/squeezenet.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/vgg.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras/xception.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/callbacks/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/callbacks/colored_progress_bar.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/callbacks/learning_rate_finder.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/callbacks/model_checkpoint_v2.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/callbacks/progressive_unfreezing.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/callbacks/warmup_scheduler.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/losses/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/losses/next_generation_loss.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/keras_utils/visualizations.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/model_interface.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/models/sandbox.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/range_tuple.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/scripts/augment_dataset.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/scripts/exhaustive_process.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/scripts/preprocess_dataset.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/scripts/routine.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/data_science/utils.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/decorators.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/decorators.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/__init__.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/__init__.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/common.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/common.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/downloader.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/downloader.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/linux.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/linux.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/main.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/main.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/windows.py +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/installer/windows.pyi +0 -0
- {stouputils-1.7.0 → stouputils-1.7.2}/stouputils/py.typed +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: stouputils
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.2
|
|
4
4
|
Summary: Stouputils is a collection of utility modules designed to simplify and enhance the development process. It includes a range of tools for tasks such as execution of doctests, display utilities, decorators, as well as context managers, and many more.
|
|
5
5
|
Project-URL: Homepage, https://github.com/Stoupy51/stouputils
|
|
6
6
|
Project-URL: Issues, https://github.com/Stoupy51/stouputils/issues
|
|
@@ -5,7 +5,7 @@ build-backend = "hatchling.build"
|
|
|
5
5
|
|
|
6
6
|
[project]
|
|
7
7
|
name = "stouputils"
|
|
8
|
-
version = "1.7.
|
|
8
|
+
version = "1.7.2"
|
|
9
9
|
description = "Stouputils is a collection of utility modules designed to simplify and enhance the development process. It includes a range of tools for tasks such as execution of doctests, display utilities, decorators, as well as context managers, and many more."
|
|
10
10
|
readme = "README.md"
|
|
11
11
|
requires-python = ">=3.10"
|
|
@@ -13,14 +13,17 @@ import importlib
|
|
|
13
13
|
import os
|
|
14
14
|
import pkgutil
|
|
15
15
|
import sys
|
|
16
|
-
from doctest import TestResults, testmod
|
|
17
16
|
from types import ModuleType
|
|
17
|
+
from typing import TYPE_CHECKING
|
|
18
18
|
|
|
19
19
|
from . import decorators
|
|
20
20
|
from .decorators import measure_time
|
|
21
21
|
from .io import clean_path, relative_path
|
|
22
22
|
from .print import error, info, progress, warning
|
|
23
23
|
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from doctest import TestResults
|
|
26
|
+
|
|
24
27
|
|
|
25
28
|
# Main program
|
|
26
29
|
def launch_tests(root_dir: str, strict: bool = True) -> int:
|
|
@@ -140,7 +143,7 @@ def launch_tests(root_dir: str, strict: bool = True) -> int:
|
|
|
140
143
|
return total_failed
|
|
141
144
|
|
|
142
145
|
|
|
143
|
-
def test_module_with_progress(module: ModuleType, separator: str) -> TestResults:
|
|
146
|
+
def test_module_with_progress(module: ModuleType, separator: str) -> "TestResults":
|
|
144
147
|
""" Test a module with testmod and measure the time taken with progress printing.
|
|
145
148
|
|
|
146
149
|
Args:
|
|
@@ -149,6 +152,7 @@ def test_module_with_progress(module: ModuleType, separator: str) -> TestResults
|
|
|
149
152
|
Returns:
|
|
150
153
|
TestResults: The results of the tests
|
|
151
154
|
"""
|
|
155
|
+
from doctest import TestResults, testmod
|
|
152
156
|
@measure_time(progress, message=f"Testing module '{module.__name__}' {separator}took")
|
|
153
157
|
def internal() -> TestResults:
|
|
154
158
|
return testmod(m=module)
|
|
@@ -41,9 +41,6 @@ def repair_zip_file(file_path: str, destination: str) -> bool:
|
|
|
41
41
|
|
|
42
42
|
> repair_zip_file("/path/to/source.zip", "/path/to/destination.zip")
|
|
43
43
|
"""
|
|
44
|
-
import struct
|
|
45
|
-
import zlib
|
|
46
|
-
|
|
47
44
|
# Check
|
|
48
45
|
if not os.path.exists(file_path):
|
|
49
46
|
raise FileNotFoundError(f"File '{file_path}' not found")
|
|
@@ -51,6 +48,9 @@ def repair_zip_file(file_path: str, destination: str) -> bool:
|
|
|
51
48
|
if dirname and not os.path.exists(dirname):
|
|
52
49
|
raise FileNotFoundError(f"Directory '{dirname}' not found")
|
|
53
50
|
|
|
51
|
+
import struct
|
|
52
|
+
import zlib
|
|
53
|
+
|
|
54
54
|
# Read the entire ZIP file into memory
|
|
55
55
|
with open(file_path, 'rb') as f:
|
|
56
56
|
data = f.read()
|
|
@@ -14,11 +14,14 @@ import atexit
|
|
|
14
14
|
import os
|
|
15
15
|
import shutil
|
|
16
16
|
import tempfile
|
|
17
|
-
from typing import Any, Literal, TypeVar
|
|
17
|
+
from typing import TYPE_CHECKING, Any, Literal, TypeVar
|
|
18
18
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
19
|
+
# Lazy imports for typing
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
import numpy as np
|
|
22
|
+
import polars as pl
|
|
23
|
+
import zarr # pyright: ignore[reportMissingTypeStubs]
|
|
24
|
+
from numpy.typing import NDArray
|
|
22
25
|
|
|
23
26
|
# Typing
|
|
24
27
|
T = TypeVar("T")
|
|
@@ -94,11 +97,62 @@ def sort_dict_keys(dictionary: dict[T, Any], order: list[T], reverse: bool = Fal
|
|
|
94
97
|
"""
|
|
95
98
|
return dict(sorted(dictionary.items(), key=lambda x: order.index(x[0]) if x[0] in order else len(order), reverse=reverse))
|
|
96
99
|
|
|
100
|
+
def upsert_in_dataframe(
|
|
101
|
+
df: "pl.DataFrame",
|
|
102
|
+
new_entry: dict[str, Any],
|
|
103
|
+
primary_keys: dict[str, Any] | None = None
|
|
104
|
+
) -> "pl.DataFrame":
|
|
105
|
+
""" Insert or update a row in the Polars DataFrame based on primary keys.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
df (pl.DataFrame): The Polars DataFrame to update.
|
|
109
|
+
new_entry (dict[str, Any]): The new entry to insert or update.
|
|
110
|
+
primary_keys (dict[str, Any]): The primary keys to identify the row (default: empty).
|
|
111
|
+
Returns:
|
|
112
|
+
pl.DataFrame: The updated Polars DataFrame.
|
|
113
|
+
"""
|
|
114
|
+
# Imports
|
|
115
|
+
import polars as pl
|
|
116
|
+
|
|
117
|
+
# Create new DataFrame if file doesn't exist or is invalid
|
|
118
|
+
if df.is_empty():
|
|
119
|
+
return pl.DataFrame([new_entry])
|
|
120
|
+
|
|
121
|
+
# If no primary keys provided, return DataFrame with new entry appended
|
|
122
|
+
if not primary_keys:
|
|
123
|
+
new_row_df = pl.DataFrame([new_entry])
|
|
124
|
+
return pl.concat([df, new_row_df], how="diagonal_relaxed")
|
|
125
|
+
|
|
126
|
+
# Build mask based on primary keys
|
|
127
|
+
mask: pl.Expr = pl.lit(True)
|
|
128
|
+
for key, value in primary_keys.items():
|
|
129
|
+
if key in df.columns:
|
|
130
|
+
mask = mask & (df[key] == value)
|
|
131
|
+
else:
|
|
132
|
+
# Primary key column doesn't exist, so no match possible
|
|
133
|
+
mask = pl.lit(False)
|
|
134
|
+
break
|
|
135
|
+
|
|
136
|
+
# Insert or update row based on primary keys
|
|
137
|
+
if df.select(mask).to_series().any():
|
|
138
|
+
# Update existing row
|
|
139
|
+
for key, value in new_entry.items():
|
|
140
|
+
if key in df.columns:
|
|
141
|
+
df = df.with_columns(pl.when(mask).then(pl.lit(value)).otherwise(pl.col(key)).alias(key))
|
|
142
|
+
else:
|
|
143
|
+
# Add new column if it doesn't exist
|
|
144
|
+
df = df.with_columns(pl.when(mask).then(pl.lit(value)).otherwise(None).alias(key))
|
|
145
|
+
return df
|
|
146
|
+
else:
|
|
147
|
+
# Insert new row
|
|
148
|
+
new_row_df = pl.DataFrame([new_entry])
|
|
149
|
+
return pl.concat([df, new_row_df], how="diagonal_relaxed")
|
|
150
|
+
|
|
97
151
|
def array_to_disk(
|
|
98
|
-
data: NDArray[Any] | zarr.Array,
|
|
152
|
+
data: "NDArray[Any] | zarr.Array",
|
|
99
153
|
delete_input: bool = True,
|
|
100
|
-
more_data: NDArray[Any] | zarr.Array | None = None
|
|
101
|
-
) -> tuple[zarr.Array, str, int]:
|
|
154
|
+
more_data: "NDArray[Any] | zarr.Array | None" = None
|
|
155
|
+
) -> tuple["zarr.Array", str, int]:
|
|
102
156
|
""" Easily handle large numpy arrays on disk using zarr for efficient storage and access.
|
|
103
157
|
|
|
104
158
|
Zarr provides a simpler and more efficient alternative to np.memmap with better compression
|
|
@@ -112,6 +166,7 @@ def array_to_disk(
|
|
|
112
166
|
tuple[zarr.Array, str, int]: The zarr array, the directory path, and the total size in bytes
|
|
113
167
|
|
|
114
168
|
Examples:
|
|
169
|
+
>>> import numpy as np
|
|
115
170
|
>>> data = np.random.rand(1000, 1000)
|
|
116
171
|
>>> zarr_array = array_to_disk(data)[0]
|
|
117
172
|
>>> zarr_array.shape
|
|
@@ -127,6 +182,9 @@ def array_to_disk(
|
|
|
127
182
|
for filename in filenames
|
|
128
183
|
)
|
|
129
184
|
|
|
185
|
+
# Imports
|
|
186
|
+
import zarr # pyright: ignore[reportMissingTypeStubs]
|
|
187
|
+
|
|
130
188
|
# If data is already a zarr.Array and more_data is present, just append and return
|
|
131
189
|
if isinstance(data, zarr.Array) and more_data is not None:
|
|
132
190
|
original_size: int = data.shape[0]
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import polars as pl
|
|
1
2
|
import zarr
|
|
2
3
|
from numpy.typing import NDArray as NDArray
|
|
3
4
|
from typing import Any, Literal, TypeVar
|
|
@@ -49,7 +50,17 @@ def sort_dict_keys(dictionary: dict[T, Any], order: list[T], reverse: bool = Fal
|
|
|
49
50
|
\t\t>>> sort_dict_keys({\'b\': 2, \'a\': 1, \'c\': 3, \'d\': 4}, order=["c", "b"])
|
|
50
51
|
\t\t{\'c\': 3, \'b\': 2, \'a\': 1, \'d\': 4}
|
|
51
52
|
\t'''
|
|
52
|
-
def
|
|
53
|
+
def upsert_in_dataframe(df: pl.DataFrame, new_entry: dict[str, Any], primary_keys: dict[str, Any] | None = None) -> pl.DataFrame:
|
|
54
|
+
""" Insert or update a row in the Polars DataFrame based on primary keys.
|
|
55
|
+
|
|
56
|
+
\tArgs:
|
|
57
|
+
\t\tdf\t\t\t\t(pl.DataFrame):\t\tThe Polars DataFrame to update.
|
|
58
|
+
\t\tnew_entry\t\t(dict[str, Any]):\tThe new entry to insert or update.
|
|
59
|
+
\t\tprimary_keys\t(dict[str, Any]):\tThe primary keys to identify the row (default: empty).
|
|
60
|
+
\tReturns:
|
|
61
|
+
\t\tpl.DataFrame: The updated Polars DataFrame.
|
|
62
|
+
\t"""
|
|
63
|
+
def array_to_disk(data: NDArray[Any] | zarr.Array, delete_input: bool = True, more_data: NDArray[Any] | zarr.Array | None = None) -> tuple['zarr.Array', str, int]:
|
|
53
64
|
""" Easily handle large numpy arrays on disk using zarr for efficient storage and access.
|
|
54
65
|
|
|
55
66
|
\tZarr provides a simpler and more efficient alternative to np.memmap with better compression
|
|
@@ -63,6 +74,7 @@ def array_to_disk(data: NDArray[Any] | zarr.Array, delete_input: bool = True, mo
|
|
|
63
74
|
\t\ttuple[zarr.Array, str, int]: The zarr array, the directory path, and the total size in bytes
|
|
64
75
|
|
|
65
76
|
\tExamples:
|
|
77
|
+
\t\t>>> import numpy as np
|
|
66
78
|
\t\t>>> data = np.random.rand(1000, 1000)
|
|
67
79
|
\t\t>>> zarr_array = array_to_disk(data)[0]
|
|
68
80
|
\t\t>>> zarr_array.shape
|
|
@@ -4,15 +4,15 @@ It is mainly used by the `stouputils.continuous_delivery.github` module.
|
|
|
4
4
|
|
|
5
5
|
# Imports
|
|
6
6
|
import os
|
|
7
|
-
from typing import Any
|
|
8
|
-
|
|
9
|
-
import requests
|
|
10
|
-
import yaml
|
|
7
|
+
from typing import TYPE_CHECKING, Any
|
|
11
8
|
|
|
12
9
|
from ..decorators import handle_error
|
|
13
10
|
from ..io import clean_path, super_json_load
|
|
14
11
|
from ..print import warning
|
|
15
12
|
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
import requests
|
|
15
|
+
|
|
16
16
|
|
|
17
17
|
# Load credentials from file
|
|
18
18
|
@handle_error()
|
|
@@ -63,6 +63,7 @@ def load_credentials(credentials_path: str) -> dict[str, Any]:
|
|
|
63
63
|
|
|
64
64
|
# Else, load the file if it's a YAML file
|
|
65
65
|
elif credentials_path.endswith((".yml", ".yaml")):
|
|
66
|
+
import yaml
|
|
66
67
|
with open(credentials_path) as f:
|
|
67
68
|
return yaml.safe_load(f)
|
|
68
69
|
|
|
@@ -71,7 +72,7 @@ def load_credentials(credentials_path: str) -> dict[str, Any]:
|
|
|
71
72
|
raise ValueError("Credentials file must be .json or .yml format")
|
|
72
73
|
|
|
73
74
|
# Handle a response
|
|
74
|
-
def handle_response(response: requests.Response, error_message: str) -> None:
|
|
75
|
+
def handle_response(response: "requests.Response", error_message: str) -> None:
|
|
75
76
|
""" Handle a response from the API by raising an error if the response is not successful (status code not in 200-299).
|
|
76
77
|
|
|
77
78
|
Args:
|
|
@@ -79,6 +80,7 @@ def handle_response(response: requests.Response, error_message: str) -> None:
|
|
|
79
80
|
error_message (str): The error message to raise if the response is not successful
|
|
80
81
|
"""
|
|
81
82
|
if response.status_code < 200 or response.status_code >= 300:
|
|
83
|
+
import requests
|
|
82
84
|
try:
|
|
83
85
|
raise ValueError(f"{error_message}, response code {response.status_code} with response {response.json()}")
|
|
84
86
|
except requests.exceptions.JSONDecodeError as e:
|
|
@@ -9,15 +9,16 @@
|
|
|
9
9
|
|
|
10
10
|
# Imports
|
|
11
11
|
import os
|
|
12
|
-
from typing import Any
|
|
13
|
-
|
|
14
|
-
import requests
|
|
12
|
+
from typing import TYPE_CHECKING, Any
|
|
15
13
|
|
|
16
14
|
from ..decorators import handle_error, measure_time
|
|
17
15
|
from ..io import clean_path
|
|
18
16
|
from ..print import info, progress, warning
|
|
19
17
|
from .cd_utils import clean_version, handle_response, version_to_float
|
|
20
18
|
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
import requests
|
|
21
|
+
|
|
21
22
|
# Constants
|
|
22
23
|
GITHUB_API_URL: str = "https://api.github.com"
|
|
23
24
|
PROJECT_ENDPOINT: str = f"{GITHUB_API_URL}/repos"
|
|
@@ -483,6 +484,8 @@ def upload_to_github(credentials: dict[str, Any], github_config: dict[str, Any])
|
|
|
483
484
|
}
|
|
484
485
|
)
|
|
485
486
|
"""
|
|
487
|
+
import requests # type: ignore # noqa: F401
|
|
488
|
+
|
|
486
489
|
# Validate credentials and configuration
|
|
487
490
|
owner, headers = validate_credentials(credentials)
|
|
488
491
|
project_name, version, build_folder, endswith = validate_config(github_config)
|
|
@@ -17,8 +17,6 @@ writing, version management and TOML formatting capabilities.
|
|
|
17
17
|
# Imports
|
|
18
18
|
from typing import Any
|
|
19
19
|
|
|
20
|
-
import toml
|
|
21
|
-
|
|
22
20
|
from ..io import super_open
|
|
23
21
|
|
|
24
22
|
|
|
@@ -31,6 +29,7 @@ def read_pyproject(pyproject_path: str) -> dict[str, Any]:
|
|
|
31
29
|
Returns:
|
|
32
30
|
dict[str, Any]: The content of the pyproject.toml file.
|
|
33
31
|
"""
|
|
32
|
+
import toml
|
|
34
33
|
return toml.load(pyproject_path)
|
|
35
34
|
|
|
36
35
|
|
|
@@ -81,6 +80,7 @@ def format_toml_lists(content: str) -> str:
|
|
|
81
80
|
|
|
82
81
|
def write_pyproject(pyproject_path: str, pyproject_content: dict[str, Any]) -> None:
|
|
83
82
|
""" Write to the pyproject.toml file with properly indented lists. """
|
|
83
|
+
import toml
|
|
84
84
|
content: str = "\n" + toml.dumps(pyproject_content) + "\n"
|
|
85
85
|
content = format_toml_lists(content) # Apply formatting
|
|
86
86
|
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides little utilities for image processing.
|
|
3
|
+
|
|
4
|
+
- image_resize: Resize an image while preserving its aspect ratio by default.
|
|
5
|
+
- auto_crop: Automatically crop an image to remove zero/uniform regions.
|
|
6
|
+
|
|
7
|
+
See stouputils.data_science.data_processing for lots more image processing utilities.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
# Imports
|
|
11
|
+
from collections.abc import Callable
|
|
12
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
import numpy as np
|
|
16
|
+
from numpy.typing import NDArray
|
|
17
|
+
from PIL import Image
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# Functions
|
|
21
|
+
def image_resize(
|
|
22
|
+
image: "Image.Image | NDArray[np.number]",
|
|
23
|
+
max_result_size: int,
|
|
24
|
+
resampling: "Image.Resampling | None" = None,
|
|
25
|
+
min_or_max: Callable[[int, int], int] = max,
|
|
26
|
+
return_type: type["Image.Image | NDArray[np.number]"] | str = "same",
|
|
27
|
+
keep_aspect_ratio: bool = True,
|
|
28
|
+
) -> Any:
|
|
29
|
+
""" Resize an image while preserving its aspect ratio by default.
|
|
30
|
+
Scales the image so that its largest dimension equals max_result_size.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
image (Image.Image | np.ndarray): The image to resize.
|
|
34
|
+
max_result_size (int): Maximum size for the largest dimension.
|
|
35
|
+
resampling (Image.Resampling | None): PIL resampling filter to use (default: Image.Resampling.LANCZOS).
|
|
36
|
+
min_or_max (Callable): Function to use to get the minimum or maximum of the two ratios.
|
|
37
|
+
return_type (type | str): Type of the return value (Image.Image, np.ndarray, or "same" to match input type).
|
|
38
|
+
keep_aspect_ratio (bool): Whether to keep the aspect ratio.
|
|
39
|
+
Returns:
|
|
40
|
+
Image.Image | NDArray[np.number]: The resized image with preserved aspect ratio.
|
|
41
|
+
Examples:
|
|
42
|
+
>>> # Test with (height x width x channels) numpy array
|
|
43
|
+
>>> import numpy as np
|
|
44
|
+
>>> array = np.random.randint(0, 255, (100, 50, 3), dtype=np.uint8)
|
|
45
|
+
>>> image_resize(array, 100).shape
|
|
46
|
+
(100, 50, 3)
|
|
47
|
+
>>> image_resize(array, 100, min_or_max=max).shape
|
|
48
|
+
(100, 50, 3)
|
|
49
|
+
>>> image_resize(array, 100, min_or_max=min).shape
|
|
50
|
+
(200, 100, 3)
|
|
51
|
+
|
|
52
|
+
>>> # Test with PIL Image
|
|
53
|
+
>>> from PIL import Image
|
|
54
|
+
>>> pil_image: Image.Image = Image.new('RGB', (200, 100))
|
|
55
|
+
>>> image_resize(pil_image, 50).size
|
|
56
|
+
(50, 25)
|
|
57
|
+
>>> # Test with different return types
|
|
58
|
+
>>> resized_array = image_resize(array, 50, return_type=np.ndarray)
|
|
59
|
+
>>> isinstance(resized_array, np.ndarray)
|
|
60
|
+
True
|
|
61
|
+
>>> resized_array.shape
|
|
62
|
+
(50, 25, 3)
|
|
63
|
+
>>> # Test with different resampling methods
|
|
64
|
+
>>> image_resize(pil_image, 50, resampling=Image.Resampling.NEAREST).size
|
|
65
|
+
(50, 25)
|
|
66
|
+
"""
|
|
67
|
+
# Imports
|
|
68
|
+
import numpy as np
|
|
69
|
+
from PIL import Image
|
|
70
|
+
|
|
71
|
+
# Set default resampling method if not provided
|
|
72
|
+
if resampling is None:
|
|
73
|
+
resampling = Image.Resampling.LANCZOS
|
|
74
|
+
|
|
75
|
+
# Store original type for later conversion
|
|
76
|
+
original_was_pil: bool = isinstance(image, Image.Image)
|
|
77
|
+
|
|
78
|
+
# Convert numpy array to PIL Image if needed
|
|
79
|
+
if isinstance(image, np.ndarray):
|
|
80
|
+
image = Image.fromarray(image)
|
|
81
|
+
|
|
82
|
+
if keep_aspect_ratio:
|
|
83
|
+
|
|
84
|
+
# Get original image dimensions
|
|
85
|
+
width: int = image.size[0]
|
|
86
|
+
height: int = image.size[1]
|
|
87
|
+
|
|
88
|
+
# Determine which dimension to use for scaling based on min_or_max function
|
|
89
|
+
max_dimension: int = min_or_max(width, height)
|
|
90
|
+
|
|
91
|
+
# Calculate scaling factor
|
|
92
|
+
scale: float = max_result_size / max_dimension
|
|
93
|
+
|
|
94
|
+
# Calculate new dimensions while preserving aspect ratio
|
|
95
|
+
new_width: int = int(width * scale)
|
|
96
|
+
new_height: int = int(height * scale)
|
|
97
|
+
|
|
98
|
+
# Resize the image with the calculated dimensions
|
|
99
|
+
new_image: Image.Image = image.resize((new_width, new_height), resampling)
|
|
100
|
+
else:
|
|
101
|
+
# If not keeping aspect ratio, resize to square with max_result_size
|
|
102
|
+
new_image: Image.Image = image.resize((max_result_size, max_result_size), resampling)
|
|
103
|
+
|
|
104
|
+
# Return the image in the requested format
|
|
105
|
+
if return_type == "same":
|
|
106
|
+
# Return same type as input
|
|
107
|
+
if original_was_pil:
|
|
108
|
+
return new_image
|
|
109
|
+
else:
|
|
110
|
+
return np.array(new_image)
|
|
111
|
+
elif return_type == np.ndarray:
|
|
112
|
+
return np.array(new_image)
|
|
113
|
+
else:
|
|
114
|
+
return new_image
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def auto_crop(
|
|
118
|
+
image: "Image.Image | NDArray[np.number]",
|
|
119
|
+
mask: "NDArray[np.bool_] | None" = None,
|
|
120
|
+
threshold: int | float | Callable[["NDArray[np.number]"], int | float] | None = None,
|
|
121
|
+
return_type: type["Image.Image | NDArray[np.number]"] | str = "same",
|
|
122
|
+
contiguous: bool = True,
|
|
123
|
+
) -> Any:
|
|
124
|
+
""" Automatically crop an image to remove zero or uniform regions.
|
|
125
|
+
|
|
126
|
+
This function crops the image to keep only the region where pixels are non-zero
|
|
127
|
+
(or above a threshold). It can work with a mask or directly analyze the image.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
image (Image.Image | NDArray): The image to crop.
|
|
131
|
+
mask (NDArray[np.bool_] | None): Optional binary mask indicating regions to keep.
|
|
132
|
+
threshold (int | float | Callable): Threshold value or function (default: np.min).
|
|
133
|
+
return_type (type | str): Type of the return value (Image.Image, NDArray[np.number], or "same" to match input type).
|
|
134
|
+
contiguous (bool): If True (default), crop to bounding box. If False, remove entire rows/columns with no content.
|
|
135
|
+
Returns:
|
|
136
|
+
Image.Image | NDArray[np.number]: The cropped image.
|
|
137
|
+
|
|
138
|
+
Examples:
|
|
139
|
+
>>> # Test with numpy array with zeros on edges
|
|
140
|
+
>>> import numpy as np
|
|
141
|
+
>>> array = np.zeros((100, 100, 3), dtype=np.uint8)
|
|
142
|
+
>>> array[20:80, 30:70] = 255 # White rectangle in center
|
|
143
|
+
>>> cropped = auto_crop(array, return_type=np.ndarray)
|
|
144
|
+
>>> cropped.shape
|
|
145
|
+
(60, 40, 3)
|
|
146
|
+
|
|
147
|
+
>>> # Test with custom mask
|
|
148
|
+
>>> mask = np.zeros((100, 100), dtype=bool)
|
|
149
|
+
>>> mask[10:90, 10:90] = True
|
|
150
|
+
>>> cropped_with_mask = auto_crop(array, mask=mask, return_type=np.ndarray)
|
|
151
|
+
>>> cropped_with_mask.shape
|
|
152
|
+
(80, 80, 3)
|
|
153
|
+
|
|
154
|
+
>>> # Test with PIL Image
|
|
155
|
+
>>> from PIL import Image
|
|
156
|
+
>>> pil_image = Image.new('RGB', (100, 100), (0, 0, 0))
|
|
157
|
+
>>> from PIL import ImageDraw
|
|
158
|
+
>>> draw = ImageDraw.Draw(pil_image)
|
|
159
|
+
>>> draw.rectangle([25, 25, 75, 75], fill=(255, 255, 255))
|
|
160
|
+
>>> cropped_pil = auto_crop(pil_image)
|
|
161
|
+
>>> cropped_pil.size
|
|
162
|
+
(51, 51)
|
|
163
|
+
|
|
164
|
+
>>> # Test with threshold
|
|
165
|
+
>>> array_gray = np.ones((100, 100), dtype=np.uint8) * 10
|
|
166
|
+
>>> array_gray[20:80, 30:70] = 255
|
|
167
|
+
>>> cropped_threshold = auto_crop(array_gray, threshold=50, return_type=np.ndarray)
|
|
168
|
+
>>> cropped_threshold.shape
|
|
169
|
+
(60, 40)
|
|
170
|
+
|
|
171
|
+
>>> # Test with callable threshold (using lambda to avoid min value)
|
|
172
|
+
>>> array_gray2 = np.ones((100, 100), dtype=np.uint8) * 10
|
|
173
|
+
>>> array_gray2[20:80, 30:70] = 255
|
|
174
|
+
>>> cropped_max = auto_crop(array_gray2, threshold=lambda x: 50, return_type=np.ndarray)
|
|
175
|
+
>>> cropped_max.shape
|
|
176
|
+
(60, 40)
|
|
177
|
+
|
|
178
|
+
>>> # Test with non-contiguous crop
|
|
179
|
+
>>> array_sparse = np.zeros((100, 100, 3), dtype=np.uint8)
|
|
180
|
+
>>> array_sparse[10, 10] = 255
|
|
181
|
+
>>> array_sparse[50, 50] = 255
|
|
182
|
+
>>> array_sparse[90, 90] = 255
|
|
183
|
+
>>> cropped_contiguous = auto_crop(array_sparse, contiguous=True, return_type=np.ndarray)
|
|
184
|
+
>>> cropped_contiguous.shape # Bounding box from (10,10) to (90,90)
|
|
185
|
+
(81, 81, 3)
|
|
186
|
+
>>> cropped_non_contiguous = auto_crop(array_sparse, contiguous=False, return_type=np.ndarray)
|
|
187
|
+
>>> cropped_non_contiguous.shape # Only rows/cols 10, 50, 90
|
|
188
|
+
(3, 3, 3)
|
|
189
|
+
"""
|
|
190
|
+
# Imports
|
|
191
|
+
import numpy as np
|
|
192
|
+
from PIL import Image
|
|
193
|
+
|
|
194
|
+
# Convert to numpy array and store original type
|
|
195
|
+
original_was_pil: bool = isinstance(image, Image.Image)
|
|
196
|
+
image_array: NDArray[np.number] = np.array(image) if original_was_pil else image
|
|
197
|
+
|
|
198
|
+
# Create mask if not provided
|
|
199
|
+
if mask is None:
|
|
200
|
+
if threshold is None:
|
|
201
|
+
threshold = cast(Callable[["NDArray[np.number]"], int | float], np.min)
|
|
202
|
+
threshold_value: int | float = threshold(image_array) if callable(threshold) else threshold
|
|
203
|
+
mask = (image_array > threshold_value) if image_array.ndim == 2 else np.any(image_array > threshold_value, axis=2)
|
|
204
|
+
|
|
205
|
+
# Find rows and columns with content
|
|
206
|
+
rows_with_content: NDArray[np.bool_] = np.any(mask, axis=1)
|
|
207
|
+
cols_with_content: NDArray[np.bool_] = np.any(mask, axis=0)
|
|
208
|
+
|
|
209
|
+
# Return original if no content found
|
|
210
|
+
if not (np.any(rows_with_content) and np.any(cols_with_content)):
|
|
211
|
+
return image_array if return_type == np.ndarray else (image if original_was_pil else Image.fromarray(image_array))
|
|
212
|
+
|
|
213
|
+
# Crop based on contiguous parameter
|
|
214
|
+
if contiguous:
|
|
215
|
+
row_idx, col_idx = np.where(rows_with_content)[0], np.where(cols_with_content)[0]
|
|
216
|
+
cropped_array: NDArray[np.number] = image_array[row_idx[0]:row_idx[-1]+1, col_idx[0]:col_idx[-1]+1]
|
|
217
|
+
else:
|
|
218
|
+
ix = np.ix_(rows_with_content, cols_with_content, np.ones(image_array.shape[2], dtype=bool)) if image_array.ndim == 3 else np.ix_(rows_with_content, cols_with_content)
|
|
219
|
+
cropped_array = image_array[ix]
|
|
220
|
+
|
|
221
|
+
# Return in requested format
|
|
222
|
+
if return_type == "same":
|
|
223
|
+
return Image.fromarray(cropped_array) if original_was_pil else cropped_array
|
|
224
|
+
return cropped_array if return_type == np.ndarray else Image.fromarray(cropped_array)
|
|
225
|
+
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from PIL import Image
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
from numpy.typing import NDArray as NDArray
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
def image_resize(image: Image.Image | NDArray[np.number], max_result_size: int, resampling: Image.Resampling | None = None, min_or_max: Callable[[int, int], int] = ..., return_type: type[Image.Image | NDArray[np.number]] | str = 'same', keep_aspect_ratio: bool = True) -> Any:
|
|
8
|
+
''' Resize an image while preserving its aspect ratio by default.
|
|
9
|
+
\tScales the image so that its largest dimension equals max_result_size.
|
|
10
|
+
|
|
11
|
+
\tArgs:
|
|
12
|
+
\t\timage (Image.Image | np.ndarray): The image to resize.
|
|
13
|
+
\t\tmax_result_size (int): Maximum size for the largest dimension.
|
|
14
|
+
\t\tresampling (Image.Resampling | None): PIL resampling filter to use (default: Image.Resampling.LANCZOS).
|
|
15
|
+
\t\tmin_or_max (Callable): Function to use to get the minimum or maximum of the two ratios.
|
|
16
|
+
\t\treturn_type (type | str): Type of the return value (Image.Image, np.ndarray, or "same" to match input type).
|
|
17
|
+
\t\tkeep_aspect_ratio (bool): Whether to keep the aspect ratio.
|
|
18
|
+
\tReturns:
|
|
19
|
+
\t\tImage.Image | NDArray[np.number]: The resized image with preserved aspect ratio.
|
|
20
|
+
\tExamples:
|
|
21
|
+
\t\t>>> # Test with (height x width x channels) numpy array
|
|
22
|
+
\t\t>>> import numpy as np
|
|
23
|
+
\t\t>>> array = np.random.randint(0, 255, (100, 50, 3), dtype=np.uint8)
|
|
24
|
+
\t\t>>> image_resize(array, 100).shape
|
|
25
|
+
\t\t(100, 50, 3)
|
|
26
|
+
\t\t>>> image_resize(array, 100, min_or_max=max).shape
|
|
27
|
+
\t\t(100, 50, 3)
|
|
28
|
+
\t\t>>> image_resize(array, 100, min_or_max=min).shape
|
|
29
|
+
\t\t(200, 100, 3)
|
|
30
|
+
|
|
31
|
+
\t\t>>> # Test with PIL Image
|
|
32
|
+
\t\t>>> from PIL import Image
|
|
33
|
+
\t\t>>> pil_image: Image.Image = Image.new(\'RGB\', (200, 100))
|
|
34
|
+
\t\t>>> image_resize(pil_image, 50).size
|
|
35
|
+
\t\t(50, 25)
|
|
36
|
+
\t\t>>> # Test with different return types
|
|
37
|
+
\t\t>>> resized_array = image_resize(array, 50, return_type=np.ndarray)
|
|
38
|
+
\t\t>>> isinstance(resized_array, np.ndarray)
|
|
39
|
+
\t\tTrue
|
|
40
|
+
\t\t>>> resized_array.shape
|
|
41
|
+
\t\t(50, 25, 3)
|
|
42
|
+
\t\t>>> # Test with different resampling methods
|
|
43
|
+
\t\t>>> image_resize(pil_image, 50, resampling=Image.Resampling.NEAREST).size
|
|
44
|
+
\t\t(50, 25)
|
|
45
|
+
\t'''
|
|
46
|
+
def auto_crop(image: Image.Image | NDArray[np.number], mask: NDArray[np.bool_] | None = None, threshold: int | float | Callable[[NDArray[np.number]], int | float] | None = None, return_type: type[Image.Image | NDArray[np.number]] | str = 'same', contiguous: bool = True) -> Any:
|
|
47
|
+
''' Automatically crop an image to remove zero or uniform regions.
|
|
48
|
+
|
|
49
|
+
\tThis function crops the image to keep only the region where pixels are non-zero
|
|
50
|
+
\t(or above a threshold). It can work with a mask or directly analyze the image.
|
|
51
|
+
|
|
52
|
+
\tArgs:
|
|
53
|
+
\t\timage (Image.Image | NDArray):\t The image to crop.
|
|
54
|
+
\t\tmask (NDArray[np.bool_] | None): Optional binary mask indicating regions to keep.
|
|
55
|
+
\t\tthreshold (int | float | Callable): Threshold value or function (default: np.min).
|
|
56
|
+
\t\treturn_type (type | str): Type of the return value (Image.Image, NDArray[np.number], or "same" to match input type).
|
|
57
|
+
\t\tcontiguous (bool): If True (default), crop to bounding box. If False, remove entire rows/columns with no content.
|
|
58
|
+
\tReturns:
|
|
59
|
+
\t\tImage.Image | NDArray[np.number]: The cropped image.
|
|
60
|
+
|
|
61
|
+
\tExamples:
|
|
62
|
+
\t\t>>> # Test with numpy array with zeros on edges
|
|
63
|
+
\t\t>>> import numpy as np
|
|
64
|
+
\t\t>>> array = np.zeros((100, 100, 3), dtype=np.uint8)
|
|
65
|
+
\t\t>>> array[20:80, 30:70] = 255 # White rectangle in center
|
|
66
|
+
\t\t>>> cropped = auto_crop(array, return_type=np.ndarray)
|
|
67
|
+
\t\t>>> cropped.shape
|
|
68
|
+
\t\t(60, 40, 3)
|
|
69
|
+
|
|
70
|
+
\t\t>>> # Test with custom mask
|
|
71
|
+
\t\t>>> mask = np.zeros((100, 100), dtype=bool)
|
|
72
|
+
\t\t>>> mask[10:90, 10:90] = True
|
|
73
|
+
\t\t>>> cropped_with_mask = auto_crop(array, mask=mask, return_type=np.ndarray)
|
|
74
|
+
\t\t>>> cropped_with_mask.shape
|
|
75
|
+
\t\t(80, 80, 3)
|
|
76
|
+
|
|
77
|
+
\t\t>>> # Test with PIL Image
|
|
78
|
+
\t\t>>> from PIL import Image
|
|
79
|
+
\t\t>>> pil_image = Image.new(\'RGB\', (100, 100), (0, 0, 0))
|
|
80
|
+
\t\t>>> from PIL import ImageDraw
|
|
81
|
+
\t\t>>> draw = ImageDraw.Draw(pil_image)
|
|
82
|
+
\t\t>>> draw.rectangle([25, 25, 75, 75], fill=(255, 255, 255))
|
|
83
|
+
\t\t>>> cropped_pil = auto_crop(pil_image)
|
|
84
|
+
\t\t>>> cropped_pil.size
|
|
85
|
+
\t\t(51, 51)
|
|
86
|
+
|
|
87
|
+
\t\t>>> # Test with threshold
|
|
88
|
+
\t\t>>> array_gray = np.ones((100, 100), dtype=np.uint8) * 10
|
|
89
|
+
\t\t>>> array_gray[20:80, 30:70] = 255
|
|
90
|
+
\t\t>>> cropped_threshold = auto_crop(array_gray, threshold=50, return_type=np.ndarray)
|
|
91
|
+
\t\t>>> cropped_threshold.shape
|
|
92
|
+
\t\t(60, 40)
|
|
93
|
+
|
|
94
|
+
\t\t>>> # Test with callable threshold (using lambda to avoid min value)
|
|
95
|
+
\t\t>>> array_gray2 = np.ones((100, 100), dtype=np.uint8) * 10
|
|
96
|
+
\t\t>>> array_gray2[20:80, 30:70] = 255
|
|
97
|
+
\t\t>>> cropped_max = auto_crop(array_gray2, threshold=lambda x: 50, return_type=np.ndarray)
|
|
98
|
+
\t\t>>> cropped_max.shape
|
|
99
|
+
\t\t(60, 40)
|
|
100
|
+
|
|
101
|
+
\t\t>>> # Test with non-contiguous crop
|
|
102
|
+
\t\t>>> array_sparse = np.zeros((100, 100, 3), dtype=np.uint8)
|
|
103
|
+
\t\t>>> array_sparse[10, 10] = 255
|
|
104
|
+
\t\t>>> array_sparse[50, 50] = 255
|
|
105
|
+
\t\t>>> array_sparse[90, 90] = 255
|
|
106
|
+
\t\t>>> cropped_contiguous = auto_crop(array_sparse, contiguous=True, return_type=np.ndarray)
|
|
107
|
+
\t\t>>> cropped_contiguous.shape # Bounding box from (10,10) to (90,90)
|
|
108
|
+
\t\t(81, 81, 3)
|
|
109
|
+
\t\t>>> cropped_non_contiguous = auto_crop(array_sparse, contiguous=False, return_type=np.ndarray)
|
|
110
|
+
\t\t>>> cropped_non_contiguous.shape # Only rows/cols 10, 50, 90
|
|
111
|
+
\t\t(3, 3, 3)
|
|
112
|
+
\t'''
|