ColorCorrectionPipeline 1.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- colorcorrectionpipeline-1.1.1/LICENSE +21 -0
- colorcorrectionpipeline-1.1.1/PKG-INFO +249 -0
- colorcorrectionpipeline-1.1.1/README.md +233 -0
- colorcorrectionpipeline-1.1.1/pyproject.toml +24 -0
- colorcorrectionpipeline-1.1.1/setup.cfg +4 -0
- colorcorrectionpipeline-1.1.1/src/ColorCorrectionPipeline.egg-info/PKG-INFO +249 -0
- colorcorrectionpipeline-1.1.1/src/ColorCorrectionPipeline.egg-info/SOURCES.txt +19 -0
- colorcorrectionpipeline-1.1.1/src/ColorCorrectionPipeline.egg-info/dependency_links.txt +1 -0
- colorcorrectionpipeline-1.1.1/src/ColorCorrectionPipeline.egg-info/top_level.txt +8 -0
- colorcorrectionpipeline-1.1.1/src/Configs/configs.py +40 -0
- colorcorrectionpipeline-1.1.1/src/FFC/FF_correction.py +508 -0
- colorcorrectionpipeline-1.1.1/src/FFC/__init__.py +3 -0
- colorcorrectionpipeline-1.1.1/src/__init__.py +9 -0
- colorcorrectionpipeline-1.1.1/src/core.py +679 -0
- colorcorrectionpipeline-1.1.1/src/key_functions.py +2151 -0
- colorcorrectionpipeline-1.1.1/src/models.py +41 -0
- colorcorrectionpipeline-1.1.1/src/quick_run.py +104 -0
- colorcorrectionpipeline-1.1.1/src/utils/logger_.py +258 -0
- colorcorrectionpipeline-1.1.1/src/utils/metrics_.py +176 -0
- colorcorrectionpipeline-1.1.1/tests/test_core.py +160 -0
- colorcorrectionpipeline-1.1.1/tests/test_model.py +35 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Collins Wakholi
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the “Software”), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ColorCorrectionPipeline
|
|
3
|
+
Version: 1.1.1
|
|
4
|
+
Summary: A Stepwise color‐correction pipeline with flat‐field, gamma, white‐balance, and color‐correction stages.
|
|
5
|
+
Author-email: Collins Wakholi <wcoln@yahoo.com>, "Devin A. Rippner" <devinrippner@gmail.com>
|
|
6
|
+
License: LICENSE
|
|
7
|
+
Project-URL: Homepage, https://github.com/collinswakholi/ColorCorrectionPackage
|
|
8
|
+
Project-URL: Bug Tracker, https://github.com/collinswakholi/ColorCorrectionPackage/issues
|
|
9
|
+
Keywords: color,image-processing,flat-field,gamma-correction,white-balance,color-correction
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Dynamic: license-file
|
|
16
|
+
|
|
17
|
+
# ColorCorrectionPipeline
|
|
18
|
+
|
|
19
|
+
A step-wise, end-to-end color‐correction pipeline for digital images.
|
|
20
|
+
This package combines flat-field correction (**FFC**), gamma correction (**GC**), white-balance (**WB**), and color-correction (**CC**) into a single, easy-to-use workflow. Once you “train” on an image containing a color-checker (and a white-field for FFC), you can apply the learned corrections to any new image (no chart required, as long as it was captured with the same camera, and in the same lighting conditions).
|
|
21
|
+
|
|
22
|
+
This package builds upon a previous package [ML_ColorCorrection_tool](https://github.com/collinswakholi/ML_ColorCorrection_tool) package.
|
|
23
|
+
|
|
24
|
+
---
|
|
25
|
+
## Features
|
|
26
|
+
|
|
27
|
+
- **Flat-Field Correction (FFC)**
|
|
28
|
+
Automatically detect or manually crops “white” background image. Fits an n-degree 2D surface to describe the light distribution in the FOV, extrapolates to full image.
|
|
29
|
+
|
|
30
|
+
- **Saturation Check / Extrapolation**
|
|
31
|
+
Identify and fix saturated patches on the chart before proceeding, ensuring accurate downstream corrections.
|
|
32
|
+
|
|
33
|
+
- **Gamma Correction (GC)**
|
|
34
|
+
Fits an optimum polynomial (up to configurable degree) mapping between measured neutral patch intensities and reference values, and applies it to the entire image.
|
|
35
|
+
|
|
36
|
+
- **White Balance (WB)**
|
|
37
|
+
Diagonal white-balance correction using the neutral patches of the color checker. Gets diagonal matrix and applies it to the entire image.
|
|
38
|
+
|
|
39
|
+
- **Color Correction (CC)**
|
|
40
|
+
Two methods:
|
|
41
|
+
- **Conventional (“conv”)**: configurable polynomial expansion with the Finlayson 2015 method, produces a 3xn matrix that can be applied to the entire image.
|
|
42
|
+
- **Custom (“ours”)**: uses ML with linear regression, pls regression, or neural networks, produces a model that can be applied to the entire image.
|
|
43
|
+
|
|
44
|
+
- **Predict on New Images**
|
|
45
|
+
Once models are saved, apply FFC → GC → WB → CC in sequence to any new photograph, no chart needed.
|
|
46
|
+
|
|
47
|
+
---
|
|
48
|
+
## Installation
|
|
49
|
+
|
|
50
|
+
### From PyPI
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
pip install ColorCorrectionPipeline
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
### From GitHub
|
|
57
|
+
|
|
58
|
+
```bash
|
|
59
|
+
git clone https://github.com/collinswakholi/ColorCorrectionPipeline.git
|
|
60
|
+
cd ColorCorrectionPipeline
|
|
61
|
+
pip install -e .
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
### Dependencies
|
|
65
|
+
The Dependencies (Automatically Installed, from `requirements.txt`) are:
|
|
66
|
+
- `numpy`
|
|
67
|
+
- `scipy`
|
|
68
|
+
- `scikit-learn`
|
|
69
|
+
- `torch`
|
|
70
|
+
- `opencv-python`
|
|
71
|
+
- `colour-science`
|
|
72
|
+
- `color-checker-detection`
|
|
73
|
+
- `ultralytics`
|
|
74
|
+
- `scikit-image`
|
|
75
|
+
- `plotly`
|
|
76
|
+
- `matplotlib`
|
|
77
|
+
- `pandas`
|
|
78
|
+
- `difflib`
|
|
79
|
+
- `glob2`
|
|
80
|
+
- `statsmodels`
|
|
81
|
+
- `seaborn`
|
|
82
|
+
- `pytest`
|
|
83
|
+
|
|
84
|
+
If you already have a `requirements.txt` file in your cloned repository, you can install them using `pip install -r requirements.txt`.
|
|
85
|
+
|
|
86
|
+
---
|
|
87
|
+
## Usage
|
|
88
|
+
Below is a simple example of how to use the package (found in `src/quick_run.py`):
|
|
89
|
+
```python
|
|
90
|
+
import os
|
|
91
|
+
import cv2
|
|
92
|
+
import numpy as np
|
|
93
|
+
import pandas as pd
|
|
94
|
+
|
|
95
|
+
from ColorCorrectionPipeline.core import ColorCorrection, Config
|
|
96
|
+
from ColorCorrectionPipeline.key_functions import to_float64
|
|
97
|
+
|
|
98
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
99
|
+
# 1. File paths
|
|
100
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
101
|
+
IMG_PATH = "Data/Images/Sample_1.JPG" # Image containing color checker
|
|
102
|
+
WHITE_PATH = "Data/Images/white.JPG" # Optional White background image for FFC
|
|
103
|
+
YOLO_MODEL_PATH = "Data/Models/plane_det_model_YOLO_512_n.pt" # Optional YOLO .pt
|
|
104
|
+
TEST_IMAGE_PATH = "Data/Images/Sample_2.JPG" # Optional New image for prediction
|
|
105
|
+
|
|
106
|
+
# Output directory (only used if config.save=True)
|
|
107
|
+
SAVE_PATH = os.path.join(os.getcwd(), "results")
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
111
|
+
# 2. Load images and convert to RGB float64 in [0,1]
|
|
112
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
113
|
+
img_bgr = cv2.imread(IMG_PATH)
|
|
114
|
+
img_rgb = to_float64(img_bgr[:, :, ::-1])# convert to RGB (64bit floats, 0-1, RGB)
|
|
115
|
+
|
|
116
|
+
white_bgr = cv2.imread(WHITE_PATH)
|
|
117
|
+
|
|
118
|
+
test_bgr = cv2.imread(TEST_IMAGE_PATH)
|
|
119
|
+
test_rgb = to_float64(test_bgr[:, :, ::-1])# convert to RGB (64bit floats, 0-1, RGB)
|
|
120
|
+
|
|
121
|
+
img_name = os.path.splitext(os.path.basename(IMG_PATH))[0]
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
125
|
+
# 3. Configure per‐stage parameters
|
|
126
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
127
|
+
|
|
128
|
+
ffc_kwargs = {
|
|
129
|
+
"model_path": YOLO_MODEL_PATH, # Optional, for automatic white plane ROI detection
|
|
130
|
+
"manual_crop": False, # Optional, for manual white plane ROI selection
|
|
131
|
+
"show": False, # Whether to show intermediate plots
|
|
132
|
+
"bins": 50, # Number of bins used for sampling the intesity profile of the white plane
|
|
133
|
+
"smooth_window": 5, # Window size for smoothing the intensity profile
|
|
134
|
+
"get_deltaE": True, # Whether to calculate and return deltaE (CIEDE2000)
|
|
135
|
+
"fit_method": "pls", # can be linear, nn, pls, or svm, default is linear
|
|
136
|
+
"interactions": True, # Whether to include interactions in the polynomial expansion
|
|
137
|
+
"max_iter": 1000, # Maximum number of iterations
|
|
138
|
+
"tol": 1e-8, # Tolerance for stopping criterion
|
|
139
|
+
"verbose": False, # Whether to print verbose output
|
|
140
|
+
"random_seed": 0, # Random seed
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
# Gamma Correction (GC) kwargs:
|
|
144
|
+
gc_kwargs = {
|
|
145
|
+
"max_degree": 5, # Maximum polynomial degree for fitting gamma profile
|
|
146
|
+
"show": False, # Whether to show intermediate plots
|
|
147
|
+
"get_deltaE": True, # Whether to calculate and return deltaE (CIEDE2000)
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
# White Balance (WB) kwargs:
|
|
151
|
+
wb_kwargs = {
|
|
152
|
+
"show": False, # Whether to show intermediate plots
|
|
153
|
+
"get_deltaE": True, # Whether to calculate and return deltaE (CIEDE2000)
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
# Color Correction (CC) kwargs:
|
|
157
|
+
cc_kwargs = {
|
|
158
|
+
'cc_method': 'ours', # method to use for color correction
|
|
159
|
+
'method': 'Finlayson 2015', # if cc_method is 'conv', this is the method
|
|
160
|
+
'mtd': 'nn', # if cc_method is 'ours', this is the method, linear, nn, pls
|
|
161
|
+
|
|
162
|
+
'degree': 2, # degree of polynomial to fit
|
|
163
|
+
'max_iterations': 10000, # max iterations for fitting
|
|
164
|
+
'random_state': 0, # random seed
|
|
165
|
+
'tol': 1e-8, # tolerance for fitting
|
|
166
|
+
'verbose': False, # whether to print verbose output
|
|
167
|
+
'param_search': False, # whether to use parameter search
|
|
168
|
+
'show': False, # whether to show plots
|
|
169
|
+
'get_deltaE': True, # whether to compute deltaE
|
|
170
|
+
'n_samples': 50, # number of samples to use for parameter search
|
|
171
|
+
|
|
172
|
+
# only if mtd == 'pls'
|
|
173
|
+
'ncomp': 1, # number of components to use
|
|
174
|
+
|
|
175
|
+
# only if mtd == 'nn'
|
|
176
|
+
'nlayers': 100, # number of layers to use
|
|
177
|
+
'hidden_layers': [64, 32, 16], # hidden layers for neural network
|
|
178
|
+
'learning_rate': 0.001, # learning rate for neural network
|
|
179
|
+
'batch_size': 16, # batch size for neural network
|
|
180
|
+
'patience': 10, # patience for early stopping
|
|
181
|
+
'dropout_rate': 0.2, # dropout rate for neural network
|
|
182
|
+
'optim_type': 'adam', # optimizer type for neural network
|
|
183
|
+
'use_batch_norm': True, # whether to use batch normalization
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
187
|
+
# 4. Build Config and run the Training Pipeline
|
|
188
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
189
|
+
config = Config(
|
|
190
|
+
do_ffc=True, # Change to False if you don't want to run FFC
|
|
191
|
+
do_gc=True, # Change to False if you don't want to run GC
|
|
192
|
+
do_wb=True, # Change to False if you don't want to run WB
|
|
193
|
+
do_cc=True, # Change to False if you don't want to run CC
|
|
194
|
+
save=False, # Change to True if you want to save models + CSVs
|
|
195
|
+
save_path=SAVE_PATH, # Directory for saving outputs (models & CSV)
|
|
196
|
+
check_saturation=True, # Change to False if you don't want to check if color chart patches are saturated
|
|
197
|
+
REF_ILLUMINANT=None, # Defaults to D65; supply np.ndarray if needed
|
|
198
|
+
FFC_kwargs=ffc_kwargs,
|
|
199
|
+
GC_kwargs=gc_kwargs,
|
|
200
|
+
WB_kwargs=wb_kwargs,
|
|
201
|
+
CC_kwargs=cc_kwargs,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
cc = ColorCorrection() # Initialize ColorCorrection class
|
|
205
|
+
metrics, corrected_imgs, errors = cc.run(
|
|
206
|
+
Image=img_rgb,
|
|
207
|
+
White_Image=white_bgr, # Optional, you do have to pass anything
|
|
208
|
+
name_=img_name,
|
|
209
|
+
config=config,
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
# Convert metrics (dict) → pandas.DataFrame for display
|
|
213
|
+
metrics_df = pd.DataFrame.from_dict(metrics)
|
|
214
|
+
print("Per-patch and summary metrics for each stage:\n", metrics_df.head())
|
|
215
|
+
|
|
216
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
217
|
+
# 5. Predict on a New Image (no color-checker required)
|
|
218
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
219
|
+
test_results = cc.predict_image(test_rgb, show=True)
|
|
220
|
+
```
|
|
221
|
+
|
|
222
|
+
### Assuming you have;
|
|
223
|
+
1. A photograph with a color checker chart: `Data/Images/Sample_1.JPG`,
|
|
224
|
+
2. An optional matching white-field image (for FFC): `Data/Images/white.JPG`,
|
|
225
|
+
3. A YOLO model for detecting the white plane (optional if you want automatic ROI): `Data/Models/plane_det_model_YOLO_512_n.pt`
|
|
226
|
+
4. Another optional image (no chart required) to test the learned corrections: `Data/Images/Sample_2.JPG`
|
|
227
|
+
|
|
228
|
+
## Sample Reusults
|
|
229
|
+
Before color correction:
|
|
230
|
+

|
|
231
|
+
|
|
232
|
+
Same images after color correction:
|
|
233
|
+

|
|
234
|
+
|
|
235
|
+
## References
|
|
236
|
+
A detailed study that led to this package can be found at: [Awaiting Publication](https://www.yet_to_publish.com).
|
|
237
|
+
|
|
238
|
+
key packages used:
|
|
239
|
+
- Colour-science package: [https://colour-science.org](https://colour-science.org)
|
|
240
|
+
- scikit-learn: [https://scikit-learn.org](https://scikit-learn.org)
|
|
241
|
+
- opencv-python: [https://pypi.org/project/opencv-python/](https://pypi.org/project/opencv-python/)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
## Contributions
|
|
245
|
+
- [Collins Wakholi](https://github.com/collinswakholi)
|
|
246
|
+
- [Devin A. Rippner](https://github.com/daripp)
|
|
247
|
+
|
|
248
|
+
## Acknowledgements
|
|
249
|
+
I would like to gratefully acknowledge [Devin A. Rippner](https://github.com/daripp), [ORISE](https://orise.orau.gov/index.html), and the [USDA-ARS](https://www.ars.usda.gov) for their invaluable assistance and funding support in the development of this Repo. This project would not have been possible without their guidance and opportunities provided.
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
# ColorCorrectionPipeline
|
|
2
|
+
|
|
3
|
+
A step-wise, end-to-end color‐correction pipeline for digital images.
|
|
4
|
+
This package combines flat-field correction (**FFC**), gamma correction (**GC**), white-balance (**WB**), and color-correction (**CC**) into a single, easy-to-use workflow. Once you “train” on an image containing a color-checker (and a white-field for FFC), you can apply the learned corrections to any new image (no chart required, as long as it was captured with the same camera, and in the same lighting conditions).
|
|
5
|
+
|
|
6
|
+
This package builds upon a previous package [ML_ColorCorrection_tool](https://github.com/collinswakholi/ML_ColorCorrection_tool) package.
|
|
7
|
+
|
|
8
|
+
---
|
|
9
|
+
## Features
|
|
10
|
+
|
|
11
|
+
- **Flat-Field Correction (FFC)**
|
|
12
|
+
Automatically detect or manually crops “white” background image. Fits an n-degree 2D surface to describe the light distribution in the FOV, extrapolates to full image.
|
|
13
|
+
|
|
14
|
+
- **Saturation Check / Extrapolation**
|
|
15
|
+
Identify and fix saturated patches on the chart before proceeding, ensuring accurate downstream corrections.
|
|
16
|
+
|
|
17
|
+
- **Gamma Correction (GC)**
|
|
18
|
+
Fits an optimum polynomial (up to configurable degree) mapping between measured neutral patch intensities and reference values, and applies it to the entire image.
|
|
19
|
+
|
|
20
|
+
- **White Balance (WB)**
|
|
21
|
+
Diagonal white-balance correction using the neutral patches of the color checker. Gets diagonal matrix and applies it to the entire image.
|
|
22
|
+
|
|
23
|
+
- **Color Correction (CC)**
|
|
24
|
+
Two methods:
|
|
25
|
+
- **Conventional (“conv”)**: configurable polynomial expansion with the Finlayson 2015 method, produces a 3xn matrix that can be applied to the entire image.
|
|
26
|
+
- **Custom (“ours”)**: uses ML with linear regression, pls regression, or neural networks, produces a model that can be applied to the entire image.
|
|
27
|
+
|
|
28
|
+
- **Predict on New Images**
|
|
29
|
+
Once models are saved, apply FFC → GC → WB → CC in sequence to any new photograph, no chart needed.
|
|
30
|
+
|
|
31
|
+
---
|
|
32
|
+
## Installation
|
|
33
|
+
|
|
34
|
+
### From PyPI
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
pip install ColorCorrectionPipeline
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
### From GitHub
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
git clone https://github.com/collinswakholi/ColorCorrectionPipeline.git
|
|
44
|
+
cd ColorCorrectionPipeline
|
|
45
|
+
pip install -e .
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
### Dependencies
|
|
49
|
+
The Dependencies (Automatically Installed, from `requirements.txt`) are:
|
|
50
|
+
- `numpy`
|
|
51
|
+
- `scipy`
|
|
52
|
+
- `scikit-learn`
|
|
53
|
+
- `torch`
|
|
54
|
+
- `opencv-python`
|
|
55
|
+
- `colour-science`
|
|
56
|
+
- `color-checker-detection`
|
|
57
|
+
- `ultralytics`
|
|
58
|
+
- `scikit-image`
|
|
59
|
+
- `plotly`
|
|
60
|
+
- `matplotlib`
|
|
61
|
+
- `pandas`
|
|
62
|
+
- `difflib`
|
|
63
|
+
- `glob2`
|
|
64
|
+
- `statsmodels`
|
|
65
|
+
- `seaborn`
|
|
66
|
+
- `pytest`
|
|
67
|
+
|
|
68
|
+
If you already have a `requirements.txt` file in your cloned repository, you can install them using `pip install -r requirements.txt`.
|
|
69
|
+
|
|
70
|
+
---
|
|
71
|
+
## Usage
|
|
72
|
+
Below is a simple example of how to use the package (found in `src/quick_run.py`):
|
|
73
|
+
```python
|
|
74
|
+
import os
|
|
75
|
+
import cv2
|
|
76
|
+
import numpy as np
|
|
77
|
+
import pandas as pd
|
|
78
|
+
|
|
79
|
+
from ColorCorrectionPipeline.core import ColorCorrection, Config
|
|
80
|
+
from ColorCorrectionPipeline.key_functions import to_float64
|
|
81
|
+
|
|
82
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
83
|
+
# 1. File paths
|
|
84
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
85
|
+
IMG_PATH = "Data/Images/Sample_1.JPG" # Image containing color checker
|
|
86
|
+
WHITE_PATH = "Data/Images/white.JPG" # Optional White background image for FFC
|
|
87
|
+
YOLO_MODEL_PATH = "Data/Models/plane_det_model_YOLO_512_n.pt" # Optional YOLO .pt
|
|
88
|
+
TEST_IMAGE_PATH = "Data/Images/Sample_2.JPG" # Optional New image for prediction
|
|
89
|
+
|
|
90
|
+
# Output directory (only used if config.save=True)
|
|
91
|
+
SAVE_PATH = os.path.join(os.getcwd(), "results")
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
95
|
+
# 2. Load images and convert to RGB float64 in [0,1]
|
|
96
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
97
|
+
img_bgr = cv2.imread(IMG_PATH)
|
|
98
|
+
img_rgb = to_float64(img_bgr[:, :, ::-1])# convert to RGB (64bit floats, 0-1, RGB)
|
|
99
|
+
|
|
100
|
+
white_bgr = cv2.imread(WHITE_PATH)
|
|
101
|
+
|
|
102
|
+
test_bgr = cv2.imread(TEST_IMAGE_PATH)
|
|
103
|
+
test_rgb = to_float64(test_bgr[:, :, ::-1])# convert to RGB (64bit floats, 0-1, RGB)
|
|
104
|
+
|
|
105
|
+
img_name = os.path.splitext(os.path.basename(IMG_PATH))[0]
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
109
|
+
# 3. Configure per‐stage parameters
|
|
110
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
111
|
+
|
|
112
|
+
ffc_kwargs = {
|
|
113
|
+
"model_path": YOLO_MODEL_PATH, # Optional, for automatic white plane ROI detection
|
|
114
|
+
"manual_crop": False, # Optional, for manual white plane ROI selection
|
|
115
|
+
"show": False, # Whether to show intermediate plots
|
|
116
|
+
"bins": 50, # Number of bins used for sampling the intesity profile of the white plane
|
|
117
|
+
"smooth_window": 5, # Window size for smoothing the intensity profile
|
|
118
|
+
"get_deltaE": True, # Whether to calculate and return deltaE (CIEDE2000)
|
|
119
|
+
"fit_method": "pls", # can be linear, nn, pls, or svm, default is linear
|
|
120
|
+
"interactions": True, # Whether to include interactions in the polynomial expansion
|
|
121
|
+
"max_iter": 1000, # Maximum number of iterations
|
|
122
|
+
"tol": 1e-8, # Tolerance for stopping criterion
|
|
123
|
+
"verbose": False, # Whether to print verbose output
|
|
124
|
+
"random_seed": 0, # Random seed
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
# Gamma Correction (GC) kwargs:
|
|
128
|
+
gc_kwargs = {
|
|
129
|
+
"max_degree": 5, # Maximum polynomial degree for fitting gamma profile
|
|
130
|
+
"show": False, # Whether to show intermediate plots
|
|
131
|
+
"get_deltaE": True, # Whether to calculate and return deltaE (CIEDE2000)
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
# White Balance (WB) kwargs:
|
|
135
|
+
wb_kwargs = {
|
|
136
|
+
"show": False, # Whether to show intermediate plots
|
|
137
|
+
"get_deltaE": True, # Whether to calculate and return deltaE (CIEDE2000)
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
# Color Correction (CC) kwargs:
|
|
141
|
+
cc_kwargs = {
|
|
142
|
+
'cc_method': 'ours', # method to use for color correction
|
|
143
|
+
'method': 'Finlayson 2015', # if cc_method is 'conv', this is the method
|
|
144
|
+
'mtd': 'nn', # if cc_method is 'ours', this is the method, linear, nn, pls
|
|
145
|
+
|
|
146
|
+
'degree': 2, # degree of polynomial to fit
|
|
147
|
+
'max_iterations': 10000, # max iterations for fitting
|
|
148
|
+
'random_state': 0, # random seed
|
|
149
|
+
'tol': 1e-8, # tolerance for fitting
|
|
150
|
+
'verbose': False, # whether to print verbose output
|
|
151
|
+
'param_search': False, # whether to use parameter search
|
|
152
|
+
'show': False, # whether to show plots
|
|
153
|
+
'get_deltaE': True, # whether to compute deltaE
|
|
154
|
+
'n_samples': 50, # number of samples to use for parameter search
|
|
155
|
+
|
|
156
|
+
# only if mtd == 'pls'
|
|
157
|
+
'ncomp': 1, # number of components to use
|
|
158
|
+
|
|
159
|
+
# only if mtd == 'nn'
|
|
160
|
+
'nlayers': 100, # number of layers to use
|
|
161
|
+
'hidden_layers': [64, 32, 16], # hidden layers for neural network
|
|
162
|
+
'learning_rate': 0.001, # learning rate for neural network
|
|
163
|
+
'batch_size': 16, # batch size for neural network
|
|
164
|
+
'patience': 10, # patience for early stopping
|
|
165
|
+
'dropout_rate': 0.2, # dropout rate for neural network
|
|
166
|
+
'optim_type': 'adam', # optimizer type for neural network
|
|
167
|
+
'use_batch_norm': True, # whether to use batch normalization
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
171
|
+
# 4. Build Config and run the Training Pipeline
|
|
172
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
173
|
+
config = Config(
|
|
174
|
+
do_ffc=True, # Change to False if you don't want to run FFC
|
|
175
|
+
do_gc=True, # Change to False if you don't want to run GC
|
|
176
|
+
do_wb=True, # Change to False if you don't want to run WB
|
|
177
|
+
do_cc=True, # Change to False if you don't want to run CC
|
|
178
|
+
save=False, # Change to True if you want to save models + CSVs
|
|
179
|
+
save_path=SAVE_PATH, # Directory for saving outputs (models & CSV)
|
|
180
|
+
check_saturation=True, # Change to False if you don't want to check if color chart patches are saturated
|
|
181
|
+
REF_ILLUMINANT=None, # Defaults to D65; supply np.ndarray if needed
|
|
182
|
+
FFC_kwargs=ffc_kwargs,
|
|
183
|
+
GC_kwargs=gc_kwargs,
|
|
184
|
+
WB_kwargs=wb_kwargs,
|
|
185
|
+
CC_kwargs=cc_kwargs,
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
cc = ColorCorrection() # Initialize ColorCorrection class
|
|
189
|
+
metrics, corrected_imgs, errors = cc.run(
|
|
190
|
+
Image=img_rgb,
|
|
191
|
+
White_Image=white_bgr, # Optional, you do have to pass anything
|
|
192
|
+
name_=img_name,
|
|
193
|
+
config=config,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
# Convert metrics (dict) → pandas.DataFrame for display
|
|
197
|
+
metrics_df = pd.DataFrame.from_dict(metrics)
|
|
198
|
+
print("Per-patch and summary metrics for each stage:\n", metrics_df.head())
|
|
199
|
+
|
|
200
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
201
|
+
# 5. Predict on a New Image (no color-checker required)
|
|
202
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
203
|
+
test_results = cc.predict_image(test_rgb, show=True)
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
### Assuming you have;
|
|
207
|
+
1. A photograph with a color checker chart: `Data/Images/Sample_1.JPG`,
|
|
208
|
+
2. An optional matching white-field image (for FFC): `Data/Images/white.JPG`,
|
|
209
|
+
3. A YOLO model for detecting the white plane (optional if you want automatic ROI): `Data/Models/plane_det_model_YOLO_512_n.pt`
|
|
210
|
+
4. Another optional image (no chart required) to test the learned corrections: `Data/Images/Sample_2.JPG`
|
|
211
|
+
|
|
212
|
+
## Sample Reusults
|
|
213
|
+
Before color correction:
|
|
214
|
+

|
|
215
|
+
|
|
216
|
+
Same images after color correction:
|
|
217
|
+

|
|
218
|
+
|
|
219
|
+
## References
|
|
220
|
+
A detailed study that led to this package can be found at: [Awaiting Publication](https://www.yet_to_publish.com).
|
|
221
|
+
|
|
222
|
+
key packages used:
|
|
223
|
+
- Colour-science package: [https://colour-science.org](https://colour-science.org)
|
|
224
|
+
- scikit-learn: [https://scikit-learn.org](https://scikit-learn.org)
|
|
225
|
+
- opencv-python: [https://pypi.org/project/opencv-python/](https://pypi.org/project/opencv-python/)
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
## Contributions
|
|
229
|
+
- [Collins Wakholi](https://github.com/collinswakholi)
|
|
230
|
+
- [Devin A. Rippner](https://github.com/daripp)
|
|
231
|
+
|
|
232
|
+
## Acknowledgements
|
|
233
|
+
I would like to gratefully acknowledge [Devin A. Rippner](https://github.com/daripp), [ORISE](https://orise.orau.gov/index.html), and the [USDA-ARS](https://www.ars.usda.gov) for their invaluable assistance and funding support in the development of this Repo. This project would not have been possible without their guidance and opportunities provided.
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=42", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "ColorCorrectionPipeline"
|
|
7
|
+
version = "1.01.1"
|
|
8
|
+
description = "A Stepwise color‐correction pipeline with flat‐field, gamma, white‐balance, and color‐correction stages."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
license = { text = "LICENSE" }
|
|
11
|
+
authors = [
|
|
12
|
+
{ name = "Collins Wakholi", email = "wcoln@yahoo.com" },
|
|
13
|
+
{ name = "Devin A. Rippner", email = "devinrippner@gmail.com" },
|
|
14
|
+
]
|
|
15
|
+
keywords = ["color", "image-processing", "flat-field", "gamma-correction", "white-balance", "color-correction"]
|
|
16
|
+
classifiers = [
|
|
17
|
+
"Programming Language :: Python :: 3",
|
|
18
|
+
"License :: OSI Approved :: MIT License",
|
|
19
|
+
"Operating System :: OS Independent",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
[project.urls]
|
|
23
|
+
Homepage = "https://github.com/collinswakholi/ColorCorrectionPackage"
|
|
24
|
+
"Bug Tracker" = "https://github.com/collinswakholi/ColorCorrectionPackage/issues"
|