sat-water 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. sat_water-0.1.0/.github/workflows/ci.yml +110 -0
  2. sat_water-0.1.0/.github/workflows/publish-test.yml +65 -0
  3. sat_water-0.1.0/.github/workflows/publish.yml +61 -0
  4. sat_water-0.1.0/.gitignore +8 -0
  5. sat_water-0.1.0/.pre-commit-config.yaml +11 -0
  6. sat_water-0.1.0/LICENSE +21 -0
  7. sat_water-0.1.0/PKG-INFO +347 -0
  8. sat_water-0.1.0/README.md +298 -0
  9. sat_water-0.1.0/assets/results/history_resnet34.png +0 -0
  10. sat_water-0.1.0/assets/results/history_unet.png +0 -0
  11. sat_water-0.1.0/assets/results/historyresnet34(2).png +0 -0
  12. sat_water-0.1.0/assets/results/prediciton_resnet34(2).png +0 -0
  13. sat_water-0.1.0/assets/results/prediciton_resnet34.png +0 -0
  14. sat_water-0.1.0/assets/results/prediciton_test.png +0 -0
  15. sat_water-0.1.0/assets/results/prediciton_unet.png +0 -0
  16. sat_water-0.1.0/assets/results/satimage1.jpg +0 -0
  17. sat_water-0.1.0/assets/results/test2.jpg +0 -0
  18. sat_water-0.1.0/assets/results/test2.png +0 -0
  19. sat_water-0.1.0/pyproject.toml +70 -0
  20. sat_water-0.1.0/requirements.txt +93 -0
  21. sat_water-0.1.0/satwater/__init__.py +4 -0
  22. sat_water-0.1.0/satwater/builders.py +179 -0
  23. sat_water-0.1.0/satwater/inference.py +313 -0
  24. sat_water-0.1.0/satwater/models.py +229 -0
  25. sat_water-0.1.0/satwater/preprocess.py +161 -0
  26. sat_water-0.1.0/satwater/utils.py +45 -0
  27. sat_water-0.1.0/satwater/weights.py +176 -0
  28. sat_water-0.1.0/scripts/__init__.py +0 -0
  29. sat_water-0.1.0/scripts/predict.py +44 -0
  30. sat_water-0.1.0/scripts/train.py +80 -0
  31. sat_water-0.1.0/scripts/weights.py +203 -0
  32. sat_water-0.1.0/tests/__init__.py +9 -0
  33. sat_water-0.1.0/tests/conftest.py +18 -0
  34. sat_water-0.1.0/tests/test_base.py +18 -0
  35. sat_water-0.1.0/tests/test_inference.py +92 -0
  36. sat_water-0.1.0/tests/test_models.py +50 -0
  37. sat_water-0.1.0/tests/test_preprocess.py +91 -0
@@ -0,0 +1,110 @@
1
+ name: CI
2
+
3
+ on:
4
+ push:
5
+ pull_request:
6
+
7
+ jobs:
8
+ base:
9
+ name: base • py${{ matrix.python-version }} • ubuntu
10
+ runs-on: ubuntu-latest
11
+
12
+ strategy:
13
+ fail-fast: false
14
+ matrix:
15
+ python-version: ["3.9", "3.10", "3.11", "3.12"]
16
+
17
+ env:
18
+ MPLBACKEND: "Agg"
19
+
20
+ steps:
21
+ - uses: actions/checkout@v4
22
+
23
+ - name: Set up Python
24
+ uses: actions/setup-python@v5
25
+ with:
26
+ python-version: ${{ matrix.python-version }}
27
+ cache: pip
28
+
29
+ - name: Upgrade pip tooling
30
+ run: |
31
+ python -m pip install --upgrade pip setuptools wheel
32
+
33
+ - name: Install sat-water (no TF)
34
+ run: |
35
+ pip install -e .
36
+
37
+ - name: Install CI tools
38
+ run: |
39
+ pip install ruff black pytest
40
+
41
+ - name: Lint (ruff)
42
+ run: ruff check .
43
+
44
+ - name: Format check (black)
45
+ run: black --check .
46
+
47
+ - name: Run tests (base)
48
+ run: |
49
+ pytest -q -m "not tf"
50
+
51
+ tf:
52
+ name: tf • py${{ matrix.python-version }} • ubuntu
53
+ runs-on: ubuntu-latest
54
+ needs: base
55
+
56
+ strategy:
57
+ fail-fast: false
58
+ matrix:
59
+ python-version: ["3.10", "3.11"]
60
+
61
+ env:
62
+ TF_CPP_MIN_LOG_LEVEL: "2"
63
+ MPLBACKEND: "Agg"
64
+ SM_FRAMEWORK: "tf.keras"
65
+ TF_USE_LEGACY_KERAS: "1"
66
+
67
+ steps:
68
+ - uses: actions/checkout@v4
69
+
70
+ - name: Set up Python
71
+ uses: actions/setup-python@v5
72
+ with:
73
+ python-version: ${{ matrix.python-version }}
74
+ cache: pip
75
+
76
+ - name: Upgrade pip tooling
77
+ run: |
78
+ python -m pip install --upgrade pip setuptools wheel
79
+
80
+ - name: Install sat-water + TensorFlow extras
81
+ run: |
82
+ pip install -e ".[tf]"
83
+ pip install ruff black pytest
84
+
85
+ - name: Show versions (debug)
86
+ run: |
87
+ python -c "import sys; print(sys.version)"
88
+ python -c "import tensorflow as tf; print('TF:', tf.__version__)"
89
+ python -c "import keras; print('keras:', keras.__version__)"
90
+ python -c "import segmentation_models as sm; print('segmentation_models:', sm.__version__)"
91
+ pip freeze | sort
92
+
93
+ - name: Run tests (TF)
94
+ run: |
95
+ pytest -q -m "tf"
96
+
97
+ build:
98
+ name: Build dist
99
+ runs-on: ubuntu-latest
100
+ steps:
101
+ - uses: actions/checkout@v4
102
+
103
+ - uses: actions/setup-python@v5
104
+ with:
105
+ python-version: "3.11"
106
+
107
+ - name: Build
108
+ run: |
109
+ python -m pip install --upgrade pip build
110
+ python -m build
@@ -0,0 +1,65 @@
1
+ name: Publish to TestPyPI
2
+
3
+ on:
4
+ workflow_dispatch:
5
+
6
+ jobs:
7
+ build-and-publish:
8
+ runs-on: ubuntu-latest
9
+ permissions:
10
+ contents: read
11
+
12
+ steps:
13
+ - uses: actions/checkout@v4
14
+
15
+ - name: Set up Python
16
+ uses: actions/setup-python@v5
17
+ with:
18
+ python-version: "3.11"
19
+
20
+ - name: Install build tools
21
+ run: |
22
+ python -m pip install --upgrade pip
23
+ python -m pip install build twine
24
+
25
+ - name: Clean previous builds
26
+ run: |
27
+ rm -rf dist build *.egg-info
28
+
29
+ - name: Build dist
30
+ run: |
31
+ python -m build
32
+ ls -lah dist
33
+
34
+ - name: Twine check
35
+ run: |
36
+ twine check dist/*
37
+
38
+ - name: Inspect dist contents
39
+ run: |
40
+ ls -lah dist
41
+ python - <<'PY'
42
+ import glob, zipfile, tarfile
43
+ wheels = glob.glob("dist/*.whl")
44
+ sdists = glob.glob("dist/*.tar.gz")
45
+ for w in wheels:
46
+ print("\n--- WHEEL:", w)
47
+ with zipfile.ZipFile(w) as z:
48
+ for n in sorted(z.namelist()):
49
+ if n.startswith("satwater/") or n.endswith("METADATA"):
50
+ print(n)
51
+ for s in sdists:
52
+ print("\n--- SDIST:", s)
53
+ with tarfile.open(s, "r:gz") as t:
54
+ names = sorted(t.getnames())
55
+ for n in names:
56
+ if "/satwater/" in n or n.endswith("pyproject.toml") or n.endswith("README.md") or n.endswith("LICENSE"):
57
+ print(n)
58
+ PY
59
+
60
+ - name: Publish to TestPyPI
61
+ env:
62
+ TWINE_USERNAME: __token__
63
+ TWINE_PASSWORD: ${{ secrets.TESTPYPI_API_TOKEN }}
64
+ run: |
65
+ twine upload --repository-url https://test.pypi.org/legacy/ dist/*
@@ -0,0 +1,61 @@
1
+ name: Publish to PyPI
2
+
3
+ on:
4
+ release:
5
+ types: [published]
6
+ workflow_dispatch:
7
+
8
+ jobs:
9
+ build:
10
+ name: Build dist
11
+ runs-on: ubuntu-latest
12
+ permissions:
13
+ contents: read
14
+
15
+ steps:
16
+ - uses: actions/checkout@v4
17
+
18
+ - name: Set up Python
19
+ uses: actions/setup-python@v5
20
+ with:
21
+ python-version: "3.11"
22
+
23
+ - name: Install build tools
24
+ run: |
25
+ python -m pip install --upgrade pip
26
+ python -m pip install build twine
27
+
28
+ - name: Build
29
+ run: |
30
+ python -m build
31
+
32
+ - name: Twine check
33
+ run: |
34
+ twine check dist/*
35
+
36
+ - name: Upload dist artifacts
37
+ uses: actions/upload-artifact@v4
38
+ with:
39
+ name: dist
40
+ path: dist/*
41
+
42
+ publish:
43
+ name: Publish to PyPI
44
+ needs: build
45
+ runs-on: ubuntu-latest
46
+
47
+ permissions:
48
+ id-token: write
49
+ contents: read
50
+
51
+ steps:
52
+ - name: Download dist artifacts
53
+ uses: actions/download-artifact@v4
54
+ with:
55
+ name: dist
56
+ path: dist
57
+
58
+ - name: Publish (PyPI)
59
+ uses: pypa/gh-action-pypi-publish@release/v1
60
+ with:
61
+ packages-dir: dist
@@ -0,0 +1,8 @@
1
+ .DS_Store
2
+ env/
3
+ .ruff_cache
4
+ **__pycache__**
5
+ .pytest_cache/
6
+ dist/
7
+ main.py
8
+ examples/
@@ -0,0 +1,11 @@
1
+ repos:
2
+ - repo: https://github.com/astral-sh/ruff-pre-commit
3
+ rev: v0.6.9
4
+ hooks:
5
+ - id: ruff
6
+ args: [--fix]
7
+
8
+ - repo: https://github.com/psf/black
9
+ rev: 24.10.0
10
+ hooks:
11
+ - id: black
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2023 Oluwabusayo Alabi
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,347 @@
1
+ Metadata-Version: 2.4
2
+ Name: sat-water
3
+ Version: 0.1.0
4
+ Summary: Satellite water body segmentation with pretrained weights hosted on Hugging Face.
5
+ Project-URL: Homepage, https://github.com/busayojee/sat-water
6
+ Project-URL: Repository, https://github.com/busayojee/sat-water
7
+ Author: Busayo Alabi
8
+ License: MIT License
9
+
10
+ Copyright (c) 2023 Oluwabusayo Alabi
11
+
12
+ Permission is hereby granted, free of charge, to any person obtaining a copy
13
+ of this software and associated documentation files (the "Software"), to deal
14
+ in the Software without restriction, including without limitation the rights
15
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16
+ copies of the Software, and to permit persons to whom the Software is
17
+ furnished to do so, subject to the following conditions:
18
+
19
+ The above copyright notice and this permission notice shall be included in all
20
+ copies or substantial portions of the Software.
21
+
22
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28
+ SOFTWARE.
29
+ License-File: LICENSE
30
+ Keywords: computer-vision,satellite,segmentation,water
31
+ Classifier: License :: OSI Approved :: MIT License
32
+ Classifier: Operating System :: OS Independent
33
+ Classifier: Programming Language :: Python :: 3
34
+ Requires-Python: >=3.9
35
+ Requires-Dist: huggingface-hub>=0.20
36
+ Requires-Dist: matplotlib>=3.5
37
+ Requires-Dist: numpy>=1.23
38
+ Requires-Dist: pillow>=9
39
+ Provides-Extra: tf
40
+ Requires-Dist: efficientnet==1.0.0; extra == 'tf'
41
+ Requires-Dist: image-classifiers==1.0.0; extra == 'tf'
42
+ Requires-Dist: keras-applications<=1.0.8; extra == 'tf'
43
+ Requires-Dist: segmentation-models==1.0.1; extra == 'tf'
44
+ Requires-Dist: tensorflow-macos>=2.15; (platform_system == 'Darwin' and platform_machine == 'arm64') and extra == 'tf'
45
+ Requires-Dist: tensorflow-metal>=1.0; (platform_system == 'Darwin' and platform_machine == 'arm64') and extra == 'tf'
46
+ Requires-Dist: tensorflow>=2.15; (platform_system != 'Darwin') and extra == 'tf'
47
+ Requires-Dist: tf-keras>=2.16; extra == 'tf'
48
+ Description-Content-Type: text/markdown
49
+
50
+ # SEGMENTATION OF WATER BODIES FROM SATELLITE IMAGES (sat-water)
51
+
52
+ ## Introduction
53
+ Satellite imagery is a rich source of information, and the accurate segmentation of water bodies is crucial for understanding environmental patterns and changes over time. This project aims to provide a reliable and efficient tool for extracting water regions from raw satellite images.
54
+
55
+ This repository supports two workflows:
56
+ 1. **Library usage**: install with `pip` and run inference (pretrained weights downloaded on-demand).
57
+ 2. **Training workflow**: train your own models using the included preprocessing + training pipeline.
58
+
59
+ ---
60
+
61
+ ## Dataset
62
+
63
+ The dataset for this project is gotten here [kaggle.com](https://www.kaggle.com/datasets/franciscoescobar/satellite-images-of-water-bodies). It consists of jpeg images of water bodies taken by satellites and their mask. More details of the dataset is provided on the website.
64
+
65
+ ---
66
+
67
+ ## Installation
68
+
69
+ ### As a library
70
+ ```bash
71
+ pip install sat-water
72
+ ```
73
+
74
+ To run inference/training you must install the TensorFlow extras:
75
+
76
+ ```bash
77
+ pip install "sat-water[tf]"
78
+ ```
79
+
80
+ ### From source (development)
81
+ ```bash
82
+ git clone https://github.com/busayojee/sat-water.git
83
+ cd sat-water
84
+ pip install -e .
85
+ ```
86
+
87
+ > Note: `sat-water` sets `TF_USE_LEGACY_KERAS=1` and `SM_FRAMEWORK=tf.keras` by default at import time to keep `segmentation-models` compatible.
88
+
89
+ ---
90
+
91
+ ## Pretrained models
92
+
93
+ Pretrained weights are hosted on Hugging Face and downloaded at inference time with SHA256 integrity verification.
94
+
95
+ Default weights repo:
96
+ - `busayojee/sat-water-weights`
97
+
98
+ Override weights source:
99
+ ```bash
100
+ export SATWATER_WEIGHTS_REPO="busayojee/sat-water-weights"
101
+ export SATWATER_WEIGHTS_REV="main"
102
+ ```
103
+
104
+ ### Available model keys
105
+
106
+ This project was trained on 2 models. The <b>UNET</b> with no backbone and the UNET with a <b>RESNET34</b> backbone of which 2 different models were trained on different sizes of images and also different hyperparameters.
107
+
108
+ | Model key | Architecture | Input size | Notes |
109
+ |---|---|---:|---|
110
+ | `resnet34_256` | UNet + ResNet34 backbone | 256×256 | Best speed/quality tradeoff |
111
+ | `resnet34_512` | UNet + ResNet34 backbone | 512×512 | Higher-res boundaries; slower |
112
+ | `unet` | UNet (no backbone) | 128×128 | Currently unavailable in weights repo |
113
+
114
+ ---
115
+
116
+ ## Quickstart (library inference)
117
+
118
+ ```python
119
+ from satwater.inference import segment_image
120
+
121
+ res = segment_image(
122
+ "path/to/image.jpg",
123
+ model="resnet34_512", # or "resnet34_256"
124
+ return_overlay=True,
125
+ show=False,
126
+ )
127
+
128
+ mask = res.masks["resnet34_512"] # (H, W, 1)
129
+ overlay = res.overlays["resnet34_512"] # (H, W, 3)
130
+ ```
131
+
132
+ ---
133
+
134
+ ## Inference API
135
+
136
+ `segment_image(...)` is the recommended entrypoint for package users.
137
+
138
+ ### Parameters (commonly used)
139
+
140
+ - `image_path` *(str)*: path to an input image (`.jpg`, `.png`, etc.)
141
+ - `model` *(str)*: one of `resnet34_256`, `resnet34_512` (and `unet` once available)
142
+ - `return_overlay` *(bool)*: whether to return an overlay image (original image + blended water mask)
143
+ - `show` *(bool)*: whether to display the result via matplotlib (useful in notebooks / local runs)
144
+
145
+ ### Weights source / versioning
146
+
147
+ - `repo_id` *(str, optional)*: Hugging Face repo containing weights (defaults to `SATWATER_WEIGHTS_REPO`)
148
+ - `revision` *(str, optional)*: branch / tag / commit (defaults to `SATWATER_WEIGHTS_REV`)
149
+ - `save_dir` *(str | Path | None, optional)*: output directory (if supported in your local version).
150
+ If you want saving, you can always do it manually from the returned arrays (example below).
151
+
152
+ #### Manual saving
153
+
154
+ ```python
155
+ from PIL import Image
156
+ import numpy as np
157
+
158
+ Image.fromarray((mask.squeeze(-1) * 255).astype(np.uint8)).save("mask.png")
159
+ Image.fromarray(overlay).save("overlay.png")
160
+ ```
161
+
162
+ ---
163
+
164
+ ## Training history (reference)
165
+
166
+ The plots below are from historical runs in this repository and are provided to show convergence behavior.
167
+
168
+ | UNet (baseline) | ResNet34-UNet (256×256) | ResNet34-UNet (512×512) |
169
+ |:--:|:--:|:--:|
170
+ | <img width="260" alt="UNet History" src="https://github.com/busayojee/sat-water/blob/main/assets/results/history_unet.png"> | <img width="260" alt="ResNet34 256 History" src="https://github.com/busayojee/sat-water/blob/main/assets/results/history_resnet34.png"> | <img width="260" alt="ResNet34 512 History" src="https://github.com/busayojee/sat-water/blob/main/assets/results/historyresnet34(2).png"> |
171
+
172
+ ---
173
+
174
+ ## Inference examples
175
+
176
+ Qualitative predictions produced by the three models.
177
+
178
+ | UNet | ResNet34-UNet (256×256) | ResNet34-UNet (512×512) |
179
+ |:--:|:--:|:--:|
180
+ | <img width="260" alt="UNet Prediction" src="https://github.com/busayojee/sat-water/blob/main/assets/results/prediciton_unet.png"> | <img width="260" alt="ResNet34 256 Prediction" src="https://github.com/busayojee/sat-water/blob/main/assets/results/prediciton_resnet34.png"> | <img width="260" alt="ResNet34 512 Prediction" src="https://github.com/busayojee/sat-water/blob/main/assets/results/prediciton_resnet34(2).png"> |
181
+
182
+ ---
183
+
184
+ ## Single test instance (end-to-end)
185
+
186
+ Using all models to predict a single test instance.
187
+
188
+ | Test Image | Prediction |
189
+ |:--:|:--:|
190
+ | <img width="300" alt="Test Image" src="https://github.com/busayojee/sat-water/blob/main/assets/results/test2.jpg"> | <img width="300" alt="Prediction" src="https://github.com/busayojee/sat-water/blob/main/assets/results/prediciton_test.png"> |
191
+
192
+ Label overlay of the best prediction (ResNet34-UNet 512×512 in that run):
193
+
194
+ <img width="320" alt="Overlay" src="https://github.com/busayojee/sat-water/blob/main/assets/results/test2.png">
195
+
196
+ ---
197
+
198
+ ## Train your own model
199
+
200
+ ### Preprocessing
201
+
202
+ ```python
203
+ from satwater.preprocess import Preprocess
204
+
205
+ train_ds, val_ds, test_ds = Preprocess.data_load(
206
+ dataset_dir="path/to/dataset",
207
+ masks_dir="/Masks",
208
+ images_dir="/Images",
209
+ split=(0.7, 0.2, 0.1),
210
+ shape=(256, 256),
211
+ batch_size=16,
212
+ channels=3,
213
+ )
214
+ ```
215
+
216
+ ### Training (UNet baseline)
217
+ ```python
218
+ from satwater.models import Unet
219
+
220
+ history = Unet.train(
221
+ train_ds,
222
+ val_ds,
223
+ shape=(128, 128, 3),
224
+ n_classes=2,
225
+ lr=1e-4,
226
+ loss=Unet.loss,
227
+ metrics=Unet.metrics,
228
+ name="unet",
229
+ )
230
+ ```
231
+
232
+ ### Training (ResNet34-UNet)
233
+ ```python
234
+ from satwater.models import BackboneModels
235
+
236
+ bm = BackboneModels("resnet34", train_ds, val_ds, test_ds, name="resnet34_256")
237
+ bm.build_model(n_classes=2, n=1, lr=1e-4)
238
+ history = bm.train()
239
+ ```
240
+
241
+ > For a 512×512 run, load a second dataset with `shape=(512, 512)` and use a different model name (e.g. `resnet34_512`) to keep artifacts separate.
242
+
243
+
244
+ ### Inference
245
+ To run inference for UNET
246
+
247
+ ```
248
+ inference_u = Inference(model="path/to/model",name="unet")
249
+ inference_u.predict_ds(test_ds)
250
+ ```
251
+
252
+ for RESNET 1 and 2
253
+
254
+ ```
255
+ inference_r = Inference(model="path/to/model",name="resnet34")
256
+ inference_r.predict_ds(test_ds)
257
+
258
+ inference_r2 = Inference(model="path/to/model",name="resnet34(2)")
259
+ inference_r2.predict_ds(test_ds1)
260
+ ```
261
+
262
+ For all 3 models together
263
+
264
+ ```
265
+ models={"unet":"path/to/model1", "resnet34":"path/to/model2", "resnet34(2)":"path/to/model3"}
266
+ inference_multiple = Inference(model=models)
267
+ inference_multiple.predict_ds(test_ds)
268
+ ```
269
+
270
+ ## CLI (optional)
271
+
272
+ If you included the `scripts/` folder in your package/repo, you can run the scripts directly.
273
+
274
+ ### Training CLI
275
+
276
+ UNet:
277
+
278
+ ```bash
279
+ python scripts/train.py --dataset path/to/dataset --image-folder /Images --mask-folder /Masks --shape 128,128,3 --batch-size 16 --split 0.2,0.1 --channels 3 --model unet --name unet --epochs 100 --lr 1e-4
280
+ ```
281
+
282
+ ResNet34-UNet (256):
283
+
284
+ ```bash
285
+ python scripts/train.py --dataset path/to/dataset --image-folder /Images --mask-folder /Masks --shape 256,256,3 --batch-size 8 --split 0.2,0.1 --channels 3 --model resnet34 --name resnet34_256 --epochs 100 --lr 1e-4
286
+ ```
287
+
288
+ ResNet34-UNet (512):
289
+
290
+ ```bash
291
+ python scripts/train.py --dataset path/to/dataset --image-folder /Images --mask-folder /Masks --shape 512,512,3 --batch-size 4 --split 0.2,0.1 --channels 3 --model resnet34(2) --name resnet34_512 --epochs 100 --lr 1e-4
292
+ ```
293
+
294
+ ### Inference CLI
295
+
296
+ Single model:
297
+
298
+ ```bash
299
+ python scripts/infer.py --image path/to/image.jpg --model path/to/model.keras --name unet --out prediction
300
+ ```
301
+
302
+ Multiple models:
303
+
304
+ ```bash
305
+ python scripts/infer.py --image path/to/image.jpg --models "unet=path/to/unet.keras,resnet34=path/to/resnet34.keras,resnet34(2)=path/to/resnet34_2.keras" --out prediction
306
+ ```
307
+
308
+ ### Upload weights to Hugging Face (optional)
309
+
310
+ ```bash
311
+ export HF_TOKEN="YOUR_HUGGINGFACE_TOKEN"
312
+
313
+ python scripts/weights.py --repo-id user/repo --hf-root weights --out-dir dist/weights --model unet=path/to/unet.keras@128,128,3 --model resnet34_256=path/to/resnet34_256.keras@256,256,3 --model resnet34_512=path/to/resnet34_512.keras@512,512,3
314
+ ```
315
+
316
+ ---
317
+
318
+ ## Contributing
319
+
320
+ Contributions are welcome — especially around:
321
+ - adding/refreshing pretrained weights (including UNet)
322
+ - improving inference UX (CLI, batch inference, better overlays)
323
+ - expanding tests and CI matrix
324
+ - model evaluation and benchmarking on additional datasets
325
+
326
+ ### How to contribute
327
+ 1. Fork the repo
328
+ 2. Create a feature branch:
329
+ ```bash
330
+ git checkout -b feat/my-change
331
+ ```
332
+ 3. Run checks locally:
333
+ ```bash
334
+ pytest -q
335
+ ruff check .
336
+ ruff format .
337
+ ```
338
+ 4. Open a pull request with a short summary + screenshots (if changing inference output)
339
+
340
+ If you’re reporting a bug, please include:
341
+ - OS + Python version
342
+ - TensorFlow version
343
+ - full traceback + a minimal repro snippet
344
+
345
+ ---
346
+
347
+