coralnet-toolbox 0.0.72__py2.py3-none-any.whl → 0.0.74__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- coralnet_toolbox/Annotations/QtAnnotation.py +28 -69
- coralnet_toolbox/Annotations/QtMaskAnnotation.py +408 -0
- coralnet_toolbox/Annotations/QtMultiPolygonAnnotation.py +72 -56
- coralnet_toolbox/Annotations/QtPatchAnnotation.py +165 -216
- coralnet_toolbox/Annotations/QtPolygonAnnotation.py +497 -353
- coralnet_toolbox/Annotations/QtRectangleAnnotation.py +126 -116
- coralnet_toolbox/AutoDistill/QtDeployModel.py +23 -12
- coralnet_toolbox/CoralNet/QtDownload.py +2 -1
- coralnet_toolbox/Explorer/QtDataItem.py +1 -1
- coralnet_toolbox/Explorer/QtExplorer.py +159 -17
- coralnet_toolbox/Explorer/QtSettingsWidgets.py +160 -86
- coralnet_toolbox/IO/QtExportTagLabAnnotations.py +30 -10
- coralnet_toolbox/IO/QtImportTagLabAnnotations.py +21 -15
- coralnet_toolbox/IO/QtOpenProject.py +46 -78
- coralnet_toolbox/IO/QtSaveProject.py +18 -43
- coralnet_toolbox/MachineLearning/DeployModel/QtDetect.py +22 -11
- coralnet_toolbox/MachineLearning/DeployModel/QtSegment.py +22 -10
- coralnet_toolbox/MachineLearning/ExportDataset/QtBase.py +61 -24
- coralnet_toolbox/MachineLearning/ExportDataset/QtClassify.py +5 -1
- coralnet_toolbox/MachineLearning/ExportDataset/QtDetect.py +19 -6
- coralnet_toolbox/MachineLearning/ExportDataset/QtSegment.py +21 -8
- coralnet_toolbox/MachineLearning/ImportDataset/QtBase.py +42 -22
- coralnet_toolbox/MachineLearning/VideoInference/QtBase.py +0 -4
- coralnet_toolbox/QtAnnotationWindow.py +42 -14
- coralnet_toolbox/QtEventFilter.py +19 -2
- coralnet_toolbox/QtImageWindow.py +134 -86
- coralnet_toolbox/QtLabelWindow.py +14 -2
- coralnet_toolbox/QtMainWindow.py +122 -9
- coralnet_toolbox/QtProgressBar.py +52 -27
- coralnet_toolbox/Rasters/QtRaster.py +59 -7
- coralnet_toolbox/Rasters/RasterTableModel.py +42 -14
- coralnet_toolbox/SAM/QtBatchInference.py +0 -2
- coralnet_toolbox/SAM/QtDeployGenerator.py +22 -11
- coralnet_toolbox/SAM/QtDeployPredictor.py +10 -0
- coralnet_toolbox/SeeAnything/QtBatchInference.py +19 -221
- coralnet_toolbox/SeeAnything/QtDeployGenerator.py +1634 -0
- coralnet_toolbox/SeeAnything/QtDeployPredictor.py +107 -154
- coralnet_toolbox/SeeAnything/QtTrainModel.py +115 -45
- coralnet_toolbox/SeeAnything/__init__.py +2 -0
- coralnet_toolbox/Tools/QtCutSubTool.py +18 -2
- coralnet_toolbox/Tools/QtResizeSubTool.py +19 -2
- coralnet_toolbox/Tools/QtSAMTool.py +222 -57
- coralnet_toolbox/Tools/QtSeeAnythingTool.py +223 -55
- coralnet_toolbox/Tools/QtSelectSubTool.py +6 -4
- coralnet_toolbox/Tools/QtSelectTool.py +27 -3
- coralnet_toolbox/Tools/QtSubtractSubTool.py +66 -0
- coralnet_toolbox/Tools/QtWorkAreaTool.py +25 -13
- coralnet_toolbox/Tools/__init__.py +2 -0
- coralnet_toolbox/__init__.py +1 -1
- coralnet_toolbox/utilities.py +137 -47
- coralnet_toolbox-0.0.74.dist-info/METADATA +375 -0
- {coralnet_toolbox-0.0.72.dist-info → coralnet_toolbox-0.0.74.dist-info}/RECORD +56 -53
- coralnet_toolbox-0.0.72.dist-info/METADATA +0 -341
- {coralnet_toolbox-0.0.72.dist-info → coralnet_toolbox-0.0.74.dist-info}/WHEEL +0 -0
- {coralnet_toolbox-0.0.72.dist-info → coralnet_toolbox-0.0.74.dist-info}/entry_points.txt +0 -0
- {coralnet_toolbox-0.0.72.dist-info → coralnet_toolbox-0.0.74.dist-info}/licenses/LICENSE.txt +0 -0
- {coralnet_toolbox-0.0.72.dist-info → coralnet_toolbox-0.0.74.dist-info}/top_level.txt +0 -0
coralnet_toolbox/utilities.py
CHANGED
@@ -9,14 +9,14 @@ import requests
|
|
9
9
|
import traceback
|
10
10
|
from functools import lru_cache
|
11
11
|
|
12
|
+
import cv2
|
12
13
|
import torch
|
13
14
|
import numpy as np
|
14
15
|
|
15
16
|
import rasterio
|
16
17
|
from rasterio.windows import Window
|
17
18
|
|
18
|
-
from shapely.
|
19
|
-
from shapely.geometry import Polygon, MultiPolygon, LineString, GeometryCollection
|
19
|
+
from shapely.geometry import Polygon
|
20
20
|
|
21
21
|
from PyQt5.QtCore import Qt
|
22
22
|
from PyQt5.QtGui import QImage
|
@@ -109,6 +109,19 @@ def rasterio_to_qimage(rasterio_src, longest_edge=None):
|
|
109
109
|
QImage: Scaled image
|
110
110
|
"""
|
111
111
|
try:
|
112
|
+
# Check if the dataset is closed
|
113
|
+
if not rasterio_src or getattr(rasterio_src, 'closed', True):
|
114
|
+
# Attempt to reopen the dataset if we can get the path
|
115
|
+
if hasattr(rasterio_src, 'name'):
|
116
|
+
try:
|
117
|
+
rasterio_src = rasterio.open(rasterio_src.name)
|
118
|
+
except Exception as reopen_error:
|
119
|
+
print(f"Error reopening dataset: {str(reopen_error)}")
|
120
|
+
return QImage()
|
121
|
+
else:
|
122
|
+
print("Cannot read from closed dataset without path information")
|
123
|
+
return QImage()
|
124
|
+
|
112
125
|
# Get the original size of the image
|
113
126
|
original_width = rasterio_src.width
|
114
127
|
original_height = rasterio_src.height
|
@@ -190,7 +203,7 @@ def rasterio_to_qimage(rasterio_src, longest_edge=None):
|
|
190
203
|
# Transpose to height, width, channels format
|
191
204
|
image = np.transpose(image, (1, 2, 0))
|
192
205
|
|
193
|
-
# Convert to uint8 if not already
|
206
|
+
# Convert to uint8 if image is not already
|
194
207
|
if image.dtype != np.uint8:
|
195
208
|
if image.max() > 0: # Avoid division by zero
|
196
209
|
image = image.astype(float) * (255.0 / image.max())
|
@@ -222,6 +235,19 @@ def rasterio_to_cropped_image(rasterio_src, window):
|
|
222
235
|
QImage: Cropped image as a QImage
|
223
236
|
"""
|
224
237
|
try:
|
238
|
+
# Check if the dataset is closed
|
239
|
+
if not rasterio_src or getattr(rasterio_src, 'closed', True):
|
240
|
+
# Attempt to reopen the dataset if we can get the path
|
241
|
+
if hasattr(rasterio_src, 'name'):
|
242
|
+
try:
|
243
|
+
rasterio_src = rasterio.open(rasterio_src.name)
|
244
|
+
except Exception as reopen_error:
|
245
|
+
print(f"Error reopening dataset: {str(reopen_error)}")
|
246
|
+
return QImage()
|
247
|
+
else:
|
248
|
+
print("Cannot read from closed dataset without path information")
|
249
|
+
return QImage()
|
250
|
+
|
225
251
|
# Check for single-band image with colormap
|
226
252
|
has_colormap = False
|
227
253
|
if rasterio_src.count == 1:
|
@@ -305,6 +331,19 @@ def rasterio_to_numpy(rasterio_src, longest_edge=None):
|
|
305
331
|
numpy.ndarray: Image as a numpy array in format (h, w, c) for RGB or (h, w) for grayscale
|
306
332
|
"""
|
307
333
|
try:
|
334
|
+
# Check if the dataset is closed
|
335
|
+
if not rasterio_src or getattr(rasterio_src, 'closed', True):
|
336
|
+
# Attempt to reopen the dataset if we can get the path
|
337
|
+
if hasattr(rasterio_src, 'name'):
|
338
|
+
try:
|
339
|
+
rasterio_src = rasterio.open(rasterio_src.name)
|
340
|
+
except Exception as reopen_error:
|
341
|
+
print(f"Error reopening dataset: {str(reopen_error)}")
|
342
|
+
return np.zeros((100, 100, 3), dtype=np.uint8)
|
343
|
+
else:
|
344
|
+
print("Cannot read from closed dataset without path information")
|
345
|
+
return np.zeros((100, 100, 3), dtype=np.uint8)
|
346
|
+
|
308
347
|
# Get the original size of the image
|
309
348
|
original_width = rasterio_src.width
|
310
349
|
original_height = rasterio_src.height
|
@@ -412,6 +451,19 @@ def work_area_to_numpy(rasterio_src, work_area):
|
|
412
451
|
"""
|
413
452
|
if not rasterio_src:
|
414
453
|
return None
|
454
|
+
|
455
|
+
# Check if the dataset is closed
|
456
|
+
if getattr(rasterio_src, 'closed', True):
|
457
|
+
# Attempt to reopen the dataset if we can get the path
|
458
|
+
if hasattr(rasterio_src, 'name'):
|
459
|
+
try:
|
460
|
+
rasterio_src = rasterio.open(rasterio_src.name)
|
461
|
+
except Exception as reopen_error:
|
462
|
+
print(f"Error reopening dataset: {str(reopen_error)}")
|
463
|
+
return None
|
464
|
+
else:
|
465
|
+
print("Cannot read from closed dataset without path information")
|
466
|
+
return None
|
415
467
|
|
416
468
|
# If we got a WorkArea object, use its rect
|
417
469
|
if hasattr(work_area, 'rect'):
|
@@ -542,50 +594,6 @@ def scale_pixmap(pixmap, max_size):
|
|
542
594
|
return scaled_pixmap
|
543
595
|
|
544
596
|
|
545
|
-
def attempt_download_asset(app, asset_name, asset_url):
|
546
|
-
"""
|
547
|
-
Attempt to download an asset from the given URL.
|
548
|
-
|
549
|
-
:param app:
|
550
|
-
:param asset_name:
|
551
|
-
:param asset_url:
|
552
|
-
:return:
|
553
|
-
"""
|
554
|
-
# Create a progress dialog
|
555
|
-
progress_dialog = ProgressBar(app, title=f"Downloading {asset_name}")
|
556
|
-
|
557
|
-
try:
|
558
|
-
# Get the asset name
|
559
|
-
asset_name = os.path.basename(asset_name)
|
560
|
-
asset_path = os.path.join(os.getcwd(), asset_name)
|
561
|
-
|
562
|
-
if os.path.exists(asset_path):
|
563
|
-
return
|
564
|
-
|
565
|
-
# Download the asset
|
566
|
-
response = requests.get(asset_url, stream=True)
|
567
|
-
total_size = int(response.headers.get('content-length', 0))
|
568
|
-
block_size = 1024 # 1 Kibibyte
|
569
|
-
|
570
|
-
# Initialize the progress bar
|
571
|
-
progress_dialog.start_progress(total_size // block_size)
|
572
|
-
progress_dialog.show()
|
573
|
-
|
574
|
-
with open(asset_path, 'wb') as f:
|
575
|
-
for data in response.iter_content(block_size):
|
576
|
-
if progress_dialog.wasCanceled():
|
577
|
-
raise Exception("Download canceled by user")
|
578
|
-
f.write(data)
|
579
|
-
progress_dialog.update_progress()
|
580
|
-
|
581
|
-
except Exception as e:
|
582
|
-
QMessageBox.critical(app, "Error", f"Failed to download {asset_name}.\n{e}")
|
583
|
-
|
584
|
-
# Close the progress dialog
|
585
|
-
progress_dialog.set_value(progress_dialog.max_value)
|
586
|
-
progress_dialog.close()
|
587
|
-
|
588
|
-
|
589
597
|
def simplify_polygon(xy_points, simplify_tolerance=0.1):
|
590
598
|
"""
|
591
599
|
Filter a list of points to keep only the largest polygon and simplify it.
|
@@ -666,6 +674,88 @@ def densify_polygon(xy_points):
|
|
666
674
|
return xy_points.tolist() if isinstance(xy_points, np.ndarray) else xy_points
|
667
675
|
|
668
676
|
|
677
|
+
def polygonize_mask_with_holes(mask_tensor):
|
678
|
+
"""
|
679
|
+
Converts a boolean mask tensor to an exterior polygon and a list of interior hole polygons.
|
680
|
+
|
681
|
+
Args:
|
682
|
+
mask_tensor (torch.Tensor): A 2D boolean tensor from the prediction results.
|
683
|
+
|
684
|
+
Returns:
|
685
|
+
A tuple containing:
|
686
|
+
- exterior (list of tuples): The (x, y) vertices of the outer boundary.
|
687
|
+
- holes (list of lists of tuples): A list where each element is a list of (x, y) vertices for a hole.
|
688
|
+
"""
|
689
|
+
# Convert the tensor to a NumPy array format that OpenCV can use
|
690
|
+
mask_np = mask_tensor.squeeze().cpu().numpy().astype(np.uint8)
|
691
|
+
|
692
|
+
# Find all contours and their hierarchy
|
693
|
+
# cv2.RETR_CCOMP organizes contours into a two-level hierarchy: external boundaries and holes inside them.
|
694
|
+
contours, hierarchy = cv2.findContours(mask_np, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
|
695
|
+
|
696
|
+
if not contours or hierarchy is None:
|
697
|
+
return [], []
|
698
|
+
|
699
|
+
exterior = []
|
700
|
+
holes = []
|
701
|
+
|
702
|
+
# Process the hierarchy to separate the exterior from the holes
|
703
|
+
for i, contour in enumerate(contours):
|
704
|
+
# An external contour's parent in the hierarchy is -1
|
705
|
+
if hierarchy[0][i][3] == -1:
|
706
|
+
# Squeeze to convert from [[x, y]] to [x, y] format
|
707
|
+
exterior = contour.squeeze(axis=1).tolist()
|
708
|
+
else:
|
709
|
+
# Any other contour is treated as a hole
|
710
|
+
holes.append(contour.squeeze(axis=1).tolist())
|
711
|
+
|
712
|
+
return exterior, holes
|
713
|
+
|
714
|
+
|
715
|
+
def attempt_download_asset(app, asset_name, asset_url):
|
716
|
+
"""
|
717
|
+
Attempt to download an asset from the given URL.
|
718
|
+
|
719
|
+
:param app:
|
720
|
+
:param asset_name:
|
721
|
+
:param asset_url:
|
722
|
+
:return:
|
723
|
+
"""
|
724
|
+
# Create a progress dialog
|
725
|
+
progress_dialog = ProgressBar(app, title=f"Downloading {asset_name}")
|
726
|
+
|
727
|
+
try:
|
728
|
+
# Get the asset name
|
729
|
+
asset_name = os.path.basename(asset_name)
|
730
|
+
asset_path = os.path.join(os.getcwd(), asset_name)
|
731
|
+
|
732
|
+
if os.path.exists(asset_path):
|
733
|
+
return
|
734
|
+
|
735
|
+
# Download the asset
|
736
|
+
response = requests.get(asset_url, stream=True)
|
737
|
+
total_size = int(response.headers.get('content-length', 0))
|
738
|
+
block_size = 1024 # 1 Kibibyte
|
739
|
+
|
740
|
+
# Initialize the progress bar
|
741
|
+
progress_dialog.start_progress(total_size // block_size)
|
742
|
+
progress_dialog.show()
|
743
|
+
|
744
|
+
with open(asset_path, 'wb') as f:
|
745
|
+
for data in response.iter_content(block_size):
|
746
|
+
if progress_dialog.wasCanceled():
|
747
|
+
raise Exception("Download canceled by user")
|
748
|
+
f.write(data)
|
749
|
+
progress_dialog.update_progress()
|
750
|
+
|
751
|
+
except Exception as e:
|
752
|
+
QMessageBox.critical(app, "Error", f"Failed to download {asset_name}.\n{e}")
|
753
|
+
|
754
|
+
# Close the progress dialog
|
755
|
+
progress_dialog.set_value(progress_dialog.max_value)
|
756
|
+
progress_dialog.close()
|
757
|
+
|
758
|
+
|
669
759
|
def console_user(error_msg, parent=None):
|
670
760
|
"""
|
671
761
|
Display an error message to the user via both terminal and GUI dialog.
|
@@ -0,0 +1,375 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: coralnet-toolbox
|
3
|
+
Version: 0.0.74
|
4
|
+
Summary: Tools for annotating and developing ML models for benthic imagery
|
5
|
+
Author-email: Jordan Pierce <jordan.pierce@noaa.gov>
|
6
|
+
License: MIT License
|
7
|
+
Project-URL: Homepage, https://github.com/Jordan-Pierce/CoralNet-Toolbox
|
8
|
+
Keywords: CoralNet-Toolbox
|
9
|
+
Classifier: Intended Audience :: Developers
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
11
|
+
Classifier: Natural Language :: English
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
13
|
+
Requires-Python: <3.11,>=3.10
|
14
|
+
Description-Content-Type: text/markdown
|
15
|
+
License-File: LICENSE.txt
|
16
|
+
Requires-Dist: PyQt5>=5.15.11
|
17
|
+
Requires-Dist: pyqtdarktheme
|
18
|
+
Requires-Dist: pyqtgraph
|
19
|
+
Requires-Dist: ultralytics>=8.3.152
|
20
|
+
Requires-Dist: lap>=0.5.12
|
21
|
+
Requires-Dist: open-clip-torch>=2.20.0
|
22
|
+
Requires-Dist: supervision>=0.24.0
|
23
|
+
Requires-Dist: scikit-learn
|
24
|
+
Requires-Dist: umap-learn
|
25
|
+
Requires-Dist: faiss-cpu
|
26
|
+
Requires-Dist: pycocotools
|
27
|
+
Requires-Dist: ujson
|
28
|
+
Requires-Dist: timm==0.9.2
|
29
|
+
Requires-Dist: autodistill
|
30
|
+
Requires-Dist: transformers>=4.5.0
|
31
|
+
Requires-Dist: x-segment-anything>=0.0.8
|
32
|
+
Requires-Dist: yolo-tiling>=0.0.19
|
33
|
+
Requires-Dist: roboflow
|
34
|
+
Requires-Dist: rasterio
|
35
|
+
Requires-Dist: requests
|
36
|
+
Requires-Dist: selenium>=4.15.2
|
37
|
+
Requires-Dist: beautifulsoup4>=4.12.2
|
38
|
+
Requires-Dist: webdriver_manager
|
39
|
+
Requires-Dist: dill
|
40
|
+
Requires-Dist: seaborn
|
41
|
+
Provides-Extra: all
|
42
|
+
Requires-Dist: coralnet-toolbox[extra]; extra == "all"
|
43
|
+
Dynamic: license-file
|
44
|
+
|
45
|
+
# CoralNet-Toolbox 🪸🧰
|
46
|
+
|
47
|
+
<div align="center">
|
48
|
+
<p>
|
49
|
+
<img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/CoralNet_Toolbox.png" alt="CoralNet-Toolbox" width="400">
|
50
|
+
</p>
|
51
|
+
|
52
|
+
<h3>🌊 <em>Empowering Coral Reef Research with AI-Powered Annotation Tools</em> 🌊</h3>
|
53
|
+
|
54
|
+
<p><strong>An unofficial toolkit to supercharge your <a href="https://coralnet.ucsd.edu/">CoralNet</a> workflows with cutting-edge computer vision</strong></p>
|
55
|
+
</div>
|
56
|
+
|
57
|
+
---
|
58
|
+
|
59
|
+
## 📊 Project Stats
|
60
|
+
|
61
|
+
<div align="center">
|
62
|
+
|
63
|
+
[](https://pypi.org/project/CoralNet-Toolbox)
|
64
|
+
[](https://pypi.python.org/pypi/CoralNet-Toolbox)
|
65
|
+
[](https://pepy.tech/project/coralnet-toolbox)
|
66
|
+
|
67
|
+
[](https://pypi.org/project/CoralNet-Toolbox)
|
68
|
+
[](https://pypi.org/project/CoralNet-Toolbox)
|
69
|
+
[](https://pypi.org/project/CoralNet-Toolbox)
|
70
|
+
[](https://pypi.org/project/CoralNet-Toolbox)
|
71
|
+
|
72
|
+
</div>
|
73
|
+
|
74
|
+
---
|
75
|
+
|
76
|
+
## ✨ Why CoralNet-Toolbox?
|
77
|
+
|
78
|
+
<div align="center">
|
79
|
+
|
80
|
+
| 🎯 **Smart Annotation** | 🤖 **AI-Powered** | 🚀 **Complete Pipeline** |
|
81
|
+
|:---:|:---:|:---:|
|
82
|
+
| Create patches, rectangles, and polygons with intelligent assistance | Leverage SAM, YOLO, and foundation models | From data collection to deployment |
|
83
|
+
| *Precision meets efficiency* | *Cutting-edge AI at your fingertips* | *End-to-end workflow automation* |
|
84
|
+
|
85
|
+
</div>
|
86
|
+
|
87
|
+
---
|
88
|
+
|
89
|
+
## ⚡ Quick Start
|
90
|
+
|
91
|
+
Get up and running in seconds:
|
92
|
+
|
93
|
+
```bash
|
94
|
+
# 💻 Installation
|
95
|
+
pip install coralnet-toolbox
|
96
|
+
|
97
|
+
# 🚀 Launch
|
98
|
+
coralnet-toolbox
|
99
|
+
```
|
100
|
+
|
101
|
+
> **🎉 That's it!** The toolbox will open and you're ready to start annotating!
|
102
|
+
|
103
|
+
For a complete installation guide (including CUDA setup), see the [Installation Documentation](https://jordan-pierce.github.io/CoralNet-Toolbox/installation).
|
104
|
+
|
105
|
+
---
|
106
|
+
|
107
|
+
## 📚 Documentation Hub
|
108
|
+
|
109
|
+
<div align="center">
|
110
|
+
|
111
|
+
| 📖 Guide | 🎯 Purpose | 🔗 Link |
|
112
|
+
|:---:|:---:|:---:|
|
113
|
+
| **Overview** | Get the big picture | [📋 Read More](https://jordan-pierce.github.io/CoralNet-Toolbox/overview) |
|
114
|
+
| **Installation** | Detailed setup instructions | [⚙️ Setup Guide](https://jordan-pierce.github.io/CoralNet-Toolbox/installation) |
|
115
|
+
| **Usage** | Learn the tools | [🛠️ User Manual](https://jordan-pierce.github.io/CoralNet-Toolbox/usage) |
|
116
|
+
| **Classification** | Community tutorial | [🧠 AI Tutorial](https://jordan-pierce.github.io/CoralNet-Toolbox/classify) |
|
117
|
+
|
118
|
+
</div>
|
119
|
+
|
120
|
+
---
|
121
|
+
|
122
|
+
## 🎥 Video Demonstrations
|
123
|
+
|
124
|
+
<details open>
|
125
|
+
<summary><h3>📺 <strong>Watch the Complete Tutorial Series</strong></h3></summary>
|
126
|
+
|
127
|
+
<div align="center">
|
128
|
+
<a href="https://youtube.com/playlist?list=PLG5z9IbwhS5NQT3B2jrg3hxQgilDeZak9&feature=shared">
|
129
|
+
<img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/toolbox_qt.PNG" alt="Video Tutorial Series" width="80%" style="border-radius: 10px; box-shadow: 0 4px 8px rgba(0,0,0,0.1);">
|
130
|
+
</a>
|
131
|
+
|
132
|
+
<p><strong>🎬 Complete playlist covering all major features and workflows</strong></p>
|
133
|
+
</div>
|
134
|
+
|
135
|
+
</details>
|
136
|
+
|
137
|
+
---
|
138
|
+
|
139
|
+
## 🤖 AI Model Arsenal
|
140
|
+
|
141
|
+
The toolbox integrates state-of-the-art models for efficient annotation workflows:
|
142
|
+
|
143
|
+
<div align="center">
|
144
|
+
|
145
|
+
### 🏋️ **Trainable Models**
|
146
|
+
|
147
|
+
| **YOLO Family** | **Versions Available** |
|
148
|
+
|:---:|:---:|
|
149
|
+
| 🦾 **Legacy** | YOLOv3 • YOLOv4 • YOLOv5 |
|
150
|
+
| 🚀 **Modern** | YOLOv6 • YOLOv7 • YOLOv8 |
|
151
|
+
| ⚡ **Latest** | YOLOv9 • YOLOv10 • YOLO11 • YOLO12 |
|
152
|
+
|
153
|
+
*Powered by the [Ultralytics](https://ultralytics.com/) ecosystem*
|
154
|
+
|
155
|
+
</div>
|
156
|
+
|
157
|
+
<div align="center">
|
158
|
+
|
159
|
+
### 🎯 **Segment Anything Models**
|
160
|
+
|
161
|
+
| **Model** | **Specialty** | **Use Case** |
|
162
|
+
|:---:|:---:|:---:|
|
163
|
+
| 🪸 **SAM** | General segmentation | High-quality masks |
|
164
|
+
| 🌊 **CoralSCOP** | Coral-specific | Marine biology focus |
|
165
|
+
| ⚡ **FastSAM** | Speed optimized | Real-time annotation |
|
166
|
+
| 📱 **MobileSAM** | Mobile-friendly | Edge deployment |
|
167
|
+
| ✂️ **EdgeSAM** | Efficient | Resource-constrained |
|
168
|
+
| 🔍 **RepViT-SAM** | Vision transformers | Advanced features |
|
169
|
+
|
170
|
+
*Powered by our [xSAM](https://Github.com/Jordan-Pierce/xSAM) integration*
|
171
|
+
|
172
|
+
</div>
|
173
|
+
|
174
|
+
<div align="center">
|
175
|
+
|
176
|
+
### 👁️ **Visual Prompting & Foundation Models**
|
177
|
+
|
178
|
+
| **Framework** | **Models** | **Capability** |
|
179
|
+
|:---:|:---:|:---:|
|
180
|
+
| **YOLOE** | See Anything | Visual prompt detection |
|
181
|
+
| **AutoDistill** | Grounding DINO • OWLViT • OmDetTurbo | Zero-shot detection |
|
182
|
+
|
183
|
+
</div>
|
184
|
+
|
185
|
+
---
|
186
|
+
|
187
|
+
## 🛠️ Feature Showcase
|
188
|
+
|
189
|
+
<div align="center">
|
190
|
+
|
191
|
+
### 📝 **Core Annotation Tools**
|
192
|
+
|
193
|
+
| <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Patches.gif" alt="Patch Annotation" width="250" style="border-radius: 8px;"/><br>**🎯 Patch Annotation** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Rectangles.gif" alt="Rectangle Annotation" width="250" style="border-radius: 8px;"/><br>**📐 Rectangle Annotation** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Polygons.gif" alt="Polygon Annotation" width="250" style="border-radius: 8px;"/><br>**🔷 Multi-Polygon Annotation** |
|
194
|
+
|:---:|:---:|:---:|
|
195
|
+
|
196
|
+
### 🤖 **AI-Powered Analysis**
|
197
|
+
|
198
|
+
| <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Classification.gif" alt="Classification" width="250" style="border-radius: 8px;"/><br>**🧠 Image Classification** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Object_Detection.gif" alt="Object Detection" width="250" style="border-radius: 8px;"/><br>**🎯 Object Detection** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Instance_Segmentation.gif" alt="Instance Segmentation" width="250" style="border-radius: 8px;"/><br>**🎭 Instance Segmentation** |
|
199
|
+
|:---:|:---:|:---:|
|
200
|
+
|
201
|
+
### 🔬 **Advanced Capabilities**
|
202
|
+
|
203
|
+
| <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Segment_Anything.gif" alt="SAM" width="250" style="border-radius: 8px;"/><br>**🪸 Segment Anything (SAM)** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Classifying_Polygons.gif" alt="Polygon Classification" width="250" style="border-radius: 8px;"/><br>**🔍 Polygon Classification** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Work_Areas.gif" alt="Work Areas" width="250" style="border-radius: 8px;"/><br>**📍 Region-based Detection** |
|
204
|
+
|:---:|:---:|:---:|
|
205
|
+
|
206
|
+
### ✂️ **Editing & Processing Tools**
|
207
|
+
|
208
|
+
| <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Cut.gif" alt="Cut Tool" width="250" style="border-radius: 8px;"/><br>**✂️ Cut** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Combine.gif" alt="Combine Tool" width="250" style="border-radius: 8px;"/><br>**🔗 Combine** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Simplify.gif" alt="Simplify Tool" width="250" style="border-radius: 8px;"/><br>**🎨 Simplify** |
|
209
|
+
|:---:|:---:|:---:|
|
210
|
+
|
211
|
+
### 🌟 **Specialized Features**
|
212
|
+
|
213
|
+
| <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/See_Anything.gif" alt="YOLOE" width="400" style="border-radius: 8px;"/><br>**👁️ See Anything (YOLOE)** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Classifying_Orthomosaics.gif" alt="LAI Classification" width="400" style="border-radius: 8px;"/><br>**🗺️ LAI Classification** |
|
214
|
+
|:---:|:---:|
|
215
|
+
|
216
|
+
### 📊 **Analysis & Exploration**
|
217
|
+
|
218
|
+
| <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Analytics.gif" alt="Video Analysis" width="400" style="border-radius: 8px;"/><br>**🎬 Video Inference & Analytics** | <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/figures/tools/Explorer.gif" alt="Data Explorer" width="400" style="border-radius: 8px;"/><br>**🔍 Data Explorer & Clustering** |
|
219
|
+
|:---:|:---:|
|
220
|
+
|
221
|
+
</div>
|
222
|
+
|
223
|
+
---
|
224
|
+
|
225
|
+
## 🔧 Complete Workflow Pipeline
|
226
|
+
|
227
|
+
### 📥 **Data Input**
|
228
|
+
- **🔥 CoralNet Download**: Retrieve source data and annotations
|
229
|
+
- **🎬 Video Processing**: Extract frames from video files
|
230
|
+
- **📸 Image Import**: Support for various image formats
|
231
|
+
|
232
|
+
### ✏️ **Annotation & Labeling**
|
233
|
+
- **👆 Manual Annotation**: Intuitive point, rectangle, and polygon tools
|
234
|
+
- **🤖 AI-Assisted**: SAM, YOLO, and visual prompting models
|
235
|
+
- **📐 Precision Editing**: Cut, combine, subtract, and simplify shapes
|
236
|
+
|
237
|
+
### 🧠 **Machine Learning**
|
238
|
+
- **🔬 Hyperparameter Tuning**: Optimize training conditions
|
239
|
+
- **🚀 Model Training**: Build custom classifiers and detectors
|
240
|
+
- **⚡ Model Optimization**: Production-ready deployment
|
241
|
+
|
242
|
+
### 📊 **Analysis & Export**
|
243
|
+
- **📈 Performance Evaluation**: Comprehensive model metrics
|
244
|
+
- **🎯 Batch Inference**: Process multiple images automatically
|
245
|
+
- **🎥 Video Analysis**: Real-time processing with analytics
|
246
|
+
- **📋 Multi-format Export**: CoralNet, Viscore, TagLab, GeoJSON
|
247
|
+
|
248
|
+
---
|
249
|
+
|
250
|
+
## 📋 Roadmap
|
251
|
+
|
252
|
+
*See the current tickets and planned features on the [GitHub Issues Page](https://github.com/Jordan-Pierce/CoralNet-Toolbox/issues)*
|
253
|
+
|
254
|
+
---
|
255
|
+
|
256
|
+
## 💻 Installation Guide
|
257
|
+
|
258
|
+
### 🐍 **Step 1: Environment Setup**
|
259
|
+
```bash
|
260
|
+
# Create a dedicated environment (recommended)
|
261
|
+
conda create --name coralnet10 python=3.10 -y
|
262
|
+
conda activate coralnet10
|
263
|
+
```
|
264
|
+
|
265
|
+
### ⚡ **Step 2: Fast Installation with UV**
|
266
|
+
```bash
|
267
|
+
# Install UV for faster package management
|
268
|
+
pip install uv
|
269
|
+
|
270
|
+
# Install CoralNet-Toolbox
|
271
|
+
uv pip install coralnet-toolbox
|
272
|
+
```
|
273
|
+
|
274
|
+
> **Fallback**: If UV fails, use regular pip: `pip install coralnet-toolbox`
|
275
|
+
|
276
|
+
### 🚀 **Step 3: GPU Acceleration (Optional)**
|
277
|
+
For CUDA-enabled systems:
|
278
|
+
```bash
|
279
|
+
# Example for CUDA 11.8
|
280
|
+
conda install nvidia/label/cuda-11.8.0::cuda-nvcc -y
|
281
|
+
conda install nvidia/label/cuda-11.8.0::cuda-toolkit -y
|
282
|
+
|
283
|
+
# Install PyTorch with CUDA support
|
284
|
+
uv pip install torch torchvision --index-url https://download.pytorch.org/whl/cu118 --upgrade
|
285
|
+
```
|
286
|
+
|
287
|
+
### 🏃♂️ **Step 4: Launch**
|
288
|
+
```bash
|
289
|
+
coralnet-toolbox
|
290
|
+
```
|
291
|
+
|
292
|
+
### 🎯 **GPU Status Indicators**
|
293
|
+
- **🐢** CPU only
|
294
|
+
- **🐇** Single GPU
|
295
|
+
- **🚀** Multiple GPUs
|
296
|
+
- **🍎** Mac Metal (Apple Silicon)
|
297
|
+
|
298
|
+
*Click the icon in the bottom-left to see available devices*
|
299
|
+
|
300
|
+
### 🔄 **Upgrading**
|
301
|
+
```bash
|
302
|
+
# When updates are available
|
303
|
+
uv pip install -U coralnet-toolbox==[latest_version]
|
304
|
+
```
|
305
|
+
|
306
|
+
---
|
307
|
+
|
308
|
+
### 🏗️ **Repository Structure**
|
309
|
+
|
310
|
+
<div align="center">
|
311
|
+

|
312
|
+
</div>
|
313
|
+
|
314
|
+
---
|
315
|
+
|
316
|
+
## 🌊 Success Stories
|
317
|
+
|
318
|
+
> **Using CoralNet-Toolbox in your research?**
|
319
|
+
>
|
320
|
+
> We'd love to feature your work! Share your success stories to help others learn and get inspired.
|
321
|
+
|
322
|
+
---
|
323
|
+
|
324
|
+
## 🌍 About CoralNet
|
325
|
+
|
326
|
+
<div align="center">
|
327
|
+
<p><em>🪸 Protecting our oceans, one annotation at a time 🪸</em></p>
|
328
|
+
</div>
|
329
|
+
|
330
|
+
Coral reefs are among Earth's most biodiverse ecosystems, supporting marine life and coastal communities worldwide. However, they face unprecedented threats from climate change, pollution, and human activities.
|
331
|
+
|
332
|
+
**[CoralNet](https://coralnet.ucsd.edu/)** is a revolutionary platform enabling researchers to:
|
333
|
+
- Upload and analyze coral reef photographs
|
334
|
+
- Create detailed species annotations
|
335
|
+
- Build AI-powered classification models
|
336
|
+
- Collaborate with the global research community
|
337
|
+
|
338
|
+
The **CoralNet-Toolbox** extends this mission by providing advanced AI tools that accelerate research and improve annotation quality.
|
339
|
+
|
340
|
+
---
|
341
|
+
|
342
|
+
## 📄 Citation
|
343
|
+
|
344
|
+
If you use CoralNet-Toolbox in your research, please cite:
|
345
|
+
|
346
|
+
```bibtex
|
347
|
+
@misc{CoralNet-Toolbox,
|
348
|
+
author = {Pierce, Jordan and Battista, Tim},
|
349
|
+
title = {CoralNet-Toolbox: AI-Powered Tools for Coral Reef Research},
|
350
|
+
year = {2025},
|
351
|
+
howpublished = {\url{https://github.com/Jordan-Pierce/CoralNet-Toolbox}},
|
352
|
+
note = {GitHub repository}
|
353
|
+
}
|
354
|
+
```
|
355
|
+
|
356
|
+
---
|
357
|
+
|
358
|
+
## ⚖️ Legal & Licensing
|
359
|
+
|
360
|
+
<div align="center">
|
361
|
+
|
362
|
+
### ⚠️ **Disclaimer**
|
363
|
+
*This is a scientific product and not official communication of NOAA or the US Department of Commerce. All code is provided 'as is' - users assume responsibility for its use.*
|
364
|
+
|
365
|
+
### 📋 **License**
|
366
|
+
*Software created by US Government employees is not subject to copyright in the United States (17 U.S.C. §105). The Department of Commerce reserves rights to seek copyright protection in other countries.*
|
367
|
+
|
368
|
+
</div>
|
369
|
+
|
370
|
+
---
|
371
|
+
|
372
|
+
<div align="center">
|
373
|
+
<p><strong>🌊 Built with ❤️ for coral reef conservation 🌊</strong></p>
|
374
|
+
<p><em>Empowering researchers • Protecting ecosystems • Advancing science</em></p>
|
375
|
+
</div>
|