coralnet-toolbox 0.0.74__py2.py3-none-any.whl → 0.0.76__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. coralnet_toolbox/Annotations/QtPolygonAnnotation.py +57 -12
  2. coralnet_toolbox/Annotations/QtRectangleAnnotation.py +44 -14
  3. coralnet_toolbox/Explorer/QtDataItem.py +52 -22
  4. coralnet_toolbox/Explorer/QtExplorer.py +277 -1600
  5. coralnet_toolbox/Explorer/QtSettingsWidgets.py +101 -15
  6. coralnet_toolbox/Explorer/QtViewers.py +1568 -0
  7. coralnet_toolbox/Explorer/transformer_models.py +70 -0
  8. coralnet_toolbox/Explorer/yolo_models.py +112 -0
  9. coralnet_toolbox/IO/QtExportMaskAnnotations.py +538 -403
  10. coralnet_toolbox/Icons/system_monitor.png +0 -0
  11. coralnet_toolbox/MachineLearning/ImportDataset/QtBase.py +239 -147
  12. coralnet_toolbox/MachineLearning/VideoInference/YOLO3D/run.py +102 -16
  13. coralnet_toolbox/QtAnnotationWindow.py +16 -10
  14. coralnet_toolbox/QtEventFilter.py +4 -4
  15. coralnet_toolbox/QtImageWindow.py +3 -7
  16. coralnet_toolbox/QtMainWindow.py +104 -64
  17. coralnet_toolbox/QtProgressBar.py +1 -0
  18. coralnet_toolbox/QtSystemMonitor.py +370 -0
  19. coralnet_toolbox/Rasters/RasterTableModel.py +20 -0
  20. coralnet_toolbox/Results/ConvertResults.py +14 -8
  21. coralnet_toolbox/Results/ResultsProcessor.py +3 -2
  22. coralnet_toolbox/SAM/QtDeployGenerator.py +2 -5
  23. coralnet_toolbox/SAM/QtDeployPredictor.py +11 -3
  24. coralnet_toolbox/SeeAnything/QtDeployGenerator.py +146 -116
  25. coralnet_toolbox/SeeAnything/QtDeployPredictor.py +55 -9
  26. coralnet_toolbox/Tile/QtTileBatchInference.py +4 -4
  27. coralnet_toolbox/Tools/QtPolygonTool.py +42 -3
  28. coralnet_toolbox/Tools/QtRectangleTool.py +30 -0
  29. coralnet_toolbox/Tools/QtSAMTool.py +140 -91
  30. coralnet_toolbox/Transformers/Models/GroundingDINO.py +72 -0
  31. coralnet_toolbox/Transformers/Models/OWLViT.py +72 -0
  32. coralnet_toolbox/Transformers/Models/OmDetTurbo.py +68 -0
  33. coralnet_toolbox/Transformers/Models/QtBase.py +120 -0
  34. coralnet_toolbox/{AutoDistill → Transformers}/Models/__init__.py +1 -1
  35. coralnet_toolbox/{AutoDistill → Transformers}/QtBatchInference.py +15 -15
  36. coralnet_toolbox/{AutoDistill → Transformers}/QtDeployModel.py +18 -16
  37. coralnet_toolbox/{AutoDistill → Transformers}/__init__.py +1 -1
  38. coralnet_toolbox/__init__.py +1 -1
  39. coralnet_toolbox/utilities.py +21 -15
  40. {coralnet_toolbox-0.0.74.dist-info → coralnet_toolbox-0.0.76.dist-info}/METADATA +13 -10
  41. {coralnet_toolbox-0.0.74.dist-info → coralnet_toolbox-0.0.76.dist-info}/RECORD +45 -40
  42. coralnet_toolbox/AutoDistill/Models/GroundingDINO.py +0 -81
  43. coralnet_toolbox/AutoDistill/Models/OWLViT.py +0 -76
  44. coralnet_toolbox/AutoDistill/Models/OmDetTurbo.py +0 -75
  45. coralnet_toolbox/AutoDistill/Models/QtBase.py +0 -112
  46. {coralnet_toolbox-0.0.74.dist-info → coralnet_toolbox-0.0.76.dist-info}/WHEEL +0 -0
  47. {coralnet_toolbox-0.0.74.dist-info → coralnet_toolbox-0.0.76.dist-info}/entry_points.txt +0 -0
  48. {coralnet_toolbox-0.0.74.dist-info → coralnet_toolbox-0.0.76.dist-info}/licenses/LICENSE.txt +0 -0
  49. {coralnet_toolbox-0.0.74.dist-info → coralnet_toolbox-0.0.76.dist-info}/top_level.txt +0 -0
@@ -13,14 +13,14 @@ from PyQt5.QtWidgets import (QApplication, QMessageBox, QCheckBox, QVBoxLayout,
13
13
 
14
14
 
15
15
  class BatchInferenceDialog(QDialog):
16
- """Dialog for performing batch inference on images using AutoDistill."""
16
+ """Dialog for performing batch inference on images using Transformers."""
17
17
 
18
18
  def __init__(self, main_window, parent=None):
19
19
  super().__init__(parent)
20
20
  self.main_window = main_window
21
21
  self.image_window = main_window.image_window
22
22
  self.annotation_window = main_window.annotation_window
23
- self.deploy_model_dialog = main_window.auto_distill_deploy_model_dialog
23
+ self.deploy_model_dialog = main_window.transformers_deploy_model_dialog
24
24
  self.loaded_models = self.deploy_model_dialog.loaded_model
25
25
 
26
26
  self.setWindowTitle("Batch Inference")
@@ -64,25 +64,25 @@ class BatchInferenceDialog(QDialog):
64
64
  self.image_options_group = QButtonGroup(self)
65
65
 
66
66
  # Create image selection options
67
- self.apply_filtered = QCheckBox("▼ Apply to filtered images")
68
- self.apply_prev = QCheckBox("↑ Apply to previous images")
69
- self.apply_next = QCheckBox("↓ Apply to next images")
70
- self.apply_all = QCheckBox("↕ Apply to all images")
67
+ self.apply_filtered_checkbox = QCheckBox("▼ Apply to filtered images")
68
+ self.apply_prev_checkbox = QCheckBox("↑ Apply to previous images")
69
+ self.apply_next_checkbox = QCheckBox("↓ Apply to next images")
70
+ self.apply_all_checkbox = QCheckBox("↕ Apply to all images")
71
71
  # Add options to button group
72
- self.image_options_group.addButton(self.apply_filtered)
73
- self.image_options_group.addButton(self.apply_prev)
74
- self.image_options_group.addButton(self.apply_next)
75
- self.image_options_group.addButton(self.apply_all)
72
+ self.image_options_group.addButton(self.apply_filtered_checkbox)
73
+ self.image_options_group.addButton(self.apply_prev_checkbox)
74
+ self.image_options_group.addButton(self.apply_next_checkbox)
75
+ self.image_options_group.addButton(self.apply_all_checkbox)
76
76
  # Make selections exclusive
77
77
  self.image_options_group.setExclusive(True)
78
78
  # Default selection
79
- self.apply_all.setChecked(True)
79
+ self.apply_all_checkbox.setChecked(True)
80
80
 
81
81
  # Add widgets to layout
82
- layout.addWidget(self.apply_filtered)
83
- layout.addWidget(self.apply_prev)
84
- layout.addWidget(self.apply_next)
85
- layout.addWidget(self.apply_all)
82
+ layout.addWidget(self.apply_filtered_checkbox)
83
+ layout.addWidget(self.apply_prev_checkbox)
84
+ layout.addWidget(self.apply_next_checkbox)
85
+ layout.addWidget(self.apply_all_checkbox)
86
86
 
87
87
  group_box.setLayout(layout)
88
88
  self.layout.addWidget(group_box)
@@ -18,6 +18,7 @@ from PyQt5.QtWidgets import (QApplication, QComboBox, QDialog,
18
18
  from coralnet_toolbox.QtProgressBar import ProgressBar
19
19
 
20
20
  from coralnet_toolbox.Results import ResultsProcessor
21
+ from coralnet_toolbox.Results import ConvertResults
21
22
  from coralnet_toolbox.Results import MapResults
22
23
 
23
24
  from coralnet_toolbox.utilities import rasterio_open
@@ -33,13 +34,13 @@ from coralnet_toolbox.Icons import get_icon
33
34
 
34
35
  class DeployModelDialog(QDialog):
35
36
  """
36
- Dialog for deploying and managing AutoDistill models.
37
+ Dialog for deploying and managing Transformers models.
37
38
  Allows users to load, configure, and deactivate models, as well as make predictions on images.
38
39
  """
39
40
 
40
41
  def __init__(self, main_window, parent=None):
41
42
  """
42
- Initialize the AutoDistillDeployModelDialog.
43
+ Initialize the TransformersDeployModelDialog.
43
44
 
44
45
  Args:
45
46
  main_window: The main application window.
@@ -52,7 +53,7 @@ class DeployModelDialog(QDialog):
52
53
  self.annotation_window = main_window.annotation_window
53
54
 
54
55
  self.setWindowIcon(get_icon("coral.png"))
55
- self.setWindowTitle("AutoDistill Deploy Model (Ctrl + 6)")
56
+ self.setWindowTitle("Transformers Deploy Model (Ctrl + 6)")
56
57
  self.resize(400, 325)
57
58
 
58
59
  # Initialize variables
@@ -66,6 +67,8 @@ class DeployModelDialog(QDialog):
66
67
  self.ontology = None
67
68
  self.class_mapping = {}
68
69
  self.ontology_pairs = []
70
+
71
+ self.task = 'detect'
69
72
 
70
73
  # Create the layout
71
74
  self.layout = QVBoxLayout(self)
@@ -422,8 +425,6 @@ class DeployModelDialog(QDialog):
422
425
  progress_bar.close()
423
426
  # Restore cursor
424
427
  QApplication.restoreOverrideCursor()
425
- # Exit the dialog box
426
- self.accept()
427
428
 
428
429
  def load_new_model(self, model_name):
429
430
  """
@@ -433,8 +434,17 @@ class DeployModelDialog(QDialog):
433
434
  model_name: Name of the model to load.
434
435
  uncertainty_thresh: Threshold for uncertainty.
435
436
  """
437
+
438
+ # Clear the model
439
+ self.loaded_model = None
440
+ self.model_name = None
441
+
442
+ # Clear cache
443
+ gc.collect()
444
+ torch.cuda.empty_cache()
445
+
436
446
  if "GroundingDINO" in model_name:
437
- from coralnet_toolbox.AutoDistill.Models.GroundingDINO import GroundingDINOModel
447
+ from coralnet_toolbox.Transformers.Models.GroundingDINO import GroundingDINOModel
438
448
 
439
449
  model = model_name.split("-")[1].strip()
440
450
  self.model_name = model_name
@@ -443,14 +453,14 @@ class DeployModelDialog(QDialog):
443
453
  device=self.main_window.device)
444
454
 
445
455
  elif "OmDetTurbo" in model_name:
446
- from coralnet_toolbox.AutoDistill.Models.OmDetTurbo import OmDetTurboModel
456
+ from coralnet_toolbox.Transformers.Models.OmDetTurbo import OmDetTurboModel
447
457
 
448
458
  self.model_name = model_name
449
459
  self.loaded_model = OmDetTurboModel(ontology=self.ontology,
450
460
  device=self.main_window.device)
451
461
 
452
462
  elif "OWLViT" in model_name:
453
- from coralnet_toolbox.AutoDistill.Models.OWLViT import OWLViTModel
463
+ from coralnet_toolbox.Transformers.Models.OWLViT import OWLViTModel
454
464
 
455
465
  self.model_name = model_name
456
466
  self.loaded_model = OWLViTModel(ontology=self.ontology,
@@ -495,7 +505,6 @@ class DeployModelDialog(QDialog):
495
505
  continue
496
506
 
497
507
  results = self._apply_model(inputs)
498
- results = self._update_results(results_processor, results, inputs, image_path)
499
508
  results = self._apply_sam(results, image_path)
500
509
  self._process_results(results_processor, results, image_path)
501
510
 
@@ -553,13 +562,6 @@ class DeployModelDialog(QDialog):
553
562
 
554
563
  return results_list
555
564
 
556
- def _update_results(self, results_processor, results, inputs, image_path):
557
- """Update the results to match Ultralytics format."""
558
- return [results_processor.from_supervision(results,
559
- inputs,
560
- image_path,
561
- self.class_mapping)]
562
-
563
565
  def _apply_sam(self, results_list, image_path):
564
566
  """Apply SAM to the results if needed."""
565
567
  # Check if SAM model is deployed and loaded
@@ -1,4 +1,4 @@
1
- # coralnet_toolbox/AutoDistill/__init__.py
1
+ # coralnet_toolbox/Transformers/__init__.py
2
2
 
3
3
  from .QtDeployModel import DeployModelDialog
4
4
  from .QtBatchInference import BatchInferenceDialog
@@ -1,6 +1,6 @@
1
1
  """Top-level package for CoralNet-Toolbox."""
2
2
 
3
- __version__ = "0.0.74"
3
+ __version__ = "0.0.76"
4
4
  __author__ = "Jordan Pierce"
5
5
  __email__ = "jordan.pierce@noaa.gov"
6
6
  __credits__ = "National Center for Coastal and Ocean Sciences (NCCOS)"
@@ -30,21 +30,6 @@ from coralnet_toolbox.QtProgressBar import ProgressBar
30
30
  # ----------------------------------------------------------------------------------------------------------------------
31
31
 
32
32
 
33
- def get_available_device():
34
- """
35
- Get available devices
36
-
37
- :return:
38
- """
39
- devices = ['cpu',]
40
- if torch.cuda.is_available():
41
- for i in range(torch.cuda.device_count()):
42
- devices.append(f'cuda:{i}')
43
- if torch.backends.mps.is_available():
44
- devices.append('mps')
45
- return devices
46
-
47
-
48
33
  @lru_cache(maxsize=32)
49
34
  def rasterio_open(image_path):
50
35
  """
@@ -571,6 +556,27 @@ def pixmap_to_numpy(pixmap):
571
556
  return numpy_array
572
557
 
573
558
 
559
+ def pixmap_to_pil(pixmap):
560
+ """
561
+ Convert a QPixmap to a PIL Image.
562
+
563
+ :param pixmap: QPixmap to convert
564
+ :return: PIL Image in RGB format
565
+ """
566
+ from PIL import Image
567
+
568
+ # Convert pixmap to numpy array first
569
+ image_np = pixmap_to_numpy(pixmap)
570
+
571
+ # Convert numpy array to PIL Image
572
+ if len(image_np.shape) == 2: # Grayscale
573
+ pil_image = Image.fromarray(image_np, mode='L').convert('RGB')
574
+ else: # RGB
575
+ pil_image = Image.fromarray(image_np, mode='RGB')
576
+
577
+ return pil_image
578
+
579
+
574
580
  def scale_pixmap(pixmap, max_size):
575
581
  """Scale pixmap and graphic if they exceed max dimension while preserving aspect ratio"""
576
582
  width = pixmap.width()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: coralnet-toolbox
3
- Version: 0.0.74
3
+ Version: 0.0.76
4
4
  Summary: Tools for annotating and developing ML models for benthic imagery
5
5
  Author-email: Jordan Pierce <jordan.pierce@noaa.gov>
6
6
  License: MIT License
@@ -16,7 +16,7 @@ License-File: LICENSE.txt
16
16
  Requires-Dist: PyQt5>=5.15.11
17
17
  Requires-Dist: pyqtdarktheme
18
18
  Requires-Dist: pyqtgraph
19
- Requires-Dist: ultralytics>=8.3.152
19
+ Requires-Dist: ultralytics>=8.3.191
20
20
  Requires-Dist: lap>=0.5.12
21
21
  Requires-Dist: open-clip-torch>=2.20.0
22
22
  Requires-Dist: supervision>=0.24.0
@@ -27,7 +27,8 @@ Requires-Dist: pycocotools
27
27
  Requires-Dist: ujson
28
28
  Requires-Dist: timm==0.9.2
29
29
  Requires-Dist: autodistill
30
- Requires-Dist: transformers>=4.5.0
30
+ Requires-Dist: transformers>=4.56.0
31
+ Requires-Dist: hf_xet
31
32
  Requires-Dist: x-segment-anything>=0.0.8
32
33
  Requires-Dist: yolo-tiling>=0.0.19
33
34
  Requires-Dist: roboflow
@@ -38,6 +39,8 @@ Requires-Dist: beautifulsoup4>=4.12.2
38
39
  Requires-Dist: webdriver_manager
39
40
  Requires-Dist: dill
40
41
  Requires-Dist: seaborn
42
+ Requires-Dist: GPUtil
43
+ Requires-Dist: psutil
41
44
  Provides-Extra: all
42
45
  Requires-Dist: coralnet-toolbox[extra]; extra == "all"
43
46
  Dynamic: license-file
@@ -113,6 +116,7 @@ For a complete installation guide (including CUDA setup), see the [Installation
113
116
  | **Overview** | Get the big picture | [📋 Read More](https://jordan-pierce.github.io/CoralNet-Toolbox/overview) |
114
117
  | **Installation** | Detailed setup instructions | [⚙️ Setup Guide](https://jordan-pierce.github.io/CoralNet-Toolbox/installation) |
115
118
  | **Usage** | Learn the tools | [🛠️ User Manual](https://jordan-pierce.github.io/CoralNet-Toolbox/usage) |
119
+ | **Hot Keys** | Keyboard shortcuts | [⌨️ Shortcuts](https://jordan-pierce.github.io/CoralNet-Toolbox/hot-keys) |
116
120
  | **Classification** | Community tutorial | [🧠 AI Tutorial](https://jordan-pierce.github.io/CoralNet-Toolbox/classify) |
117
121
 
118
122
  </div>
@@ -178,7 +182,7 @@ The toolbox integrates state-of-the-art models for efficient annotation workflow
178
182
  | **Framework** | **Models** | **Capability** |
179
183
  |:---:|:---:|:---:|
180
184
  | **YOLOE** | See Anything | Visual prompt detection |
181
- | **AutoDistill** | Grounding DINO • OWLViT • OmDetTurbo | Zero-shot detection |
185
+ | **Transformers** | Grounding DINO • OWLViT • OmDetTurbo | Zero-shot detection |
182
186
 
183
187
  </div>
184
188
 
@@ -276,12 +280,9 @@ uv pip install coralnet-toolbox
276
280
  ### 🚀 **Step 3: GPU Acceleration (Optional)**
277
281
  For CUDA-enabled systems:
278
282
  ```bash
279
- # Example for CUDA 11.8
280
- conda install nvidia/label/cuda-11.8.0::cuda-nvcc -y
281
- conda install nvidia/label/cuda-11.8.0::cuda-toolkit -y
282
-
283
+ # Example for CUDA 12.9
283
284
  # Install PyTorch with CUDA support
284
- uv pip install torch torchvision --index-url https://download.pytorch.org/whl/cu118 --upgrade
285
+ uv pip install torch torchvision --index-url https://download.pytorch.org/whl/cu129 --upgrade
285
286
  ```
286
287
 
287
288
  ### 🏃‍♂️ **Step 4: Launch**
@@ -308,7 +309,9 @@ uv pip install -U coralnet-toolbox==[latest_version]
308
309
  ### 🏗️ **Repository Structure**
309
310
 
310
311
  <div align="center">
311
- ![Visualization of the codebase](./diagram.svg)
312
+ <a href="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/diagram.svg">
313
+ <img src="https://raw.githubusercontent.com/Jordan-Pierce/CoralNet-Toolbox/refs/heads/main/diagram.svg" alt="Visualization of the codebase" width="80%">
314
+ </a>
312
315
  </div>
313
316
 
314
317
  ---
@@ -1,30 +1,23 @@
1
- coralnet_toolbox/QtAnnotationWindow.py,sha256=WOfi4BhXt3rtH0PnJZ0tDPpjn-wq66n8MoMqKFg-T2M,40727
1
+ coralnet_toolbox/QtAnnotationWindow.py,sha256=YpE4e5gu0EcTqtzRNUcFqZl5SIjDaPK_FWxtFavfm4k,40950
2
2
  coralnet_toolbox/QtConfidenceWindow.py,sha256=L5hR23uW91GpqnsNS9R1XF3zCTe2aU7w0iDoQMV0oyE,16190
3
- coralnet_toolbox/QtEventFilter.py,sha256=qL3-FzQ2iZX1yITJb2mjn9_Q09nNNqKsgx-oCz4aT9Y,7717
4
- coralnet_toolbox/QtImageWindow.py,sha256=OagopE-6RNULQ7EMlIpY1Ex5Fv9oGvLjGiTdvTSS11s,51542
3
+ coralnet_toolbox/QtEventFilter.py,sha256=utGSwtKlKl-3qepN6aaVQm4sL513bfuJpA3-3VTqnDU,7701
4
+ coralnet_toolbox/QtImageWindow.py,sha256=B_38VR_0QARyo05UEvInmQKd3N74Dfb0g_QDeGCTUuY,51291
5
5
  coralnet_toolbox/QtLabelWindow.py,sha256=O-mLtE6ycuuGloYYZX0Z9JYZtsBMmspqNeJWslrjfFc,51419
6
- coralnet_toolbox/QtMainWindow.py,sha256=ctwSZiPql-nafxNacifHdZtNnIY-u-xvhKWptC7aBTk,122154
6
+ coralnet_toolbox/QtMainWindow.py,sha256=4Znd0fezebQ-cWOKHjHTPVfj_DPH45A7tnUfxUEiqOQ,123615
7
7
  coralnet_toolbox/QtPatchSampling.py,sha256=Ehj06auBGfQwIruLNYQjF8eFOCpl8G72p42UXXb2mUo,29013
8
- coralnet_toolbox/QtProgressBar.py,sha256=kFcq0ftj4Hljesev-jwWMLVc_OOjlOwQrGc4r2yGMAs,7877
8
+ coralnet_toolbox/QtProgressBar.py,sha256=L9V1dD9NQ5K-IK2NhJtESoieWGd1ULLsACEXuUDE4Ck,7922
9
+ coralnet_toolbox/QtSystemMonitor.py,sha256=KSsUZ11GcHPH0Q7Pl8TUsFSB_jnWzNqGmtnf7IYYXpQ,16567
9
10
  coralnet_toolbox/QtWorkArea.py,sha256=YXRvHQKpWUtWyv_o9lZ8rmxfm28dUOG9pmMUeimDhQ4,13578
10
- coralnet_toolbox/__init__.py,sha256=-Nkdnt5uSinCh5WRxHXkS1YzGoDzWz665VAoGrCk-IQ,207
11
+ coralnet_toolbox/__init__.py,sha256=VV5fKKSOZQ99ek8daqcG9QSiYWU3u7JvjF5L9yq8tfA,207
11
12
  coralnet_toolbox/main.py,sha256=6j2B_1reC_KDmqvq1C0fB-UeSEm8eeJOozp2f4XXMLQ,1573
12
- coralnet_toolbox/utilities.py,sha256=apbVfWlySSUG0BMSVzpAUWwasQeFsI6afuOHpUSu-nI,30961
13
+ coralnet_toolbox/utilities.py,sha256=Up6_z0n-8p7KccFLgBvlSnhfgX8B_mVTSmIfajpkkug,31173
13
14
  coralnet_toolbox/Annotations/QtAnnotation.py,sha256=4KxqDe_WPMGK18OYHZ1NqWzV87ARv8MnLAQdHFXo-Yg,27527
14
15
  coralnet_toolbox/Annotations/QtMaskAnnotation.py,sha256=Hs8p-Lxv4OYqcx4Y7dayer1KxTaX-G0kuQe-W2JuGIE,16269
15
16
  coralnet_toolbox/Annotations/QtMultiPolygonAnnotation.py,sha256=u0vPQPeaBCEdw5lMFygpBbjDKDSjvtV7Jkj03MDfj8M,16463
16
17
  coralnet_toolbox/Annotations/QtPatchAnnotation.py,sha256=huoCsPloSA5uCZxG7pwIoO02GcHFDwW8-VohLxbCVnY,16608
17
- coralnet_toolbox/Annotations/QtPolygonAnnotation.py,sha256=D8VajJ1pBtb5NOSYIspAxonkiVWn0yuWSNo8vRjreSw,33371
18
- coralnet_toolbox/Annotations/QtRectangleAnnotation.py,sha256=FnKDHRNg_v9pUEqhdyvbGBGSxtNww3AZU_rulYU0878,21357
18
+ coralnet_toolbox/Annotations/QtPolygonAnnotation.py,sha256=NYzca916tfZDOxbP9TBC5CaFEoiMiZec8QkwHtpfGLQ,35198
19
+ coralnet_toolbox/Annotations/QtRectangleAnnotation.py,sha256=F49Cc3MyPPHQp-qhfjsFACE5ZRwT4Hsq0jDi8nt9iVg,22492
19
20
  coralnet_toolbox/Annotations/__init__.py,sha256=bpMldC70tT_lzMrOdBNDkEhG9dCX3tXEBd48IrcUg3E,419
20
- coralnet_toolbox/AutoDistill/QtBatchInference.py,sha256=k871aW3XRX8kc4BDaS1aipbPh9WOZxgmilF2c4KOdVA,5646
21
- coralnet_toolbox/AutoDistill/QtDeployModel.py,sha256=Fycm7wuydUfr1E2CUy00ridiI2JaNDZqAeoVB_HVydY,25923
22
- coralnet_toolbox/AutoDistill/__init__.py,sha256=-cJSCr3HSVcybbkvdSZY_zz9EDLESq9A3gisHu3gIgM,206
23
- coralnet_toolbox/AutoDistill/Models/GroundingDINO.py,sha256=xG20nLOrKjtzRhZznIIdwFXxBJ7RCeQ7h1z0V0J6trE,2781
24
- coralnet_toolbox/AutoDistill/Models/OWLViT.py,sha256=disVxSQ80sS4SVYdwrQocFP_LN6YDQQhzfeORWe4veU,2572
25
- coralnet_toolbox/AutoDistill/Models/OmDetTurbo.py,sha256=i2k9C0U8CzojKvv58CE_4wvquvR_JHUHRCe93Yzb5QQ,2526
26
- coralnet_toolbox/AutoDistill/Models/QtBase.py,sha256=P9dzGgzOZJZr-hQltAIswWqUyfaUP40GcXc_X11GOv8,4220
27
- coralnet_toolbox/AutoDistill/Models/__init__.py,sha256=3woEIkWjoNLlZhNijnyAwAimsBoy2AGCt_tks3Y4q6M,259
28
21
  coralnet_toolbox/BreakTime/QtBreakout.py,sha256=KYlhLMHF_5HVkjR8JDjbNu8CB6SHsEpECAywXqWVw10,54763
29
22
  coralnet_toolbox/BreakTime/QtSnake.py,sha256=XxmV64A_1avYf1uC_fXQpOZV3kCetz3CqboQsFwSIJk,22398
30
23
  coralnet_toolbox/BreakTime/__init__.py,sha256=7d_CMXp7T872NV-a6xaGU4oq5wjWAWGyrnd-YD3BDJo,150
@@ -36,16 +29,19 @@ coralnet_toolbox/Common/QtUpdateImagePaths.py,sha256=_hJYx6hXdAOfH_m77f75AQduQ0W
36
29
  coralnet_toolbox/CoralNet/QtAuthenticate.py,sha256=Y__iY0Kcosz6AOV7dlJBwiB6Hte40wHahHe-OmRngZA,13267
37
30
  coralnet_toolbox/CoralNet/QtDownload.py,sha256=YGu_-4SCmTw8TJC6qSuZr6OGfesKHOgy1nWMioE1ECI,48552
38
31
  coralnet_toolbox/CoralNet/__init__.py,sha256=ILkAZh6mlAK1UaCCZjCB9JZxd-oY4cIgfnIC8UgjjIU,188
39
- coralnet_toolbox/Explorer/QtDataItem.py,sha256=mRcQ0GlriEjhbFV_khbuFNMEJhIyFxD_uIC5v1L03cw,14885
40
- coralnet_toolbox/Explorer/QtExplorer.py,sha256=mj09zkRFAw6AgepxXa0A2qTtR3iWlgCyphy5Bf2FoDU,138517
32
+ coralnet_toolbox/Explorer/QtDataItem.py,sha256=_Qb7R32N07H1UcPTjm0CMtSWJbwQYdwYFuvCfae24h4,15772
33
+ coralnet_toolbox/Explorer/QtExplorer.py,sha256=ffFlw1LKFd0Wh4zCvcYaXL2uyb54o_IyIQGC2SveGiE,79950
41
34
  coralnet_toolbox/Explorer/QtFeatureStore.py,sha256=3VwGezs1stmu65Z4ZQpvY27rGEIJq_prERWkFwMATBo,7378
42
- coralnet_toolbox/Explorer/QtSettingsWidgets.py,sha256=g4ZHpNVj5fTt6TmPBJqS-7NGTLGQm-dkt5iubbU6VyY,31474
35
+ coralnet_toolbox/Explorer/QtSettingsWidgets.py,sha256=wwgMje5hga6GpJsAwFYXqd3G_Hew0oKmtu3tEr061Hs,36042
36
+ coralnet_toolbox/Explorer/QtViewers.py,sha256=2qtzxSQNsRoHYUdm2t046QCUJd51pKOts5DNtgMRmUY,69408
43
37
  coralnet_toolbox/Explorer/__init__.py,sha256=wZPhf2oaUUyIQ2WK48Aj-4q1ENIZG2dGl1HF_mjhI6w,116
38
+ coralnet_toolbox/Explorer/transformer_models.py,sha256=yNgGoYEIWcSgCnCOSKdAatG4M6GRbtf4pVmUWGPxvWo,2878
39
+ coralnet_toolbox/Explorer/yolo_models.py,sha256=GicZrypDE699gut5KEW68Ui_KiTk4Ojt1uRkyDWJVI8,3473
44
40
  coralnet_toolbox/IO/QtExportAnnotations.py,sha256=xeaS0BukC3cpkBIGT9DXRqHmvHhp-vOU47h6EoANpNg,4474
45
41
  coralnet_toolbox/IO/QtExportCoralNetAnnotations.py,sha256=4royhF63EmeOlSIBX389EUjjvE-SF44_maW6qm52mdA,2778
46
42
  coralnet_toolbox/IO/QtExportGeoJSONAnnotations.py,sha256=9HkHjQTRtH4VnYa50c5pyqQz30R_6gIH5i3xFF6kDWI,27759
47
43
  coralnet_toolbox/IO/QtExportLabels.py,sha256=Vsav0wd1EK4g065aEWvxyNuvvM9BFB7UXxz6IJzwVBU,2588
48
- coralnet_toolbox/IO/QtExportMaskAnnotations.py,sha256=U-2sQyA154La5-QCovkJkrz_2-2bmuZytTw05VKinCE,24268
44
+ coralnet_toolbox/IO/QtExportMaskAnnotations.py,sha256=7prXGsFFXjF1dvaybEqmwBO-N1th0c-s8GsedmyB_RQ,32651
49
45
  coralnet_toolbox/IO/QtExportTagLabAnnotations.py,sha256=JL4r1a6_PUjCzWQjMxOzxtkF2gyqIttpD14OxEcW-dA,11330
50
46
  coralnet_toolbox/IO/QtExportTagLabLabels.py,sha256=e6OL8UNtLRAJrovfs1cxVz0k2bHuJXdVmO-A0OVpgSk,3164
51
47
  coralnet_toolbox/IO/QtExportViscoreAnnotations.py,sha256=AUTzVB-N9uwlQPSds74YXyPVZzEHph7HDq01R88OBJY,19166
@@ -90,6 +86,7 @@ coralnet_toolbox/Icons/rocket.png,sha256=iMlRGlrNBS_dNBD2XIpN4RSrphCGbw_Ds1AYJ01
90
86
  coralnet_toolbox/Icons/select.png,sha256=twnMIO9ylQYjvyGnAR28V6K3ds6xpArZQTrvf0uxS6g,1896
91
87
  coralnet_toolbox/Icons/settings.png,sha256=rklROt3oKrfEk_qwN9J-JwvKok08iOkZy3OD4oNsLJQ,1376
92
88
  coralnet_toolbox/Icons/snake.png,sha256=cwcekSkXwDi_fhtTU48u7FN4bIybbY53cWK0n7-IN9A,2361
89
+ coralnet_toolbox/Icons/system_monitor.png,sha256=ui6377kyFMHLnbfSFiE5NAJVnC16tku4RDi7Rv5vJ-0,739
93
90
  coralnet_toolbox/Icons/target.png,sha256=jzb-S_sXWT8MfbvefhDNsuTdAZgV2nGf1ieawaCkByM,1702
94
91
  coralnet_toolbox/Icons/tile.png,sha256=WiXKBpWVBfPv7gC8dnkc_gW3wuLQmLUyxYMWEM-G9ZU,382
95
92
  coralnet_toolbox/Icons/transparent.png,sha256=ZkuGkVzh6zLVNau1Wj166-TtUlbCRqJObGt4vxMxnLk,1098
@@ -140,7 +137,7 @@ coralnet_toolbox/MachineLearning/ExportDataset/QtClassify.py,sha256=5LB8m2zJ24hj
140
137
  coralnet_toolbox/MachineLearning/ExportDataset/QtDetect.py,sha256=ptZ0rUoZ1Tc0RGjKuXU15ZTM87m3gO8vLu6I7w5PVgs,6669
141
138
  coralnet_toolbox/MachineLearning/ExportDataset/QtSegment.py,sha256=7sDczfciPPbGgMdb6D9pZn27DHs_Spg1bd-bc9_hI7Y,6696
142
139
  coralnet_toolbox/MachineLearning/ExportDataset/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
143
- coralnet_toolbox/MachineLearning/ImportDataset/QtBase.py,sha256=2dy9KirW-LlN0SILlq1LGvAYGutlSnWqVHnPFeJN32Q,25261
140
+ coralnet_toolbox/MachineLearning/ImportDataset/QtBase.py,sha256=BQ_o5l_QHPm2dFD1jUkQxaQJ8ya1zysg3_14J7wei8E,29369
144
141
  coralnet_toolbox/MachineLearning/ImportDataset/QtDetect.py,sha256=1YQFAgfuPUUZ18fXbvs4GP9Mrp_-9kfeDdmJHEA5e7I,1121
145
142
  coralnet_toolbox/MachineLearning/ImportDataset/QtSegment.py,sha256=D4bef57dCQa4nJWf8cUphrUILvbfT-a34C-rgpxi814,1163
146
143
  coralnet_toolbox/MachineLearning/ImportDataset/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -173,28 +170,28 @@ coralnet_toolbox/MachineLearning/VideoInference/YOLO3D/bbox3d_utils.py,sha256=Vw
173
170
  coralnet_toolbox/MachineLearning/VideoInference/YOLO3D/depth_model.py,sha256=WQkGmmJwqOC2N__H_76Kegl143vp50Gs63dW54f9E1E,8845
174
171
  coralnet_toolbox/MachineLearning/VideoInference/YOLO3D/detection_model.py,sha256=-rNedm35hv3fGBZyvMHb_mBm0Hjh9MfVHHrbFjxifiE,11453
175
172
  coralnet_toolbox/MachineLearning/VideoInference/YOLO3D/load_camera_params.py,sha256=0eduzfUuT_C852D9LWykMN1tGf962lk6Q2qjiJyxcQA,4430
176
- coralnet_toolbox/MachineLearning/VideoInference/YOLO3D/run.py,sha256=Aumo6RkwJ1wzAloCuhNjgyFnb4z5YeRlME8N_6IdfCU,23463
173
+ coralnet_toolbox/MachineLearning/VideoInference/YOLO3D/run.py,sha256=eOXf7o9M9OB4T1BpvqBe3cx00VVTscw-YCCCHOOI2X8,26912
177
174
  coralnet_toolbox/Rasters/ImageFilter.py,sha256=EhH1YoVjC29ER4qU4t9xwybeX9E012fLq04iPFfbaz4,7928
178
175
  coralnet_toolbox/Rasters/QtRaster.py,sha256=qWqhKiZbBnv0JnCQvPXx6fQYENgSA7sp3vBUj4fdnhA,18435
179
176
  coralnet_toolbox/Rasters/RasterManager.py,sha256=GuTeuty3x4k8DWZ34FQ4BTNwRfjWu0lEhv3NfF2t5u4,6534
180
- coralnet_toolbox/Rasters/RasterTableModel.py,sha256=vgSCjzqK2u_9DyRjz2ecZAUmIh9eDfcPWl8vglGg4Sc,14546
177
+ coralnet_toolbox/Rasters/RasterTableModel.py,sha256=8ebirBkTUSy5Rdsoq10sqzDQBoYCH_Hu40dPiUhtlzc,15311
181
178
  coralnet_toolbox/Rasters/__init__.py,sha256=Pi88uDQbtWxwHfJFdlsvbkwGNhtlyM_013l8bbJlFfw,428
182
179
  coralnet_toolbox/Results/CombineResults.py,sha256=QrHyKhMrjNDtQ98PQabUflHhyv_8KXTGqU30tw9amV8,4523
183
- coralnet_toolbox/Results/ConvertResults.py,sha256=uh7tfJxDiH_cJHxWsr8KQQOcsfzC59-k6PQDVtl3ZaY,6015
180
+ coralnet_toolbox/Results/ConvertResults.py,sha256=f9L1C7JQMVt7x2eSG4CAqpUDRhhc6eQZvTaqCiGUWmk,6310
184
181
  coralnet_toolbox/Results/MapResults.py,sha256=sDieekB93RVKvD-9mW1zRsHOs85shYVIQklt9-kFJZI,8088
185
182
  coralnet_toolbox/Results/Masks.py,sha256=C-zoobRaWXP_QdGcL7ZgSxytHOBdHIBUbQuGnoMZthE,5183
186
- coralnet_toolbox/Results/ResultsProcessor.py,sha256=q_UZNYggpZyY4_P6RpDLbY1ygNH49GXlP1s9ZFI3yF0,17261
183
+ coralnet_toolbox/Results/ResultsProcessor.py,sha256=WPyq4nETrnk9WpMaeFw0rlQRFqZkzAYz_qttT3_jaoA,17332
187
184
  coralnet_toolbox/Results/__init__.py,sha256=WPdlq8aXzjrdQo5T3UqFh7jxge33iNEHiSRAmm0eJuw,630
188
185
  coralnet_toolbox/SAM/QtBatchInference.py,sha256=UyuYLfPF4JrOmmuMOzshbKDEEribV669d9LURmuu6gg,6866
189
- coralnet_toolbox/SAM/QtDeployGenerator.py,sha256=1zzzLuN6Dqe0tVoc3KosaFfYh39HeJ1OCOH0GvqRZDg,26550
190
- coralnet_toolbox/SAM/QtDeployPredictor.py,sha256=m8ZmK5ZVcNHz-i7sYkW7jCa8ZAgEMtN12S21h4is-OM,23966
186
+ coralnet_toolbox/SAM/QtDeployGenerator.py,sha256=UtJH1ZQ9g_Wqa4s09PBMCrmNRpqUZZaTMpE7lfH_Xq0,26491
187
+ coralnet_toolbox/SAM/QtDeployPredictor.py,sha256=7jOFFnNqY7Ylr1IKjTI_YSrPYgCxHpI4ZBda7Kp806g,24413
191
188
  coralnet_toolbox/SAM/__init__.py,sha256=Zxd75pFMrt5DfSmNNVSsQeCucIQ2rVaEiS0hT_OVIMM,293
192
189
  coralnet_toolbox/SeeAnything/QtBatchInference.py,sha256=k3aftVzva84yATB4Su5DSI0lhkHDggUg3mVAx4AHmjw,7134
193
- coralnet_toolbox/SeeAnything/QtDeployGenerator.py,sha256=OvzCDYpsT2RlfR03JBUANaQt5B7ffP6jlk0qcbYC_cw,67836
194
- coralnet_toolbox/SeeAnything/QtDeployPredictor.py,sha256=0hjtxrKMN26XyRMoUxFKhnmcBY9vnhfndBdAhXrqGHs,24765
190
+ coralnet_toolbox/SeeAnything/QtDeployGenerator.py,sha256=5wG2XKhHzdNXjmG3u5Npc7TGBfFLMWPHzYQ12NlZvd0,68578
191
+ coralnet_toolbox/SeeAnything/QtDeployPredictor.py,sha256=sfaaJoDFM2ntdqD0CXsTSHRZIuCRfviVg4vqhG0sGdI,26804
195
192
  coralnet_toolbox/SeeAnything/QtTrainModel.py,sha256=dQ6ZkeIr1migU-edGO-gQMENVP4o7WJsIANlSVhFK8k,28031
196
193
  coralnet_toolbox/SeeAnything/__init__.py,sha256=4OgG9-aQ6_RZ942-Ift_q-kkp14kObMT4lDIIx9YSxQ,366
197
- coralnet_toolbox/Tile/QtTileBatchInference.py,sha256=chSo-TOwpnAgIQpTetZnbhWQIBzbDJO01xAQ3z0-ubk,10730
194
+ coralnet_toolbox/Tile/QtTileBatchInference.py,sha256=oo7NeqEwwmxMEGBMo5v2SCnyQwJYq9pg-9yj3G_-vaM,10727
198
195
  coralnet_toolbox/Tile/QtTileCreation.py,sha256=Cw6q0ZVXx0hU4uMuXA3OZ3_5bKu6oQgnlMFwaXSZbS0,39963
199
196
  coralnet_toolbox/Tile/__init__.py,sha256=BlV-1bO9u-olfNAIvukUMPRzlw8dx-ayjrjMRQ-bSsk,463
200
197
  coralnet_toolbox/Tile/TileDataset/QtBase.py,sha256=5zWtnNVbril0i3aVFv5q0VyMYcFP0Qb09ylqQ4smBXw,18623
@@ -206,10 +203,10 @@ coralnet_toolbox/Tools/QtCutSubTool.py,sha256=3mO8VLrS_sKuEukGQljF_gX8PMdf1za-dx
206
203
  coralnet_toolbox/Tools/QtMoveSubTool.py,sha256=K5uXEGGl8nwKvGI0DBGZ8ILZsxWf22HVSfyLr0hKhuE,2165
207
204
  coralnet_toolbox/Tools/QtPanTool.py,sha256=q0g5Ryse6mIZ_Ss4qJw5NNwgoLuQQBIyQTXNFL643-s,3062
208
205
  coralnet_toolbox/Tools/QtPatchTool.py,sha256=57vFeR2jQ_VQRlMEIC_mH8NigUqOlVvmhaVkXDvd_Gw,5574
209
- coralnet_toolbox/Tools/QtPolygonTool.py,sha256=yxnkwK3rb52pWCq7a3iAABhHUSS_a3vkL7G7Ev0uLDA,9174
210
- coralnet_toolbox/Tools/QtRectangleTool.py,sha256=gYOOsn1WRHLG0YzkKmmM7OzLpuLNh8GWIZ4MloXoLDc,7218
206
+ coralnet_toolbox/Tools/QtPolygonTool.py,sha256=mQB2NuGGUmo7W4Pf8DJI9PDa3PW7Hu2nOMf4uTyWAq8,11236
207
+ coralnet_toolbox/Tools/QtRectangleTool.py,sha256=VJWKktDiDN1TUZATRSe_1kjppFjV0AMYDZ8bK78dCXc,8672
211
208
  coralnet_toolbox/Tools/QtResizeSubTool.py,sha256=cWJDx8PEtxoCLQKuyEyZ6ccBzFKau9j1djrSSpDgaq8,6524
212
- coralnet_toolbox/Tools/QtSAMTool.py,sha256=PKSQU0gBsbI5rDCHRs46fUsS8QDkaqYT7aF8CEreNBQ,34068
209
+ coralnet_toolbox/Tools/QtSAMTool.py,sha256=c719q573SRDxr5mLxNB7K5YNvZnWQYcFxzdm_I4f08Y,36529
213
210
  coralnet_toolbox/Tools/QtSeeAnythingTool.py,sha256=ZjqaM0At7MCB5R0wgibph75FkH2ZIePfRnXQUQko6wE,38515
214
211
  coralnet_toolbox/Tools/QtSelectSubTool.py,sha256=_FIjLhnEVY19Q87jhRKXGdghNfMBxxy_sECAIUo0BZA,3294
215
212
  coralnet_toolbox/Tools/QtSelectTool.py,sha256=qAXRIGmjdzWjaH6GwhvlQSodZuFa6OnyckzNVfVDG2w,20983
@@ -219,9 +216,17 @@ coralnet_toolbox/Tools/QtTool.py,sha256=2MCjT151gYBN8KbsK0GX4WOrEg1uw3oeSkp7Elw1
219
216
  coralnet_toolbox/Tools/QtWorkAreaTool.py,sha256=ApsIiEbkyWFWKW6qnFPPnL_Wgs2xa_Edif5kZU5_n8M,22733
220
217
  coralnet_toolbox/Tools/QtZoomTool.py,sha256=F9CAoABv1jxcUS7dyIh1FYjgjOXYRI1xtBPNIR1g62o,4041
221
218
  coralnet_toolbox/Tools/__init__.py,sha256=UYStZw1eA_yJ07IVli1MYSvk0pSCs1aS169LcQo630s,867
222
- coralnet_toolbox-0.0.74.dist-info/licenses/LICENSE.txt,sha256=AURacZ_G_PZKqqPQ9VB9Sqegblk67RNgWSGAYKwXXMY,521
223
- coralnet_toolbox-0.0.74.dist-info/METADATA,sha256=MQd5g1S0o1V9ZkhTUe1Ky13Jk_7bK2hn4CqLL4YoFcw,15083
224
- coralnet_toolbox-0.0.74.dist-info/WHEEL,sha256=JNWh1Fm1UdwIQV075glCn4MVuCRs0sotJIq-J6rbxCU,109
225
- coralnet_toolbox-0.0.74.dist-info/entry_points.txt,sha256=oEeMoDlJ_2lq95quOeDHIx9hZpubUlSo80OLtgbcrbM,63
226
- coralnet_toolbox-0.0.74.dist-info/top_level.txt,sha256=SMWPh4_9JfB8zVpPOOvjucV2_B_hvWW7bNWmMjG0LsY,17
227
- coralnet_toolbox-0.0.74.dist-info/RECORD,,
219
+ coralnet_toolbox/Transformers/QtBatchInference.py,sha256=Adry1H-oIMV6Ppo8yRJRx79oeG1yUthT5jqszj7EJ20,5764
220
+ coralnet_toolbox/Transformers/QtDeployModel.py,sha256=oH82XtP07f0n6fUhlPADUbFujTJt0ppSvitX15KeuaQ,25669
221
+ coralnet_toolbox/Transformers/__init__.py,sha256=Oef7mKgwlK_hi5ZtXlRTvpyKhf98JPwBELWE3zjrp9U,207
222
+ coralnet_toolbox/Transformers/Models/GroundingDINO.py,sha256=V77tapTLsXtTISbqsV9ZSGYgkJQTI1RKsT95QagYhqk,2747
223
+ coralnet_toolbox/Transformers/Models/OWLViT.py,sha256=l9R9XKN7grw6gF7EC9DWxF5sUsApLfi0WO-zj6pVVHU,2781
224
+ coralnet_toolbox/Transformers/Models/OmDetTurbo.py,sha256=vaXaQNqBCvnEFcPMt6x_EJI-gf-Wy3eYZPak7a527WY,2592
225
+ coralnet_toolbox/Transformers/Models/QtBase.py,sha256=AYGTOxopOXYrHSDWAenvyxAAgqHbQs3zIox-c4BX9YQ,4533
226
+ coralnet_toolbox/Transformers/Models/__init__.py,sha256=icJnQkt2vZksubEJuih0sT0q2vLR_Y-12WuTGquvxt8,260
227
+ coralnet_toolbox-0.0.76.dist-info/licenses/LICENSE.txt,sha256=AURacZ_G_PZKqqPQ9VB9Sqegblk67RNgWSGAYKwXXMY,521
228
+ coralnet_toolbox-0.0.76.dist-info/METADATA,sha256=MlZwkwAUGZWKcCzQVqIpHkuoenuz2arqnEIzviPsFps,15381
229
+ coralnet_toolbox-0.0.76.dist-info/WHEEL,sha256=JNWh1Fm1UdwIQV075glCn4MVuCRs0sotJIq-J6rbxCU,109
230
+ coralnet_toolbox-0.0.76.dist-info/entry_points.txt,sha256=oEeMoDlJ_2lq95quOeDHIx9hZpubUlSo80OLtgbcrbM,63
231
+ coralnet_toolbox-0.0.76.dist-info/top_level.txt,sha256=SMWPh4_9JfB8zVpPOOvjucV2_B_hvWW7bNWmMjG0LsY,17
232
+ coralnet_toolbox-0.0.76.dist-info/RECORD,,
@@ -1,81 +0,0 @@
1
- from dataclasses import dataclass
2
-
3
- import cv2
4
- import numpy as np
5
-
6
- import supervision as sv
7
-
8
- from transformers import AutoProcessor, AutoModelForZeroShotObjectDetection
9
-
10
- from autodistill.detection import CaptionOntology
11
- from autodistill.helpers import load_image
12
-
13
- from coralnet_toolbox.AutoDistill.Models.QtBase import QtBaseModel
14
-
15
-
16
- # ----------------------------------------------------------------------------------------------------------------------
17
- # Classes
18
- # ----------------------------------------------------------------------------------------------------------------------
19
-
20
-
21
- @dataclass
22
- class GroundingDINOModel(QtBaseModel):
23
- def __init__(self, ontology: CaptionOntology, model="SwinB", device: str = "cpu"):
24
- super().__init__(ontology, device)
25
-
26
- if model == "SwinB":
27
- model_name = "IDEA-Research/grounding-dino-base"
28
- else:
29
- model_name = "IDEA-Research/grounding-dino-tiny"
30
-
31
- self.processor = AutoProcessor.from_pretrained(model_name, use_fast=True)
32
- self.model = AutoModelForZeroShotObjectDetection.from_pretrained(model_name).to(self.device)
33
-
34
- def _process_predictions(self, image, texts, class_idx_mapper, confidence):
35
- """Process model predictions for a single image."""
36
- inputs = self.processor(text=texts, images=image, return_tensors="pt").to(self.device)
37
- outputs = self.model(**inputs)
38
-
39
- results = self.processor.post_process_grounded_object_detection(
40
- outputs,
41
- inputs.input_ids,
42
- box_threshold=confidence,
43
- text_threshold=confidence,
44
- target_sizes=[image.shape[:2]],
45
- )[0]
46
-
47
- boxes, scores, labels = (
48
- results["boxes"],
49
- results["scores"],
50
- results["text_labels"],
51
- )
52
-
53
- final_boxes, final_scores, final_labels = [], [], []
54
-
55
- for box, score, label in zip(boxes, scores, labels):
56
- try:
57
- box = box.detach().cpu().numpy().astype(int).tolist()
58
- score = score.item()
59
- # Grounding Dino issues...
60
- label = class_idx_mapper[label.split(" ")[0]]
61
-
62
- # Amplify scores
63
- if score < confidence:
64
- continue
65
-
66
- final_boxes.append(box)
67
- final_scores.append(score)
68
- final_labels.append(label)
69
-
70
- except Exception as e:
71
- print(f"Error: Issue converting predictions:\n{e}")
72
- continue
73
-
74
- if len(final_boxes) == 0:
75
- return None
76
-
77
- return sv.Detections(
78
- xyxy=np.array(final_boxes),
79
- class_id=np.array(final_labels),
80
- confidence=np.array(final_scores)
81
- )
@@ -1,76 +0,0 @@
1
- from dataclasses import dataclass
2
-
3
- import cv2
4
- import numpy as np
5
-
6
- import supervision as sv
7
-
8
- from transformers import OwlViTForObjectDetection, OwlViTProcessor
9
-
10
- from autodistill.detection import CaptionOntology
11
- from autodistill.helpers import load_image
12
-
13
- from coralnet_toolbox.AutoDistill.Models.QtBase import QtBaseModel
14
-
15
-
16
- # ----------------------------------------------------------------------------------------------------------------------
17
- # Classes
18
- # ----------------------------------------------------------------------------------------------------------------------
19
-
20
-
21
- @dataclass
22
- class OWLViTModel(QtBaseModel):
23
- def __init__(self, ontology: CaptionOntology, device: str = "cpu"):
24
- super().__init__(ontology, device)
25
-
26
- model_name = "google/owlvit-base-patch32"
27
- self.processor = OwlViTProcessor.from_pretrained(model_name, use_fast=True)
28
- self.model = OwlViTForObjectDetection.from_pretrained(model_name).to(self.device)
29
-
30
- def _process_predictions(self, image, texts, class_idx_mapper, confidence):
31
- """Process model predictions for a single image."""
32
- inputs = self.processor(text=texts, images=image, return_tensors="pt").to(self.device)
33
- outputs = self.model(**inputs)
34
-
35
- results = self.processor.post_process_object_detection(
36
- outputs,
37
- threshold=confidence,
38
- target_sizes=[image.shape[:2]]
39
- )[0]
40
-
41
- boxes, scores, labels = (
42
- results["boxes"],
43
- results["scores"],
44
- results["labels"],
45
- )
46
-
47
- final_boxes, final_scores, final_labels = [], [], []
48
-
49
- for box, score, label in zip(boxes, scores, labels):
50
- try:
51
- box = box.detach().cpu().numpy().astype(int).tolist()
52
- score = score.item()
53
- label_index = label.item()
54
- class_label = texts[label_index]
55
- label = class_idx_mapper[class_label]
56
-
57
- # Filter by confidence
58
- if score < confidence:
59
- continue
60
-
61
- final_boxes.append(box)
62
- final_scores.append(score)
63
- final_labels.append(label)
64
-
65
- except Exception as e:
66
- print(f"Error: Issue converting predictions:\n{e}")
67
- continue
68
-
69
- if len(final_boxes) == 0:
70
- return None
71
-
72
- return sv.Detections(
73
- xyxy=np.array(final_boxes),
74
- class_id=np.array(final_labels),
75
- confidence=np.array(final_scores)
76
- )