neuro-sam 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- neuro_sam/napari_utils/punet_widget.py +5 -11
- neuro_sam/napari_utils/segmentation_model.py +16 -7
- neuro_sam/napari_utils/segmentation_module.py +1 -4
- neuro_sam/plugin.py +5 -3
- neuro_sam/punet/punet_inference.py +1 -1
- neuro_sam/utils.py +90 -0
- {neuro_sam-0.1.5.dist-info → neuro_sam-0.1.7.dist-info}/METADATA +2 -1
- {neuro_sam-0.1.5.dist-info → neuro_sam-0.1.7.dist-info}/RECORD +12 -11
- {neuro_sam-0.1.5.dist-info → neuro_sam-0.1.7.dist-info}/entry_points.txt +1 -0
- {neuro_sam-0.1.5.dist-info → neuro_sam-0.1.7.dist-info}/WHEEL +0 -0
- {neuro_sam-0.1.5.dist-info → neuro_sam-0.1.7.dist-info}/licenses/LICENSE +0 -0
- {neuro_sam-0.1.5.dist-info → neuro_sam-0.1.7.dist-info}/top_level.txt +0 -0
|
@@ -15,14 +15,8 @@ import napari
|
|
|
15
15
|
from napari.qt.threading import thread_worker
|
|
16
16
|
|
|
17
17
|
# Import the model class
|
|
18
|
-
#
|
|
19
|
-
|
|
20
|
-
from neuro_sam.punet.punet_inference import run_inference_volume
|
|
21
|
-
except ImportError:
|
|
22
|
-
# Fallback if running from a different context, try to append path
|
|
23
|
-
import sys
|
|
24
|
-
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'punet'))
|
|
25
|
-
from neuro_sam.punet.punet_inference import run_inference_volume
|
|
18
|
+
# Assumes installed package structure
|
|
19
|
+
from neuro_sam.punet.punet_inference import run_inference_volume
|
|
26
20
|
|
|
27
21
|
|
|
28
22
|
|
|
@@ -197,13 +191,13 @@ class PunetSpineSegmentationWidget(QWidget):
|
|
|
197
191
|
def _segmentation_worker(self, vol, params):
|
|
198
192
|
import traceback
|
|
199
193
|
try:
|
|
194
|
+
# Import the refactored inference function from the package
|
|
200
195
|
# Import the refactored inference function from the package
|
|
201
196
|
try:
|
|
202
197
|
from neuro_sam.punet.punet_inference import run_inference_volume
|
|
203
198
|
except ImportError:
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
from neuro_sam.punet_inference import run_inference_volume
|
|
199
|
+
# Fallback should not be needed with proper package execution
|
|
200
|
+
raise ImportError("Could not import run_inference_volume from neuro_sam.punet.punet_inference")
|
|
207
201
|
|
|
208
202
|
yield "Starting inference..."
|
|
209
203
|
|
|
@@ -8,22 +8,33 @@ from scipy.ndimage import label
|
|
|
8
8
|
from matplotlib.path import Path
|
|
9
9
|
|
|
10
10
|
|
|
11
|
+
from neuro_sam.utils import get_weights_path
|
|
12
|
+
|
|
11
13
|
class DendriteSegmenter:
|
|
12
14
|
"""Class for segmenting dendrites from 3D image volumes using SAM2 with overlapping patches"""
|
|
13
15
|
|
|
14
|
-
def __init__(self, model_path=
|
|
16
|
+
def __init__(self, model_path=None, config_path="sam2.1_hiera_s.yaml", weights_path=None, device="cuda"):
|
|
15
17
|
"""
|
|
16
18
|
Initialize the dendrite segmenter with overlapping patches.
|
|
17
19
|
|
|
18
20
|
Args:
|
|
19
|
-
model_path: Path to SAM2 model checkpoint
|
|
21
|
+
model_path: Path to SAM2 model checkpoint (auto-downloaded if None)
|
|
20
22
|
config_path: Path to model configuration
|
|
21
|
-
weights_path: Path to trained weights
|
|
23
|
+
weights_path: Path to trained weights (auto-downloaded if None)
|
|
22
24
|
device: Device to run the model on (cpu or cuda)
|
|
23
25
|
"""
|
|
24
|
-
|
|
26
|
+
if model_path is None:
|
|
27
|
+
self.model_path = get_weights_path("sam2.1_hiera_small.pt")
|
|
28
|
+
else:
|
|
29
|
+
self.model_path = model_path
|
|
30
|
+
|
|
25
31
|
self.config_path = config_path
|
|
26
|
-
|
|
32
|
+
|
|
33
|
+
if weights_path is None:
|
|
34
|
+
self.weights_path = get_weights_path("dendrite_model.torch")
|
|
35
|
+
else:
|
|
36
|
+
self.weights_path = weights_path
|
|
37
|
+
|
|
27
38
|
self.device = device
|
|
28
39
|
self.predictor = None
|
|
29
40
|
|
|
@@ -35,8 +46,6 @@ class DendriteSegmenter:
|
|
|
35
46
|
|
|
36
47
|
# Try importing first to catch import errors
|
|
37
48
|
try:
|
|
38
|
-
import sys
|
|
39
|
-
sys.path.append('./Train-SAMv2')
|
|
40
49
|
from sam2.build_sam import build_sam2
|
|
41
50
|
from sam2.sam2_image_predictor import SAM2ImagePredictor
|
|
42
51
|
print("Successfully imported SAM2 modules")
|
|
@@ -349,11 +349,8 @@ class SegmentationWidget(QWidget):
|
|
|
349
349
|
# Initialize segmenter if not already done
|
|
350
350
|
if self.segmenter is None:
|
|
351
351
|
self.segmenter = DendriteSegmenter(
|
|
352
|
-
model_path="./Train-SAMv2/checkpoints/sam2.1_hiera_small.pt",
|
|
353
|
-
config_path="sam2.1_hiera_s.yaml",
|
|
354
|
-
weights_path="./Train-SAMv2/results/samv2_dendrite/dendrite_model.torch",
|
|
355
352
|
device=device
|
|
356
|
-
)
|
|
353
|
+
) # Paths are now handled automatically by default args
|
|
357
354
|
|
|
358
355
|
# Load the model
|
|
359
356
|
success = self.segmenter.load_model()
|
neuro_sam/plugin.py
CHANGED
|
@@ -247,11 +247,13 @@ def main():
|
|
|
247
247
|
else:
|
|
248
248
|
# Try to load a default benchmark image
|
|
249
249
|
try:
|
|
250
|
-
|
|
251
|
-
|
|
250
|
+
from neuro_sam.utils import get_weights_path
|
|
251
|
+
default_path = get_weights_path('DeepD3_Benchmark.tif')
|
|
252
|
+
print(f"No image path provided, loading default: {default_path}")
|
|
252
253
|
spacing_xyz = (args.x_spacing, args.y_spacing, args.z_spacing)
|
|
253
254
|
viewer = run_neuro_sam(image_path=default_path, spacing_xyz=spacing_xyz)
|
|
254
|
-
except
|
|
255
|
+
except Exception as e:
|
|
256
|
+
print(f"Failed to load default image: {e}")
|
|
255
257
|
sys.exit(1)
|
|
256
258
|
|
|
257
259
|
print("\nStarted NeuroSAM with anisotropic scaling support!")
|
neuro_sam/utils.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import requests
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from tqdm import tqdm
|
|
6
|
+
|
|
7
|
+
# Constants for Model URLs
|
|
8
|
+
WEIGHTS_URLS = {
|
|
9
|
+
"DeepD3_Benchmark.tif": "https://github.com/nipunarora8/Neuro-SAM/releases/download/weights/DeepD3_Benchmark.tif",
|
|
10
|
+
"dendrite_model.torch": "https://github.com/nipunarora8/Neuro-SAM/releases/download/weights/dendrite_model.torch",
|
|
11
|
+
"sam2.1_hiera_small.pt": "https://github.com/nipunarora8/Neuro-SAM/releases/download/weights/sam2.1_hiera_small.pt",
|
|
12
|
+
"punet_best.pth": "https://github.com/nipunarora8/Neuro-SAM/releases/download/weights/punet_best.pth"
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
def get_weights_dir():
|
|
16
|
+
"""Get the directory where weights are stored (~/.neuro_sam/checkpoints)."""
|
|
17
|
+
weights_dir = Path.home() / ".neuro_sam" / "checkpoints"
|
|
18
|
+
weights_dir.mkdir(parents=True, exist_ok=True)
|
|
19
|
+
return weights_dir
|
|
20
|
+
|
|
21
|
+
def download_file(url, dest_path):
|
|
22
|
+
"""Download a file from a URL to a destination path with a progress bar."""
|
|
23
|
+
print(f"Downloading {url} to {dest_path}...")
|
|
24
|
+
try:
|
|
25
|
+
response = requests.get(url, stream=True)
|
|
26
|
+
response.raise_for_status()
|
|
27
|
+
total_size = int(response.headers.get('content-length', 0))
|
|
28
|
+
|
|
29
|
+
block_size = 1024 # 1 Kibibyte
|
|
30
|
+
t = tqdm(total=total_size, unit='iB', unit_scale=True)
|
|
31
|
+
|
|
32
|
+
with open(dest_path, 'wb') as file:
|
|
33
|
+
for data in response.iter_content(block_size):
|
|
34
|
+
t.update(len(data))
|
|
35
|
+
file.write(data)
|
|
36
|
+
t.close()
|
|
37
|
+
|
|
38
|
+
if total_size != 0 and t.n != total_size:
|
|
39
|
+
print("ERROR, something went wrong with the download")
|
|
40
|
+
return False
|
|
41
|
+
|
|
42
|
+
print(f"Download complete: {dest_path}")
|
|
43
|
+
return True
|
|
44
|
+
except Exception as e:
|
|
45
|
+
print(f"Error downloading file: {e}")
|
|
46
|
+
if os.path.exists(dest_path):
|
|
47
|
+
os.remove(dest_path)
|
|
48
|
+
return False
|
|
49
|
+
|
|
50
|
+
def get_weights_path(filename, url=None):
|
|
51
|
+
"""
|
|
52
|
+
Get the path to a weights file.
|
|
53
|
+
Checks local 'checkpoints' folder first, then ~/.neuro_sam/checkpoints.
|
|
54
|
+
If not found, downloads it.
|
|
55
|
+
"""
|
|
56
|
+
# 1. Check local checkpoints folder (development mode)
|
|
57
|
+
local_path = Path("checkpoints") / filename
|
|
58
|
+
if local_path.exists():
|
|
59
|
+
return str(local_path.absolute())
|
|
60
|
+
|
|
61
|
+
# 2. Check global cache directory
|
|
62
|
+
weights_dir = get_weights_dir()
|
|
63
|
+
cache_path = weights_dir / filename
|
|
64
|
+
|
|
65
|
+
if cache_path.exists():
|
|
66
|
+
return str(cache_path.absolute())
|
|
67
|
+
|
|
68
|
+
# 3. Download if not found
|
|
69
|
+
if url is None:
|
|
70
|
+
url = WEIGHTS_URLS.get(filename)
|
|
71
|
+
|
|
72
|
+
if url:
|
|
73
|
+
print(f"Weights file {filename} not found locally. Downloading...")
|
|
74
|
+
success = download_file(url, cache_path)
|
|
75
|
+
if success:
|
|
76
|
+
return str(cache_path.absolute())
|
|
77
|
+
else:
|
|
78
|
+
raise RuntimeError(f"Failed to download {filename}")
|
|
79
|
+
else:
|
|
80
|
+
raise FileNotFoundError(f"Weights file {filename} not found and no URL provided.")
|
|
81
|
+
|
|
82
|
+
def download_all_models():
|
|
83
|
+
"""Download all known models to the cache directory."""
|
|
84
|
+
print("Downloading all Neuro-SAM models...")
|
|
85
|
+
for filename, url in WEIGHTS_URLS.items():
|
|
86
|
+
try:
|
|
87
|
+
get_weights_path(filename, url)
|
|
88
|
+
except Exception as e:
|
|
89
|
+
print(f"Failed to process {filename}: {e}")
|
|
90
|
+
print("All downloads processed.")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: neuro-sam
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.7
|
|
4
4
|
Summary: Neuro-SAM: Foundation Models for Dendrite and Dendritic Spine Segmentation
|
|
5
5
|
Author-email: Nipun Arora <nipunarora8@yahoo.com>
|
|
6
6
|
License: MIT License
|
|
@@ -54,6 +54,7 @@ Requires-Dist: numba
|
|
|
54
54
|
Requires-Dist: PyQt5
|
|
55
55
|
Requires-Dist: opencv-python-headless
|
|
56
56
|
Requires-Dist: matplotlib
|
|
57
|
+
Requires-Dist: requests
|
|
57
58
|
Dynamic: license-file
|
|
58
59
|
|
|
59
60
|
<div align="center">
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
neuro_sam/__init__.py,sha256=0pbMqpI_nQyhP0_pfTaIg97FVEcFkS5w8gQrsMiBcG4,34
|
|
2
|
-
neuro_sam/plugin.py,sha256=
|
|
2
|
+
neuro_sam/plugin.py,sha256=qFEdbEFEBgs0RSQj4zdK3cmgnCxjTmz9_HLi7LI__Tk,10235
|
|
3
|
+
neuro_sam/utils.py,sha256=epdMrxYBO__sVZk3Twk9rHHOtswAu-wmqUektdQNP-s,3307
|
|
3
4
|
neuro_sam/brightest_path_lib/__init__.py,sha256=vU3VvX26D2c9B26Lid09uThzKMQJYPx622SkPhxNlDI,123
|
|
4
5
|
neuro_sam/brightest_path_lib/connected_componen.py,sha256=x_kjDGZ_8U2ks9kZJOOyM8_ow84UugAsPUByI-NlXFk,12734
|
|
5
6
|
neuro_sam/brightest_path_lib/algorithm/__init__.py,sha256=XFYxFyx97FG7lK3_j7uA8GgunpfkOcAo2NIJU0GUn40,170
|
|
@@ -30,19 +31,19 @@ neuro_sam/napari_utils/color_utils.py,sha256=Hf5R8f0rh7b9CY1VT72o3tLGfGnnjRREkX8
|
|
|
30
31
|
neuro_sam/napari_utils/contrasting_color_system.py,sha256=a-lt_3zJLDL9YyIdWJhFDGMYzBb6yH85cV7BNCabbdI,6771
|
|
31
32
|
neuro_sam/napari_utils/main_widget.py,sha256=yahfPLwmhBt_hImpRykIObzfMwbVZvVJTEKKzMZ11bw,48588
|
|
32
33
|
neuro_sam/napari_utils/path_tracing_module.py,sha256=0mMAtrMmtgK_ujMzaWzIguYVDPr8nfzalaTAwgF3NaQ,44062
|
|
33
|
-
neuro_sam/napari_utils/punet_widget.py,sha256=
|
|
34
|
-
neuro_sam/napari_utils/segmentation_model.py,sha256=
|
|
35
|
-
neuro_sam/napari_utils/segmentation_module.py,sha256=
|
|
34
|
+
neuro_sam/napari_utils/punet_widget.py,sha256=FfnC6V_FErczkaQP5y3rp1YBMWPVx6YMI4TxEHah_Vo,16862
|
|
35
|
+
neuro_sam/napari_utils/segmentation_model.py,sha256=mHXVjksqEcxHRH5KWp5-hXLEnRHgGhwPUxyUkV8eJGM,34141
|
|
36
|
+
neuro_sam/napari_utils/segmentation_module.py,sha256=iObM5k8VkARtB_rcqAQGzKJ-PmaAKLeFJD14_Jy6xhs,28732
|
|
36
37
|
neuro_sam/napari_utils/visualization_module.py,sha256=JtZlBoKlfIwVLa2Sqg7b2KTr07fNlAcwR0M7fHsn2oM,24723
|
|
37
38
|
neuro_sam/punet/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
39
|
neuro_sam/punet/deepd3_model.py,sha256=nGVEqzCPz_E4cFA6QmknW2CffDcjxH7VsdYAyTdAtY0,7509
|
|
39
40
|
neuro_sam/punet/prob_unet_deepd3.py,sha256=syXNleUVrfYtmVveN9G461oAhumxsijsavps8in4VRw,14698
|
|
40
41
|
neuro_sam/punet/prob_unet_with_tversky.py,sha256=2dBbO_BEHbhYWBXW7rXQX6s2DnqoTgBKkgk6VkgN-Ds,12845
|
|
41
|
-
neuro_sam/punet/punet_inference.py,sha256=
|
|
42
|
+
neuro_sam/punet/punet_inference.py,sha256=v5ufB2Zz5WfgfFZ5-rDjBEobpr5gy-HKPPWZpCALV7A,8033
|
|
42
43
|
neuro_sam/punet/run_inference.py,sha256=c9ATKWJvhOzNEaww_sUCI5fFS1q0bQ4GYUwNUqxWcwA,5312
|
|
43
44
|
neuro_sam/punet/unet_blocks.py,sha256=ZRNKay9P3OnJ0PmtKXw_iSgUyRE1DkkGefGXwSbYZGY,3171
|
|
44
45
|
neuro_sam/punet/utils.py,sha256=ibwcpkqqZ3_3Afz2VYxzplz8_8FWQ5qYQqjJiKS8hIo,1786
|
|
45
|
-
neuro_sam-0.1.
|
|
46
|
+
neuro_sam-0.1.7.dist-info/licenses/LICENSE,sha256=akmTIN8IuZn3Y7UK_8qVQnyKDWSDcVUwB8RPGNXCojw,1068
|
|
46
47
|
sam2/__init__.py,sha256=uHyh6VzVS4F2box0rPDpN5UmOVKeQNK0CIaTKG9JQZ4,395
|
|
47
48
|
sam2/automatic_mask_generator.py,sha256=Zt8mbb4UQSMFrjOY8OwbshswOpMhaxAtdn5sTuXUw9c,18461
|
|
48
49
|
sam2/benchmark.py,sha256=m3o1BriIQuwJAx-3zQ_B0_7YLhN84G28oQSV5sGA3ak,2811
|
|
@@ -86,8 +87,8 @@ sam2/utils/__init__.py,sha256=NL2AacVHZOe41zp4kF2-ZGcUCi9zFwh1Eo9spNjN0Ko,197
|
|
|
86
87
|
sam2/utils/amg.py,sha256=t7MwkOKvcuBNu4FcjzKv9BpO0av5Zo9itZ8b3WQMpdg,12842
|
|
87
88
|
sam2/utils/misc.py,sha256=AWAMAcFhzQedcQb7HU2oRc-RqjGrK87K-MsVG21tIKI,13090
|
|
88
89
|
sam2/utils/transforms.py,sha256=ujpk9GAMYvIJIGpt87QOP88TPtrjL61liDG7DCptEUY,4885
|
|
89
|
-
neuro_sam-0.1.
|
|
90
|
-
neuro_sam-0.1.
|
|
91
|
-
neuro_sam-0.1.
|
|
92
|
-
neuro_sam-0.1.
|
|
93
|
-
neuro_sam-0.1.
|
|
90
|
+
neuro_sam-0.1.7.dist-info/METADATA,sha256=42Pp29GxNSpY1GeNvzeQ3XJyDDsEiMxsCLSSV2Ib-FA,9642
|
|
91
|
+
neuro_sam-0.1.7.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
92
|
+
neuro_sam-0.1.7.dist-info/entry_points.txt,sha256=a1JXEgiM_QOPJdV8zvcIS60WAE62MeqgIVY2oSx81FY,162
|
|
93
|
+
neuro_sam-0.1.7.dist-info/top_level.txt,sha256=yPbWxFcw79sErTk8zohihUHMK9LL31i3bXir2MrS4OQ,15
|
|
94
|
+
neuro_sam-0.1.7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|