microlive 1.0.13__py3-none-any.whl → 1.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- microlive/__init__.py +1 -1
- microlive/data/models/spot_detection_cnn.pth +0 -0
- microlive/gui/app.py +290 -27
- microlive/microscopy.py +54 -17
- microlive/pipelines/pipeline_FRAP.py +192 -21
- microlive/utils/__init__.py +11 -0
- microlive/utils/model_downloader.py +293 -0
- {microlive-1.0.13.dist-info → microlive-1.0.15.dist-info}/METADATA +1 -1
- {microlive-1.0.13.dist-info → microlive-1.0.15.dist-info}/RECORD +12 -10
- {microlive-1.0.13.dist-info → microlive-1.0.15.dist-info}/WHEEL +0 -0
- {microlive-1.0.13.dist-info → microlive-1.0.15.dist-info}/entry_points.txt +0 -0
- {microlive-1.0.13.dist-info → microlive-1.0.15.dist-info}/licenses/LICENSE +0 -0
microlive/__init__.py
CHANGED
|
@@ -23,7 +23,7 @@ Authors:
|
|
|
23
23
|
Nathan L. Nowling, Brian Munsky, Ning Zhao
|
|
24
24
|
"""
|
|
25
25
|
|
|
26
|
-
__version__ = "1.0.
|
|
26
|
+
__version__ = "1.0.15"
|
|
27
27
|
__author__ = "Luis U. Aguilera, William S. Raymond, Rhiannon M. Sears, Nathan L. Nowling, Brian Munsky, Ning Zhao"
|
|
28
28
|
|
|
29
29
|
# Package name (for backward compatibility)
|
|
Binary file
|
microlive/gui/app.py
CHANGED
|
@@ -9054,7 +9054,7 @@ class GUI(QMainWindow):
|
|
|
9054
9054
|
# Cluster radius
|
|
9055
9055
|
self.cluster_radius_input = QSpinBox()
|
|
9056
9056
|
self.cluster_radius_input.setMinimum(100)
|
|
9057
|
-
self.cluster_radius_input.setMaximum(
|
|
9057
|
+
self.cluster_radius_input.setMaximum(6000)
|
|
9058
9058
|
self.cluster_radius_input.setValue(self.cluster_radius_nm)
|
|
9059
9059
|
self.cluster_radius_input.valueChanged.connect(self.update_cluster_radius)
|
|
9060
9060
|
params_layout.addRow("Cluster radius (nm):", self.cluster_radius_input)
|
|
@@ -11701,9 +11701,13 @@ class GUI(QMainWindow):
|
|
|
11701
11701
|
layout = QVBoxLayout(self.coloc_verify_distance_widget)
|
|
11702
11702
|
layout.setContentsMargins(10, 5, 10, 5)
|
|
11703
11703
|
|
|
11704
|
-
# Info label
|
|
11705
|
-
info_label = QLabel(
|
|
11704
|
+
# Info label explaining what is displayed
|
|
11705
|
+
info_label = QLabel(
|
|
11706
|
+
"Review unique particle tracks. Each row shows a time-averaged crop. "
|
|
11707
|
+
"A track is marked colocalized (✓) if ANY frame is within the distance threshold."
|
|
11708
|
+
)
|
|
11706
11709
|
info_label.setStyleSheet("font-style: italic; color: #999;")
|
|
11710
|
+
info_label.setWordWrap(True)
|
|
11707
11711
|
layout.addWidget(info_label)
|
|
11708
11712
|
|
|
11709
11713
|
# Top bar with stats and buttons
|
|
@@ -12553,6 +12557,9 @@ class GUI(QMainWindow):
|
|
|
12553
12557
|
channels=(ch1, ch2)
|
|
12554
12558
|
)
|
|
12555
12559
|
|
|
12560
|
+
# Reset sorted flag so Sort button can be used
|
|
12561
|
+
self._verify_visual_sorted = False
|
|
12562
|
+
|
|
12556
12563
|
# Update stats label
|
|
12557
12564
|
self._update_verify_visual_stats()
|
|
12558
12565
|
|
|
@@ -12570,20 +12577,76 @@ class GUI(QMainWindow):
|
|
|
12570
12577
|
)
|
|
12571
12578
|
|
|
12572
12579
|
def sort_verify_visual(self):
|
|
12573
|
-
"""Sort Verify Visual results by prediction value (lowest to highest)."""
|
|
12580
|
+
"""Sort Verify Visual results by prediction value (lowest to highest for review)."""
|
|
12574
12581
|
if not hasattr(self, 'verify_visual_checkboxes') or len(self.verify_visual_checkboxes) == 0:
|
|
12582
|
+
QMessageBox.information(self, "No Data", "No spots to sort. Please click Populate first.")
|
|
12583
|
+
return
|
|
12584
|
+
|
|
12585
|
+
if not hasattr(self, 'colocalization_results') or not self.colocalization_results:
|
|
12586
|
+
QMessageBox.warning(self, "No Results", "No colocalization results available.")
|
|
12575
12587
|
return
|
|
12576
12588
|
|
|
12577
|
-
|
|
12589
|
+
results = self.colocalization_results
|
|
12590
|
+
values = results.get('prediction_values_vector')
|
|
12591
|
+
mean_crop = results.get('mean_crop_filtered')
|
|
12592
|
+
crop_size = results.get('crop_size', 15)
|
|
12593
|
+
flag_vector = results.get('flag_vector')
|
|
12594
|
+
ch1 = results.get('ch1_index', 0)
|
|
12595
|
+
ch2 = results.get('ch2_index', 1)
|
|
12596
|
+
|
|
12578
12597
|
if values is None or len(values) == 0:
|
|
12579
12598
|
QMessageBox.information(self, "Cannot Sort", "No prediction values available for sorting.")
|
|
12580
12599
|
return
|
|
12581
12600
|
|
|
12582
|
-
|
|
12583
|
-
|
|
12584
|
-
|
|
12585
|
-
|
|
12586
|
-
|
|
12601
|
+
if mean_crop is None:
|
|
12602
|
+
QMessageBox.warning(self, "No Data", "Crop data not available for sorting.")
|
|
12603
|
+
return
|
|
12604
|
+
|
|
12605
|
+
# Check if already sorted (compare to original order)
|
|
12606
|
+
if hasattr(self, '_verify_visual_sorted') and self._verify_visual_sorted:
|
|
12607
|
+
QMessageBox.information(self, "Already Sorted", "Spots are already sorted by prediction value.")
|
|
12608
|
+
return
|
|
12609
|
+
|
|
12610
|
+
# Get current checkbox states before sorting
|
|
12611
|
+
current_states = [chk.isChecked() for chk in self.verify_visual_checkboxes]
|
|
12612
|
+
|
|
12613
|
+
# Create sorted indices (ascending by prediction value - uncertain first)
|
|
12614
|
+
num_spots = len(values)
|
|
12615
|
+
sorted_indices = np.argsort(values)
|
|
12616
|
+
|
|
12617
|
+
# Re-order checkbox states to match new sort order
|
|
12618
|
+
sorted_states = [current_states[i] if i < len(current_states) else False for i in sorted_indices]
|
|
12619
|
+
|
|
12620
|
+
# Re-order crops - each spot is crop_size rows in the mean_crop array
|
|
12621
|
+
num_crop_spots = mean_crop.shape[0] // crop_size
|
|
12622
|
+
if num_crop_spots < num_spots:
|
|
12623
|
+
num_spots = num_crop_spots
|
|
12624
|
+
sorted_indices = sorted_indices[:num_spots]
|
|
12625
|
+
|
|
12626
|
+
sorted_crop = np.zeros_like(mean_crop[:num_spots*crop_size])
|
|
12627
|
+
for new_idx, old_idx in enumerate(sorted_indices[:num_spots]):
|
|
12628
|
+
if old_idx < num_crop_spots:
|
|
12629
|
+
sorted_crop[new_idx*crop_size:(new_idx+1)*crop_size] = \
|
|
12630
|
+
mean_crop[old_idx*crop_size:(old_idx+1)*crop_size]
|
|
12631
|
+
|
|
12632
|
+
# Re-create verification crops with sorted data
|
|
12633
|
+
self._create_verification_crops(
|
|
12634
|
+
scroll_area=self.verify_visual_scroll_area,
|
|
12635
|
+
checkboxes_list_attr='verify_visual_checkboxes',
|
|
12636
|
+
mean_crop=sorted_crop,
|
|
12637
|
+
crop_size=crop_size,
|
|
12638
|
+
flag_vector=sorted_states, # Use previously checked states after reorder
|
|
12639
|
+
stats_label=self.verify_visual_stats_label,
|
|
12640
|
+
num_channels=2,
|
|
12641
|
+
channels=(ch1, ch2)
|
|
12642
|
+
)
|
|
12643
|
+
|
|
12644
|
+
# Mark as sorted
|
|
12645
|
+
self._verify_visual_sorted = True
|
|
12646
|
+
self._verify_visual_sort_indices = sorted_indices
|
|
12647
|
+
|
|
12648
|
+
# Update stats
|
|
12649
|
+
self._update_verify_visual_stats()
|
|
12587
12650
|
|
|
12588
12651
|
def cleanup_verify_visual(self):
|
|
12589
12652
|
"""Clear all checkboxes in Verify Visual subtab."""
|
|
@@ -12630,7 +12693,11 @@ class GUI(QMainWindow):
|
|
|
12630
12693
|
# === Verify Distance Subtab Methods ===
|
|
12631
12694
|
|
|
12632
12695
|
def populate_verify_distance(self):
|
|
12633
|
-
"""Populate the Verify Distance subtab with Distance colocalization results.
|
|
12696
|
+
"""Populate the Verify Distance subtab with Distance colocalization results.
|
|
12697
|
+
|
|
12698
|
+
Calculates and stores the minimum distance from each reference channel spot
|
|
12699
|
+
to its nearest partner in the target channel for sorting purposes.
|
|
12700
|
+
"""
|
|
12634
12701
|
if not hasattr(self, 'distance_coloc_results') or not self.distance_coloc_results:
|
|
12635
12702
|
QMessageBox.warning(self, "No Results",
|
|
12636
12703
|
"Please run Distance colocalization first.")
|
|
@@ -12641,8 +12708,10 @@ class GUI(QMainWindow):
|
|
|
12641
12708
|
ch0 = results.get('channel_0', 0)
|
|
12642
12709
|
ch1 = results.get('channel_1', 1)
|
|
12643
12710
|
df_coloc = results.get('df_colocalized', pd.DataFrame())
|
|
12711
|
+
df_ch1_all = results.get('df_ch1_all', pd.DataFrame())
|
|
12644
12712
|
threshold_px = results.get('threshold_distance_px', 2.0)
|
|
12645
12713
|
threshold_nm = results.get('threshold_distance_nm', 130.0)
|
|
12714
|
+
use_3d = results.get('use_3d', False)
|
|
12646
12715
|
|
|
12647
12716
|
# We need to create crops from tracking data
|
|
12648
12717
|
if not hasattr(self, 'df_tracking') or self.df_tracking.empty:
|
|
@@ -12684,28 +12753,129 @@ class GUI(QMainWindow):
|
|
|
12684
12753
|
|
|
12685
12754
|
num_spots = mean_crop.shape[0] // crop_size
|
|
12686
12755
|
|
|
12687
|
-
#
|
|
12688
|
-
#
|
|
12689
|
-
|
|
12756
|
+
# Build set of colocalized coordinates for matching
|
|
12757
|
+
# Use a tolerance-based approach instead of exact coordinate matching
|
|
12758
|
+
coloc_coords_array = np.empty((0, 4)) # z, y, x, cell_id
|
|
12690
12759
|
if not df_coloc.empty:
|
|
12691
|
-
|
|
12692
|
-
|
|
12693
|
-
|
|
12694
|
-
|
|
12760
|
+
if 'z' in df_coloc.columns and use_3d:
|
|
12761
|
+
coloc_coords_array = df_coloc[['z', 'y', 'x', 'cell_id']].values
|
|
12762
|
+
else:
|
|
12763
|
+
# Add dummy z=0 for 2D matching
|
|
12764
|
+
coloc_coords_array = np.column_stack([
|
|
12765
|
+
np.zeros(len(df_coloc)),
|
|
12766
|
+
df_coloc['y'].values,
|
|
12767
|
+
df_coloc['x'].values,
|
|
12768
|
+
df_coloc['cell_id'].values
|
|
12769
|
+
])
|
|
12770
|
+
|
|
12771
|
+
# Calculate minimum distances for each spot in ch0 to nearest spot in ch1
|
|
12772
|
+
# This will be used for sorting (ascending = closest to threshold = most uncertain)
|
|
12773
|
+
distance_values = []
|
|
12695
12774
|
flag_vector = []
|
|
12696
|
-
|
|
12775
|
+
|
|
12776
|
+
# Get ch1 coordinates for distance calculation
|
|
12777
|
+
ch1_coords = None
|
|
12778
|
+
if not df_ch1_all.empty and 'x' in df_ch1_all.columns and 'y' in df_ch1_all.columns:
|
|
12779
|
+
if use_3d and 'z' in df_ch1_all.columns:
|
|
12780
|
+
ch1_coords = df_ch1_all[['z', 'y', 'x']].values
|
|
12781
|
+
else:
|
|
12782
|
+
ch1_coords = df_ch1_all[['y', 'x']].values
|
|
12783
|
+
|
|
12784
|
+
# Get anisotropic scaling for 3D
|
|
12785
|
+
voxel_z_nm = results.get('voxel_z_nm', 300.0)
|
|
12786
|
+
voxel_xy_nm = results.get('voxel_xy_nm', 130.0)
|
|
12787
|
+
z_scale = voxel_z_nm / voxel_xy_nm if use_3d and voxel_xy_nm > 0 else 1.0
|
|
12788
|
+
|
|
12789
|
+
# Use the same particle column identification as CropArray
|
|
12790
|
+
# This ensures our iteration matches the crop order
|
|
12791
|
+
df_ch0_copy = df_ch0.copy()
|
|
12792
|
+
if 'unique_particle' in df_ch0_copy.columns:
|
|
12793
|
+
particle_col = 'unique_particle'
|
|
12794
|
+
elif 'cell_id' in df_ch0_copy.columns:
|
|
12795
|
+
if 'spot_type' in df_ch0_copy.columns:
|
|
12796
|
+
df_ch0_copy['unique_particle'] = (
|
|
12797
|
+
df_ch0_copy['cell_id'].astype(str) + '_' +
|
|
12798
|
+
df_ch0_copy['spot_type'].astype(str) + '_' +
|
|
12799
|
+
df_ch0_copy['particle'].astype(str)
|
|
12800
|
+
)
|
|
12801
|
+
else:
|
|
12802
|
+
df_ch0_copy['unique_particle'] = (
|
|
12803
|
+
df_ch0_copy['cell_id'].astype(str) + '_' +
|
|
12804
|
+
df_ch0_copy['particle'].astype(str)
|
|
12805
|
+
)
|
|
12806
|
+
particle_col = 'unique_particle'
|
|
12807
|
+
else:
|
|
12808
|
+
particle_col = 'particle'
|
|
12809
|
+
|
|
12810
|
+
# Helper function to check if a spot coordinate is in the colocalized set
|
|
12811
|
+
def is_coord_colocalized(z, y, x, cell_id, coloc_arr, tolerance=1.0):
|
|
12812
|
+
"""Check if a spot is in the colocalized set using coordinate tolerance."""
|
|
12813
|
+
if len(coloc_arr) == 0:
|
|
12814
|
+
return False
|
|
12815
|
+
# Filter by cell_id first for efficiency
|
|
12816
|
+
cell_mask = coloc_arr[:, 3].astype(int) == int(cell_id)
|
|
12817
|
+
cell_coloc = coloc_arr[cell_mask]
|
|
12818
|
+
if len(cell_coloc) == 0:
|
|
12819
|
+
return False
|
|
12820
|
+
# Check distance to each colocalized spot
|
|
12821
|
+
for cz, cy, cx, _ in cell_coloc:
|
|
12822
|
+
dist_xy = np.sqrt((x - cx)**2 + (y - cy)**2)
|
|
12823
|
+
dist_z = abs(z - cz) if use_3d else 0
|
|
12824
|
+
if dist_xy <= tolerance and dist_z <= tolerance:
|
|
12825
|
+
return True
|
|
12826
|
+
return False
|
|
12827
|
+
|
|
12828
|
+
# Iterate unique particles in the same order as CropArray
|
|
12829
|
+
unique_particles = df_ch0_copy[particle_col].unique()
|
|
12830
|
+
|
|
12831
|
+
for i, particle_id in enumerate(unique_particles):
|
|
12697
12832
|
if i >= num_spots:
|
|
12698
12833
|
break
|
|
12699
|
-
|
|
12700
|
-
|
|
12834
|
+
|
|
12835
|
+
df_particle = df_ch0_copy[df_ch0_copy[particle_col] == particle_id]
|
|
12836
|
+
|
|
12837
|
+
# Check if ANY observation of this particle is colocalized
|
|
12838
|
+
is_coloc = False
|
|
12839
|
+
min_dist_all = threshold_px * 10.0 # Large default
|
|
12840
|
+
|
|
12841
|
+
for _, row in df_particle.iterrows():
|
|
12842
|
+
x_val, y_val = row['x'], row['y']
|
|
12843
|
+
z_val = row.get('z', 0)
|
|
12844
|
+
cell_id = row.get('cell_id', 0)
|
|
12845
|
+
|
|
12846
|
+
# Check if this observation is in the colocalized set
|
|
12847
|
+
if len(coloc_coords_array) > 0:
|
|
12848
|
+
if is_coord_colocalized(z_val, y_val, x_val, cell_id, coloc_coords_array, tolerance=1.0):
|
|
12849
|
+
is_coloc = True
|
|
12850
|
+
|
|
12851
|
+
# Calculate minimum distance to any ch1 spot for this observation
|
|
12852
|
+
if ch1_coords is not None and len(ch1_coords) > 0:
|
|
12853
|
+
if use_3d and ch1_coords.shape[1] == 3:
|
|
12854
|
+
spot_coord = np.array([[z_val * z_scale, y_val, x_val]])
|
|
12855
|
+
ch1_scaled = ch1_coords.copy().astype(float)
|
|
12856
|
+
ch1_scaled[:, 0] = ch1_scaled[:, 0] * z_scale # Scale Z
|
|
12857
|
+
else:
|
|
12858
|
+
spot_coord = np.array([[y_val, x_val]])
|
|
12859
|
+
ch1_scaled = ch1_coords
|
|
12860
|
+
|
|
12861
|
+
from scipy.spatial.distance import cdist
|
|
12862
|
+
distances = cdist(spot_coord, ch1_scaled, metric='euclidean')
|
|
12863
|
+
obs_min_dist = float(np.min(distances))
|
|
12864
|
+
if obs_min_dist < min_dist_all:
|
|
12865
|
+
min_dist_all = obs_min_dist
|
|
12866
|
+
|
|
12867
|
+
flag_vector.append(is_coloc)
|
|
12868
|
+
distance_values.append(min_dist_all)
|
|
12701
12869
|
|
|
12702
|
-
# Pad
|
|
12870
|
+
# Pad vectors if needed (shouldn't happen, but just in case)
|
|
12703
12871
|
while len(flag_vector) < num_spots:
|
|
12704
12872
|
flag_vector.append(False)
|
|
12873
|
+
distance_values.append(threshold_px * 10.0)
|
|
12705
12874
|
|
|
12706
|
-
# Store for later use
|
|
12875
|
+
# Store for later use (sorting, etc.)
|
|
12707
12876
|
self.verify_distance_mean_crop = mean_crop
|
|
12708
12877
|
self.verify_distance_crop_size = crop_size
|
|
12878
|
+
self.verify_distance_values = np.array(distance_values) # For sorting by distance
|
|
12709
12879
|
|
|
12710
12880
|
# Create spot crops with checkboxes
|
|
12711
12881
|
self._create_verification_crops(
|
|
@@ -12719,6 +12889,9 @@ class GUI(QMainWindow):
|
|
|
12719
12889
|
channels=(ch0, ch1)
|
|
12720
12890
|
)
|
|
12721
12891
|
|
|
12892
|
+
# Reset sorted flag so Sort button can be used
|
|
12893
|
+
self._verify_distance_sorted = False
|
|
12894
|
+
|
|
12722
12895
|
# Update stats label
|
|
12723
12896
|
self._update_verify_distance_stats()
|
|
12724
12897
|
|
|
@@ -12743,9 +12916,88 @@ class GUI(QMainWindow):
|
|
|
12743
12916
|
)
|
|
12744
12917
|
|
|
12745
12918
|
def sort_verify_distance(self):
|
|
12746
|
-
"""Sort Verify Distance results
|
|
12747
|
-
|
|
12748
|
-
|
|
12919
|
+
"""Sort Verify Distance results by distance value (ascending - closest to threshold first).
|
|
12920
|
+
|
|
12921
|
+
Similar to Visual method's certainty-based sorting, but uses the measured
|
|
12922
|
+
distance to nearest partner. Spots with distances closest to the colocalization
|
|
12923
|
+
threshold are shown first as they represent the most uncertain classifications.
|
|
12924
|
+
"""
|
|
12925
|
+
if not hasattr(self, 'verify_distance_checkboxes') or len(self.verify_distance_checkboxes) == 0:
|
|
12926
|
+
QMessageBox.information(self, "No Data", "No spots to sort. Please click Populate first.")
|
|
12927
|
+
return
|
|
12928
|
+
|
|
12929
|
+
if not hasattr(self, 'verify_distance_mean_crop') or self.verify_distance_mean_crop is None:
|
|
12930
|
+
QMessageBox.warning(self, "No Data", "Crop data not available for sorting.")
|
|
12931
|
+
return
|
|
12932
|
+
|
|
12933
|
+
# Check if distance values are available
|
|
12934
|
+
if not hasattr(self, 'verify_distance_values') or self.verify_distance_values is None:
|
|
12935
|
+
QMessageBox.warning(self, "No Distance Data",
|
|
12936
|
+
"Distance values not available. Please re-run Populate.")
|
|
12937
|
+
return
|
|
12938
|
+
|
|
12939
|
+
# Check if already sorted
|
|
12940
|
+
if hasattr(self, '_verify_distance_sorted') and self._verify_distance_sorted:
|
|
12941
|
+
QMessageBox.information(self, "Already Sorted", "Spots are already sorted by distance value.")
|
|
12942
|
+
return
|
|
12943
|
+
|
|
12944
|
+
mean_crop = self.verify_distance_mean_crop
|
|
12945
|
+
crop_size = self.verify_distance_crop_size
|
|
12946
|
+
distance_values = self.verify_distance_values
|
|
12947
|
+
|
|
12948
|
+
# Get current checkbox states before sorting
|
|
12949
|
+
current_states = [chk.isChecked() for chk in self.verify_distance_checkboxes]
|
|
12950
|
+
num_spots = len(current_states)
|
|
12951
|
+
|
|
12952
|
+
# Sort ascending by distance (closest to threshold = most uncertain first)
|
|
12953
|
+
# This matches the Visual method's approach of showing uncertain cases first
|
|
12954
|
+
sorted_indices = np.argsort(distance_values)
|
|
12955
|
+
|
|
12956
|
+
# Re-order states and distances
|
|
12957
|
+
sorted_states = [current_states[i] if i < len(current_states) else False for i in sorted_indices]
|
|
12958
|
+
sorted_distances = distance_values[sorted_indices]
|
|
12959
|
+
|
|
12960
|
+
# Re-order crops
|
|
12961
|
+
num_crop_spots = mean_crop.shape[0] // crop_size
|
|
12962
|
+
if num_crop_spots < num_spots:
|
|
12963
|
+
num_spots = num_crop_spots
|
|
12964
|
+
sorted_indices = sorted_indices[:num_spots]
|
|
12965
|
+
|
|
12966
|
+
sorted_crop = np.zeros_like(mean_crop[:num_spots*crop_size])
|
|
12967
|
+
for new_idx, old_idx in enumerate(sorted_indices[:num_spots]):
|
|
12968
|
+
if old_idx < num_crop_spots:
|
|
12969
|
+
sorted_crop[new_idx*crop_size:(new_idx+1)*crop_size] = \
|
|
12970
|
+
mean_crop[old_idx*crop_size:(old_idx+1)*crop_size]
|
|
12971
|
+
|
|
12972
|
+
# Get channels from distance results
|
|
12973
|
+
results = self.distance_coloc_results if hasattr(self, 'distance_coloc_results') else {}
|
|
12974
|
+
ch0 = results.get('channel_0', 0)
|
|
12975
|
+
ch1 = results.get('channel_1', 1)
|
|
12976
|
+
image = self.corrected_image if self.corrected_image is not None else self.image_stack
|
|
12977
|
+
num_channels = image.shape[-1] if image is not None and image.ndim == 5 else 1
|
|
12978
|
+
|
|
12979
|
+
# Re-create verification crops with sorted data
|
|
12980
|
+
self._create_verification_crops(
|
|
12981
|
+
scroll_area=self.verify_distance_scroll_area,
|
|
12982
|
+
checkboxes_list_attr='verify_distance_checkboxes',
|
|
12983
|
+
mean_crop=sorted_crop,
|
|
12984
|
+
crop_size=crop_size,
|
|
12985
|
+
flag_vector=sorted_states,
|
|
12986
|
+
stats_label=self.verify_distance_stats_label,
|
|
12987
|
+
num_channels=num_channels,
|
|
12988
|
+
channels=(ch0, ch1)
|
|
12989
|
+
)
|
|
12990
|
+
|
|
12991
|
+
# Update stored data after sorting for consistency
|
|
12992
|
+
self.verify_distance_mean_crop = sorted_crop
|
|
12993
|
+
self.verify_distance_values = sorted_distances
|
|
12994
|
+
self._verify_distance_sort_indices = sorted_indices # Store for reference
|
|
12995
|
+
|
|
12996
|
+
# Mark as sorted
|
|
12997
|
+
self._verify_distance_sorted = True
|
|
12998
|
+
|
|
12999
|
+
# Update stats
|
|
13000
|
+
self._update_verify_distance_stats()
|
|
12749
13001
|
|
|
12750
13002
|
def cleanup_verify_distance(self):
|
|
12751
13003
|
"""Clear all checkboxes in Verify Distance subtab."""
|
|
@@ -12833,7 +13085,12 @@ class GUI(QMainWindow):
|
|
|
12833
13085
|
|
|
12834
13086
|
# Checkbox
|
|
12835
13087
|
chk = QCheckBox(f"Spot {i+1}")
|
|
12836
|
-
|
|
13088
|
+
# Safely get the flag value (handle numpy arrays, lists, etc.)
|
|
13089
|
+
try:
|
|
13090
|
+
flag_val = bool(flag_vector[i]) if i < len(flag_vector) else False
|
|
13091
|
+
except (TypeError, IndexError):
|
|
13092
|
+
flag_val = False
|
|
13093
|
+
chk.setChecked(flag_val)
|
|
12837
13094
|
chk.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
|
12838
13095
|
|
|
12839
13096
|
# Connect to stats update
|
|
@@ -15475,6 +15732,7 @@ class GUI(QMainWindow):
|
|
|
15475
15732
|
self.verify_visual_checkboxes = []
|
|
15476
15733
|
if hasattr(self, 'verify_visual_stats_label'):
|
|
15477
15734
|
self.verify_visual_stats_label.setText("Run Visual colocalization first, then click Populate")
|
|
15735
|
+
self._verify_visual_sorted = False
|
|
15478
15736
|
|
|
15479
15737
|
# Reset Verify Distance
|
|
15480
15738
|
if hasattr(self, 'verify_distance_scroll_area'):
|
|
@@ -15483,6 +15741,11 @@ class GUI(QMainWindow):
|
|
|
15483
15741
|
self.verify_distance_checkboxes = []
|
|
15484
15742
|
if hasattr(self, 'verify_distance_stats_label'):
|
|
15485
15743
|
self.verify_distance_stats_label.setText("Run Distance colocalization first, then click Populate")
|
|
15744
|
+
# Reset stored distance data for sorting
|
|
15745
|
+
self.verify_distance_mean_crop = None
|
|
15746
|
+
self.verify_distance_crop_size = None
|
|
15747
|
+
self.verify_distance_values = None
|
|
15748
|
+
self._verify_distance_sorted = False
|
|
15486
15749
|
|
|
15487
15750
|
def reset_cellpose_tab(self):
|
|
15488
15751
|
"""Reset Cellpose tab state, masks, and UI controls to defaults."""
|
microlive/microscopy.py
CHANGED
|
@@ -1423,6 +1423,13 @@ class Intensity():
|
|
|
1423
1423
|
optimize_spot_size: Search for optimal spot size (5-11 px). Slower. Defaults to False.
|
|
1424
1424
|
allow_subpixel_repositioning: Search ±2px for better center. Defaults to False.
|
|
1425
1425
|
fast_gaussian_fit: Use moment-based (fast) vs full Gaussian fit. Defaults to True.
|
|
1426
|
+
snr_method: Method for calculating signal-to-noise ratio. Options:
|
|
1427
|
+
- 'peak' (default): Uses the maximum pixel value in the spot region as signal.
|
|
1428
|
+
This is the standard definition of SNR: (max_spot - mean_bg) / std_bg.
|
|
1429
|
+
Recommended for most applications.
|
|
1430
|
+
- 'disk_doughnut': Uses mean disk intensity as signal instead of peak value.
|
|
1431
|
+
Calculates SNR as: (mean_disk - mean_bg) / std_bg.
|
|
1432
|
+
More robust when data is very noisy or spots are dim.
|
|
1426
1433
|
|
|
1427
1434
|
Attributes:
|
|
1428
1435
|
number_spots: Number of spots to measure.
|
|
@@ -1431,7 +1438,7 @@ class Intensity():
|
|
|
1431
1438
|
|
|
1432
1439
|
def __init__(self, original_image, spot_size=5, array_spot_location_z_y_x=None,
|
|
1433
1440
|
use_max_projection=False, optimize_spot_size=False, allow_subpixel_repositioning=False,
|
|
1434
|
-
fast_gaussian_fit=True):
|
|
1441
|
+
fast_gaussian_fit=True, snr_method='peak'):
|
|
1435
1442
|
self.original_image = original_image
|
|
1436
1443
|
if array_spot_location_z_y_x is None:
|
|
1437
1444
|
self.array_spot_location_z_y_x = np.array([[0, 0, 0]])
|
|
@@ -1449,6 +1456,7 @@ class Intensity():
|
|
|
1449
1456
|
self.optimize_spot_size = optimize_spot_size
|
|
1450
1457
|
self.allow_subpixel_repositioning = allow_subpixel_repositioning
|
|
1451
1458
|
self.fast_gaussian_fit = fast_gaussian_fit
|
|
1459
|
+
self.snr_method = snr_method
|
|
1452
1460
|
|
|
1453
1461
|
def two_dimensional_gaussian(self, xy, amplitude, x0, y0, sigma_x, sigma_y, offset):
|
|
1454
1462
|
"""Evaluate 2D Gaussian at given coordinates."""
|
|
@@ -1551,11 +1559,23 @@ class Intensity():
|
|
|
1551
1559
|
def calculate_intensity(self):
|
|
1552
1560
|
"""Calculate intensity metrics for all spots across all channels.
|
|
1553
1561
|
|
|
1562
|
+
The signal-to-noise ratio (SNR) calculation method is controlled by the
|
|
1563
|
+
`snr_method` parameter set during class initialization:
|
|
1564
|
+
|
|
1565
|
+
- **'peak'** (default): Uses the maximum pixel value in the spot region
|
|
1566
|
+
as the signal. This is the standard definition of SNR commonly used
|
|
1567
|
+
in microscopy: SNR = (max_spot - mean_background) / std_background.
|
|
1568
|
+
|
|
1569
|
+
- **'disk_doughnut'**: Uses the mean disk intensity as signal instead
|
|
1570
|
+
of the peak value. This method is more robust for very noisy data
|
|
1571
|
+
or dim spots where the maximum value may be unreliable due to noise.
|
|
1572
|
+
SNR = (mean_disk - mean_background) / std_background.
|
|
1573
|
+
|
|
1554
1574
|
Returns:
|
|
1555
1575
|
tuple: 8-element tuple of arrays, each with shape [N_spots, N_channels]:
|
|
1556
1576
|
- intensities: Background-subtracted intensity (disk - doughnut mean).
|
|
1557
1577
|
- intensities_std: Standard deviation within disk region.
|
|
1558
|
-
- intensities_snr: Signal-to-noise ratio (
|
|
1578
|
+
- intensities_snr: Signal-to-noise ratio (calculation depends on snr_method).
|
|
1559
1579
|
- intensities_background_mean: Mean background from doughnut.
|
|
1560
1580
|
- intensities_background_std: Std of background from doughnut.
|
|
1561
1581
|
- psfs_amplitude: PSF peak amplitude from Gaussian fit (or NaN).
|
|
@@ -1577,11 +1597,31 @@ class Intensity():
|
|
|
1577
1597
|
donut_values = tem_img[~np.isnan(tem_img)].astype('uint16')
|
|
1578
1598
|
return donut_values
|
|
1579
1599
|
|
|
1580
|
-
def signal_to_noise_ratio(values_disk, values_donut):
|
|
1581
|
-
|
|
1600
|
+
def signal_to_noise_ratio(values_disk, values_donut, snr_method='peak'):
|
|
1601
|
+
"""Calculate signal-to-noise ratio for a spot.
|
|
1602
|
+
|
|
1603
|
+
Args:
|
|
1604
|
+
values_disk: Pixel values in the spot disk region.
|
|
1605
|
+
values_donut: Pixel values in the background doughnut region.
|
|
1606
|
+
snr_method: 'peak' uses max pixel value as signal (standard),
|
|
1607
|
+
'disk_doughnut' uses mean disk intensity (robust for noisy data).
|
|
1608
|
+
|
|
1609
|
+
Returns:
|
|
1610
|
+
tuple: (SNR, mean_background, std_background)
|
|
1611
|
+
"""
|
|
1582
1612
|
mean_donut = np.mean(values_donut.astype(float))
|
|
1583
1613
|
std_donut = np.std(values_donut.astype(float))
|
|
1584
|
-
|
|
1614
|
+
|
|
1615
|
+
if snr_method == 'peak':
|
|
1616
|
+
# Standard SNR: use peak (maximum) pixel value as signal
|
|
1617
|
+
max_disk = np.max(values_disk.astype(float))
|
|
1618
|
+
signal = max_disk - mean_donut
|
|
1619
|
+
else:
|
|
1620
|
+
# disk_doughnut method: use mean disk intensity as signal
|
|
1621
|
+
mean_disk = np.mean(values_disk.astype(float))
|
|
1622
|
+
signal = mean_disk - mean_donut
|
|
1623
|
+
|
|
1624
|
+
SNR = signal / std_donut if std_donut > 0 else 0
|
|
1585
1625
|
return SNR, mean_donut, std_donut
|
|
1586
1626
|
|
|
1587
1627
|
def disk_donut(values_disk, values_donut, spot_size):
|
|
@@ -1749,7 +1789,7 @@ class Intensity():
|
|
|
1749
1789
|
# Use the updated integer positions for the final intensity crop
|
|
1750
1790
|
crop_disk_and_donut = return_crop(frame_data[:,:,i], current_x_int, current_y_int, spot_range=crop_range)
|
|
1751
1791
|
values_donut = return_donut(crop_disk_and_donut, spot_size=best_size)
|
|
1752
|
-
intensities_snr[sp,i], intensities_background_mean[sp,i], intensities_background_std[sp,i] = signal_to_noise_ratio(values_disk, values_donut)
|
|
1792
|
+
intensities_snr[sp,i], intensities_background_mean[sp,i], intensities_background_std[sp,i] = signal_to_noise_ratio(values_disk, values_donut, self.snr_method)
|
|
1753
1793
|
# disk_donut calculation
|
|
1754
1794
|
intensities[sp,i], intensities_std[sp,i] = disk_donut(values_disk, values_donut, spot_size=best_size)
|
|
1755
1795
|
intensities_total[sp,i] = np.sum(values_disk)
|
|
@@ -1914,14 +1954,9 @@ class Cellpose():
|
|
|
1914
1954
|
# Initialize Cellpose model
|
|
1915
1955
|
if self.pretrained_model is None:
|
|
1916
1956
|
if self.model_type == 'cyto3':
|
|
1917
|
-
|
|
1918
|
-
|
|
1919
|
-
|
|
1920
|
-
logging.debug('Cellpose: Model cyto3 loaded')
|
|
1921
|
-
except Exception as e:
|
|
1922
|
-
logging.warning(f"Failed to load cyto3 model: {e}. Falling back to 'cyto2'.")
|
|
1923
|
-
self.model_type = 'cyto2'
|
|
1924
|
-
model = models.Cellpose(gpu=use_gpu, model_type='cyto2')
|
|
1957
|
+
model = denoise.CellposeDenoiseModel(gpu=use_gpu, model_type="cyto3",
|
|
1958
|
+
restore_type="denoise_cyto3")
|
|
1959
|
+
logging.debug('Cellpose: Model cyto3 loaded')
|
|
1925
1960
|
else:
|
|
1926
1961
|
model = models.Cellpose(gpu=use_gpu, model_type=self.model_type) # model_type = 'cyto' or 'nuclei'
|
|
1927
1962
|
else:
|
|
@@ -3913,7 +3948,8 @@ class BigFISH():
|
|
|
3913
3948
|
|
|
3914
3949
|
# Select isolated spots (cluster_id < 0) and set cluster_size to 1
|
|
3915
3950
|
spots_no_clusters = clusters_and_spots_big_fish[clusters_and_spots_big_fish[:,-1] < 0].copy()
|
|
3916
|
-
spots_no_clusters
|
|
3951
|
+
if len(spots_no_clusters) > 0:
|
|
3952
|
+
spots_no_clusters[:,-1] = 1 # Replace cluster_id with cluster_size=1
|
|
3917
3953
|
|
|
3918
3954
|
# Select cluster centroids with cluster_size > 1
|
|
3919
3955
|
clusters_no_spots = clusters[clusters[:,-2] > 1]
|
|
@@ -4977,6 +5013,7 @@ class DataProcessing():
|
|
|
4977
5013
|
self.fast_gaussian_fit = fast_gaussian_fit
|
|
4978
5014
|
# This number represent the number of columns that doesnt change with the number of color channels in the image
|
|
4979
5015
|
self.NUMBER_OF_CONSTANT_COLUMNS_IN_DATAFRAME = 18
|
|
5016
|
+
|
|
4980
5017
|
def get_dataframe(self):
|
|
4981
5018
|
'''
|
|
4982
5019
|
This method extracts data from the class SpotDetection and returns the data as a dataframe.
|
|
@@ -5127,7 +5164,7 @@ class DataProcessing():
|
|
|
5127
5164
|
array_spots_nuc[:,10:13] = spots_nuc[:,:3] # populating coord
|
|
5128
5165
|
array_spots_nuc[:,13] = 1 # is_nuc
|
|
5129
5166
|
array_spots_nuc[:,14] = 0 # is_cluster
|
|
5130
|
-
array_spots_nuc[:,15] =
|
|
5167
|
+
array_spots_nuc[:,15] = spots_nuc[:,3] # cluster_size (use actual detected value)
|
|
5131
5168
|
array_spots_nuc[:,16] = spot_type # spot_type
|
|
5132
5169
|
array_spots_nuc[:,17] = is_cell_in_border # is_cell_fragmented
|
|
5133
5170
|
|
|
@@ -5136,7 +5173,7 @@ class DataProcessing():
|
|
|
5136
5173
|
array_spots_cytosol_only[:,10:13] = spots_cytosol_only[:,:3] # populating coord
|
|
5137
5174
|
array_spots_cytosol_only[:,13] = 0 # is_nuc
|
|
5138
5175
|
array_spots_cytosol_only[:,14] = 0 # is_cluster
|
|
5139
|
-
array_spots_cytosol_only[:,15] =
|
|
5176
|
+
array_spots_cytosol_only[:,15] = spots_cytosol_only[:,3] # cluster_size (use actual detected value)
|
|
5140
5177
|
array_spots_cytosol_only[:,16] = spot_type # spot_type
|
|
5141
5178
|
array_spots_cytosol_only[:,17] = is_cell_in_border # is_cell_fragmented
|
|
5142
5179
|
if (detected_cyto_clusters == True): #(detected_cyto == True) and
|
|
@@ -1,7 +1,14 @@
|
|
|
1
|
-
"""Pipeline module for MicroLive.
|
|
1
|
+
"""Pipeline module for MicroLive FRAP analysis.
|
|
2
2
|
|
|
3
|
-
This module is part of the microlive package
|
|
3
|
+
This module is part of the microlive package and provides functions for
|
|
4
|
+
Fluorescence Recovery After Photobleaching (FRAP) analysis.
|
|
5
|
+
|
|
6
|
+
The pipeline uses a pretrained Cellpose model for nuclei segmentation that
|
|
7
|
+
is automatically downloaded from GitHub on first use.
|
|
4
8
|
"""
|
|
9
|
+
import os
|
|
10
|
+
import traceback
|
|
11
|
+
|
|
5
12
|
from microlive.imports import *
|
|
6
13
|
|
|
7
14
|
from skimage.feature import canny
|
|
@@ -12,6 +19,95 @@ from skimage.morphology import binary_opening, binary_closing
|
|
|
12
19
|
from skimage.measure import label, regionprops
|
|
13
20
|
from skimage.transform import hough_circle, hough_circle_peaks
|
|
14
21
|
|
|
22
|
+
# Import model downloader with graceful fallback
|
|
23
|
+
try:
|
|
24
|
+
from microlive.utils.model_downloader import get_frap_nuclei_model_path
|
|
25
|
+
_HAS_MODEL_DOWNLOADER = True
|
|
26
|
+
except ImportError:
|
|
27
|
+
_HAS_MODEL_DOWNLOADER = False
|
|
28
|
+
|
|
29
|
+
import logging
|
|
30
|
+
logger = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# =============================================================================
|
|
34
|
+
# GPU Detection and MPS Compatibility (aligned with microscopy.py)
|
|
35
|
+
# =============================================================================
|
|
36
|
+
|
|
37
|
+
class PatchMPSFloat64:
|
|
38
|
+
"""
|
|
39
|
+
Context manager to safely monkeypatch torch.zeros on MPS devices
|
|
40
|
+
to force float32 instead of float64 (which is not supported).
|
|
41
|
+
|
|
42
|
+
Copied from microlive.microscopy for self-contained FRAP pipeline.
|
|
43
|
+
"""
|
|
44
|
+
def __init__(self):
|
|
45
|
+
self.original_zeros = torch.zeros
|
|
46
|
+
self.is_mps = torch.backends.mps.is_available() and torch.backends.mps.is_built()
|
|
47
|
+
|
|
48
|
+
def __enter__(self):
|
|
49
|
+
if not self.is_mps:
|
|
50
|
+
return
|
|
51
|
+
|
|
52
|
+
def patched_zeros(*args, **kwargs):
|
|
53
|
+
# Check if device is MPS (either string or torch.device)
|
|
54
|
+
device = kwargs.get('device', None)
|
|
55
|
+
is_target_device = False
|
|
56
|
+
if device is not None:
|
|
57
|
+
if isinstance(device, str) and 'mps' in device:
|
|
58
|
+
is_target_device = True
|
|
59
|
+
elif isinstance(device, torch.device) and device.type == 'mps':
|
|
60
|
+
is_target_device = True
|
|
61
|
+
|
|
62
|
+
# Check if dtype is float64/double
|
|
63
|
+
dtype = kwargs.get('dtype', None)
|
|
64
|
+
is_target_dtype = (dtype == torch.float64 or dtype == torch.double)
|
|
65
|
+
|
|
66
|
+
if is_target_device and is_target_dtype:
|
|
67
|
+
kwargs['dtype'] = torch.float32
|
|
68
|
+
|
|
69
|
+
return self.original_zeros(*args, **kwargs)
|
|
70
|
+
|
|
71
|
+
torch.zeros = patched_zeros
|
|
72
|
+
|
|
73
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
74
|
+
if self.is_mps:
|
|
75
|
+
torch.zeros = self.original_zeros
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _detect_gpu():
|
|
79
|
+
"""
|
|
80
|
+
Detect available GPU (CUDA or MPS) for Cellpose.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
bool: True if GPU is available (CUDA or MPS), False otherwise.
|
|
84
|
+
"""
|
|
85
|
+
os.environ['PYTORCH_ENABLE_MPS_FALLBACK'] = '1'
|
|
86
|
+
return torch.cuda.is_available() or torch.backends.mps.is_available()
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _get_frap_nuclei_model():
|
|
90
|
+
"""
|
|
91
|
+
Get the path to the pretrained FRAP nuclei segmentation model.
|
|
92
|
+
|
|
93
|
+
Downloads from GitHub on first use, caches locally in ~/.microlive/models/.
|
|
94
|
+
Returns None if download fails, allowing fallback to default Cellpose model.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
str or None: Path to the model file, or None if unavailable.
|
|
98
|
+
"""
|
|
99
|
+
if not _HAS_MODEL_DOWNLOADER:
|
|
100
|
+
logger.debug("Model downloader not available, using default nuclei model")
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
model_path = get_frap_nuclei_model_path()
|
|
105
|
+
logger.info(f"Using pretrained FRAP nuclei model: {model_path}")
|
|
106
|
+
return model_path
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.warning(f"Could not load FRAP nuclei model: {e}. Using default.")
|
|
109
|
+
return None
|
|
110
|
+
|
|
15
111
|
def read_lif_files_in_folder(folder_path):
|
|
16
112
|
# create funtion that read all the .lif files in a folder and return the list of images
|
|
17
113
|
list_folders = list(folder_path.glob('*.lif'))
|
|
@@ -160,43 +256,111 @@ def find_frap_coordinates(image_TXY, frap_time, stable_FRAP_channel, min_diamete
|
|
|
160
256
|
return None, None
|
|
161
257
|
|
|
162
258
|
|
|
163
|
-
def segment_image(image_TXY, step_size=5, pretrained_model_segmentation=
|
|
259
|
+
def segment_image(image_TXY, step_size=5, pretrained_model_segmentation='auto', frap_time=None, pixel_dilation_pseudo_cytosol=10,stable_FRAP_channel=0,min_diameter=10):
|
|
260
|
+
"""
|
|
261
|
+
Segment nuclei in FRAP image stack using Cellpose.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
image_TXY: 3D image array (Time, X, Y).
|
|
265
|
+
step_size: Number of frames between segmentations.
|
|
266
|
+
pretrained_model_segmentation: Model to use:
|
|
267
|
+
- 'auto' (default): Auto-download and use FRAP-optimized model from GitHub
|
|
268
|
+
- None or 'nuclei': Use default Cellpose nuclei model
|
|
269
|
+
- str path: Use custom pretrained model at given path
|
|
270
|
+
frap_time: Frame index of FRAP event.
|
|
271
|
+
pixel_dilation_pseudo_cytosol: Pixels to dilate for pseudo-cytosol.
|
|
272
|
+
stable_FRAP_channel: Channel index for stable signal.
|
|
273
|
+
min_diameter: Minimum ROI diameter in pixels.
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
Tuple of (masks_TXY, background_mask, pseudo_cytosol_masks_TXY).
|
|
277
|
+
"""
|
|
164
278
|
num_pixels_to_dilate = 1
|
|
165
|
-
|
|
166
|
-
|
|
279
|
+
|
|
280
|
+
# GPU detection (aligned with microscopy.py)
|
|
281
|
+
use_gpu = _detect_gpu()
|
|
282
|
+
logger.debug(f"FRAP Pipeline: GPU available = {use_gpu}")
|
|
283
|
+
|
|
284
|
+
# Ensure image is float32 for MPS compatibility
|
|
285
|
+
image_TXY = image_TXY.astype(np.float32)
|
|
286
|
+
|
|
287
|
+
# Determine which model to use
|
|
288
|
+
if pretrained_model_segmentation == 'auto':
|
|
289
|
+
# Auto-download FRAP-optimized model from GitHub
|
|
290
|
+
pretrained_model_segmentation = _get_frap_nuclei_model()
|
|
291
|
+
|
|
292
|
+
# Helper function to run Cellpose with error handling
|
|
293
|
+
def _run_cellpose_eval(model, image, model_type_fallback=None, **kwargs):
|
|
294
|
+
"""Run Cellpose evaluation with MPS error handling and CPU fallback."""
|
|
295
|
+
nonlocal use_gpu
|
|
296
|
+
try:
|
|
297
|
+
with PatchMPSFloat64():
|
|
298
|
+
return model.eval(image, **kwargs)[0]
|
|
299
|
+
except RuntimeError as e:
|
|
300
|
+
if "sparse" in str(e) and torch.backends.mps.is_available():
|
|
301
|
+
logger.warning(f"MPS sparse error detected: {e}. Retrying with resample=False.")
|
|
302
|
+
try:
|
|
303
|
+
kwargs['resample'] = False
|
|
304
|
+
with PatchMPSFloat64():
|
|
305
|
+
return model.eval(image, **kwargs)[0]
|
|
306
|
+
except RuntimeError as e2:
|
|
307
|
+
logger.warning(f"MPS error persisted: {e2}. Falling back to CPU.")
|
|
308
|
+
# Reinitialize model on CPU
|
|
309
|
+
if model_type_fallback is not None:
|
|
310
|
+
model = models.CellposeModel(gpu=False, model_type=model_type_fallback)
|
|
311
|
+
else:
|
|
312
|
+
model = models.CellposeModel(gpu=False, pretrained_model=pretrained_model_segmentation)
|
|
313
|
+
use_gpu = False
|
|
314
|
+
kwargs.pop('resample', None) # Reset resample
|
|
315
|
+
return model.eval(image, **kwargs)[0]
|
|
316
|
+
else:
|
|
317
|
+
logger.error(f"Cellpose RuntimeError: {e}")
|
|
318
|
+
logger.error(traceback.format_exc())
|
|
319
|
+
return np.zeros(image.shape[:2], dtype=np.uint16)
|
|
320
|
+
except Exception as e:
|
|
321
|
+
logger.error(f"Cellpose error: {e}")
|
|
322
|
+
logger.error(traceback.format_exc())
|
|
323
|
+
return np.zeros(image.shape[:2], dtype=np.uint16)
|
|
324
|
+
|
|
325
|
+
# Initialize models
|
|
326
|
+
if pretrained_model_segmentation is not None and pretrained_model_segmentation != 'nuclei':
|
|
327
|
+
logger.info(f"Using pretrained model for nuclei segmentation")
|
|
167
328
|
model_nucleus = models.CellposeModel(
|
|
168
329
|
gpu=use_gpu,
|
|
169
330
|
pretrained_model=pretrained_model_segmentation
|
|
170
331
|
)
|
|
171
332
|
else:
|
|
333
|
+
logger.info("Using default Cellpose nuclei model")
|
|
172
334
|
model_nucleus = models.CellposeModel(
|
|
173
335
|
gpu=use_gpu,
|
|
174
336
|
model_type='nuclei'
|
|
175
337
|
)
|
|
176
|
-
|
|
177
|
-
|
|
338
|
+
model_cyto = models.CellposeModel(gpu=use_gpu, model_type='cyto2')
|
|
339
|
+
|
|
178
340
|
num_steps = (image_TXY.shape[0] + step_size - 1) // step_size
|
|
179
341
|
list_masks = []
|
|
180
342
|
list_selected_mask_id = []
|
|
181
343
|
list_selected_masks = []
|
|
182
344
|
list_masks_cyto = []
|
|
345
|
+
|
|
183
346
|
# If frap_time is provided, segment the FRAP images and select the mask with maximum intensity change
|
|
184
347
|
if frap_time is not None:
|
|
185
348
|
# Ensure frap_time is within valid range
|
|
186
349
|
if frap_time < 1 or frap_time >= image_TXY.shape[0] - 1:
|
|
187
350
|
raise ValueError("frap_time must be within the range of the image stack.")
|
|
188
351
|
# Segment the image at frap_time
|
|
189
|
-
|
|
190
|
-
|
|
352
|
+
masks_frap = _run_cellpose_eval(
|
|
353
|
+
model_nucleus,
|
|
191
354
|
image_TXY[frap_time],
|
|
192
|
-
|
|
355
|
+
model_type_fallback='nuclei',
|
|
356
|
+
channels=[0, 0],
|
|
193
357
|
normalize=True,
|
|
194
358
|
flow_threshold=1,
|
|
195
359
|
diameter=150,
|
|
196
360
|
min_size=50
|
|
197
|
-
)
|
|
361
|
+
)
|
|
198
362
|
# remove all the maks that are touching the border
|
|
199
|
-
masks_frap =remove_border_masks(masks_frap,min_size=50)
|
|
363
|
+
masks_frap = remove_border_masks(masks_frap, min_size=50)
|
|
200
364
|
# Get unique mask labels (excluding background)
|
|
201
365
|
mask_labels = np.unique(masks_frap)
|
|
202
366
|
mask_labels = mask_labels[mask_labels != 0]
|
|
@@ -210,13 +374,10 @@ def segment_image(image_TXY, step_size=5, pretrained_model_segmentation=None, fr
|
|
|
210
374
|
selected_mask_frap = binary_dilation(selected_mask_frap, iterations=num_pixels_to_dilate).astype('int')
|
|
211
375
|
break
|
|
212
376
|
else:
|
|
213
|
-
#selected_mask_id_frap = None
|
|
214
377
|
selected_mask_frap = None
|
|
215
378
|
else:
|
|
216
|
-
#selected_mask_id_frap = None
|
|
217
379
|
selected_mask_frap = None
|
|
218
380
|
else:
|
|
219
|
-
#selected_mask_id_frap = None
|
|
220
381
|
selected_mask_frap = None
|
|
221
382
|
if selected_mask_frap is None:
|
|
222
383
|
return None, None, None
|
|
@@ -224,19 +385,29 @@ def segment_image(image_TXY, step_size=5, pretrained_model_segmentation=None, fr
|
|
|
224
385
|
for step in range(num_steps):
|
|
225
386
|
i = step * step_size
|
|
226
387
|
# Detecting masks in i-th frame
|
|
227
|
-
masks =
|
|
388
|
+
masks = _run_cellpose_eval(
|
|
389
|
+
model_nucleus,
|
|
228
390
|
image_TXY[i],
|
|
229
|
-
|
|
391
|
+
model_type_fallback='nuclei',
|
|
392
|
+
channels=[0, 0],
|
|
230
393
|
normalize=True,
|
|
231
394
|
flow_threshold=1,
|
|
232
395
|
diameter=150,
|
|
233
396
|
min_size=50
|
|
234
|
-
)
|
|
397
|
+
)
|
|
235
398
|
list_masks.append(masks)
|
|
236
|
-
masks =remove_border_masks(masks,min_size=50)
|
|
399
|
+
masks = remove_border_masks(masks, min_size=50)
|
|
237
400
|
# Detect cytosol masks only every `step_size` frames
|
|
238
401
|
if step % 2 == 0:
|
|
239
|
-
masks_cyto =
|
|
402
|
+
masks_cyto = _run_cellpose_eval(
|
|
403
|
+
model_cyto,
|
|
404
|
+
image_TXY[i],
|
|
405
|
+
model_type_fallback='cyto2',
|
|
406
|
+
normalize=True,
|
|
407
|
+
flow_threshold=0.5,
|
|
408
|
+
diameter=250,
|
|
409
|
+
min_size=100
|
|
410
|
+
)
|
|
240
411
|
list_masks_cyto.append(masks_cyto)
|
|
241
412
|
if frap_time is None:
|
|
242
413
|
# Selecting the mask that is in the center of the image
|
|
@@ -311,7 +482,7 @@ def segment_image(image_TXY, step_size=5, pretrained_model_segmentation=None, fr
|
|
|
311
482
|
|
|
312
483
|
|
|
313
484
|
|
|
314
|
-
def create_image_arrays(list_concatenated_images, selected_image=0, FRAP_channel_to_quantify=0,pretrained_model_segmentation=
|
|
485
|
+
def create_image_arrays(list_concatenated_images, selected_image=0, FRAP_channel_to_quantify=0,pretrained_model_segmentation='auto',frap_time=None, starting_changing_frame=40, step_size_increase=5,min_diameter=10):
|
|
315
486
|
image_TZXYC = list_concatenated_images[selected_image] # shape (T Z Y X C)
|
|
316
487
|
print('Image with shape (T Z Y X C):\n ' ,list_concatenated_images[selected_image].shape) # TZYXC
|
|
317
488
|
print('Original Image pixel ', 'min: {:.2f}, max: {:.2f}, mean: {:.2f}, std: {:.2f}'.format(np.min(image_TZXYC), np.max(image_TZXYC), np.mean(image_TZXYC), np.std(image_TZXYC)) )
|
microlive/utils/__init__.py
CHANGED
|
@@ -2,6 +2,12 @@
|
|
|
2
2
|
|
|
3
3
|
from .device import get_device, is_gpu_available, get_device_info, check_gpu_status
|
|
4
4
|
from .resources import get_icon_path, get_model_path
|
|
5
|
+
from .model_downloader import (
|
|
6
|
+
get_frap_nuclei_model_path,
|
|
7
|
+
cache_model,
|
|
8
|
+
list_cached_models,
|
|
9
|
+
MODEL_DIR,
|
|
10
|
+
)
|
|
5
11
|
|
|
6
12
|
__all__ = [
|
|
7
13
|
"get_device",
|
|
@@ -10,4 +16,9 @@ __all__ = [
|
|
|
10
16
|
"check_gpu_status",
|
|
11
17
|
"get_icon_path",
|
|
12
18
|
"get_model_path",
|
|
19
|
+
# Model downloader
|
|
20
|
+
"get_frap_nuclei_model_path",
|
|
21
|
+
"cache_model",
|
|
22
|
+
"list_cached_models",
|
|
23
|
+
"MODEL_DIR",
|
|
13
24
|
]
|
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Model download utilities for MicroLive.
|
|
3
|
+
|
|
4
|
+
This module provides functions to download and cache pretrained models from
|
|
5
|
+
the MicroLive GitHub repository. It follows the same patterns used by Cellpose
|
|
6
|
+
for robust model provisioning.
|
|
7
|
+
|
|
8
|
+
Models are downloaded on first use and cached locally in ~/.microlive/models/
|
|
9
|
+
to avoid repeated downloads.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import os
|
|
13
|
+
import ssl
|
|
14
|
+
import shutil
|
|
15
|
+
import tempfile
|
|
16
|
+
import logging
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from urllib.request import urlopen
|
|
19
|
+
from urllib.error import URLError, HTTPError
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
# =============================================================================
|
|
24
|
+
# Configuration
|
|
25
|
+
# =============================================================================
|
|
26
|
+
|
|
27
|
+
# Base URL for raw GitHub content
|
|
28
|
+
_GITHUB_RAW_BASE = "https://raw.githubusercontent.com/ningzhaoAnschutz/microlive/main"
|
|
29
|
+
|
|
30
|
+
# Model URLs - Add new models here
|
|
31
|
+
MODEL_URLS = {
|
|
32
|
+
"frap_nuclei": f"{_GITHUB_RAW_BASE}/modeling/cellpose_models/cellpose_models/FRAP_nuclei_model/models/cellpose_1728581750.581418",
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
# Local cache directory (similar to Cellpose's ~/.cellpose/models/)
|
|
36
|
+
_MODEL_DIR_ENV = os.environ.get("MICROLIVE_LOCAL_MODELS_PATH")
|
|
37
|
+
_MODEL_DIR_DEFAULT = Path.home() / ".microlive" / "models"
|
|
38
|
+
MODEL_DIR = Path(_MODEL_DIR_ENV) if _MODEL_DIR_ENV else _MODEL_DIR_DEFAULT
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# =============================================================================
|
|
42
|
+
# Download Utilities (adapted from Cellpose)
|
|
43
|
+
# =============================================================================
|
|
44
|
+
|
|
45
|
+
def download_url_to_file(url: str, dst: str, progress: bool = True) -> None:
|
|
46
|
+
"""
|
|
47
|
+
Download object at the given URL to a local path.
|
|
48
|
+
|
|
49
|
+
Adapted from Cellpose/torch implementation for robustness.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
url: URL of the object to download.
|
|
53
|
+
dst: Full path where object will be saved.
|
|
54
|
+
progress: Whether to display a progress bar. Default: True.
|
|
55
|
+
|
|
56
|
+
Raises:
|
|
57
|
+
HTTPError: If the server returns an error status.
|
|
58
|
+
URLError: If the URL cannot be reached.
|
|
59
|
+
"""
|
|
60
|
+
try:
|
|
61
|
+
from tqdm import tqdm
|
|
62
|
+
HAS_TQDM = True
|
|
63
|
+
except ImportError:
|
|
64
|
+
HAS_TQDM = False
|
|
65
|
+
progress = False
|
|
66
|
+
|
|
67
|
+
file_size = None
|
|
68
|
+
|
|
69
|
+
# Handle SSL certificate verification issues
|
|
70
|
+
ssl_context = ssl.create_default_context()
|
|
71
|
+
ssl_context.check_hostname = False
|
|
72
|
+
ssl_context.verify_mode = ssl.CERT_NONE
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
u = urlopen(url, context=ssl_context)
|
|
76
|
+
except URLError as e:
|
|
77
|
+
raise URLError(f"Failed to connect to {url}: {e}")
|
|
78
|
+
|
|
79
|
+
meta = u.info()
|
|
80
|
+
if hasattr(meta, "getheaders"):
|
|
81
|
+
content_length = meta.getheaders("Content-Length")
|
|
82
|
+
else:
|
|
83
|
+
content_length = meta.get_all("Content-Length")
|
|
84
|
+
|
|
85
|
+
if content_length is not None and len(content_length) > 0:
|
|
86
|
+
file_size = int(content_length[0])
|
|
87
|
+
|
|
88
|
+
# Save to temp file first, then move (atomic operation)
|
|
89
|
+
dst = os.path.expanduser(dst)
|
|
90
|
+
dst_dir = os.path.dirname(dst)
|
|
91
|
+
os.makedirs(dst_dir, exist_ok=True)
|
|
92
|
+
|
|
93
|
+
f = tempfile.NamedTemporaryFile(delete=False, dir=dst_dir)
|
|
94
|
+
try:
|
|
95
|
+
if HAS_TQDM and progress:
|
|
96
|
+
with tqdm(total=file_size, disable=not progress, unit="B",
|
|
97
|
+
unit_scale=True, unit_divisor=1024,
|
|
98
|
+
desc=f"Downloading {Path(dst).name}") as pbar:
|
|
99
|
+
while True:
|
|
100
|
+
buffer = u.read(8192)
|
|
101
|
+
if len(buffer) == 0:
|
|
102
|
+
break
|
|
103
|
+
f.write(buffer)
|
|
104
|
+
pbar.update(len(buffer))
|
|
105
|
+
else:
|
|
106
|
+
# Simple download without progress bar
|
|
107
|
+
while True:
|
|
108
|
+
buffer = u.read(8192)
|
|
109
|
+
if len(buffer) == 0:
|
|
110
|
+
break
|
|
111
|
+
f.write(buffer)
|
|
112
|
+
|
|
113
|
+
f.close()
|
|
114
|
+
shutil.move(f.name, dst)
|
|
115
|
+
logger.info(f"Successfully downloaded model to {dst}")
|
|
116
|
+
|
|
117
|
+
except Exception as e:
|
|
118
|
+
f.close()
|
|
119
|
+
if os.path.exists(f.name):
|
|
120
|
+
os.remove(f.name)
|
|
121
|
+
raise RuntimeError(f"Download failed: {e}")
|
|
122
|
+
finally:
|
|
123
|
+
if os.path.exists(f.name):
|
|
124
|
+
try:
|
|
125
|
+
os.remove(f.name)
|
|
126
|
+
except OSError:
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
# =============================================================================
|
|
131
|
+
# Model Cache Functions
|
|
132
|
+
# =============================================================================
|
|
133
|
+
|
|
134
|
+
def get_model_path(model_name: str) -> Path:
|
|
135
|
+
"""
|
|
136
|
+
Get the local cache path for a model.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
model_name: Name of the model (e.g., "frap_nuclei").
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Path to the cached model file.
|
|
143
|
+
"""
|
|
144
|
+
return MODEL_DIR / model_name
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def is_model_cached(model_name: str) -> bool:
|
|
148
|
+
"""
|
|
149
|
+
Check if a model is already cached locally.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
model_name: Name of the model.
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
True if the model exists locally, False otherwise.
|
|
156
|
+
"""
|
|
157
|
+
return get_model_path(model_name).exists()
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def cache_model(model_name: str, force_download: bool = False) -> str:
|
|
161
|
+
"""
|
|
162
|
+
Ensure a model is cached locally, downloading if necessary.
|
|
163
|
+
|
|
164
|
+
This function follows the Cellpose pattern:
|
|
165
|
+
1. Check if model exists in local cache
|
|
166
|
+
2. If not (or force_download=True), download from GitHub
|
|
167
|
+
3. Return the local path
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
model_name: Name of the model (must be in MODEL_URLS).
|
|
171
|
+
force_download: If True, re-download even if cached.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
String path to the cached model file.
|
|
175
|
+
|
|
176
|
+
Raises:
|
|
177
|
+
ValueError: If model_name is not recognized.
|
|
178
|
+
RuntimeError: If download fails.
|
|
179
|
+
"""
|
|
180
|
+
if model_name not in MODEL_URLS:
|
|
181
|
+
available = ", ".join(MODEL_URLS.keys())
|
|
182
|
+
raise ValueError(f"Unknown model '{model_name}'. Available: {available}")
|
|
183
|
+
|
|
184
|
+
MODEL_DIR.mkdir(parents=True, exist_ok=True)
|
|
185
|
+
cached_file = get_model_path(model_name)
|
|
186
|
+
|
|
187
|
+
if not cached_file.exists() or force_download:
|
|
188
|
+
url = MODEL_URLS[model_name]
|
|
189
|
+
logger.info(f"Downloading model '{model_name}' from {url}")
|
|
190
|
+
print(f"Downloading MicroLive model '{model_name}' (first time only)...")
|
|
191
|
+
|
|
192
|
+
try:
|
|
193
|
+
download_url_to_file(url, str(cached_file), progress=True)
|
|
194
|
+
except (HTTPError, URLError) as e:
|
|
195
|
+
raise RuntimeError(
|
|
196
|
+
f"Failed to download model '{model_name}' from GitHub. "
|
|
197
|
+
f"Error: {e}\n\n"
|
|
198
|
+
f"If this persists, you can manually download from:\n"
|
|
199
|
+
f" {url}\n"
|
|
200
|
+
f"And place it at:\n"
|
|
201
|
+
f" {cached_file}"
|
|
202
|
+
)
|
|
203
|
+
else:
|
|
204
|
+
logger.debug(f"Model '{model_name}' already cached at {cached_file}")
|
|
205
|
+
|
|
206
|
+
return str(cached_file)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
# =============================================================================
|
|
210
|
+
# Convenience Functions for Specific Models
|
|
211
|
+
# =============================================================================
|
|
212
|
+
|
|
213
|
+
def get_frap_nuclei_model_path() -> str:
|
|
214
|
+
"""
|
|
215
|
+
Get the path to the FRAP nuclei segmentation model.
|
|
216
|
+
|
|
217
|
+
Downloads the model from GitHub if not already cached locally.
|
|
218
|
+
The model is stored in ~/.microlive/models/frap_nuclei
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
String path to the FRAP nuclei model file.
|
|
222
|
+
|
|
223
|
+
Example:
|
|
224
|
+
>>> from microlive.utils.model_downloader import get_frap_nuclei_model_path
|
|
225
|
+
>>> model_path = get_frap_nuclei_model_path()
|
|
226
|
+
>>> # Use with Cellpose
|
|
227
|
+
>>> from cellpose import models
|
|
228
|
+
>>> model = models.CellposeModel(pretrained_model=model_path)
|
|
229
|
+
"""
|
|
230
|
+
return cache_model("frap_nuclei")
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
# =============================================================================
|
|
234
|
+
# Verification and Diagnostics
|
|
235
|
+
# =============================================================================
|
|
236
|
+
|
|
237
|
+
def verify_model_integrity(model_name: str) -> bool:
|
|
238
|
+
"""
|
|
239
|
+
Verify that a cached model file exists and has non-zero size.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
model_name: Name of the model to verify.
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
True if the model file exists and is valid.
|
|
246
|
+
"""
|
|
247
|
+
model_path = get_model_path(model_name)
|
|
248
|
+
if not model_path.exists():
|
|
249
|
+
return False
|
|
250
|
+
|
|
251
|
+
# Check file size (should be > 1MB for a real model)
|
|
252
|
+
size_bytes = model_path.stat().st_size
|
|
253
|
+
if size_bytes < 1_000_000:
|
|
254
|
+
logger.warning(f"Model file seems too small ({size_bytes} bytes): {model_path}")
|
|
255
|
+
return False
|
|
256
|
+
|
|
257
|
+
return True
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def list_cached_models() -> dict:
|
|
261
|
+
"""
|
|
262
|
+
List all cached models and their status.
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
Dictionary mapping model names to their cache status and size.
|
|
266
|
+
"""
|
|
267
|
+
result = {}
|
|
268
|
+
for name in MODEL_URLS:
|
|
269
|
+
path = get_model_path(name)
|
|
270
|
+
if path.exists():
|
|
271
|
+
size_mb = path.stat().st_size / (1024 * 1024)
|
|
272
|
+
result[name] = {"cached": True, "size_mb": round(size_mb, 2), "path": str(path)}
|
|
273
|
+
else:
|
|
274
|
+
result[name] = {"cached": False, "size_mb": 0, "path": str(path)}
|
|
275
|
+
return result
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def clear_model_cache(model_name: str = None) -> None:
|
|
279
|
+
"""
|
|
280
|
+
Clear cached models.
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
model_name: Specific model to clear, or None to clear all.
|
|
284
|
+
"""
|
|
285
|
+
if model_name:
|
|
286
|
+
path = get_model_path(model_name)
|
|
287
|
+
if path.exists():
|
|
288
|
+
path.unlink()
|
|
289
|
+
logger.info(f"Cleared cached model: {model_name}")
|
|
290
|
+
else:
|
|
291
|
+
if MODEL_DIR.exists():
|
|
292
|
+
shutil.rmtree(MODEL_DIR)
|
|
293
|
+
logger.info(f"Cleared all cached models from {MODEL_DIR}")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: microlive
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.15
|
|
4
4
|
Summary: Live-cell microscopy image analysis and single-molecule measurements
|
|
5
5
|
Project-URL: Homepage, https://github.com/ningzhaoAnschutz/microlive
|
|
6
6
|
Project-URL: Documentation, https://github.com/ningzhaoAnschutz/microlive/blob/main/docs/user_guide.md
|
|
@@ -1,26 +1,28 @@
|
|
|
1
|
-
microlive/__init__.py,sha256=
|
|
1
|
+
microlive/__init__.py,sha256=7MuUee2Gl8qB6BxHVh-WCCBShmt7ZNkKgYRLAvTVT9A,1385
|
|
2
2
|
microlive/imports.py,sha256=VAAMavSLIKO0LooadTXfCdZiv8LQbV_wITeIv8IHwxM,7531
|
|
3
|
-
microlive/microscopy.py,sha256=
|
|
3
|
+
microlive/microscopy.py,sha256=OFqf0JXJW4-2cLHvXnwwp_SfMFsUXwp5lDKbkCRR4ok,710841
|
|
4
4
|
microlive/ml_spot_detection.py,sha256=pVbOSGNJ0WWMuPRML42rFwvjKVZ0B1fJux1179OIbAg,10603
|
|
5
5
|
microlive/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
microlive/data/icons/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
7
|
microlive/data/icons/icon_micro.png,sha256=b5tFv4E6vUmLwYmYeM4PJuxLV_XqEzN14ueolekTFW0,370236
|
|
8
8
|
microlive/data/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
microlive/gui/__init__.py,sha256=tB-CdDC7x5OwYFAQxLOUvfVnUThaXKXVRsB68YP0Y6Q,28
|
|
10
|
-
microlive/gui/app.py,sha256=
|
|
10
|
+
microlive/gui/app.py,sha256=bloU7fOVHB09SCtpmRMtoPzEwE64zGgI2SVRsQD6Hug,800266
|
|
11
11
|
microlive/gui/main.py,sha256=b66W_2V-pclGKOozfs75pwrCGbL_jkVU3kFt8RFMZIc,2520
|
|
12
12
|
microlive/gui/micro_mac.command,sha256=TkxYOO_5A2AiNJMz3_--1geBYfl77THpOLFZnV4J2ac,444
|
|
13
13
|
microlive/gui/micro_windows.bat,sha256=DJUKPhDbCO4HToLwSMT-QTYRe9Kr1wn5A2Ijy2klIrw,773
|
|
14
14
|
microlive/pipelines/__init__.py,sha256=VimchYrIWalFs_edRmjR1zBHIg2CcpRceZoRmB1e8kA,764
|
|
15
|
-
microlive/pipelines/pipeline_FRAP.py,sha256=
|
|
15
|
+
microlive/pipelines/pipeline_FRAP.py,sha256=jBGzb7m3RzbuKtmD-KCrpSZCbypuLHeUacm88-XlUUU,62691
|
|
16
16
|
microlive/pipelines/pipeline_folding_efficiency.py,sha256=0PTogfXHRtO2kXOeQXb5-VBb46DQsj6namGVEkMGI0g,22550
|
|
17
17
|
microlive/pipelines/pipeline_particle_tracking.py,sha256=euPTLH6O9I66HkUb4Izah8ZF_aOdQLRyyR8vo1jSkFA,28245
|
|
18
18
|
microlive/pipelines/pipeline_spot_detection_no_tracking.py,sha256=t-p1xCQvThnVKMJZgk3Xhk3k6cvp1VgwTJ0ZIbfzNG0,19087
|
|
19
|
-
microlive/utils/__init__.py,sha256=
|
|
19
|
+
microlive/utils/__init__.py,sha256=metAf2zPS8w23d8dyM7-ld1ovrOKBdx3y3zu5IVrzIg,564
|
|
20
20
|
microlive/utils/device.py,sha256=tcPMU8UiXL-DuGwhudUgrbjW1lgIK_EUKIOeOn0U6q4,2533
|
|
21
|
+
microlive/utils/model_downloader.py,sha256=EruviTEh75YBekpznn1RZ1Nj8lnDmeC4TKEnFLOow6Y,9448
|
|
21
22
|
microlive/utils/resources.py,sha256=Jz7kPI75xMLCBJMyX7Y_3ixKi_UgydfQkF0BlFtLCKs,1753
|
|
22
|
-
microlive
|
|
23
|
-
microlive-1.0.
|
|
24
|
-
microlive-1.0.
|
|
25
|
-
microlive-1.0.
|
|
26
|
-
microlive-1.0.
|
|
23
|
+
microlive/data/models/spot_detection_cnn.pth,sha256=Np7vpPJIbKQmuKY0Hx-4IkeEDsnks_QEgs7TqaYgZmI,8468580
|
|
24
|
+
microlive-1.0.15.dist-info/METADATA,sha256=s3CJducpiRGKEiKt6iodAbjo72wZGO1fOQBBU6Jkmb0,12434
|
|
25
|
+
microlive-1.0.15.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
26
|
+
microlive-1.0.15.dist-info/entry_points.txt,sha256=Zqp2vixyD8lngcfEmOi8fkCj7vPhesz5xlGBI-EubRw,54
|
|
27
|
+
microlive-1.0.15.dist-info/licenses/LICENSE,sha256=ixuiBLtpoK3iv89l7ylKkg9rs2GzF9ukPH7ynZYzK5s,35148
|
|
28
|
+
microlive-1.0.15.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|