napari-tmidas 0.2.1__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. napari_tmidas/__init__.py +35 -5
  2. napari_tmidas/_crop_anything.py +1458 -499
  3. napari_tmidas/_env_manager.py +76 -0
  4. napari_tmidas/_file_conversion.py +1646 -1131
  5. napari_tmidas/_file_selector.py +1464 -223
  6. napari_tmidas/_label_inspection.py +83 -8
  7. napari_tmidas/_processing_worker.py +309 -0
  8. napari_tmidas/_reader.py +6 -10
  9. napari_tmidas/_registry.py +15 -14
  10. napari_tmidas/_roi_colocalization.py +1221 -84
  11. napari_tmidas/_tests/test_crop_anything.py +123 -0
  12. napari_tmidas/_tests/test_env_manager.py +89 -0
  13. napari_tmidas/_tests/test_file_selector.py +90 -0
  14. napari_tmidas/_tests/test_grid_view_overlay.py +193 -0
  15. napari_tmidas/_tests/test_init.py +98 -0
  16. napari_tmidas/_tests/test_intensity_label_filter.py +222 -0
  17. napari_tmidas/_tests/test_label_inspection.py +86 -0
  18. napari_tmidas/_tests/test_processing_basic.py +500 -0
  19. napari_tmidas/_tests/test_processing_worker.py +142 -0
  20. napari_tmidas/_tests/test_regionprops_analysis.py +547 -0
  21. napari_tmidas/_tests/test_registry.py +135 -0
  22. napari_tmidas/_tests/test_scipy_filters.py +168 -0
  23. napari_tmidas/_tests/test_skimage_filters.py +259 -0
  24. napari_tmidas/_tests/test_split_channels.py +217 -0
  25. napari_tmidas/_tests/test_spotiflow.py +87 -0
  26. napari_tmidas/_tests/test_tyx_display_fix.py +142 -0
  27. napari_tmidas/_tests/test_ui_utils.py +68 -0
  28. napari_tmidas/_tests/test_widget.py +30 -0
  29. napari_tmidas/_tests/test_windows_basic.py +66 -0
  30. napari_tmidas/_ui_utils.py +57 -0
  31. napari_tmidas/_version.py +16 -3
  32. napari_tmidas/_widget.py +41 -4
  33. napari_tmidas/processing_functions/basic.py +557 -20
  34. napari_tmidas/processing_functions/careamics_env_manager.py +72 -99
  35. napari_tmidas/processing_functions/cellpose_env_manager.py +415 -112
  36. napari_tmidas/processing_functions/cellpose_segmentation.py +132 -191
  37. napari_tmidas/processing_functions/colocalization.py +513 -56
  38. napari_tmidas/processing_functions/grid_view_overlay.py +703 -0
  39. napari_tmidas/processing_functions/intensity_label_filter.py +422 -0
  40. napari_tmidas/processing_functions/regionprops_analysis.py +1280 -0
  41. napari_tmidas/processing_functions/sam2_env_manager.py +53 -69
  42. napari_tmidas/processing_functions/sam2_mp4.py +274 -195
  43. napari_tmidas/processing_functions/scipy_filters.py +403 -8
  44. napari_tmidas/processing_functions/skimage_filters.py +424 -212
  45. napari_tmidas/processing_functions/spotiflow_detection.py +949 -0
  46. napari_tmidas/processing_functions/spotiflow_env_manager.py +591 -0
  47. napari_tmidas/processing_functions/timepoint_merger.py +334 -86
  48. napari_tmidas/processing_functions/trackastra_tracking.py +24 -5
  49. {napari_tmidas-0.2.1.dist-info → napari_tmidas-0.2.4.dist-info}/METADATA +92 -39
  50. napari_tmidas-0.2.4.dist-info/RECORD +63 -0
  51. napari_tmidas/_tests/__init__.py +0 -0
  52. napari_tmidas-0.2.1.dist-info/RECORD +0 -38
  53. {napari_tmidas-0.2.1.dist-info → napari_tmidas-0.2.4.dist-info}/WHEEL +0 -0
  54. {napari_tmidas-0.2.1.dist-info → napari_tmidas-0.2.4.dist-info}/entry_points.txt +0 -0
  55. {napari_tmidas-0.2.1.dist-info → napari_tmidas-0.2.4.dist-info}/licenses/LICENSE +0 -0
  56. {napari_tmidas-0.2.1.dist-info → napari_tmidas-0.2.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,422 @@
1
+ # processing_functions/intensity_label_filter.py
2
+ """
3
+ Processing functions for filtering labels based on intensity using k-medoids clustering.
4
+ """
5
+ import inspect
6
+ from pathlib import Path
7
+ from typing import Dict
8
+
9
+ import numpy as np
10
+ from skimage import measure
11
+
12
+ from napari_tmidas._registry import BatchProcessingRegistry
13
+
14
+
15
+ def _convert_semantic_to_instance(image: np.ndarray) -> np.ndarray:
16
+ """
17
+ Convert semantic labels (where all objects have the same value) to instance labels.
18
+
19
+ Parameters
20
+ ----------
21
+ image : np.ndarray
22
+ Label image that may contain semantic labels
23
+
24
+ Returns
25
+ -------
26
+ np.ndarray
27
+ Image with instance labels (each connected component gets unique label)
28
+ """
29
+ if image is None or np.all(image == 0):
30
+ return image
31
+
32
+ # Get unique non-zero values
33
+ unique_labels = np.unique(image[image != 0])
34
+
35
+ # If there's only one unique non-zero value, it's definitely semantic
36
+ if len(unique_labels) == 1:
37
+ # Single semantic label - convert to instance labels
38
+ mask = image > 0
39
+ return measure.label(mask, connectivity=None)
40
+ else:
41
+ # Multiple labels - already instance labels
42
+ return image
43
+
44
+
45
+ # Lazy imports for optional dependencies
46
+ try:
47
+ from sklearn_extra.cluster import KMedoids
48
+
49
+ _HAS_KMEDOIDS = True
50
+ except ImportError:
51
+ KMedoids = None
52
+ _HAS_KMEDOIDS = False
53
+ print(
54
+ "scikit-learn-extra not available. Install with: pip install scikit-learn-extra"
55
+ )
56
+
57
+ try:
58
+ import pandas as pd
59
+
60
+ _HAS_PANDAS = True
61
+ except ImportError:
62
+ pd = None
63
+ _HAS_PANDAS = False
64
+
65
+
66
+ def _calculate_label_mean_intensities(
67
+ label_image: np.ndarray, intensity_image: np.ndarray
68
+ ) -> Dict[int, float]:
69
+ """
70
+ Calculate mean intensity for each label.
71
+
72
+ Parameters
73
+ ----------
74
+ label_image : np.ndarray
75
+ Label image with integer labels
76
+ intensity_image : np.ndarray
77
+ Intensity image corresponding to the label image
78
+
79
+ Returns
80
+ -------
81
+ Dict[int, float]
82
+ Dictionary mapping label IDs to mean intensities
83
+ """
84
+ # Use regionprops to calculate mean intensity for each label
85
+ props = measure.regionprops_table(
86
+ label_image, intensity_image, properties=["label", "intensity_mean"]
87
+ )
88
+
89
+ return dict(zip(props["label"], props["intensity_mean"]))
90
+
91
+
92
+ def _cluster_intensities(
93
+ intensities: np.ndarray, n_clusters: int
94
+ ) -> tuple[np.ndarray, np.ndarray, float]:
95
+ """
96
+ Cluster intensities using k-medoids and determine threshold.
97
+
98
+ Parameters
99
+ ----------
100
+ intensities : np.ndarray
101
+ Array of intensity values to cluster
102
+ n_clusters : int
103
+ Number of clusters (2 or 3)
104
+
105
+ Returns
106
+ -------
107
+ tuple[np.ndarray, np.ndarray, float]
108
+ Cluster labels, cluster centers (medoids), and threshold value
109
+ """
110
+ if not _HAS_KMEDOIDS:
111
+ raise ImportError(
112
+ "scikit-learn-extra is required for k-medoids clustering. "
113
+ "Install with: pip install scikit-learn-extra"
114
+ )
115
+
116
+ # Reshape for sklearn
117
+ X = intensities.reshape(-1, 1)
118
+
119
+ # Perform k-medoids clustering
120
+ kmedoids = KMedoids(n_clusters=n_clusters, random_state=42, method="pam")
121
+ cluster_labels = kmedoids.fit_predict(X)
122
+ medoids = kmedoids.cluster_centers_.flatten()
123
+
124
+ # Sort medoids to identify clusters from low to high intensity
125
+ sorted_indices = np.argsort(medoids)
126
+ sorted_medoids = medoids[sorted_indices]
127
+
128
+ # Create mapping from old cluster labels to sorted cluster labels
129
+ label_mapping = {
130
+ old_label: new_label
131
+ for new_label, old_label in enumerate(sorted_indices)
132
+ }
133
+ sorted_labels = np.array(
134
+ [label_mapping[label] for label in cluster_labels]
135
+ )
136
+
137
+ # Determine threshold between lowest and second-lowest clusters
138
+ # Use midpoint between the two lowest cluster centers
139
+ threshold = (sorted_medoids[0] + sorted_medoids[1]) / 2.0
140
+
141
+ return sorted_labels, sorted_medoids, threshold
142
+
143
+
144
+ def _get_intensity_filename(
145
+ label_filename: str, label_suffix: str = "_convpaint_labels_filtered.tif"
146
+ ) -> str:
147
+ """
148
+ Convert label filename to intensity filename by removing suffix.
149
+
150
+ Parameters
151
+ ----------
152
+ label_filename : str
153
+ Filename of the label image
154
+ label_suffix : str
155
+ Suffix to remove from label filename (default: "_convpaint_labels_filtered.tif")
156
+
157
+ Returns
158
+ -------
159
+ str
160
+ Intensity image filename
161
+ """
162
+ if label_filename.endswith(label_suffix):
163
+ # Remove the label suffix and add .tif
164
+ base_name = label_filename[: -len(label_suffix)]
165
+ return base_name + ".tif"
166
+ else:
167
+ # If suffix doesn't match, assume same filename
168
+ return label_filename
169
+
170
+
171
+ def _filter_labels_by_threshold(
172
+ label_image: np.ndarray,
173
+ label_intensities: Dict[int, float],
174
+ threshold: float,
175
+ ) -> np.ndarray:
176
+ """
177
+ Filter labels based on intensity threshold.
178
+
179
+ Parameters
180
+ ----------
181
+ label_image : np.ndarray
182
+ Label image with integer labels
183
+ label_intensities : Dict[int, float]
184
+ Dictionary mapping label IDs to mean intensities
185
+ threshold : float
186
+ Intensity threshold - labels below this are removed
187
+
188
+ Returns
189
+ -------
190
+ np.ndarray
191
+ Filtered label image with same dtype as input
192
+ """
193
+ filtered_image = label_image.copy()
194
+
195
+ # Remove labels with intensity below threshold
196
+ for label_id, intensity in label_intensities.items():
197
+ if intensity < threshold:
198
+ filtered_image[label_image == label_id] = 0
199
+
200
+ return filtered_image
201
+
202
+
203
+ @BatchProcessingRegistry.register(
204
+ name="Filter Labels by Intensity (K-medoids)",
205
+ suffix="_intensity_filtered",
206
+ description="Filter out labels with low intensity using k-medoids clustering. Finds corresponding intensity image in same folder. Choose 2 clusters for simple low/high separation, or 3 clusters when you have distinct noise/signal/strong-signal populations.",
207
+ parameters={
208
+ "n_clusters": {
209
+ "type": int,
210
+ "default": 2,
211
+ "description": "Number of clusters (2 or 3). Use 2 for simple low/high separation, 3 for noise/diffuse/strong separation.",
212
+ },
213
+ "save_stats": {
214
+ "type": bool,
215
+ "default": True,
216
+ "description": "Save clustering statistics to CSV file",
217
+ },
218
+ },
219
+ )
220
+ def filter_labels_by_intensity(
221
+ image: np.ndarray,
222
+ n_clusters: int = 2,
223
+ save_stats: bool = True,
224
+ ) -> np.ndarray:
225
+ """
226
+ Filter labels based on intensity using k-medoids clustering.
227
+
228
+ This function processes pairs of label and intensity images in the same folder.
229
+ For each label image, it finds the corresponding intensity image (removes
230
+ "_convpaint_labels_filtered.tif" suffix from label filename to find intensity file),
231
+ calculates mean intensity per label, performs k-medoids clustering to identify
232
+ intensity groups, and filters out labels in the low intensity cluster.
233
+
234
+ Use n_clusters=2 for simple separation (bad vs. good signal).
235
+ Use n_clusters=3 when you have distinct populations (noise, diffuse signal, strong signal).
236
+
237
+ Parameters
238
+ ----------
239
+ image : np.ndarray
240
+ Label image with integer labels
241
+ n_clusters : int
242
+ Number of clusters (2 or 3)
243
+ save_stats : bool
244
+ Whether to save clustering statistics to CSV
245
+
246
+ Returns
247
+ -------
248
+ np.ndarray
249
+ Filtered label image with low-intensity labels removed
250
+ """
251
+ # Extract current filepath from call stack
252
+ current_filepath = None
253
+ for frame_info in inspect.stack():
254
+ frame_locals = frame_info.frame.f_locals
255
+ if "filepath" in frame_locals:
256
+ current_filepath = frame_locals["filepath"]
257
+ break
258
+
259
+ if current_filepath is None:
260
+ raise ValueError(
261
+ "Could not determine current file path from call stack"
262
+ )
263
+
264
+ if n_clusters not in [2, 3]:
265
+ raise ValueError(f"n_clusters must be 2 or 3, got {n_clusters}")
266
+
267
+ # Convert semantic labels to instance labels if needed
268
+ original_dtype = image.dtype
269
+ image = _convert_semantic_to_instance(image)
270
+
271
+ # Check if we actually have any labels after conversion
272
+ unique_labels = np.unique(image[image != 0])
273
+ if len(unique_labels) == 0:
274
+ print("⚠️ No labels found in image, returning empty image")
275
+ return np.zeros_like(image)
276
+
277
+ print(f"📋 Found {len(unique_labels)} labels in the image")
278
+
279
+ # Find corresponding intensity image in same folder
280
+ label_path = Path(current_filepath)
281
+ label_filename = label_path.name
282
+ intensity_filename = _get_intensity_filename(label_filename)
283
+ intensity_path = label_path.parent / intensity_filename
284
+
285
+ if not intensity_path.exists():
286
+ print(
287
+ f"⚠️ No corresponding intensity image found for {label_filename}"
288
+ )
289
+ print(f" Expected: {intensity_filename}")
290
+ print(f" Full path: {intensity_path}")
291
+ print(" Skipping this file...")
292
+ return image # Return original image unchanged
293
+
294
+ # Load intensity image directly with tifffile
295
+ try:
296
+ import tifffile
297
+
298
+ intensity_image = tifffile.imread(str(intensity_path))
299
+ except (FileNotFoundError, OSError) as e:
300
+ print(f"⚠️ Could not read intensity image: {intensity_path}")
301
+ print(f" Error: {e}")
302
+ print(" Skipping this file...")
303
+ return image # Return original if can't read intensity image
304
+
305
+ # Validate dimensions match
306
+ if image.shape != intensity_image.shape:
307
+ raise ValueError(
308
+ f"Label and intensity images must have same shape. "
309
+ f"Label: {image.shape}, Intensity: {intensity_image.shape}"
310
+ )
311
+
312
+ # Calculate mean intensity for each label
313
+ label_intensities = _calculate_label_mean_intensities(
314
+ image, intensity_image
315
+ )
316
+
317
+ if len(label_intensities) == 0:
318
+ print(f"⚠️ No labels found in {label_filename}, returning empty image")
319
+ return np.zeros_like(image)
320
+
321
+ # Perform k-medoids clustering
322
+ intensities = np.array(list(label_intensities.values()))
323
+ cluster_labels, medoids, threshold = _cluster_intensities(
324
+ intensities, n_clusters=n_clusters
325
+ )
326
+
327
+ # Print results based on number of clusters
328
+ print(f"📊 {label_filename}:")
329
+ print(f" Total labels: {len(label_intensities)}")
330
+
331
+ if n_clusters == 2:
332
+ n_low = np.sum(cluster_labels == 0)
333
+ n_high = np.sum(cluster_labels == 1)
334
+ print(
335
+ f" Low intensity cluster: {n_low} labels (medoid: {medoids[0]:.2f})"
336
+ )
337
+ print(
338
+ f" High intensity cluster: {n_high} labels (medoid: {medoids[1]:.2f})"
339
+ )
340
+ print(f" Threshold: {threshold:.2f}")
341
+ print(f" Keeping {n_high} labels, removing {n_low} labels")
342
+
343
+ # Save statistics if requested
344
+ if save_stats and _HAS_PANDAS:
345
+ stats = {
346
+ "filename": label_filename,
347
+ "n_clusters": n_clusters,
348
+ "total_labels": len(label_intensities),
349
+ "low_cluster_count": n_low,
350
+ "high_cluster_count": n_high,
351
+ "low_cluster_medoid": medoids[0],
352
+ "high_cluster_medoid": medoids[1],
353
+ "threshold": threshold,
354
+ }
355
+
356
+ stats_dir = (
357
+ Path(current_filepath).parent / "intensity_filter_stats"
358
+ )
359
+ stats_dir.mkdir(exist_ok=True)
360
+ stats_file = stats_dir / "clustering_stats.csv"
361
+
362
+ df = pd.DataFrame([stats])
363
+ if stats_file.exists():
364
+ df.to_csv(stats_file, mode="a", header=False, index=False)
365
+ else:
366
+ df.to_csv(stats_file, index=False)
367
+
368
+ else: # n_clusters == 3
369
+ n_low = np.sum(cluster_labels == 0)
370
+ n_medium = np.sum(cluster_labels == 1)
371
+ n_high = np.sum(cluster_labels == 2)
372
+ print(
373
+ f" Low intensity cluster: {n_low} labels (medoid: {medoids[0]:.2f})"
374
+ )
375
+ print(
376
+ f" Medium intensity cluster: {n_medium} labels (medoid: {medoids[1]:.2f})"
377
+ )
378
+ print(
379
+ f" High intensity cluster: {n_high} labels (medoid: {medoids[2]:.2f})"
380
+ )
381
+ print(f" Threshold: {threshold:.2f}")
382
+ print(
383
+ f" Keeping {n_medium + n_high} labels, removing {n_low} labels"
384
+ )
385
+
386
+ # Save statistics if requested
387
+ if save_stats and _HAS_PANDAS:
388
+ stats = {
389
+ "filename": label_filename,
390
+ "n_clusters": n_clusters,
391
+ "total_labels": len(label_intensities),
392
+ "low_cluster_count": n_low,
393
+ "medium_cluster_count": n_medium,
394
+ "high_cluster_count": n_high,
395
+ "low_cluster_medoid": medoids[0],
396
+ "medium_cluster_medoid": medoids[1],
397
+ "high_cluster_medoid": medoids[2],
398
+ "threshold": threshold,
399
+ }
400
+
401
+ stats_dir = (
402
+ Path(current_filepath).parent / "intensity_filter_stats"
403
+ )
404
+ stats_dir.mkdir(exist_ok=True)
405
+ stats_file = stats_dir / "clustering_stats.csv"
406
+
407
+ df = pd.DataFrame([stats])
408
+ if stats_file.exists():
409
+ df.to_csv(stats_file, mode="a", header=False, index=False)
410
+ else:
411
+ df.to_csv(stats_file, index=False)
412
+
413
+ # Filter labels
414
+ filtered_image = _filter_labels_by_threshold(
415
+ image, label_intensities, threshold
416
+ )
417
+
418
+ # Convert back to original dtype
419
+ if filtered_image.dtype != original_dtype:
420
+ filtered_image = filtered_image.astype(original_dtype)
421
+
422
+ return filtered_image