nettracer3d 1.2.9__tar.gz → 1.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {nettracer3d-1.2.9/src/nettracer3d.egg-info → nettracer3d-1.3.4}/PKG-INFO +6 -3
  2. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/README.md +4 -2
  3. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/pyproject.toml +2 -1
  4. nettracer3d-1.3.4/src/nettracer3d/branch_stitcher.py +528 -0
  5. nettracer3d-1.3.4/src/nettracer3d/endpoint_joiner.py +286 -0
  6. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/filaments.py +348 -106
  7. nettracer3d-1.3.4/src/nettracer3d/histos.py +946 -0
  8. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/nettracer.py +220 -62
  9. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/nettracer_gui.py +2163 -2729
  10. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/network_analysis.py +16 -4
  11. nettracer3d-1.3.4/src/nettracer3d/network_graph_widget.py +2275 -0
  12. nettracer3d-1.3.4/src/nettracer3d/painting.py +409 -0
  13. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/simple_network.py +4 -4
  14. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/tutorial.py +77 -26
  15. {nettracer3d-1.2.9 → nettracer3d-1.3.4/src/nettracer3d.egg-info}/PKG-INFO +6 -3
  16. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d.egg-info/SOURCES.txt +3 -0
  17. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d.egg-info/requires.txt +1 -0
  18. nettracer3d-1.2.9/src/nettracer3d/branch_stitcher.py +0 -425
  19. nettracer3d-1.2.9/src/nettracer3d/painting.py +0 -549
  20. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/LICENSE +0 -0
  21. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/setup.cfg +0 -0
  22. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/__init__.py +0 -0
  23. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/cellpose_manager.py +0 -0
  24. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/community_extractor.py +0 -0
  25. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/excelotron.py +0 -0
  26. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/modularity.py +0 -0
  27. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/morphology.py +0 -0
  28. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/neighborhoods.py +0 -0
  29. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/network_draw.py +0 -0
  30. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/node_draw.py +0 -0
  31. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/proximity.py +0 -0
  32. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/run.py +0 -0
  33. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/segmenter.py +0 -0
  34. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/segmenter_GPU.py +0 -0
  35. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/smart_dilate.py +0 -0
  36. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d/stats.py +0 -0
  37. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d.egg-info/dependency_links.txt +0 -0
  38. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d.egg-info/entry_points.txt +0 -0
  39. {nettracer3d-1.2.9 → nettracer3d-1.3.4}/src/nettracer3d.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nettracer3d
3
- Version: 1.2.9
3
+ Version: 1.3.4
4
4
  Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
5
5
  Author-email: Liam McLaughlin <liamm@wustl.edu>
6
6
  Project-URL: Documentation, https://nettracer3d.readthedocs.io/en/latest/
@@ -24,6 +24,7 @@ Requires-Dist: pandas
24
24
  Requires-Dist: tifffile
25
25
  Requires-Dist: qtrangeslider
26
26
  Requires-Dist: PyQt6
27
+ Requires-Dist: pyqtgraph
27
28
  Requires-Dist: scikit-learn
28
29
  Requires-Dist: setuptools
29
30
  Requires-Dist: umap-learn
@@ -154,7 +155,9 @@ NetTracer3D is freely available for academic and nonprofit use and can obtained
154
155
 
155
156
  NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
156
157
 
157
- -- Version 1.2.9 Updates --
158
+ -- Version 1.3.4 Updates --
158
159
 
159
- * Some minor adjustments.
160
+ * Added option to view network in a concentric-shell like manner
161
+ * Added way to batch compute the histogram statistics
162
+ * Updated the slice refresh rate to be much faster
160
163
 
@@ -104,7 +104,9 @@ NetTracer3D is freely available for academic and nonprofit use and can obtained
104
104
 
105
105
  NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
106
106
 
107
- -- Version 1.2.9 Updates --
107
+ -- Version 1.3.4 Updates --
108
108
 
109
- * Some minor adjustments.
109
+ * Added option to view network in a concentric-shell like manner
110
+ * Added way to batch compute the histogram statistics
111
+ * Updated the slice refresh rate to be much faster
110
112
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "nettracer3d"
3
- version = "1.2.9"
3
+ version = "1.3.4"
4
4
  authors = [
5
5
  { name="Liam McLaughlin", email="liamm@wustl.edu" },
6
6
  ]
@@ -19,6 +19,7 @@ dependencies = [
19
19
  "tifffile",
20
20
  "qtrangeslider",
21
21
  "PyQt6",
22
+ "pyqtgraph",
22
23
  "scikit-learn",
23
24
  "setuptools",
24
25
  "umap-learn"]
@@ -0,0 +1,528 @@
1
+ import numpy as np
2
+ import networkx as nx
3
+ from scipy.spatial import cKDTree
4
+ from collections import deque
5
+ from . import smart_dilate as sdl
6
+
7
+
8
+ class VesselDenoiser:
9
+ """
10
+ Denoise vessel segmentations using graph-based geometric features
11
+ IMPROVED: Uses skeleton topology to compute endpoint directions
12
+ """
13
+
14
+ def __init__(self,
15
+ score_thresh = 2,
16
+ xy_scale = 1,
17
+ z_scale = 1,
18
+ trace_length = 10):
19
+ self.score_thresh = score_thresh
20
+ self.xy_scale = xy_scale
21
+ self.z_scale = z_scale
22
+ self.trace_length = trace_length # How far to trace from endpoint
23
+
24
+ def _build_skeleton_graph(self, skeleton):
25
+ """
26
+ Build a graph from skeleton where nodes are voxel coordinates
27
+ and edges connect 26-connected neighbors
28
+ """
29
+ skeleton_coords = np.argwhere(skeleton)
30
+ if len(skeleton_coords) == 0:
31
+ return None, None
32
+
33
+ # Map coordinate tuple -> node index
34
+ coord_to_idx = {tuple(c): i for i, c in enumerate(skeleton_coords)}
35
+
36
+ # Build graph
37
+ skel_graph = nx.Graph()
38
+ for i, c in enumerate(skeleton_coords):
39
+ skel_graph.add_node(i, pos=c)
40
+
41
+ # 26-connected neighborhood
42
+ nbr_offsets = [(dz, dy, dx)
43
+ for dz in (-1, 0, 1)
44
+ for dy in (-1, 0, 1)
45
+ for dx in (-1, 0, 1)
46
+ if not (dz == dy == dx == 0)]
47
+
48
+ # Add edges
49
+ for i, c in enumerate(skeleton_coords):
50
+ cz, cy, cx = c
51
+ for dz, dy, dx in nbr_offsets:
52
+ nb = (cz + dz, cy + dy, cx + dx)
53
+ j = coord_to_idx.get(nb)
54
+ if j is not None and j > i:
55
+ skel_graph.add_edge(i, j)
56
+
57
+ return skel_graph, coord_to_idx
58
+
59
+ def select_kernel_points_topology(self, data, skeleton):
60
+ """
61
+ Returns only skeleton endpoints (degree=1 nodes)
62
+ """
63
+ skel_graph, coord_to_idx = self._build_skeleton_graph(skeleton)
64
+
65
+ if skel_graph is None:
66
+ return np.array([]), None, None
67
+
68
+ # Get degree per node
69
+ deg = dict(skel_graph.degree())
70
+
71
+ # ONLY keep endpoints (degree=1)
72
+ endpoints = [i for i, d in deg.items() if d == 1]
73
+
74
+ # Get coordinates
75
+ skeleton_coords = np.argwhere(skeleton)
76
+ kernel_coords = np.array([skeleton_coords[i] for i in endpoints])
77
+
78
+ return kernel_coords, skel_graph, coord_to_idx
79
+
80
+ def _compute_endpoint_direction(self, skel_graph, endpoint_idx, trace_length=None):
81
+ """
82
+ Compute direction by tracing along skeleton from endpoint.
83
+ Returns direction vector pointing INTO the skeleton (away from endpoint).
84
+
85
+ Parameters:
86
+ -----------
87
+ skel_graph : networkx.Graph
88
+ Skeleton graph with node positions
89
+ endpoint_idx : int
90
+ Node index of the endpoint
91
+ trace_length : int
92
+ How many steps to trace along skeleton
93
+
94
+ Returns:
95
+ --------
96
+ direction : ndarray
97
+ Normalized direction vector pointing into skeleton from endpoint
98
+ """
99
+ if trace_length is None:
100
+ trace_length = self.trace_length
101
+
102
+ # Get endpoint position
103
+ endpoint_pos = skel_graph.nodes[endpoint_idx]['pos']
104
+
105
+ # BFS from endpoint to collect positions along skeleton path
106
+ visited = {endpoint_idx}
107
+ queue = deque([endpoint_idx])
108
+ path_positions = []
109
+
110
+ while queue and len(path_positions) < trace_length:
111
+ current = queue.popleft()
112
+
113
+ # Get neighbors
114
+ for neighbor in skel_graph.neighbors(current):
115
+ if neighbor not in visited:
116
+ visited.add(neighbor)
117
+ queue.append(neighbor)
118
+
119
+ # Add this position to path
120
+ neighbor_pos = skel_graph.nodes[neighbor]['pos']
121
+ path_positions.append(neighbor_pos)
122
+
123
+ if len(path_positions) >= trace_length:
124
+ break
125
+
126
+ # If we couldn't trace far enough, use what we have
127
+ if len(path_positions) == 0:
128
+ # Isolated endpoint, return arbitrary direction
129
+ return np.array([0., 0., 1.])
130
+
131
+ # Compute direction as average vector from endpoint to traced positions
132
+ # This gives us the direction the skeleton is "extending" from the endpoint
133
+ path_positions = np.array(path_positions)
134
+
135
+ # Weight more distant points more heavily (they better represent overall direction)
136
+ weights = np.linspace(1.0, 2.0, len(path_positions))
137
+ weights = weights / weights.sum()
138
+
139
+ # Weighted average position along the path
140
+ weighted_target = np.sum(path_positions * weights[:, None], axis=0)
141
+
142
+ # Direction from endpoint toward this position
143
+ direction = weighted_target - endpoint_pos
144
+
145
+ # Normalize
146
+ norm = np.linalg.norm(direction)
147
+ if norm < 1e-10:
148
+ return np.array([0., 0., 1.])
149
+
150
+ return direction / norm
151
+
152
+ def extract_kernel_features(self, skeleton, distance_map, kernel_pos,
153
+ skel_graph, coord_to_idx, endpoint_idx):
154
+ """Extract geometric features for a kernel at a skeleton endpoint"""
155
+ z, y, x = kernel_pos
156
+
157
+ features = {}
158
+
159
+ # Vessel radius at this point
160
+ features['radius'] = distance_map[z, y, x]
161
+
162
+ # Direction vector using topology-based tracing
163
+ features['direction'] = self._compute_endpoint_direction(
164
+ skel_graph, endpoint_idx, self.trace_length
165
+ )
166
+
167
+ # Position
168
+ features['pos'] = np.array(kernel_pos)
169
+
170
+ # All kernels are endpoints
171
+ features['is_endpoint'] = True
172
+
173
+ return features
174
+
175
+ def group_endpoints_by_vertex(self, skeleton_points, verts):
176
+ """
177
+ Group endpoints by which vertex (labeled blob) they belong to
178
+
179
+ Returns:
180
+ --------
181
+ vertex_to_endpoints : dict
182
+ Dictionary mapping vertex_label -> [list of endpoint indices]
183
+ """
184
+ vertex_to_endpoints = {}
185
+
186
+ for idx, pos in enumerate(skeleton_points):
187
+ z, y, x = pos.astype(int)
188
+ vertex_label = int(verts[z, y, x])
189
+
190
+ # Skip if endpoint is not in any vertex (label=0)
191
+ if vertex_label == 0:
192
+ continue
193
+
194
+ if vertex_label not in vertex_to_endpoints:
195
+ vertex_to_endpoints[vertex_label] = []
196
+
197
+ vertex_to_endpoints[vertex_label].append(idx)
198
+
199
+ return vertex_to_endpoints
200
+
201
+ def compute_edge_features(self, feat_i, feat_j):
202
+ """
203
+ Compute features for potential connection between two endpoints.
204
+ IMPROVED: Uses proper directional alignment (not abs value).
205
+
206
+ Two endpoints should connect if:
207
+ - Their skeletons are pointing TOWARD each other (negative dot product of directions)
208
+ - They have similar radii
209
+ - The connection vector aligns with both skeleton directions
210
+ """
211
+ features = {}
212
+
213
+ # Vector from endpoint i to endpoint j
214
+ pos_diff = feat_j['pos'] - feat_i['pos']
215
+ features['distance'] = np.linalg.norm(pos_diff)
216
+
217
+ if features['distance'] < 1e-10:
218
+ # Same point, shouldn't happen
219
+ features['connection_vector'] = np.array([0., 0., 1.])
220
+ else:
221
+ features['connection_vector'] = pos_diff / features['distance']
222
+
223
+ # Radius similarity
224
+ r_i, r_j = feat_i['radius'], feat_j['radius']
225
+ features['radius_diff'] = abs(r_i - r_j)
226
+ features['radius_ratio'] = min(r_i, r_j) / (max(r_i, r_j) + 1e-10)
227
+ features['mean_radius'] = (r_i + r_j) / 2.0
228
+
229
+ # CRITICAL: Check if skeletons point toward each other
230
+ # If both directions point into their skeletons (away from endpoints),
231
+ # they should point in OPPOSITE directions across the gap
232
+ dir_i = feat_i['direction']
233
+ dir_j = feat_j['direction']
234
+ connection_vec = features['connection_vector']
235
+
236
+ # How well does endpoint i's skeleton direction align with the gap vector?
237
+ # (positive = pointing toward j)
238
+ align_i = np.dot(dir_i, connection_vec)
239
+
240
+ # How well does endpoint j's skeleton direction align AGAINST the gap vector?
241
+ # (negative = pointing toward i)
242
+ align_j = np.dot(dir_j, connection_vec)
243
+
244
+ # For good connection: align_i should be positive (i pointing toward j)
245
+ # and align_j should be negative (j pointing toward i)
246
+ # So align_i - align_j should be large and positive
247
+ features['approach_score'] = align_i - align_j
248
+
249
+ # Individual alignment scores (for diagnostics)
250
+ features['align_i'] = align_i
251
+ features['align_j'] = align_j
252
+
253
+ # How parallel/antiparallel are the two skeleton directions?
254
+ # -1 = pointing toward each other (good for connection)
255
+ # +1 = pointing in same direction (bad, parallel branches)
256
+ features['direction_similarity'] = np.dot(dir_i, dir_j)
257
+
258
+ return features
259
+
260
+ def score_connection(self, edge_features):
261
+ """
262
+ Score potential connection between two endpoints.
263
+ FIXED: Directions point INTO skeletons (away from endpoints)
264
+ """
265
+ score = 0.0
266
+
267
+ # For good connections when directions point INTO skeletons:
268
+ # - align_i should be NEGATIVE (skeleton i extends away from j)
269
+ # - align_j should be POSITIVE (skeleton j extends away from i)
270
+ # - Both skeletons extend away from the gap (good!)
271
+
272
+ # HARD REJECT: If skeletons point in same direction (parallel branches)
273
+ if edge_features['direction_similarity'] > 0.7:
274
+ return -999
275
+
276
+ # HARD REJECT: If both skeletons extend TOWARD the gap (diverging structure)
277
+ # This means: align_i > 0 and align_j < 0 (both point at gap = fork/divergence)
278
+ if edge_features['align_i'] > 0.3 and edge_features['align_j'] < -0.3:
279
+ return -999
280
+
281
+ # HARD REJECT: If either skeleton extends the wrong way
282
+ # align_i should be negative, align_j should be positive
283
+ if edge_features['align_i'] > 0.3 or edge_features['align_j'] < -0.3:
284
+ return -999
285
+
286
+ # Base similarity scoring
287
+ score += edge_features['radius_ratio'] * 15.0
288
+
289
+ # REWARD: Skeletons extending away from each other across gap
290
+ # When directions point into skeletons:
291
+ # Good connection has align_i < 0 and align_j > 0
292
+ # So we want to MAXIMIZE: -align_i + align_j (both terms positive)
293
+ extension_score = (-edge_features['align_i'] + edge_features['align_j'])
294
+ score += extension_score * 10.0
295
+
296
+ # REWARD: Skeletons pointing in opposite directions (antiparallel)
297
+ # direction_similarity should be negative
298
+ antiparallel_bonus = max(0, -edge_features['direction_similarity']) * 5.0
299
+ score += antiparallel_bonus
300
+
301
+ # SIZE BONUS: Reward large, well-matched vessels
302
+ if edge_features['radius_ratio'] > 0.7 and extension_score > 1.0:
303
+ mean_radius = edge_features['mean_radius']
304
+ score += mean_radius * 1.5
305
+
306
+ return score
307
+
308
+ def connect_vertices_across_gaps(self, skeleton_points, kernel_features,
309
+ labeled_skeleton, vertex_to_endpoints, verbose=False):
310
+ """
311
+ Connect vertices by finding best endpoint pair across each vertex.
312
+ Each vertex makes at most one connection.
313
+ """
314
+ # Initialize label dictionary: label -> label (identity mapping)
315
+ unique_labels = np.unique(labeled_skeleton[labeled_skeleton > 0])
316
+ label_dict = {int(label): int(label) for label in unique_labels}
317
+
318
+ # Map endpoint index to its skeleton label
319
+ endpoint_to_label = {}
320
+ for idx, pos in enumerate(skeleton_points):
321
+ z, y, x = pos.astype(int)
322
+ label = int(labeled_skeleton[z, y, x])
323
+ endpoint_to_label[idx] = label
324
+
325
+ # Find root label (union-find helper)
326
+ def find_root(label):
327
+ root = label
328
+ while label_dict[root] != root:
329
+ root = label_dict[root]
330
+ return root
331
+
332
+ # Iterate through each vertex
333
+ for vertex_label, endpoint_indices in vertex_to_endpoints.items():
334
+ if len(endpoint_indices) < 2:
335
+ continue
336
+
337
+ if verbose and len(endpoint_indices) > 0:
338
+ print(f"\nVertex {vertex_label}: {len(endpoint_indices)} endpoints")
339
+
340
+ # Find best pair of endpoints to connect
341
+ best_i = None
342
+ best_j = None
343
+ best_score = -np.inf
344
+
345
+ # Try all pairs of endpoints within this vertex
346
+ for i in range(len(endpoint_indices)):
347
+ for j in range(i + 1, len(endpoint_indices)):
348
+ idx_i = endpoint_indices[i]
349
+ idx_j = endpoint_indices[j]
350
+
351
+ feat_i = kernel_features[idx_i]
352
+ feat_j = kernel_features[idx_j]
353
+
354
+ label_i = endpoint_to_label[idx_i]
355
+ label_j = endpoint_to_label[idx_j]
356
+
357
+ root_i = find_root(label_i)
358
+ root_j = find_root(label_j)
359
+
360
+ # Skip if already unified
361
+ if root_i == root_j:
362
+ continue
363
+
364
+ # Compute edge features
365
+ edge_feat = self.compute_edge_features(feat_i, feat_j)
366
+
367
+ # Score this connection
368
+ score = self.score_connection(edge_feat)
369
+ #print(score)
370
+
371
+ if verbose and score > -900:
372
+ print(f" Pair {idx_i}-{idx_j}: score={score:.2f}, "
373
+ f"approach={edge_feat['approach_score']:.2f}, "
374
+ f"dir_sim={edge_feat['direction_similarity']:.2f}")
375
+
376
+ # Apply threshold
377
+ if score > self.score_thresh and score > best_score:
378
+ best_score = score
379
+ best_i = idx_i
380
+ best_j = idx_j
381
+
382
+ # Make the best connection for this vertex
383
+ if best_i is not None and best_j is not None:
384
+ label_i = endpoint_to_label[best_i]
385
+ label_j = endpoint_to_label[best_j]
386
+
387
+ root_i = find_root(label_i)
388
+ root_j = find_root(label_j)
389
+
390
+ # Unify labels
391
+ if root_i < root_j:
392
+ label_dict[root_j] = root_i
393
+ unified_label = root_i
394
+ else:
395
+ label_dict[root_i] = root_j
396
+ unified_label = root_j
397
+
398
+ if verbose:
399
+ feat_i = kernel_features[best_i]
400
+ feat_j = kernel_features[best_j]
401
+ print(f" ✓ Connected labels {label_i} <-> {label_j} (unified as {unified_label})")
402
+ print(f" Score: {best_score:.2f} | Radii: {feat_i['radius']:.1f}, {feat_j['radius']:.1f}")
403
+
404
+ return label_dict
405
+
406
+ def denoise(self, data, skeleton, labeled_skeleton, verts, verbose=False):
407
+ """
408
+ Main pipeline: unify skeleton labels by connecting endpoints at vertices
409
+ """
410
+ if verbose:
411
+ print("Starting skeleton label unification (IMPROVED VERSION)...")
412
+ print(f"Initial unique labels: {len(np.unique(labeled_skeleton[labeled_skeleton > 0]))}")
413
+
414
+ # Compute distance transform
415
+ if verbose:
416
+ print("Computing distance transform...")
417
+ distance_map = sdl.compute_distance_transform_distance(data, fast_dil = True)
418
+
419
+ # Extract endpoints and build skeleton graph
420
+ if verbose:
421
+ print("Extracting skeleton endpoints and building graph...")
422
+ kernel_points, skel_graph, coord_to_idx = self.select_kernel_points_topology(data, skeleton)
423
+
424
+ if verbose:
425
+ print(f"Found {len(kernel_points)} endpoints")
426
+
427
+ if len(kernel_points) == 0:
428
+ # No endpoints, return identity mapping
429
+ unique_labels = np.unique(labeled_skeleton[labeled_skeleton > 0])
430
+ return {int(label): int(label) for label in unique_labels}
431
+
432
+ # Group endpoints by vertex
433
+ if verbose:
434
+ print("Grouping endpoints by vertex...")
435
+ vertex_to_endpoints = self.group_endpoints_by_vertex(kernel_points, verts)
436
+
437
+ if verbose:
438
+ print(f"Found {len(vertex_to_endpoints)} vertices with endpoints")
439
+ vertices_with_multiple = sum(1 for v in vertex_to_endpoints.values() if len(v) >= 2)
440
+ print(f" {vertices_with_multiple} vertices have 2+ endpoints (connection candidates)")
441
+
442
+ # Extract features for each endpoint
443
+ if verbose:
444
+ print("Extracting endpoint features with topology-based directions...")
445
+
446
+ # Create reverse mapping: position -> node index in graph
447
+ skeleton_coords = np.argwhere(skeleton)
448
+ kernel_features = []
449
+
450
+ for pt in kernel_points:
451
+ # Find this endpoint in the graph
452
+ pt_tuple = tuple(pt)
453
+ endpoint_idx = coord_to_idx.get(pt_tuple)
454
+
455
+ if endpoint_idx is None:
456
+ # Shouldn't happen, but handle gracefully
457
+ print(f"Warning: Endpoint {pt} not found in graph")
458
+ continue
459
+
460
+ feat = self.extract_kernel_features(
461
+ skeleton, distance_map, pt, skel_graph, coord_to_idx, endpoint_idx
462
+ )
463
+ kernel_features.append(feat)
464
+
465
+ # Connect vertices
466
+ if verbose:
467
+ print("Connecting endpoints at vertices...")
468
+ label_dict = self.connect_vertices_across_gaps(
469
+ kernel_points, kernel_features, labeled_skeleton,
470
+ vertex_to_endpoints, verbose
471
+ )
472
+
473
+ # Compress label dictionary
474
+ if verbose:
475
+ print("\nCompressing label mappings...")
476
+ for label in list(label_dict.keys()):
477
+ root = label
478
+ while label_dict[root] != root:
479
+ root = label_dict[root]
480
+ label_dict[label] = root
481
+
482
+ # Count final unified components
483
+ final_labels = set(label_dict.values())
484
+ if verbose:
485
+ print(f"Final unified labels: {len(final_labels)}")
486
+ print(f"Reduced from {len(label_dict)} to {len(final_labels)} components")
487
+
488
+ return label_dict
489
+
490
+
491
+ def trace(data, labeled_skeleton, verts, score_thresh=10, xy_scale=1, z_scale=1,
492
+ trace_length=10, verbose=False):
493
+ """
494
+ Trace and unify skeleton labels using vertex-based endpoint grouping.
495
+ IMPROVED: Uses topology-based direction calculation.
496
+
497
+ Parameters:
498
+ -----------
499
+ trace_length : int
500
+ How many voxels to trace from each endpoint to determine direction
501
+ """
502
+ skeleton = (labeled_skeleton > 0).astype(np.uint8)
503
+
504
+ # Create denoiser with trace_length parameter
505
+ denoiser = VesselDenoiser(
506
+ score_thresh=score_thresh,
507
+ xy_scale=xy_scale,
508
+ z_scale=z_scale,
509
+ trace_length=trace_length
510
+ )
511
+
512
+ # Run label unification
513
+ label_dict = denoiser.denoise(data, skeleton, labeled_skeleton, verts, verbose=verbose)
514
+
515
+ # Apply unified labels
516
+ max_label = np.max(labeled_skeleton)
517
+ label_map = np.arange(max_label + 1)
518
+
519
+ for old_label, new_label in label_dict.items():
520
+ label_map[old_label] = new_label
521
+
522
+ relabeled_skeleton = label_map[labeled_skeleton]
523
+
524
+ return relabeled_skeleton
525
+
526
+
527
+ if __name__ == "__main__":
528
+ print("Improved branch stitcher with topology-based direction calculation")