nettracer3d 0.7.7__tar.gz → 0.7.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nettracer3d might be problematic. Click here for more details.

Files changed (28) hide show
  1. {nettracer3d-0.7.7/src/nettracer3d.egg-info → nettracer3d-0.7.9}/PKG-INFO +3 -3
  2. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/README.md +2 -2
  3. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/pyproject.toml +1 -1
  4. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/excelotron.py +51 -1
  5. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/neighborhoods.py +131 -0
  6. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/nettracer.py +88 -2
  7. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/nettracer_gui.py +484 -127
  8. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/proximity.py +1 -1
  9. {nettracer3d-0.7.7 → nettracer3d-0.7.9/src/nettracer3d.egg-info}/PKG-INFO +3 -3
  10. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/LICENSE +0 -0
  11. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/setup.cfg +0 -0
  12. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/__init__.py +0 -0
  13. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/community_extractor.py +0 -0
  14. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/modularity.py +0 -0
  15. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/morphology.py +0 -0
  16. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/network_analysis.py +0 -0
  17. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/network_draw.py +0 -0
  18. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/node_draw.py +0 -0
  19. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/run.py +0 -0
  20. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/segmenter.py +0 -0
  21. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/segmenter_GPU.py +0 -0
  22. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/simple_network.py +0 -0
  23. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d/smart_dilate.py +0 -0
  24. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d.egg-info/SOURCES.txt +0 -0
  25. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d.egg-info/dependency_links.txt +0 -0
  26. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d.egg-info/entry_points.txt +0 -0
  27. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d.egg-info/requires.txt +0 -0
  28. {nettracer3d-0.7.7 → nettracer3d-0.7.9}/src/nettracer3d.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nettracer3d
3
- Version: 0.7.7
3
+ Version: 0.7.9
4
4
  Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
5
5
  Author-email: Liam McLaughlin <liamm@wustl.edu>
6
6
  Project-URL: Documentation, https://nettracer3d.readthedocs.io/en/latest/
@@ -73,6 +73,6 @@ NetTracer3D is free to use/fork for academic/nonprofit use so long as citation i
73
73
 
74
74
  NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
75
75
 
76
- -- Version 0.7.7 Updates --
76
+ -- Version 0.7.9 Updates --
77
77
 
78
- * See documentation once updated
78
+ * The GPU segmenter was being imported regardless of GPU status, causing the program to fail without cupy (which should be optional), fixed that.
@@ -34,6 +34,6 @@ NetTracer3D is free to use/fork for academic/nonprofit use so long as citation i
34
34
 
35
35
  NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
36
36
 
37
- -- Version 0.7.7 Updates --
37
+ -- Version 0.7.9 Updates --
38
38
 
39
- * See documentation once updated
39
+ * The GPU segmenter was being imported regardless of GPU status, causing the program to fail without cupy (which should be optional), fixed that.
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "nettracer3d"
3
- version = "0.7.7"
3
+ version = "0.7.9"
4
4
  authors = [
5
5
  { name="Liam McLaughlin", email="liamm@wustl.edu" },
6
6
  ]
@@ -1599,7 +1599,13 @@ class ExcelToDictGUI(QMainWindow):
1599
1599
  remapped_data = self.identity_remap_widget.get_remapped_identities(filtered_data)
1600
1600
  result_dict[key_name] = remapped_data
1601
1601
  elif key_name == 'Numerical IDs':
1602
- # Apply same filtering to Numerical IDs
1602
+
1603
+ # Check if user actually dropped a numerical IDs column
1604
+ if widget_id not in self.dict_columns or 'data' not in self.dict_columns[widget_id]:
1605
+ # Auto-generate sequential IDs and assign to column_data
1606
+ column_data = np.array(list(range(1, len(self.df) + 1)))
1607
+
1608
+ # Now use the exact same logic as if user provided the data
1603
1609
  identity_column_data = None
1604
1610
  # Find the identity column data
1605
1611
  for other_widget_id in self.dict_columns:
@@ -1620,11 +1626,55 @@ class ExcelToDictGUI(QMainWindow):
1620
1626
  result_dict[key_name] = filtered_numerical_ids
1621
1627
  else:
1622
1628
  result_dict[key_name] = column_data.tolist()
1629
+
1630
+
1623
1631
  else:
1624
1632
  result_dict[key_name] = column_data.tolist()
1625
1633
  else:
1626
1634
  result_dict[key_name] = column_data.tolist()
1627
1635
  break
1636
+
1637
+ for i in range(self.dict_layout.count()):
1638
+ item = self.dict_layout.itemAt(i)
1639
+ if item and item.widget() and hasattr(item.widget(), 'widget_id'):
1640
+ widget = item.widget()
1641
+ widget_id = widget.widget_id
1642
+ key_name = widget.header_input.text().strip()
1643
+
1644
+ # Skip if already processed (has dropped data) or no key name
1645
+ if widget_id in self.dict_columns or not key_name:
1646
+ continue
1647
+
1648
+ # Handle auto-generation for Node Identities template
1649
+ if property_name == 'Node Identities' and key_name == 'Numerical IDs':
1650
+
1651
+ # Find the identity column data
1652
+ identity_column_data = None
1653
+ for other_widget_id in self.dict_columns:
1654
+ for j in range(self.dict_layout.count()):
1655
+ item_j = self.dict_layout.itemAt(j)
1656
+ if item_j and item_j.widget() and hasattr(item_j.widget(), 'widget_id'):
1657
+ if item_j.widget().widget_id == other_widget_id:
1658
+ other_key_name = item_j.widget().header_input.text().strip()
1659
+ if other_key_name == 'Identity Column':
1660
+ identity_column_data = self.dict_columns[other_widget_id]['data']
1661
+ break
1662
+ if identity_column_data is not None:
1663
+ break
1664
+
1665
+ if identity_column_data is not None:
1666
+ # Auto-generate sequential IDs
1667
+ auto_generated_ids = np.array(list(range(1, len(self.df) + 1)))
1668
+
1669
+ filtered_indices = self.identity_remap_widget.get_filtered_indices(identity_column_data.tolist())
1670
+
1671
+ filtered_numerical_ids = [auto_generated_ids[i] for i in filtered_indices]
1672
+
1673
+ result_dict[key_name] = filtered_numerical_ids
1674
+ else:
1675
+ # Fallback: generate sequential IDs for all rows
1676
+ result_dict[key_name] = list(range(1, len(self.df) + 1))
1677
+
1628
1678
 
1629
1679
  if not result_dict:
1630
1680
  QMessageBox.warning(self, "Warning", "No valid dictionary keys defined")
@@ -202,6 +202,137 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
202
202
 
203
203
  return embedding
204
204
 
205
+ def create_community_heatmap(community_intensity, node_community, node_centroids, is_3d=True,
206
+ figsize=(12, 8), point_size=50, alpha=0.7, colorbar_label="Community Intensity"):
207
+ """
208
+ Create a 2D or 3D heatmap showing nodes colored by their community intensities.
209
+
210
+ Parameters:
211
+ -----------
212
+ community_intensity : dict
213
+ Dictionary mapping community IDs to intensity values
214
+ Keys can be np.int64 or regular ints
215
+
216
+ node_community : dict
217
+ Dictionary mapping node IDs to community IDs
218
+
219
+ node_centroids : dict
220
+ Dictionary mapping node IDs to centroids
221
+ Centroids should be [Z, Y, X] for 3D or [1, Y, X] for pseudo-3D
222
+
223
+ is_3d : bool, default=True
224
+ If True, create 3D plot. If False, create 2D plot.
225
+
226
+ figsize : tuple, default=(12, 8)
227
+ Figure size (width, height)
228
+
229
+ point_size : int, default=50
230
+ Size of scatter plot points
231
+
232
+ alpha : float, default=0.7
233
+ Transparency of points (0-1)
234
+
235
+ colorbar_label : str, default="Community Intensity"
236
+ Label for the colorbar
237
+
238
+ Returns:
239
+ --------
240
+ fig, ax : matplotlib figure and axis objects
241
+ """
242
+
243
+ # Convert numpy int64 keys to regular ints for consistency
244
+ community_intensity_clean = {}
245
+ for k, v in community_intensity.items():
246
+ if hasattr(k, 'item'): # numpy scalar
247
+ community_intensity_clean[k.item()] = v
248
+ else:
249
+ community_intensity_clean[k] = v
250
+
251
+ # Prepare data for plotting
252
+ node_positions = []
253
+ node_intensities = []
254
+
255
+ for node_id, centroid in node_centroids.items():
256
+ try:
257
+ # Get community for this node
258
+ community_id = node_community[node_id]
259
+
260
+ # Convert community_id to regular int if it's numpy
261
+ if hasattr(community_id, 'item'):
262
+ community_id = community_id.item()
263
+
264
+ # Get intensity for this community
265
+ intensity = community_intensity_clean[community_id]
266
+
267
+ node_positions.append(centroid)
268
+ node_intensities.append(intensity)
269
+ except:
270
+ pass
271
+
272
+ # Convert to numpy arrays
273
+ positions = np.array(node_positions)
274
+ intensities = np.array(node_intensities)
275
+
276
+ # Determine min and max intensities for color scaling
277
+ min_intensity = np.min(intensities)
278
+ max_intensity = np.max(intensities)
279
+
280
+ # Create figure
281
+ fig = plt.figure(figsize=figsize)
282
+
283
+ if is_3d:
284
+ # 3D plot
285
+ ax = fig.add_subplot(111, projection='3d')
286
+
287
+ # Extract coordinates (assuming [Z, Y, X] format)
288
+ z_coords = positions[:, 0]
289
+ y_coords = positions[:, 1]
290
+ x_coords = positions[:, 2]
291
+
292
+ # Create scatter plot
293
+ scatter = ax.scatter(x_coords, y_coords, z_coords,
294
+ c=intensities, s=point_size, alpha=alpha,
295
+ cmap='RdBu_r', vmin=min_intensity, vmax=max_intensity)
296
+
297
+ ax.set_xlabel('X')
298
+ ax.set_ylabel('Y')
299
+ ax.set_zlabel('Z')
300
+ ax.set_title('3D Community Intensity Heatmap')
301
+
302
+ else:
303
+ # 2D plot (using Y, X coordinates, ignoring Z/first dimension)
304
+ ax = fig.add_subplot(111)
305
+
306
+ # Extract Y, X coordinates
307
+ y_coords = positions[:, 1]
308
+ x_coords = positions[:, 2]
309
+
310
+ # Create scatter plot
311
+ scatter = ax.scatter(x_coords, y_coords,
312
+ c=intensities, s=point_size, alpha=alpha,
313
+ cmap='RdBu_r', vmin=min_intensity, vmax=max_intensity)
314
+
315
+ ax.set_xlabel('X')
316
+ ax.set_ylabel('Y')
317
+ ax.set_title('2D Community Intensity Heatmap')
318
+ ax.grid(True, alpha=0.3)
319
+
320
+ # Set origin to top-left (invert Y-axis)
321
+ ax.invert_yaxis()
322
+
323
+ # Add colorbar
324
+ cbar = plt.colorbar(scatter, ax=ax, shrink=0.8)
325
+ cbar.set_label(colorbar_label)
326
+
327
+ # Add text annotations for min/max values
328
+ cbar.ax.text(1.05, 0, f'Min: {min_intensity:.3f}\n(Blue)',
329
+ transform=cbar.ax.transAxes, va='bottom')
330
+ cbar.ax.text(1.05, 1, f'Max: {max_intensity:.3f}\n(Red)',
331
+ transform=cbar.ax.transAxes, va='top')
332
+
333
+ plt.tight_layout()
334
+ plt.show()
335
+
205
336
 
206
337
 
207
338
  # Example usage:
@@ -3831,6 +3831,41 @@ class Network_3D:
3831
3831
  self._nodes = self._nodes.astype(np.uint16)
3832
3832
 
3833
3833
 
3834
+ def com_by_size(self):
3835
+ """Reassign communities based on size, starting with 1 for largest."""
3836
+
3837
+ from collections import Counter
3838
+
3839
+ # Convert all community values to regular ints (handles numpy scalars)
3840
+ clean_communities = {
3841
+ node: comm.item() if hasattr(comm, 'item') else comm
3842
+ for node, comm in self.communities.items()
3843
+ }
3844
+
3845
+ # Count community sizes and create mapping in one go
3846
+ community_sizes = Counter(clean_communities.values())
3847
+
3848
+ # Create old->new mapping: sort by size (desc), then by community ID for ties
3849
+ old_to_new = {
3850
+ old_comm: new_comm
3851
+ for new_comm, (old_comm, _) in enumerate(
3852
+ sorted(community_sizes.items(), key=lambda x: (-x[1], x[0])),
3853
+ start=1
3854
+ )
3855
+ }
3856
+
3857
+ # Apply mapping
3858
+ self.communities = {
3859
+ node: old_to_new[comm]
3860
+ for node, comm in clean_communities.items()
3861
+ }
3862
+
3863
+
3864
+
3865
+
3866
+
3867
+
3868
+
3834
3869
  def com_to_node(self, targets = None):
3835
3870
 
3836
3871
  def invert_dict(d):
@@ -4982,7 +5017,7 @@ class Network_3D:
4982
5017
 
4983
5018
  return array
4984
5019
 
4985
- def community_cells(self, size = 32):
5020
+ def community_cells(self, size = 32, xy_scale = 1, z_scale = 1):
4986
5021
 
4987
5022
  def invert_dict(d):
4988
5023
  inverted = {}
@@ -4991,10 +5026,61 @@ class Network_3D:
4991
5026
  inverted[value] = key
4992
5027
  return inverted
4993
5028
 
4994
- com_dict = proximity.partition_objects_into_cells(self.node_centroids, size)
5029
+ size_x = int(size * xy_scale)
5030
+ size_z = int(size * z_scale)
5031
+
5032
+ if size_x == size_z:
5033
+
5034
+ com_dict = proximity.partition_objects_into_cells(self.node_centroids, size_x)
5035
+
5036
+ else:
5037
+
5038
+ com_dict = proximity.partition_objects_into_cells(self.node_centroids, (size_z, size_x, size_x))
4995
5039
 
4996
5040
  self.communities = invert_dict(com_dict)
4997
5041
 
5042
+ def community_heatmap(self, num_nodes = None, is3d = True):
5043
+
5044
+ import math
5045
+
5046
+ def invert_dict(d):
5047
+ inverted = {}
5048
+ for key, value in d.items():
5049
+ inverted.setdefault(value, []).append(key)
5050
+ return inverted
5051
+
5052
+ if num_nodes == None:
5053
+
5054
+ try:
5055
+ num_nodes = len(self.network.nodes())
5056
+ except:
5057
+ try:
5058
+ num_nodes = len(self.node_centroids.keys())
5059
+ except:
5060
+ try:
5061
+ num_nodes = len(self.node_identities.keys())
5062
+ except:
5063
+ try:
5064
+ unique = np.unique(self.nodes)
5065
+ num_nodes = len(unique)
5066
+ if unique[0] == 0:
5067
+ num_nodes -= 1
5068
+ except:
5069
+ return
5070
+
5071
+ coms = invert_dict(self.communities)
5072
+
5073
+ rand_dens = num_nodes / len(coms.keys())
5074
+
5075
+ heat_dict = {}
5076
+
5077
+ for com, nodes in coms.items():
5078
+ heat_dict[com] = math.log(len(nodes)/rand_dens)
5079
+
5080
+ from . import neighborhoods
5081
+ neighborhoods.create_community_heatmap(heat_dict, self.communities, self.node_centroids, is_3d=is3d)
5082
+
5083
+ return heat_dict
4998
5084
 
4999
5085
 
5000
5086