nettracer3d 1.1.1__tar.gz → 1.1.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nettracer3d might be problematic. Click here for more details.
- {nettracer3d-1.1.1/src/nettracer3d.egg-info → nettracer3d-1.1.6}/PKG-INFO +3 -3
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/README.md +2 -2
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/pyproject.toml +2 -2
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/morphology.py +9 -4
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/neighborhoods.py +86 -63
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/nettracer.py +264 -27
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/nettracer_gui.py +525 -162
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/network_draw.py +9 -3
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/node_draw.py +41 -58
- {nettracer3d-1.1.1 → nettracer3d-1.1.6/src/nettracer3d.egg-info}/PKG-INFO +3 -3
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/LICENSE +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/setup.cfg +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/__init__.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/cellpose_manager.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/community_extractor.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/excelotron.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/modularity.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/network_analysis.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/painting.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/proximity.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/run.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/segmenter.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/segmenter_GPU.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/simple_network.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/smart_dilate.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d/stats.py +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d.egg-info/SOURCES.txt +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d.egg-info/dependency_links.txt +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d.egg-info/entry_points.txt +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d.egg-info/requires.txt +0 -0
- {nettracer3d-1.1.1 → nettracer3d-1.1.6}/src/nettracer3d.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nettracer3d
|
|
3
|
-
Version: 1.1.
|
|
3
|
+
Version: 1.1.6
|
|
4
4
|
Summary: Scripts for intializing and analyzing networks from segmentations of three dimensional images.
|
|
5
5
|
Author-email: Liam McLaughlin <liamm@wustl.edu>
|
|
6
6
|
Project-URL: Documentation, https://nettracer3d.readthedocs.io/en/latest/
|
|
@@ -110,6 +110,6 @@ McLaughlin, L., Zhang, B., Sharma, S. et al. Three dimensional multiscalar neuro
|
|
|
110
110
|
|
|
111
111
|
NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
|
|
112
112
|
|
|
113
|
-
-- Version 1.1.
|
|
113
|
+
-- Version 1.1.6 Updates --
|
|
114
114
|
|
|
115
|
-
*
|
|
115
|
+
* Some adjustments
|
|
@@ -65,6 +65,6 @@ McLaughlin, L., Zhang, B., Sharma, S. et al. Three dimensional multiscalar neuro
|
|
|
65
65
|
|
|
66
66
|
NetTracer3D was developed by Liam McLaughlin while working under Dr. Sanjay Jain at Washington University School of Medicine.
|
|
67
67
|
|
|
68
|
-
-- Version 1.1.
|
|
68
|
+
-- Version 1.1.6 Updates --
|
|
69
69
|
|
|
70
|
-
*
|
|
70
|
+
* Some adjustments
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "nettracer3d"
|
|
3
|
-
version = "1.1.
|
|
3
|
+
version = "1.1.6"
|
|
4
4
|
authors = [
|
|
5
5
|
{ name="Liam McLaughlin", email="liamm@wustl.edu" },
|
|
6
6
|
]
|
|
@@ -37,7 +37,7 @@ classifiers = [
|
|
|
37
37
|
# GPU options (choose one)
|
|
38
38
|
CUDA11 = ["cupy-cuda11x"]
|
|
39
39
|
CUDA12 = ["cupy-cuda12x"]
|
|
40
|
-
cupy = ["cupy"]
|
|
40
|
+
cupy = ["cupy"]
|
|
41
41
|
|
|
42
42
|
# Features
|
|
43
43
|
cellpose = ["cellpose[GUI]"]
|
|
@@ -205,15 +205,20 @@ def quantify_edge_node(nodes, edges, search = 0, xy_scale = 1, z_scale = 1, core
|
|
|
205
205
|
|
|
206
206
|
# Helper methods for counting the lens of skeletons:
|
|
207
207
|
|
|
208
|
-
def calculate_skeleton_lengths(skeleton_binary, xy_scale=1.0, z_scale=1.0):
|
|
208
|
+
def calculate_skeleton_lengths(skeleton_binary, xy_scale=1.0, z_scale=1.0, skeleton_coords = None):
|
|
209
209
|
"""
|
|
210
210
|
Calculate total length of all skeletons in a 3D binary image.
|
|
211
211
|
|
|
212
212
|
skeleton_binary: 3D boolean array where True = skeleton voxel
|
|
213
213
|
xy_scale, z_scale: physical units per voxel
|
|
214
214
|
"""
|
|
215
|
-
|
|
216
|
-
skeleton_coords
|
|
215
|
+
|
|
216
|
+
if skeleton_coords is None:
|
|
217
|
+
# Find all skeleton voxels
|
|
218
|
+
skeleton_coords = np.argwhere(skeleton_binary)
|
|
219
|
+
shape = skeleton_binary.shape
|
|
220
|
+
else:
|
|
221
|
+
shape = skeleton_binary #Very professional stuff
|
|
217
222
|
|
|
218
223
|
if len(skeleton_coords) == 0:
|
|
219
224
|
return 0.0
|
|
@@ -222,7 +227,7 @@ def calculate_skeleton_lengths(skeleton_binary, xy_scale=1.0, z_scale=1.0):
|
|
|
222
227
|
coord_to_idx = {tuple(coord): idx for idx, coord in enumerate(skeleton_coords)}
|
|
223
228
|
|
|
224
229
|
# Build adjacency graph
|
|
225
|
-
adjacency_list = build_adjacency_graph(skeleton_coords, coord_to_idx,
|
|
230
|
+
adjacency_list = build_adjacency_graph(skeleton_coords, coord_to_idx, shape)
|
|
226
231
|
|
|
227
232
|
# Calculate lengths using scaled distances
|
|
228
233
|
total_length = calculate_graph_length(skeleton_coords, adjacency_list, xy_scale, z_scale)
|
|
@@ -1128,7 +1128,7 @@ def create_node_heatmap(node_intensity, node_centroids, shape=None, is_3d=True,
|
|
|
1128
1128
|
|
|
1129
1129
|
def create_violin_plots(data_dict, graph_title="Violin Plots"):
|
|
1130
1130
|
"""
|
|
1131
|
-
Create violin plots from dictionary data with distinct colors.
|
|
1131
|
+
Create violin plots from dictionary data with distinct colors and IQR lines.
|
|
1132
1132
|
|
|
1133
1133
|
Parameters:
|
|
1134
1134
|
data_dict (dict): Dictionary where keys are column headers (strings) and
|
|
@@ -1140,110 +1140,133 @@ def create_violin_plots(data_dict, graph_title="Violin Plots"):
|
|
|
1140
1140
|
return
|
|
1141
1141
|
|
|
1142
1142
|
# Prepare data
|
|
1143
|
+
data_dict = dict(sorted(data_dict.items()))
|
|
1143
1144
|
labels = list(data_dict.keys())
|
|
1144
1145
|
data_lists = list(data_dict.values())
|
|
1145
1146
|
|
|
1146
|
-
# Generate colors
|
|
1147
|
+
# Generate colors
|
|
1147
1148
|
try:
|
|
1148
|
-
|
|
1149
|
-
mock_community_dict = {i: i+1 for i in range(len(labels))} # No outliers for simplicity
|
|
1150
|
-
|
|
1151
|
-
# Get distinct colors
|
|
1152
|
-
n_colors = len(labels)
|
|
1153
|
-
colors_rgb = community_extractor.generate_distinct_colors(n_colors)
|
|
1154
|
-
|
|
1155
|
-
# Sort by data size for consistent color assignment (like community sizes)
|
|
1156
|
-
data_sizes = [(i, len(data_lists[i])) for i in range(len(data_lists))]
|
|
1157
|
-
sorted_indices = sorted(data_sizes, key=lambda x: (-x[1], x[0]))
|
|
1158
|
-
|
|
1159
|
-
# Create color mapping
|
|
1160
|
-
colors = []
|
|
1161
|
-
for i, _ in sorted_indices:
|
|
1162
|
-
color_idx = sorted_indices.index((i, _))
|
|
1163
|
-
if color_idx < len(colors_rgb):
|
|
1164
|
-
# Convert RGB (0-255) to matplotlib format (0-1)
|
|
1165
|
-
rgb_normalized = tuple(c/255.0 for c in colors_rgb[color_idx])
|
|
1166
|
-
colors.append(rgb_normalized)
|
|
1167
|
-
else:
|
|
1168
|
-
colors.append('gray') # Fallback color
|
|
1169
|
-
|
|
1170
|
-
# Reorder colors to match original label order
|
|
1171
|
-
final_colors = ['gray'] * len(labels)
|
|
1172
|
-
for idx, (original_idx, _) in enumerate(sorted_indices):
|
|
1173
|
-
final_colors[original_idx] = colors[idx]
|
|
1174
|
-
|
|
1149
|
+
final_colors = generate_distinct_colors(len(labels))
|
|
1175
1150
|
except Exception as e:
|
|
1176
1151
|
print(f"Color generation failed, using default colors: {e}")
|
|
1177
|
-
# Fallback to default matplotlib colors
|
|
1178
1152
|
final_colors = plt.cm.Set3(np.linspace(0, 1, len(labels)))
|
|
1179
1153
|
|
|
1180
|
-
# Create the plot
|
|
1181
1154
|
fig, ax = plt.subplots(figsize=(max(8, len(labels) * 1.5), 6))
|
|
1182
1155
|
|
|
1183
1156
|
# Create violin plots
|
|
1184
|
-
violin_parts = ax.violinplot(
|
|
1185
|
-
|
|
1157
|
+
violin_parts = ax.violinplot(
|
|
1158
|
+
data_lists, positions=range(len(labels)),
|
|
1159
|
+
showmeans=False, showmedians=True, showextrema=True
|
|
1160
|
+
)
|
|
1186
1161
|
|
|
1187
|
-
# Color
|
|
1162
|
+
# Color violins
|
|
1188
1163
|
for i, pc in enumerate(violin_parts['bodies']):
|
|
1189
1164
|
if i < len(final_colors):
|
|
1190
1165
|
pc.set_facecolor(final_colors[i])
|
|
1191
1166
|
pc.set_alpha(0.7)
|
|
1192
1167
|
|
|
1193
|
-
# Color
|
|
1168
|
+
# Color other violin parts
|
|
1194
1169
|
for partname in ('cbars', 'cmins', 'cmaxes', 'cmedians'):
|
|
1195
1170
|
if partname in violin_parts:
|
|
1196
1171
|
violin_parts[partname].set_edgecolor('black')
|
|
1197
1172
|
violin_parts[partname].set_linewidth(1)
|
|
1198
|
-
|
|
1199
|
-
# Add data points as scatter plot overlay with much lower transparency
|
|
1200
|
-
"""
|
|
1201
|
-
for i, data in enumerate(data_lists):
|
|
1202
|
-
y = data
|
|
1203
|
-
# Add some jitter to x positions for better visibility
|
|
1204
|
-
x = np.random.normal(i, 0.04, size=len(y))
|
|
1205
|
-
ax.scatter(x, y, alpha=0.2, s=15, color='black', edgecolors='none', zorder=3) # No borders, more transparent
|
|
1206
|
-
"""
|
|
1207
1173
|
|
|
1208
|
-
#
|
|
1174
|
+
# Set y-limits using percentiles to reduce extreme outlier influence
|
|
1209
1175
|
all_data = [val for sublist in data_lists for val in sublist]
|
|
1210
1176
|
if all_data:
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
y_max = np.percentile(all_data, 95) # 95th percentile
|
|
1214
|
-
|
|
1215
|
-
# Add some padding
|
|
1177
|
+
y_min = np.percentile(all_data, 5)
|
|
1178
|
+
y_max = np.percentile(all_data, 95)
|
|
1216
1179
|
y_range = y_max - y_min
|
|
1217
1180
|
y_padding = y_range * 0.15
|
|
1218
1181
|
ax.set_ylim(y_min - y_padding, y_max + y_padding)
|
|
1219
1182
|
|
|
1220
|
-
# Add IQR and median text annotations
|
|
1183
|
+
# Add IQR and median text annotations and dotted IQR lines
|
|
1221
1184
|
for i, data in enumerate(data_lists):
|
|
1222
1185
|
if len(data) > 0:
|
|
1223
1186
|
q1, median, q3 = np.percentile(data, [25, 50, 75])
|
|
1224
1187
|
iqr = q3 - q1
|
|
1188
|
+
|
|
1189
|
+
# Add dotted green lines for IQR
|
|
1190
|
+
ax.hlines(
|
|
1191
|
+
[q1, q3],
|
|
1192
|
+
i - 0.25, i + 0.25,
|
|
1193
|
+
colors='green',
|
|
1194
|
+
linestyles='dotted',
|
|
1195
|
+
linewidth=1.5,
|
|
1196
|
+
zorder=3,
|
|
1197
|
+
label='IQR (25th–75th)' if i == 0 else None # Add label once
|
|
1198
|
+
)
|
|
1225
1199
|
|
|
1226
|
-
#
|
|
1200
|
+
# Text annotation below the violins
|
|
1227
1201
|
y_min_current = ax.get_ylim()[0]
|
|
1228
1202
|
y_text = y_min_current - (ax.get_ylim()[1] - ax.get_ylim()[0]) * 0.15
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1203
|
+
ax.text(
|
|
1204
|
+
i, y_text, f'Median: {median:.2f}\nIQR: {iqr:.2f}',
|
|
1205
|
+
ha='center', fontsize=8,
|
|
1206
|
+
bbox=dict(boxstyle='round,pad=0.3', facecolor='white', alpha=0.8)
|
|
1207
|
+
)
|
|
1233
1208
|
|
|
1234
|
-
# Customize
|
|
1209
|
+
# Customize appearance
|
|
1235
1210
|
ax.set_xticks(range(len(labels)))
|
|
1236
1211
|
ax.set_xticklabels(labels, rotation=45, ha='right')
|
|
1237
1212
|
ax.set_title(graph_title, fontsize=14, fontweight='bold')
|
|
1238
1213
|
ax.set_ylabel('Normalized Values (Z-score-like)', fontsize=12)
|
|
1239
1214
|
ax.grid(True, alpha=0.3)
|
|
1240
1215
|
|
|
1241
|
-
# Add
|
|
1242
|
-
ax.axhline(y=0, color='red', linestyle='--', alpha=0.5, linewidth=1,
|
|
1243
|
-
label='Identity Centerpoint')
|
|
1216
|
+
# Add baseline
|
|
1217
|
+
ax.axhline(y=0, color='red', linestyle='--', alpha=0.5, linewidth=1, label='Identity Centerpoint')
|
|
1244
1218
|
ax.legend(loc='upper right')
|
|
1245
1219
|
|
|
1246
|
-
|
|
1247
|
-
plt.subplots_adjust(bottom=0.2) # Extra space for bottom text
|
|
1220
|
+
plt.subplots_adjust(bottom=0.2)
|
|
1248
1221
|
plt.tight_layout()
|
|
1249
|
-
plt.show()
|
|
1222
|
+
plt.show()
|
|
1223
|
+
|
|
1224
|
+
# --- Outlier Detection ---
|
|
1225
|
+
outliers_info = []
|
|
1226
|
+
non_outlier_data = []
|
|
1227
|
+
|
|
1228
|
+
for i, data in enumerate(data_lists):
|
|
1229
|
+
if len(data) > 0:
|
|
1230
|
+
q1, median, q3 = np.percentile(data, [25, 50, 75])
|
|
1231
|
+
iqr = q3 - q1
|
|
1232
|
+
lower_bound = q1 - 1.5 * iqr
|
|
1233
|
+
upper_bound = q3 + 1.5 * iqr
|
|
1234
|
+
|
|
1235
|
+
outliers = [val for val in data if val < lower_bound or val > upper_bound]
|
|
1236
|
+
non_outliers = [val for val in data if lower_bound <= val <= upper_bound]
|
|
1237
|
+
|
|
1238
|
+
outliers_info.append({
|
|
1239
|
+
'label': labels[i],
|
|
1240
|
+
'outliers': outliers,
|
|
1241
|
+
'lower_bound': lower_bound,
|
|
1242
|
+
'upper_bound': upper_bound,
|
|
1243
|
+
'total_count': len(data)
|
|
1244
|
+
})
|
|
1245
|
+
non_outlier_data.append(non_outliers)
|
|
1246
|
+
else:
|
|
1247
|
+
outliers_info.append({
|
|
1248
|
+
'label': labels[i],
|
|
1249
|
+
'outliers': [],
|
|
1250
|
+
'lower_bound': None,
|
|
1251
|
+
'upper_bound': None,
|
|
1252
|
+
'total_count': 0
|
|
1253
|
+
})
|
|
1254
|
+
non_outlier_data.append([])
|
|
1255
|
+
|
|
1256
|
+
print("\n" + "="*60)
|
|
1257
|
+
print("OUTLIER DETECTION SUMMARY")
|
|
1258
|
+
print("="*60)
|
|
1259
|
+
total_outliers = 0
|
|
1260
|
+
for info in outliers_info:
|
|
1261
|
+
n_outliers = len(info['outliers'])
|
|
1262
|
+
total_outliers += n_outliers
|
|
1263
|
+
if n_outliers > 0:
|
|
1264
|
+
print(f"{info['label']}: {n_outliers} outliers out of {info['total_count']} points "
|
|
1265
|
+
f"({n_outliers/info['total_count']*100:.1f}%)")
|
|
1266
|
+
print(f" Outlier Removed Range: [{info['lower_bound']:.2f}, {info['upper_bound']:.2f}]")
|
|
1267
|
+
if total_outliers == 0:
|
|
1268
|
+
print("No outliers detected in any dataset.")
|
|
1269
|
+
else:
|
|
1270
|
+
print(f"\nTotal outliers across all datasets: {total_outliers}")
|
|
1271
|
+
print("="*60 + "\n")
|
|
1272
|
+
|