nettracer3d 1.2.7__py3-none-any.whl → 1.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nettracer3d/branch_stitcher.py +245 -142
- nettracer3d/community_extractor.py +3 -2
- nettracer3d/endpoint_joiner.py +286 -0
- nettracer3d/filaments.py +348 -106
- nettracer3d/histos.py +1182 -0
- nettracer3d/modularity.py +14 -96
- nettracer3d/neighborhoods.py +3 -2
- nettracer3d/nettracer.py +296 -82
- nettracer3d/nettracer_gui.py +2275 -2770
- nettracer3d/network_analysis.py +28 -9
- nettracer3d/network_graph_widget.py +2267 -0
- nettracer3d/painting.py +158 -298
- nettracer3d/segmenter.py +1 -1
- nettracer3d/segmenter_GPU.py +0 -1
- nettracer3d/simple_network.py +4 -4
- nettracer3d/smart_dilate.py +19 -7
- nettracer3d/tutorial.py +77 -26
- {nettracer3d-1.2.7.dist-info → nettracer3d-1.3.6.dist-info}/METADATA +50 -18
- nettracer3d-1.3.6.dist-info/RECORD +32 -0
- {nettracer3d-1.2.7.dist-info → nettracer3d-1.3.6.dist-info}/WHEEL +1 -1
- nettracer3d-1.2.7.dist-info/RECORD +0 -29
- {nettracer3d-1.2.7.dist-info → nettracer3d-1.3.6.dist-info}/entry_points.txt +0 -0
- {nettracer3d-1.2.7.dist-info → nettracer3d-1.3.6.dist-info}/licenses/LICENSE +0 -0
- {nettracer3d-1.2.7.dist-info → nettracer3d-1.3.6.dist-info}/top_level.txt +0 -0
nettracer3d/modularity.py
CHANGED
|
@@ -6,6 +6,7 @@ import matplotlib.colors as mcolors
|
|
|
6
6
|
import os
|
|
7
7
|
from . import network_analysis
|
|
8
8
|
from . import simple_network
|
|
9
|
+
from . import nettracer as n3d
|
|
9
10
|
import numpy as np
|
|
10
11
|
import itertools
|
|
11
12
|
|
|
@@ -230,9 +231,9 @@ def show_communities_flex(G, master_list, normalized_weights, geo_info=None, geo
|
|
|
230
231
|
|
|
231
232
|
|
|
232
233
|
|
|
233
|
-
def community_partition(
|
|
234
|
+
def community_partition(G, weighted = False, style = 0, dostats = True, seed = None):
|
|
234
235
|
|
|
235
|
-
def calculate_network_stats(G, communities):
|
|
236
|
+
def calculate_network_stats(G, unweighted_G, communities):
|
|
236
237
|
"""
|
|
237
238
|
Calculate comprehensive network statistics for the graph and its communities.
|
|
238
239
|
|
|
@@ -283,7 +284,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
283
284
|
|
|
284
285
|
# Per-community statistics
|
|
285
286
|
for i, com in enumerate(communities):
|
|
286
|
-
subgraph =
|
|
287
|
+
subgraph = unweighted_G.subgraph(com)
|
|
287
288
|
|
|
288
289
|
# Basic community metrics
|
|
289
290
|
stats[f'Community {i+1} Density'] = nx.density(subgraph)
|
|
@@ -302,7 +303,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
302
303
|
|
|
303
304
|
try:
|
|
304
305
|
# Global network metrics
|
|
305
|
-
stats['Global Clustering Coefficient'] = nx.average_clustering(
|
|
306
|
+
stats['Global Clustering Coefficient'] = nx.average_clustering(unweighted_G)
|
|
306
307
|
except:
|
|
307
308
|
pass
|
|
308
309
|
try:
|
|
@@ -340,7 +341,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
340
341
|
|
|
341
342
|
return stats
|
|
342
343
|
|
|
343
|
-
def calculate_louvain_network_stats(G, partition, seed):
|
|
344
|
+
def calculate_louvain_network_stats(G, unweighted_G, partition, seed):
|
|
344
345
|
"""
|
|
345
346
|
Calculate comprehensive network statistics for the graph using Louvain community detection.
|
|
346
347
|
|
|
@@ -371,7 +372,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
371
372
|
if len(connected_components) > 1:
|
|
372
373
|
for i, component in enumerate(connected_components):
|
|
373
374
|
subgraph = G.subgraph(component)
|
|
374
|
-
subgraph_partition = nx.community.louvain_communities(
|
|
375
|
+
subgraph_partition = list(nx.community.louvain_communities(subgraph, seed = seed))
|
|
375
376
|
modularity = community.modularity(subgraph, subgraph_partition)
|
|
376
377
|
num_nodes = len(component)
|
|
377
378
|
stats[f'Modularity of component with {num_nodes} nodes'] = modularity
|
|
@@ -396,7 +397,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
396
397
|
|
|
397
398
|
# Global network metrics
|
|
398
399
|
try:
|
|
399
|
-
stats['Global Clustering Coefficient'] = nx.average_clustering(
|
|
400
|
+
stats['Global Clustering Coefficient'] = nx.average_clustering(unweighted_G)
|
|
400
401
|
except:
|
|
401
402
|
pass
|
|
402
403
|
try:
|
|
@@ -435,7 +436,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
435
436
|
try:
|
|
436
437
|
# Per-community statistics
|
|
437
438
|
for i, com in enumerate(communities):
|
|
438
|
-
subgraph =
|
|
439
|
+
subgraph = unweighted_G.subgraph(com)
|
|
439
440
|
|
|
440
441
|
# Basic community metrics
|
|
441
442
|
stats[f'Community {i+1} Density'] = nx.density(subgraph)
|
|
@@ -451,52 +452,16 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
451
452
|
if nx.is_connected(subgraph):
|
|
452
453
|
stats[f'Community {i+1} Avg Path Length'] = nx.average_shortest_path_length(subgraph)
|
|
453
454
|
except:
|
|
454
|
-
import traceback
|
|
455
455
|
pass
|
|
456
456
|
|
|
457
457
|
return stats
|
|
458
458
|
|
|
459
459
|
stats = {}
|
|
460
|
-
|
|
460
|
+
unweighted_G = G
|
|
461
461
|
if weighted:
|
|
462
|
-
G
|
|
463
|
-
edge_weights = get_edge_weights(G)
|
|
464
|
-
|
|
465
|
-
if style == 1 and weighted:
|
|
466
|
-
G = nx.Graph()
|
|
467
|
-
|
|
468
|
-
# Find the maximum and minimum edge weights
|
|
469
|
-
max_weight = max(weight for edge, weight in edge_weights.items())
|
|
470
|
-
min_weight = min(weight for edge, weight in edge_weights.items())
|
|
471
|
-
|
|
472
|
-
if max_weight > 1:
|
|
473
|
-
# Normalize edge weights to the range [0.1, 1.0]
|
|
474
|
-
normalized_weights = {edge: 0.1 + 0.9 * ((weight - min_weight) / (max_weight - min_weight)) for edge, weight in edge_weights.items()}
|
|
475
|
-
else:
|
|
476
|
-
normalized_weights = {edge: 0.1 for edge, weight in edge_weights.items()}
|
|
477
|
-
|
|
478
|
-
# Add edges to the graph with normalized weights
|
|
479
|
-
for edge, normalized_weight in normalized_weights.items():
|
|
480
|
-
G.add_edge(edge[0], edge[1], weight=normalized_weight)
|
|
481
|
-
|
|
482
|
-
# Replace Louvain with NetworkX's implementation
|
|
483
|
-
communities = list(nx.community.louvain_communities(G, weight='weight', seed = seed))
|
|
484
|
-
|
|
485
|
-
# Convert to the same format as community_louvain.best_partition
|
|
486
|
-
output = {}
|
|
487
|
-
for i, com in enumerate(communities):
|
|
488
|
-
for node in com:
|
|
489
|
-
output[node] = i + 1
|
|
462
|
+
G = n3d.convert_to_multigraph(G)
|
|
490
463
|
|
|
491
|
-
|
|
492
|
-
stats = calculate_louvain_network_stats(G, communities, seed)
|
|
493
|
-
|
|
494
|
-
return output, normalized_weights, stats
|
|
495
|
-
|
|
496
|
-
elif style == 1:
|
|
497
|
-
edges = list(zip(master_list[0], master_list[1]))
|
|
498
|
-
G = nx.Graph()
|
|
499
|
-
G.add_edges_from(edges)
|
|
464
|
+
if style == 1:
|
|
500
465
|
|
|
501
466
|
# Louvain with NetworkX's implementation
|
|
502
467
|
communities = list(nx.community.louvain_communities(G, seed = seed))
|
|
@@ -508,60 +473,13 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
508
473
|
output[node] = i + 1
|
|
509
474
|
|
|
510
475
|
if dostats:
|
|
511
|
-
stats = calculate_louvain_network_stats(G, communities, seed)
|
|
476
|
+
stats = calculate_louvain_network_stats(G, unweighted_G, communities, seed)
|
|
512
477
|
|
|
513
478
|
return output, None, stats
|
|
514
479
|
|
|
515
|
-
elif style == 0 and weighted:
|
|
516
|
-
|
|
517
|
-
G = nx.Graph()
|
|
518
|
-
|
|
519
|
-
# Find the maximum and minimum edge weights
|
|
520
|
-
max_weight = max(weight for edge, weight in edge_weights.items())
|
|
521
|
-
min_weight = min(weight for edge, weight in edge_weights.items())
|
|
522
|
-
|
|
523
|
-
if max_weight > 1:
|
|
524
|
-
# Normalize edge weights to the range [0.1, 1.0]
|
|
525
|
-
normalized_weights = {edge: 0.1 + 0.9 * ((weight - min_weight) / (max_weight - min_weight)) for edge, weight in edge_weights.items()}
|
|
526
|
-
else:
|
|
527
|
-
normalized_weights = {edge: 0.1 for edge, weight in edge_weights.items()}
|
|
528
|
-
|
|
529
|
-
# Add edges to the graph with normalized weights
|
|
530
|
-
for edge, normalized_weight in normalized_weights.items():
|
|
531
|
-
G.add_edge(edge[0], edge[1], weight=normalized_weight)
|
|
532
|
-
|
|
533
|
-
if seed is not None:
|
|
534
|
-
import random
|
|
535
|
-
import numpy as np
|
|
536
|
-
# Set seeds
|
|
537
|
-
random.seed(seed)
|
|
538
|
-
np.random.seed(seed)
|
|
539
|
-
|
|
540
|
-
# Detect communities using label propagation
|
|
541
|
-
communities = list(community.label_propagation_communities(G))
|
|
542
|
-
output = {}
|
|
543
|
-
for i, com in enumerate(communities):
|
|
544
|
-
for node in com:
|
|
545
|
-
output[node] = i + 1
|
|
546
|
-
|
|
547
|
-
if dostats:
|
|
548
|
-
|
|
549
|
-
stats = calculate_network_stats(G, communities)
|
|
550
|
-
|
|
551
|
-
return output, normalized_weights, stats
|
|
552
|
-
|
|
553
480
|
elif style == 0:
|
|
554
481
|
|
|
555
482
|
|
|
556
|
-
edges = list(zip(master_list[0], master_list[1]))
|
|
557
|
-
|
|
558
|
-
# Create a graph
|
|
559
|
-
G = nx.Graph()
|
|
560
|
-
|
|
561
|
-
# Add edges from the DataFrame
|
|
562
|
-
G.add_edges_from(edges)
|
|
563
|
-
|
|
564
|
-
|
|
565
483
|
# Detect communities using label propagation
|
|
566
484
|
|
|
567
485
|
if seed is not None:
|
|
@@ -579,7 +497,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
579
497
|
|
|
580
498
|
if dostats:
|
|
581
499
|
|
|
582
|
-
stats = calculate_network_stats(G, communities)
|
|
500
|
+
stats = calculate_network_stats(G, unweighted_G, communities)
|
|
583
501
|
|
|
584
502
|
return output, None, stats
|
|
585
503
|
|
nettracer3d/neighborhoods.py
CHANGED
|
@@ -10,6 +10,7 @@ from sklearn.neighbors import NearestNeighbors
|
|
|
10
10
|
import matplotlib.colors as mcolors
|
|
11
11
|
from collections import Counter
|
|
12
12
|
from . import community_extractor
|
|
13
|
+
import random
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
import os
|
|
@@ -418,7 +419,7 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
|
|
|
418
419
|
non_outlier_neighborhoods = {node: neighborhood for node, neighborhood in original_communities.items() if neighborhood != 0}
|
|
419
420
|
|
|
420
421
|
# Get neighborhoods excluding outliers
|
|
421
|
-
unique_neighborhoods = set(non_outlier_neighborhoods.values()) if non_outlier_neighborhoods else
|
|
422
|
+
unique_neighborhoods = sorted(set(non_outlier_neighborhoods.values())) if non_outlier_neighborhoods else list()
|
|
422
423
|
|
|
423
424
|
# Generate colors for non-outlier neighborhoods only (same as assign_community_colors)
|
|
424
425
|
colors = community_extractor.generate_distinct_colors(len(unique_neighborhoods)) if unique_neighborhoods else []
|
|
@@ -427,7 +428,7 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
|
|
|
427
428
|
# Use the ORIGINAL node counts from original_communities
|
|
428
429
|
if non_outlier_neighborhoods:
|
|
429
430
|
neighborhood_sizes = Counter(non_outlier_neighborhoods.values())
|
|
430
|
-
sorted_neighborhoods =
|
|
431
|
+
sorted_neighborhoods = random.Random(42).sample(list(unique_neighborhoods), len(unique_neighborhoods))
|
|
431
432
|
neighborhood_to_color = {neighborhood: colors[i] for i, neighborhood in enumerate(sorted_neighborhoods)}
|
|
432
433
|
else:
|
|
433
434
|
neighborhood_to_color = {}
|