nettracer3d 1.3.1__py3-none-any.whl → 1.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nettracer3d might be problematic. Click here for more details.
- nettracer3d/community_extractor.py +3 -2
- nettracer3d/endpoint_joiner.py +286 -0
- nettracer3d/filaments.py +348 -106
- nettracer3d/histos.py +1182 -0
- nettracer3d/modularity.py +14 -96
- nettracer3d/neighborhoods.py +3 -2
- nettracer3d/nettracer.py +91 -50
- nettracer3d/nettracer_gui.py +359 -803
- nettracer3d/network_analysis.py +12 -5
- nettracer3d/network_graph_widget.py +302 -101
- nettracer3d/segmenter.py +1 -1
- nettracer3d/segmenter_GPU.py +0 -1
- nettracer3d/tutorial.py +41 -25
- {nettracer3d-1.3.1.dist-info → nettracer3d-1.3.6.dist-info}/METADATA +4 -6
- nettracer3d-1.3.6.dist-info/RECORD +32 -0
- {nettracer3d-1.3.1.dist-info → nettracer3d-1.3.6.dist-info}/WHEEL +1 -1
- nettracer3d-1.3.1.dist-info/RECORD +0 -30
- {nettracer3d-1.3.1.dist-info → nettracer3d-1.3.6.dist-info}/entry_points.txt +0 -0
- {nettracer3d-1.3.1.dist-info → nettracer3d-1.3.6.dist-info}/licenses/LICENSE +0 -0
- {nettracer3d-1.3.1.dist-info → nettracer3d-1.3.6.dist-info}/top_level.txt +0 -0
nettracer3d/modularity.py
CHANGED
|
@@ -6,6 +6,7 @@ import matplotlib.colors as mcolors
|
|
|
6
6
|
import os
|
|
7
7
|
from . import network_analysis
|
|
8
8
|
from . import simple_network
|
|
9
|
+
from . import nettracer as n3d
|
|
9
10
|
import numpy as np
|
|
10
11
|
import itertools
|
|
11
12
|
|
|
@@ -230,9 +231,9 @@ def show_communities_flex(G, master_list, normalized_weights, geo_info=None, geo
|
|
|
230
231
|
|
|
231
232
|
|
|
232
233
|
|
|
233
|
-
def community_partition(
|
|
234
|
+
def community_partition(G, weighted = False, style = 0, dostats = True, seed = None):
|
|
234
235
|
|
|
235
|
-
def calculate_network_stats(G, communities):
|
|
236
|
+
def calculate_network_stats(G, unweighted_G, communities):
|
|
236
237
|
"""
|
|
237
238
|
Calculate comprehensive network statistics for the graph and its communities.
|
|
238
239
|
|
|
@@ -283,7 +284,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
283
284
|
|
|
284
285
|
# Per-community statistics
|
|
285
286
|
for i, com in enumerate(communities):
|
|
286
|
-
subgraph =
|
|
287
|
+
subgraph = unweighted_G.subgraph(com)
|
|
287
288
|
|
|
288
289
|
# Basic community metrics
|
|
289
290
|
stats[f'Community {i+1} Density'] = nx.density(subgraph)
|
|
@@ -302,7 +303,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
302
303
|
|
|
303
304
|
try:
|
|
304
305
|
# Global network metrics
|
|
305
|
-
stats['Global Clustering Coefficient'] = nx.average_clustering(
|
|
306
|
+
stats['Global Clustering Coefficient'] = nx.average_clustering(unweighted_G)
|
|
306
307
|
except:
|
|
307
308
|
pass
|
|
308
309
|
try:
|
|
@@ -340,7 +341,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
340
341
|
|
|
341
342
|
return stats
|
|
342
343
|
|
|
343
|
-
def calculate_louvain_network_stats(G, partition, seed):
|
|
344
|
+
def calculate_louvain_network_stats(G, unweighted_G, partition, seed):
|
|
344
345
|
"""
|
|
345
346
|
Calculate comprehensive network statistics for the graph using Louvain community detection.
|
|
346
347
|
|
|
@@ -371,7 +372,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
371
372
|
if len(connected_components) > 1:
|
|
372
373
|
for i, component in enumerate(connected_components):
|
|
373
374
|
subgraph = G.subgraph(component)
|
|
374
|
-
subgraph_partition = nx.community.louvain_communities(
|
|
375
|
+
subgraph_partition = list(nx.community.louvain_communities(subgraph, seed = seed))
|
|
375
376
|
modularity = community.modularity(subgraph, subgraph_partition)
|
|
376
377
|
num_nodes = len(component)
|
|
377
378
|
stats[f'Modularity of component with {num_nodes} nodes'] = modularity
|
|
@@ -396,7 +397,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
396
397
|
|
|
397
398
|
# Global network metrics
|
|
398
399
|
try:
|
|
399
|
-
stats['Global Clustering Coefficient'] = nx.average_clustering(
|
|
400
|
+
stats['Global Clustering Coefficient'] = nx.average_clustering(unweighted_G)
|
|
400
401
|
except:
|
|
401
402
|
pass
|
|
402
403
|
try:
|
|
@@ -435,7 +436,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
435
436
|
try:
|
|
436
437
|
# Per-community statistics
|
|
437
438
|
for i, com in enumerate(communities):
|
|
438
|
-
subgraph =
|
|
439
|
+
subgraph = unweighted_G.subgraph(com)
|
|
439
440
|
|
|
440
441
|
# Basic community metrics
|
|
441
442
|
stats[f'Community {i+1} Density'] = nx.density(subgraph)
|
|
@@ -451,52 +452,16 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
451
452
|
if nx.is_connected(subgraph):
|
|
452
453
|
stats[f'Community {i+1} Avg Path Length'] = nx.average_shortest_path_length(subgraph)
|
|
453
454
|
except:
|
|
454
|
-
import traceback
|
|
455
455
|
pass
|
|
456
456
|
|
|
457
457
|
return stats
|
|
458
458
|
|
|
459
459
|
stats = {}
|
|
460
|
-
|
|
460
|
+
unweighted_G = G
|
|
461
461
|
if weighted:
|
|
462
|
-
G
|
|
463
|
-
edge_weights = get_edge_weights(G)
|
|
464
|
-
|
|
465
|
-
if style == 1 and weighted:
|
|
466
|
-
G = nx.Graph()
|
|
467
|
-
|
|
468
|
-
# Find the maximum and minimum edge weights
|
|
469
|
-
max_weight = max(weight for edge, weight in edge_weights.items())
|
|
470
|
-
min_weight = min(weight for edge, weight in edge_weights.items())
|
|
471
|
-
|
|
472
|
-
if max_weight > 1:
|
|
473
|
-
# Normalize edge weights to the range [0.1, 1.0]
|
|
474
|
-
normalized_weights = {edge: 0.1 + 0.9 * ((weight - min_weight) / (max_weight - min_weight)) for edge, weight in edge_weights.items()}
|
|
475
|
-
else:
|
|
476
|
-
normalized_weights = {edge: 0.1 for edge, weight in edge_weights.items()}
|
|
477
|
-
|
|
478
|
-
# Add edges to the graph with normalized weights
|
|
479
|
-
for edge, normalized_weight in normalized_weights.items():
|
|
480
|
-
G.add_edge(edge[0], edge[1], weight=normalized_weight)
|
|
481
|
-
|
|
482
|
-
# Replace Louvain with NetworkX's implementation
|
|
483
|
-
communities = list(nx.community.louvain_communities(G, weight='weight', seed = seed))
|
|
484
|
-
|
|
485
|
-
# Convert to the same format as community_louvain.best_partition
|
|
486
|
-
output = {}
|
|
487
|
-
for i, com in enumerate(communities):
|
|
488
|
-
for node in com:
|
|
489
|
-
output[node] = i + 1
|
|
462
|
+
G = n3d.convert_to_multigraph(G)
|
|
490
463
|
|
|
491
|
-
|
|
492
|
-
stats = calculate_louvain_network_stats(G, communities, seed)
|
|
493
|
-
|
|
494
|
-
return output, normalized_weights, stats
|
|
495
|
-
|
|
496
|
-
elif style == 1:
|
|
497
|
-
edges = list(zip(master_list[0], master_list[1]))
|
|
498
|
-
G = nx.Graph()
|
|
499
|
-
G.add_edges_from(edges)
|
|
464
|
+
if style == 1:
|
|
500
465
|
|
|
501
466
|
# Louvain with NetworkX's implementation
|
|
502
467
|
communities = list(nx.community.louvain_communities(G, seed = seed))
|
|
@@ -508,60 +473,13 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
508
473
|
output[node] = i + 1
|
|
509
474
|
|
|
510
475
|
if dostats:
|
|
511
|
-
stats = calculate_louvain_network_stats(G, communities, seed)
|
|
476
|
+
stats = calculate_louvain_network_stats(G, unweighted_G, communities, seed)
|
|
512
477
|
|
|
513
478
|
return output, None, stats
|
|
514
479
|
|
|
515
|
-
elif style == 0 and weighted:
|
|
516
|
-
|
|
517
|
-
G = nx.Graph()
|
|
518
|
-
|
|
519
|
-
# Find the maximum and minimum edge weights
|
|
520
|
-
max_weight = max(weight for edge, weight in edge_weights.items())
|
|
521
|
-
min_weight = min(weight for edge, weight in edge_weights.items())
|
|
522
|
-
|
|
523
|
-
if max_weight > 1:
|
|
524
|
-
# Normalize edge weights to the range [0.1, 1.0]
|
|
525
|
-
normalized_weights = {edge: 0.1 + 0.9 * ((weight - min_weight) / (max_weight - min_weight)) for edge, weight in edge_weights.items()}
|
|
526
|
-
else:
|
|
527
|
-
normalized_weights = {edge: 0.1 for edge, weight in edge_weights.items()}
|
|
528
|
-
|
|
529
|
-
# Add edges to the graph with normalized weights
|
|
530
|
-
for edge, normalized_weight in normalized_weights.items():
|
|
531
|
-
G.add_edge(edge[0], edge[1], weight=normalized_weight)
|
|
532
|
-
|
|
533
|
-
if seed is not None:
|
|
534
|
-
import random
|
|
535
|
-
import numpy as np
|
|
536
|
-
# Set seeds
|
|
537
|
-
random.seed(seed)
|
|
538
|
-
np.random.seed(seed)
|
|
539
|
-
|
|
540
|
-
# Detect communities using label propagation
|
|
541
|
-
communities = list(community.label_propagation_communities(G))
|
|
542
|
-
output = {}
|
|
543
|
-
for i, com in enumerate(communities):
|
|
544
|
-
for node in com:
|
|
545
|
-
output[node] = i + 1
|
|
546
|
-
|
|
547
|
-
if dostats:
|
|
548
|
-
|
|
549
|
-
stats = calculate_network_stats(G, communities)
|
|
550
|
-
|
|
551
|
-
return output, normalized_weights, stats
|
|
552
|
-
|
|
553
480
|
elif style == 0:
|
|
554
481
|
|
|
555
482
|
|
|
556
|
-
edges = list(zip(master_list[0], master_list[1]))
|
|
557
|
-
|
|
558
|
-
# Create a graph
|
|
559
|
-
G = nx.Graph()
|
|
560
|
-
|
|
561
|
-
# Add edges from the DataFrame
|
|
562
|
-
G.add_edges_from(edges)
|
|
563
|
-
|
|
564
|
-
|
|
565
483
|
# Detect communities using label propagation
|
|
566
484
|
|
|
567
485
|
if seed is not None:
|
|
@@ -579,7 +497,7 @@ def community_partition(master_list, weighted = False, style = 0, dostats = True
|
|
|
579
497
|
|
|
580
498
|
if dostats:
|
|
581
499
|
|
|
582
|
-
stats = calculate_network_stats(G, communities)
|
|
500
|
+
stats = calculate_network_stats(G, unweighted_G, communities)
|
|
583
501
|
|
|
584
502
|
return output, None, stats
|
|
585
503
|
|
nettracer3d/neighborhoods.py
CHANGED
|
@@ -10,6 +10,7 @@ from sklearn.neighbors import NearestNeighbors
|
|
|
10
10
|
import matplotlib.colors as mcolors
|
|
11
11
|
from collections import Counter
|
|
12
12
|
from . import community_extractor
|
|
13
|
+
import random
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
import os
|
|
@@ -418,7 +419,7 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
|
|
|
418
419
|
non_outlier_neighborhoods = {node: neighborhood for node, neighborhood in original_communities.items() if neighborhood != 0}
|
|
419
420
|
|
|
420
421
|
# Get neighborhoods excluding outliers
|
|
421
|
-
unique_neighborhoods = set(non_outlier_neighborhoods.values()) if non_outlier_neighborhoods else
|
|
422
|
+
unique_neighborhoods = sorted(set(non_outlier_neighborhoods.values())) if non_outlier_neighborhoods else list()
|
|
422
423
|
|
|
423
424
|
# Generate colors for non-outlier neighborhoods only (same as assign_community_colors)
|
|
424
425
|
colors = community_extractor.generate_distinct_colors(len(unique_neighborhoods)) if unique_neighborhoods else []
|
|
@@ -427,7 +428,7 @@ def visualize_cluster_composition_umap(cluster_data: Dict[int, np.ndarray],
|
|
|
427
428
|
# Use the ORIGINAL node counts from original_communities
|
|
428
429
|
if non_outlier_neighborhoods:
|
|
429
430
|
neighborhood_sizes = Counter(non_outlier_neighborhoods.values())
|
|
430
|
-
sorted_neighborhoods =
|
|
431
|
+
sorted_neighborhoods = random.Random(42).sample(list(unique_neighborhoods), len(unique_neighborhoods))
|
|
431
432
|
neighborhood_to_color = {neighborhood: colors[i] for i, neighborhood in enumerate(sorted_neighborhoods)}
|
|
432
433
|
else:
|
|
433
434
|
neighborhood_to_color = {}
|
nettracer3d/nettracer.py
CHANGED
|
@@ -1315,21 +1315,29 @@ def z_project(array3d, method='max'):
|
|
|
1315
1315
|
Returns:
|
|
1316
1316
|
numpy.ndarray: 2D projected array with shape (Y, X)
|
|
1317
1317
|
"""
|
|
1318
|
-
if not isinstance(array3d, np.ndarray)
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1318
|
+
#if not isinstance(array3d, np.ndarray):
|
|
1319
|
+
# raise ValueError("Input must be a 3D numpy array")
|
|
1320
|
+
|
|
1321
|
+
|
|
1322
|
+
if len(array3d.shape) == 3:
|
|
1323
|
+
if method == 'max':
|
|
1324
|
+
return np.max(array3d, axis=0)
|
|
1325
|
+
elif method == 'mean':
|
|
1326
|
+
return np.mean(array3d, axis=0)
|
|
1327
|
+
elif method == 'min':
|
|
1328
|
+
return np.min(array3d, axis=0)
|
|
1329
|
+
elif method == 'sum':
|
|
1330
|
+
return np.sum(array3d, axis=0)
|
|
1331
|
+
elif method == 'std':
|
|
1332
|
+
return np.std(array3d, axis=0)
|
|
1333
|
+
else:
|
|
1334
|
+
raise ValueError("Method must be one of: 'max', 'mean', 'min', 'sum', 'std'")
|
|
1331
1335
|
else:
|
|
1332
|
-
|
|
1336
|
+
array_list = []
|
|
1337
|
+
for i in range(array3d.shape[-1]):
|
|
1338
|
+
array_list.append(z_project(array3d[:, :, :, i], method = method))
|
|
1339
|
+
return np.stack(array_list, axis=-1)
|
|
1340
|
+
|
|
1333
1341
|
|
|
1334
1342
|
def fill_holes_3d(array, head_on = False, fill_borders = True):
|
|
1335
1343
|
def process_slice(slice_2d, border_threshold=0.08, fill_borders = True):
|
|
@@ -1631,24 +1639,6 @@ def hash_inners(search_region, inner_edges, GPU = False):
|
|
|
1631
1639
|
|
|
1632
1640
|
return inner_edges
|
|
1633
1641
|
|
|
1634
|
-
def hash_inners_old(search_region, inner_edges, GPU = True):
|
|
1635
|
-
"""Internal method used to help sort out inner edge connections. The inner edges of the array will not differentiate between what nodes they contact if those nodes themselves directly touch each other.
|
|
1636
|
-
This method allows these elements to be efficiently seperated from each other. Originally this was implemented using the gaussian blur because i didn't yet realize skimage could do the same more efficiently."""
|
|
1637
|
-
|
|
1638
|
-
print("Performing gaussian blur to hash inner edges.")
|
|
1639
|
-
|
|
1640
|
-
blurred_search = smart_dilate.gaussian(search_region, GPU = GPU)
|
|
1641
|
-
|
|
1642
|
-
borders = binarize((blurred_search - search_region)) #By subtracting the original image from the guassian blurred version, we set all non-border regions to 0
|
|
1643
|
-
|
|
1644
|
-
del blurred_search
|
|
1645
|
-
|
|
1646
|
-
inner_edges = inner_edges * borders #And as a result, we can mask out only 'inner edges' that themselves exist within borders
|
|
1647
|
-
|
|
1648
|
-
inner_edges = dilate_3D_old(inner_edges, 3, 3, 3) #Not sure if dilating is necessary. Want to ensure that the inner edge pieces still overlap with the proper nodes after the masking.
|
|
1649
|
-
|
|
1650
|
-
return inner_edges
|
|
1651
|
-
|
|
1652
1642
|
|
|
1653
1643
|
def dilate_2D(array, search, scaling = 1):
|
|
1654
1644
|
|
|
@@ -2308,6 +2298,51 @@ def binarize(arrayimage, directory = None):
|
|
|
2308
2298
|
|
|
2309
2299
|
return arrayimage.astype(np.uint8)
|
|
2310
2300
|
|
|
2301
|
+
def convert_to_multigraph(G, weight_attr='weight'):
|
|
2302
|
+
"""
|
|
2303
|
+
Convert weighted graph to MultiGraph by creating parallel edges.
|
|
2304
|
+
|
|
2305
|
+
Args:
|
|
2306
|
+
G: NetworkX Graph with edge weights representing multiplicity
|
|
2307
|
+
weight_attr: Name of the weight attribute (default: 'weight')
|
|
2308
|
+
|
|
2309
|
+
Returns:
|
|
2310
|
+
MultiGraph with parallel edges instead of weights
|
|
2311
|
+
|
|
2312
|
+
Note:
|
|
2313
|
+
- Weights are rounded to integers
|
|
2314
|
+
- Original node/edge attributes are preserved on first edge
|
|
2315
|
+
- Directed graphs become MultiDiGraphs
|
|
2316
|
+
"""
|
|
2317
|
+
|
|
2318
|
+
MG = nx.MultiGraph()
|
|
2319
|
+
|
|
2320
|
+
# Copy nodes with all their attributes
|
|
2321
|
+
MG.add_nodes_from(G.nodes(data=True))
|
|
2322
|
+
|
|
2323
|
+
# Convert weighted edges to multiple parallel edges
|
|
2324
|
+
for u, v, data in G.edges(data=True):
|
|
2325
|
+
# Get weight (default to 1 if missing)
|
|
2326
|
+
weight = data.get(weight_attr, 1)
|
|
2327
|
+
|
|
2328
|
+
# Round to integer for number of parallel edges
|
|
2329
|
+
num_edges = int(round(weight))
|
|
2330
|
+
|
|
2331
|
+
if num_edges < 1:
|
|
2332
|
+
num_edges = 1 # At least one edge
|
|
2333
|
+
|
|
2334
|
+
# Create parallel edges
|
|
2335
|
+
for i in range(num_edges):
|
|
2336
|
+
# First edge gets all the original attributes (except weight)
|
|
2337
|
+
if i == 0:
|
|
2338
|
+
edge_data = {k: v for k, v in data.items() if k != weight_attr}
|
|
2339
|
+
MG.add_edge(u, v, **edge_data)
|
|
2340
|
+
else:
|
|
2341
|
+
# Subsequent parallel edges are simple
|
|
2342
|
+
MG.add_edge(u, v)
|
|
2343
|
+
|
|
2344
|
+
return MG
|
|
2345
|
+
|
|
2311
2346
|
def dilate(arrayimage, amount, xy_scale = 1, z_scale = 1, directory = None, fast_dil = False, recursive = False, dilate_xy = None, dilate_z = None):
|
|
2312
2347
|
"""
|
|
2313
2348
|
Can be used to dilate a binary image in 3D. Dilated output will be saved to the active directory if none is specified. Note that dilation is done with single-instance kernels and not iterations, and therefore
|
|
@@ -4261,27 +4296,30 @@ class Network_3D:
|
|
|
4261
4296
|
else:
|
|
4262
4297
|
outer_edges = dilate_3D_old(outer_edges)
|
|
4263
4298
|
|
|
4264
|
-
labelled_edges, num_edge =
|
|
4299
|
+
#labelled_edges, num_edge = ndimage.label(outer_edges)
|
|
4265
4300
|
|
|
4266
|
-
|
|
4267
|
-
inner_edges = hash_inners(self._search_region, binary_edges, GPU = GPU)
|
|
4301
|
+
inner_edges = hash_inners(self._search_region, binary_edges, GPU = GPU)
|
|
4268
4302
|
|
|
4269
|
-
|
|
4303
|
+
del binary_edges
|
|
4304
|
+
|
|
4305
|
+
outer_edges = (inner_edges > 0) | (outer_edges > 0)
|
|
4270
4306
|
|
|
4271
|
-
inner_labels, num_edge =
|
|
4307
|
+
#inner_labels, num_edge = ndimage.label(inner_edges)
|
|
4272
4308
|
|
|
4273
|
-
|
|
4309
|
+
del inner_edges
|
|
4274
4310
|
|
|
4275
|
-
|
|
4311
|
+
outer_edges, num_edge = ndimage.label(outer_edges)
|
|
4276
4312
|
|
|
4277
|
-
|
|
4313
|
+
#labelled_edges = combine_edges(labelled_edges, inner_labels)
|
|
4278
4314
|
|
|
4279
|
-
|
|
4280
|
-
labelled_edges = labelled_edges.astype(np.uint8)
|
|
4281
|
-
elif num_edge < 65536:
|
|
4282
|
-
labelled_edges = labelled_edges.astype(np.uint16)
|
|
4315
|
+
#num_edge = np.max(labelled_edges)
|
|
4283
4316
|
|
|
4284
|
-
|
|
4317
|
+
#if num_edge < 256:
|
|
4318
|
+
# labelled_edges = labelled_edges.astype(np.uint8)
|
|
4319
|
+
#elif num_edge < 65536:
|
|
4320
|
+
# labelled_edges = labelled_edges.astype(np.uint16)
|
|
4321
|
+
|
|
4322
|
+
self._edges = outer_edges
|
|
4285
4323
|
|
|
4286
4324
|
def label_nodes(self):
|
|
4287
4325
|
"""
|
|
@@ -4748,7 +4786,7 @@ class Network_3D:
|
|
|
4748
4786
|
Sets the communities attribute by splitting the network into communities
|
|
4749
4787
|
"""
|
|
4750
4788
|
|
|
4751
|
-
self._communities, self.normalized_weights, stats = modularity.community_partition(self.
|
|
4789
|
+
self._communities, self.normalized_weights, stats = modularity.community_partition(self._network, weighted = weighted, style = style, dostats = dostats, seed = seed)
|
|
4752
4790
|
|
|
4753
4791
|
return stats
|
|
4754
4792
|
|
|
@@ -4764,6 +4802,8 @@ class Network_3D:
|
|
|
4764
4802
|
self._network = network_analysis.open_network(self._network_lists)
|
|
4765
4803
|
|
|
4766
4804
|
|
|
4805
|
+
|
|
4806
|
+
|
|
4767
4807
|
def rescale(self, array, directory = None):
|
|
4768
4808
|
"""
|
|
4769
4809
|
Scale a downsampled overlay or extracted image object back to the size that is present in either a Network_3D's node or edge properties.
|
|
@@ -5530,7 +5570,8 @@ class Network_3D:
|
|
|
5530
5570
|
Returns:
|
|
5531
5571
|
dict: Dictionary containing various network statistics
|
|
5532
5572
|
"""
|
|
5533
|
-
|
|
5573
|
+
G_unweighted = self._network
|
|
5574
|
+
G = convert_to_multigraph(self._network)
|
|
5534
5575
|
stats = {}
|
|
5535
5576
|
|
|
5536
5577
|
# Basic graph properties
|
|
@@ -5561,13 +5602,13 @@ class Network_3D:
|
|
|
5561
5602
|
try:
|
|
5562
5603
|
stats['avg_betweenness_centrality'] = np.mean(list(nx.betweenness_centrality(G).values()))
|
|
5563
5604
|
stats['avg_closeness_centrality'] = np.mean(list(nx.closeness_centrality(G).values()))
|
|
5564
|
-
stats['avg_eigenvector_centrality'] = np.mean(list(nx.eigenvector_centrality(
|
|
5605
|
+
stats['avg_eigenvector_centrality'] = np.mean(list(nx.eigenvector_centrality(G_unweighted, max_iter=1000).values()))
|
|
5565
5606
|
except:
|
|
5566
5607
|
stats['centrality_measures'] = "Failed to compute - graph might be too large or disconnected"
|
|
5567
5608
|
|
|
5568
5609
|
# Clustering and transitivity
|
|
5569
|
-
stats['avg_clustering_coefficient'] = nx.average_clustering(
|
|
5570
|
-
stats['transitivity'] = nx.transitivity(
|
|
5610
|
+
stats['avg_clustering_coefficient'] = nx.average_clustering(G_unweighted)
|
|
5611
|
+
stats['transitivity'] = nx.transitivity(G_unweighted)
|
|
5571
5612
|
|
|
5572
5613
|
# Path lengths
|
|
5573
5614
|
if nx.is_connected(G):
|