sgtlib 3.3.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- StructuralGT/__init__.py +31 -0
- StructuralGT/apps/__init__.py +0 -0
- StructuralGT/apps/cli_main.py +258 -0
- StructuralGT/apps/gui_main.py +69 -0
- StructuralGT/apps/gui_mcw/__init__.py +0 -0
- StructuralGT/apps/gui_mcw/checkbox_model.py +91 -0
- StructuralGT/apps/gui_mcw/controller.py +1073 -0
- StructuralGT/apps/gui_mcw/image_provider.py +74 -0
- StructuralGT/apps/gui_mcw/imagegrid_model.py +75 -0
- StructuralGT/apps/gui_mcw/qthread_worker.py +102 -0
- StructuralGT/apps/gui_mcw/table_model.py +79 -0
- StructuralGT/apps/gui_mcw/tree_model.py +154 -0
- StructuralGT/apps/sgt_qml/CenterMainContent.qml +19 -0
- StructuralGT/apps/sgt_qml/LeftContent.qml +48 -0
- StructuralGT/apps/sgt_qml/MainWindow.qml +762 -0
- StructuralGT/apps/sgt_qml/RightLoggingPanel.qml +125 -0
- StructuralGT/apps/sgt_qml/assets/icons/.DS_Store +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/back_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/brightness_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/cancel_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/crop_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/edit_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/graph_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/hide_panel.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/next_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/notify_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/rescale_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/show_panel.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/square_icon.png +0 -0
- StructuralGT/apps/sgt_qml/assets/icons/undo_icon.png +0 -0
- StructuralGT/apps/sgt_qml/components/ImageFilters.qml +82 -0
- StructuralGT/apps/sgt_qml/components/ImageProperties.qml +112 -0
- StructuralGT/apps/sgt_qml/components/ProjectNav.qml +127 -0
- StructuralGT/apps/sgt_qml/widgets/BinaryFilterWidget.qml +151 -0
- StructuralGT/apps/sgt_qml/widgets/BrightnessControlWidget.qml +103 -0
- StructuralGT/apps/sgt_qml/widgets/CreateProjectWidget.qml +112 -0
- StructuralGT/apps/sgt_qml/widgets/GTWidget.qml +94 -0
- StructuralGT/apps/sgt_qml/widgets/GraphComputeWidget.qml +77 -0
- StructuralGT/apps/sgt_qml/widgets/GraphExtractWidget.qml +175 -0
- StructuralGT/apps/sgt_qml/widgets/GraphPropertyWidget.qml +77 -0
- StructuralGT/apps/sgt_qml/widgets/ImageFilterWidget.qml +137 -0
- StructuralGT/apps/sgt_qml/widgets/ImagePropertyWidget.qml +78 -0
- StructuralGT/apps/sgt_qml/widgets/ImageViewWidget.qml +585 -0
- StructuralGT/apps/sgt_qml/widgets/MenuBarWidget.qml +137 -0
- StructuralGT/apps/sgt_qml/widgets/MicroscopyPropertyWidget.qml +80 -0
- StructuralGT/apps/sgt_qml/widgets/ProjectWidget.qml +141 -0
- StructuralGT/apps/sgt_qml/widgets/RescaleControlWidget.qml +83 -0
- StructuralGT/apps/sgt_qml/widgets/RibbonWidget.qml +406 -0
- StructuralGT/apps/sgt_qml/widgets/StatusBarWidget.qml +173 -0
- StructuralGT/compute/__init__.py +0 -0
- StructuralGT/compute/c_lang/include/sgt_base.h +21 -0
- StructuralGT/compute/graph_analyzer.py +1499 -0
- StructuralGT/entrypoints.py +49 -0
- StructuralGT/imaging/__init__.py +0 -0
- StructuralGT/imaging/base_image.py +403 -0
- StructuralGT/imaging/image_processor.py +780 -0
- StructuralGT/modules.py +29 -0
- StructuralGT/networks/__init__.py +0 -0
- StructuralGT/networks/fiber_network.py +490 -0
- StructuralGT/networks/graph_skeleton.py +425 -0
- StructuralGT/networks/sknw_mod.py +199 -0
- StructuralGT/utils/__init__.py +0 -0
- StructuralGT/utils/config_loader.py +244 -0
- StructuralGT/utils/configs.ini +97 -0
- StructuralGT/utils/progress_update.py +67 -0
- StructuralGT/utils/sgt_utils.py +291 -0
- sgtlib-3.3.9.dist-info/METADATA +789 -0
- sgtlib-3.3.9.dist-info/RECORD +72 -0
- sgtlib-3.3.9.dist-info/WHEEL +5 -0
- sgtlib-3.3.9.dist-info/entry_points.txt +3 -0
- sgtlib-3.3.9.dist-info/licenses/LICENSE +674 -0
- sgtlib-3.3.9.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1499 @@
|
|
1
|
+
# SPDX-License-Identifier: GNU GPL v3
|
2
|
+
|
3
|
+
"""
|
4
|
+
Compute graph theory metrics
|
5
|
+
"""
|
6
|
+
|
7
|
+
import os
|
8
|
+
import cv2
|
9
|
+
import math
|
10
|
+
import time
|
11
|
+
import datetime
|
12
|
+
import itertools
|
13
|
+
import logging
|
14
|
+
import multiprocessing
|
15
|
+
import numpy as np
|
16
|
+
import scipy as sp
|
17
|
+
import pandas as pd
|
18
|
+
import networkx as nx
|
19
|
+
import matplotlib.table as tbl
|
20
|
+
import matplotlib.pyplot as plt
|
21
|
+
from collections import defaultdict
|
22
|
+
from statistics import stdev, StatisticsError
|
23
|
+
from matplotlib.backends.backend_pdf import PdfPages
|
24
|
+
|
25
|
+
from networkx.algorithms.centrality import betweenness_centrality, closeness_centrality, eigenvector_centrality
|
26
|
+
from networkx.algorithms import average_node_connectivity, global_efficiency, clustering
|
27
|
+
from networkx.algorithms import degree_assortativity_coefficient
|
28
|
+
from networkx.algorithms.flow import maximum_flow
|
29
|
+
from networkx.algorithms.distance_measures import diameter, periphery
|
30
|
+
from networkx.algorithms.wiener import wiener_index
|
31
|
+
|
32
|
+
from .c_lang import sgt_c_module as sgt
|
33
|
+
from ..utils.progress_update import ProgressUpdate
|
34
|
+
from ..networks.fiber_network import FiberNetworkBuilder
|
35
|
+
from ..imaging.image_processor import ImageProcessor
|
36
|
+
from ..utils.config_loader import load_gtc_configs
|
37
|
+
from ..utils.sgt_utils import get_num_cores, AbortException, write_txt_file
|
38
|
+
|
39
|
+
logger = logging.getLogger("SGT App")
|
40
|
+
|
41
|
+
# WE ARE USING CPU BECAUSE CuPy generates some errors - yet to be resolved.
|
42
|
+
COMPUTING_DEVICE = "CPU"
|
43
|
+
"""
|
44
|
+
try:
|
45
|
+
import sys
|
46
|
+
import cupy as cp
|
47
|
+
|
48
|
+
# Check for GPU
|
49
|
+
test = cp.cuda.Device(0).compute_capability
|
50
|
+
# Check for CUDA_PATH in environment variables
|
51
|
+
cuda_path = os.getenv("CUDA_PATH")
|
52
|
+
print(cuda_path)
|
53
|
+
if cuda_path:
|
54
|
+
xp = np # Use CuPy for GPU
|
55
|
+
COMPUTING_DEVICE = "GPU"
|
56
|
+
logging.info("Using GPU with CuPy!", extra={'user': 'SGT Logs'})
|
57
|
+
else:
|
58
|
+
logging.info(
|
59
|
+
"Please add CUDA_PATH to System environment variables OR install 'NVIDIA GPU Computing Toolkit'\nvia: https://developer.nvidia.com/cuda-downloads",
|
60
|
+
extra={'user': 'SGT Logs'})
|
61
|
+
raise ImportError("Please add CUDA_PATH to System environment variables.")
|
62
|
+
except (ImportError, NameError, AttributeError):
|
63
|
+
xp = np # Fallback to NumPy for CPU
|
64
|
+
logging.info("Using CPU with NumPy!", extra={'user': 'SGT Logs'})
|
65
|
+
except cp.cuda.runtime.CUDARuntimeError:
|
66
|
+
xp = np # Fallback to NumPy for CPU
|
67
|
+
logging.info("Using CPU with NumPy!", extra={'user': 'SGT Logs'})
|
68
|
+
"""
|
69
|
+
|
70
|
+
|
71
|
+
class GraphAnalyzer(ProgressUpdate):
|
72
|
+
"""
|
73
|
+
A class that computes all the user-selected graph theory metrics and writes the results in a PDF file.
|
74
|
+
|
75
|
+
Args:
|
76
|
+
:param imp: Image Processor object.
|
77
|
+
allow_multiprocessing: a decision to allow multiprocessing computing.
|
78
|
+
"""
|
79
|
+
|
80
|
+
def __init__(self, imp: ImageProcessor, allow_multiprocessing: bool = True, use_igraph: bool = True):
|
81
|
+
"""
|
82
|
+
A class that computes all the user-selected graph theory metrics and writes the results in a PDF file.
|
83
|
+
|
84
|
+
:param imp: Image Processor object.
|
85
|
+
:param allow_multiprocessing: Allows multiprocessing computing.
|
86
|
+
:param use_igraph: Whether to use igraph C library module.
|
87
|
+
|
88
|
+
>>> i_path = "path/to/image"
|
89
|
+
>>> o_dir = ""
|
90
|
+
>>>
|
91
|
+
>>> ntwk_obj = ImageProcessor(i_path, o_dir)
|
92
|
+
>>> metrics_obj = GraphAnalyzer(ntwk_obj)
|
93
|
+
>>> metrics_obj.run_analyzer()
|
94
|
+
"""
|
95
|
+
super(GraphAnalyzer, self).__init__()
|
96
|
+
self.configs: dict = load_gtc_configs(imp.config_file) # graph theory computation parameters and options.
|
97
|
+
self.props: list = []
|
98
|
+
self.allow_mp: bool = allow_multiprocessing
|
99
|
+
self.use_igraph: bool = use_igraph
|
100
|
+
self.ntwk_p: ImageProcessor = imp
|
101
|
+
self.plot_figures: list | None = None
|
102
|
+
self.scaling_data = None
|
103
|
+
self.output_df: pd.DataFrame | None = None
|
104
|
+
self.weighted_output_df: pd.DataFrame | None = None
|
105
|
+
self.histogram_data = {"degree_distribution": [0], "clustering_coefficients": [0],
|
106
|
+
"betweenness_distribution": [0], "closeness_distribution": [0],
|
107
|
+
"eigenvector_distribution": [0], "ohms_distribution": [0],
|
108
|
+
"percolation_distribution": [], "weighted_degree_distribution": [0],
|
109
|
+
"weighted_clustering_coefficients": [0], "weighted_betweenness_distribution": [0],
|
110
|
+
"currentflow_distribution": [0], "weighted_closeness_distribution": [0],
|
111
|
+
"weighted_eigenvector_distribution": [0], "weighted_percolation_distribution": [0]}
|
112
|
+
|
113
|
+
def track_img_progress(self, value, msg):
|
114
|
+
self.update_status([value, msg])
|
115
|
+
|
116
|
+
def run_analyzer(self):
|
117
|
+
"""
|
118
|
+
Execute functions that will process image filters and extract the graph from the processed image
|
119
|
+
"""
|
120
|
+
|
121
|
+
# 1. Get graph extracted from selected images
|
122
|
+
sel_batch = self.ntwk_p.get_selected_batch()
|
123
|
+
graph_obj = sel_batch.graph_obj
|
124
|
+
|
125
|
+
# 2. Apply image filters and extract the graph (only if it has not been executed)
|
126
|
+
if graph_obj.nx_giant_graph is None:
|
127
|
+
self.ntwk_p.add_listener(self.track_img_progress)
|
128
|
+
self.ntwk_p.apply_img_filters() # Apply image filters
|
129
|
+
self.ntwk_p.build_graph_network() # Extract graph from binary image
|
130
|
+
self.ntwk_p.remove_listener(self.track_img_progress)
|
131
|
+
self.abort = self.ntwk_p.abort
|
132
|
+
self.update_status([100, "Graph successfully extracted!"]) if not self.abort else None
|
133
|
+
sel_batch = self.ntwk_p.get_selected_batch()
|
134
|
+
graph_obj = sel_batch.graph_obj
|
135
|
+
|
136
|
+
if self.abort:
|
137
|
+
return
|
138
|
+
|
139
|
+
# 3a. Compute Unweighted GT parameters
|
140
|
+
self.output_df = self.compute_gt_metrics(graph_obj.nx_giant_graph) # replace with graph_obj.nx_giant_graph
|
141
|
+
|
142
|
+
# 3b. Compute Scaling Scatter Plots
|
143
|
+
if self.configs["compute_scaling_behavior"]["value"] == 1:
|
144
|
+
self.scaling_data = self.compute_scaling_data(full_img_df=self.output_df.copy())
|
145
|
+
|
146
|
+
if self.abort:
|
147
|
+
self.update_status([-1, "Problem encountered while computing un-weighted GT parameters."])
|
148
|
+
return
|
149
|
+
|
150
|
+
# 4. Compute Weighted GT parameters (skip if MultiGraph)
|
151
|
+
self.weighted_output_df = self.compute_weighted_gt_metrics(graph_obj)
|
152
|
+
|
153
|
+
if self.abort:
|
154
|
+
self.update_status([-1, "Problem encountered while computing weighted GT parameters."])
|
155
|
+
return
|
156
|
+
|
157
|
+
# 5. Save GT compute metrics into props
|
158
|
+
self.get_compute_props()
|
159
|
+
|
160
|
+
# 6. Generate results in PDF
|
161
|
+
self.plot_figures = self.generate_pdf_output(graph_obj)
|
162
|
+
|
163
|
+
def compute_gt_metrics(self, graph: nx.Graph = None, save_histogram: bool = True):
|
164
|
+
"""
|
165
|
+
Compute unweighted graph theory metrics.
|
166
|
+
|
167
|
+
:param graph: NetworkX graph object.
|
168
|
+
:param save_histogram: Whether to save the histogram data.
|
169
|
+
|
170
|
+
:return: A Pandas DataFrame containing the unweighted graph theory metrics.
|
171
|
+
"""
|
172
|
+
|
173
|
+
if graph is None:
|
174
|
+
return None
|
175
|
+
|
176
|
+
self.update_status([1, "Performing un-weighted analysis..."])
|
177
|
+
|
178
|
+
opt_gtc = self.configs
|
179
|
+
data_dict = {"parameter": [], "value": []}
|
180
|
+
|
181
|
+
node_count = int(nx.number_of_nodes(graph))
|
182
|
+
edge_count = int(nx.number_of_edges(graph))
|
183
|
+
|
184
|
+
data_dict["parameter"].append("Number of nodes")
|
185
|
+
data_dict["value"].append(node_count)
|
186
|
+
|
187
|
+
data_dict["parameter"].append("Number of edges")
|
188
|
+
data_dict["value"].append(edge_count)
|
189
|
+
|
190
|
+
"""
|
191
|
+
# length of edges
|
192
|
+
length_arr = np.array(list(nx.get_edge_attributes(graph, 'length').values()))
|
193
|
+
data_dict["parameter"].append('Average length (nm)')
|
194
|
+
data_dict["value"].append(round(np.average(length_arr), 3))
|
195
|
+
data_dict["parameter"].append('Median length (nm)')
|
196
|
+
data_dict["value"].append(round(np.median(length_arr), 3))
|
197
|
+
|
198
|
+
# width of edges
|
199
|
+
width_arr = np.array(list(nx.get_edge_attributes(graph, 'width').values()))
|
200
|
+
data_dict["parameter"].append('Average width (nm)')
|
201
|
+
data_dict["value"].append(round(np.average(width_arr), 3))
|
202
|
+
data_dict["parameter"].append('Median width (nm)')
|
203
|
+
data_dict["value"].append(round(np.median(width_arr), 3))
|
204
|
+
"""
|
205
|
+
|
206
|
+
# angle of edges (inbound and outbound)
|
207
|
+
angle_arr = np.array(list(nx.get_edge_attributes(graph, 'angle').values()))
|
208
|
+
data_dict["parameter"].append('Average edge angle (degrees)')
|
209
|
+
data_dict["value"].append(round(np.average(angle_arr), 3))
|
210
|
+
data_dict["parameter"].append('Median edge angle (degrees)')
|
211
|
+
data_dict["value"].append(round(np.median(angle_arr), 3))
|
212
|
+
|
213
|
+
if graph.number_of_nodes() <= 0:
|
214
|
+
self.update_status([-1, "Problem with graph (change filter and graph options)."])
|
215
|
+
return None
|
216
|
+
|
217
|
+
# creating degree histogram
|
218
|
+
if opt_gtc["display_degree_histogram"]["value"] == 1:
|
219
|
+
self.update_status([5, "Computing graph degree..."])
|
220
|
+
deg_distribution_1 = dict(nx.degree(graph))
|
221
|
+
deg_distribution = np.array(list(deg_distribution_1.values()), dtype=float)
|
222
|
+
if save_histogram:
|
223
|
+
self.histogram_data["degree_distribution"] = deg_distribution
|
224
|
+
data_dict["parameter"].append("Average degree")
|
225
|
+
data_dict["value"].append(round(np.average(deg_distribution), 5))
|
226
|
+
|
227
|
+
connected_graph = None
|
228
|
+
if (opt_gtc["compute_network_diameter"]["value"] == 1) or (
|
229
|
+
opt_gtc["compute_avg_node_connectivity"]["value"] == 1):
|
230
|
+
try:
|
231
|
+
connected_graph = nx.is_connected(graph)
|
232
|
+
except nx.exception.NetworkXPointlessConcept:
|
233
|
+
pass
|
234
|
+
|
235
|
+
# calculating network diameter
|
236
|
+
if opt_gtc["compute_network_diameter"]["value"] == 1:
|
237
|
+
self.update_status([10, "Computing network diameter..."])
|
238
|
+
if connected_graph:
|
239
|
+
dia = int(diameter(graph))
|
240
|
+
else:
|
241
|
+
dia = np.nan
|
242
|
+
data_dict["parameter"].append("Network diameter")
|
243
|
+
data_dict["value"].append(dia)
|
244
|
+
|
245
|
+
# calculating average nodal connectivity
|
246
|
+
if opt_gtc["compute_avg_node_connectivity"]["value"] == 1:
|
247
|
+
if self.abort:
|
248
|
+
self.update_status([-1, "Task aborted."])
|
249
|
+
return None
|
250
|
+
self.update_status([15, "Computing node connectivity..."])
|
251
|
+
if connected_graph:
|
252
|
+
# use_igraph = opt_gtc["compute_lang == 'C'"]["value"]
|
253
|
+
if self.use_igraph:
|
254
|
+
# use iGraph Lib in C
|
255
|
+
self.update_status([15, "Using iGraph library..."])
|
256
|
+
avg_node_con = self.igraph_average_node_connectivity(graph)
|
257
|
+
else:
|
258
|
+
# Use NetworkX Lib in Python
|
259
|
+
self.update_status([15, "Using NetworkX library..."])
|
260
|
+
if self.allow_mp: # Multi-processing
|
261
|
+
avg_node_con = self.average_node_connectivity(graph)
|
262
|
+
else:
|
263
|
+
avg_node_con = average_node_connectivity(graph)
|
264
|
+
avg_node_con = round(avg_node_con, 5)
|
265
|
+
else:
|
266
|
+
avg_node_con = np.nan
|
267
|
+
data_dict["parameter"].append("Average node connectivity")
|
268
|
+
data_dict["value"].append(avg_node_con)
|
269
|
+
|
270
|
+
# calculating graph density
|
271
|
+
if opt_gtc["compute_graph_density"]["value"] == 1:
|
272
|
+
self.update_status([20, "Computing graph density..."])
|
273
|
+
g_density = nx.density(graph)
|
274
|
+
g_density = round(g_density, 5)
|
275
|
+
data_dict["parameter"].append("Graph density")
|
276
|
+
data_dict["value"].append(g_density)
|
277
|
+
|
278
|
+
# calculating global efficiency
|
279
|
+
if opt_gtc["compute_global_efficiency"]["value"] == 1:
|
280
|
+
if self.abort:
|
281
|
+
self.update_status([-1, "Task aborted."])
|
282
|
+
return None
|
283
|
+
self.update_status([25, "Computing global efficiency..."])
|
284
|
+
g_eff = global_efficiency(graph)
|
285
|
+
g_eff = round(g_eff, 5)
|
286
|
+
data_dict["parameter"].append("Global efficiency")
|
287
|
+
data_dict["value"].append(g_eff)
|
288
|
+
|
289
|
+
if opt_gtc["compute_wiener_index"]["value"] == 1:
|
290
|
+
self.update_status([30, "Computing wiener index..."])
|
291
|
+
# settings.update_label("Calculating w_index...")
|
292
|
+
w_index = wiener_index(graph)
|
293
|
+
w_index = round(w_index, 1)
|
294
|
+
data_dict["parameter"].append("Wiener Index")
|
295
|
+
data_dict["value"].append(w_index)
|
296
|
+
|
297
|
+
# calculating assortativity coefficient
|
298
|
+
if opt_gtc["compute_assortativity_coef"]["value"] == 1:
|
299
|
+
self.update_status([35, "Computing assortativity coefficient..."])
|
300
|
+
a_coef = degree_assortativity_coefficient(graph)
|
301
|
+
a_coef = round(a_coef, 5)
|
302
|
+
data_dict["parameter"].append("Assortativity coefficient")
|
303
|
+
data_dict["value"].append(a_coef)
|
304
|
+
|
305
|
+
# calculating clustering coefficients
|
306
|
+
if opt_gtc["compute_avg_clustering_coef"]["value"] == 1:
|
307
|
+
self.update_status([40, "Computing clustering coefficients..."])
|
308
|
+
coefficients_1 = clustering(graph)
|
309
|
+
cl_coefficients = np.array(list(coefficients_1.values()), dtype=float)
|
310
|
+
if save_histogram:
|
311
|
+
self.histogram_data["clustering_coefficients"] = cl_coefficients
|
312
|
+
data_dict["parameter"].append("Average clustering coefficient")
|
313
|
+
data_dict["value"].append(round(np.average(cl_coefficients), 5))
|
314
|
+
|
315
|
+
# calculating betweenness centrality histogram
|
316
|
+
if opt_gtc["display_betweenness_centrality_histogram"]["value"] == 1:
|
317
|
+
self.update_status([45, "Computing betweenness centrality..."])
|
318
|
+
b_distribution_1 = betweenness_centrality(graph)
|
319
|
+
b_distribution = np.array(list(b_distribution_1.values()), dtype=float)
|
320
|
+
if save_histogram:
|
321
|
+
self.histogram_data["betweenness_distribution"] = b_distribution
|
322
|
+
data_dict["parameter"].append("Average betweenness centrality")
|
323
|
+
data_dict["value"].append(round(np.average(b_distribution), 5))
|
324
|
+
|
325
|
+
# calculating eigenvector centrality
|
326
|
+
if opt_gtc["display_eigenvector_centrality_histogram"]["value"] == 1:
|
327
|
+
self.update_status([50, "Computing eigenvector centrality..."])
|
328
|
+
try:
|
329
|
+
e_vecs_1 = eigenvector_centrality(graph, max_iter=100)
|
330
|
+
except nx.exception.PowerIterationFailedConvergence:
|
331
|
+
e_vecs_1 = eigenvector_centrality(graph, max_iter=10000)
|
332
|
+
e_vecs = np.array(list(e_vecs_1.values()), dtype=float)
|
333
|
+
if save_histogram:
|
334
|
+
self.histogram_data["eigenvector_distribution"] = e_vecs
|
335
|
+
data_dict["parameter"].append("Average eigenvector centrality")
|
336
|
+
data_dict["value"].append(round(np.average(e_vecs), 5))
|
337
|
+
|
338
|
+
# calculating closeness centrality
|
339
|
+
if opt_gtc["display_closeness_centrality_histogram"]["value"] == 1:
|
340
|
+
self.update_status([55, "Computing closeness centrality..."])
|
341
|
+
close_distribution_1 = closeness_centrality(graph)
|
342
|
+
close_distribution = np.array(list(close_distribution_1.values()), dtype=float)
|
343
|
+
if save_histogram:
|
344
|
+
self.histogram_data["closeness_distribution"] = close_distribution
|
345
|
+
data_dict["parameter"].append("Average closeness centrality")
|
346
|
+
data_dict["value"].append(round(np.average(close_distribution), 5))
|
347
|
+
|
348
|
+
# calculating Ohms centrality
|
349
|
+
if opt_gtc["display_ohms_histogram"]["value"] == 1:
|
350
|
+
self.update_status([60, "Computing Ohms centrality..."])
|
351
|
+
o_distribution_1, res = self.compute_ohms_centrality(graph)
|
352
|
+
o_distribution = np.array(list(o_distribution_1.values()), dtype=float)
|
353
|
+
if save_histogram:
|
354
|
+
self.histogram_data["ohms_distribution"] = o_distribution
|
355
|
+
data_dict["parameter"].append("Average Ohms centrality")
|
356
|
+
data_dict["value"].append(round(np.average(o_distribution), 5))
|
357
|
+
data_dict["parameter"].append("Ohms centrality -- avg. area " + r"($m^2$)")
|
358
|
+
data_dict["value"].append(round(res['avg area'], 5))
|
359
|
+
data_dict["parameter"].append("Ohms centrality -- avg. length (m)")
|
360
|
+
data_dict["value"].append(round(res['avg length'], 5))
|
361
|
+
data_dict["parameter"].append("Ohms centrality -- avg. width (m)")
|
362
|
+
data_dict["value"].append(round(res['avg width'], 5))
|
363
|
+
data_dict["parameter"].append("Ohms centrality -- g shape coeff.")
|
364
|
+
data_dict["value"].append(round(res['g shape'], 5))
|
365
|
+
data_dict["parameter"].append("Ohms centrality -- conductivity (S/m)")
|
366
|
+
data_dict["value"].append(round(res['conductivity'], 5))
|
367
|
+
|
368
|
+
return pd.DataFrame(data_dict)
|
369
|
+
|
370
|
+
def compute_weighted_gt_metrics(self, graph_obj: FiberNetworkBuilder = None, save_histogram: bool = True):
|
371
|
+
"""
|
372
|
+
Compute weighted graph theory metrics.
|
373
|
+
|
374
|
+
:param graph_obj: GraphExtractor object.
|
375
|
+
:param save_histogram: Whether to save histogram data.
|
376
|
+
|
377
|
+
:return: A Pandas DataFrame containing the weighted graph theory metrics.
|
378
|
+
"""
|
379
|
+
if graph_obj is None:
|
380
|
+
return None
|
381
|
+
|
382
|
+
if not graph_obj.configs["has_weights"]["value"]:
|
383
|
+
return None
|
384
|
+
|
385
|
+
self.update_status([70, "Performing weighted analysis..."])
|
386
|
+
|
387
|
+
graph = graph_obj.nx_giant_graph
|
388
|
+
opt_gtc = self.configs
|
389
|
+
wt_type = graph_obj.get_weight_type()
|
390
|
+
weight_type = FiberNetworkBuilder.get_weight_options().get(wt_type)
|
391
|
+
data_dict = {"parameter": [], "value": []}
|
392
|
+
|
393
|
+
if graph.number_of_nodes() <= 0:
|
394
|
+
self.update_status([-1, "Problem with graph (change filter and graph options)."])
|
395
|
+
return None
|
396
|
+
|
397
|
+
if opt_gtc["display_degree_histogram"]["value"] == 1:
|
398
|
+
self.update_status([72, "Compute weighted graph degree..."])
|
399
|
+
deg_distribution_1 = dict(nx.degree(graph, weight='weight'))
|
400
|
+
deg_distribution = np.array(list(deg_distribution_1.values()), dtype=float)
|
401
|
+
if save_histogram:
|
402
|
+
self.histogram_data["weighted_degree_distribution"] = deg_distribution
|
403
|
+
data_dict["parameter"].append(f"{weight_type}-weighted average degree")
|
404
|
+
data_dict["value"].append(round(np.average(deg_distribution), 5))
|
405
|
+
|
406
|
+
if opt_gtc["compute_wiener_index"]["value"] == 1:
|
407
|
+
self.update_status([74, "Compute weighted wiener index..."])
|
408
|
+
w_index = wiener_index(graph, weight='length')
|
409
|
+
w_index = round(w_index, 1)
|
410
|
+
data_dict["parameter"].append("Length-weighted Wiener Index")
|
411
|
+
data_dict["value"].append(w_index)
|
412
|
+
|
413
|
+
if opt_gtc["compute_avg_node_connectivity"]["value"] == 1:
|
414
|
+
self.update_status([76, "Compute weighted node connectivity..."])
|
415
|
+
connected_graph = nx.is_connected(graph)
|
416
|
+
if connected_graph:
|
417
|
+
max_flow = float(0)
|
418
|
+
p = periphery(graph)
|
419
|
+
q = len(p) - 1
|
420
|
+
for s in range(0, q - 1):
|
421
|
+
for t in range(s + 1, q):
|
422
|
+
flow_value = maximum_flow(graph, p[s], p[t], capacity='weight')[0]
|
423
|
+
if flow_value > max_flow:
|
424
|
+
max_flow = flow_value
|
425
|
+
max_flow = round(max_flow, 5)
|
426
|
+
else:
|
427
|
+
max_flow = np.nan
|
428
|
+
data_dict["parameter"].append("Max flow between periphery")
|
429
|
+
data_dict["value"].append(max_flow)
|
430
|
+
|
431
|
+
if opt_gtc["compute_assortativity_coef"]["value"] == 1:
|
432
|
+
self.update_status([78, "Compute weighted assortativity..."])
|
433
|
+
a_coef = degree_assortativity_coefficient(graph, weight='width')
|
434
|
+
a_coef = round(a_coef, 5)
|
435
|
+
data_dict["parameter"].append("Width-weighted assortativity coefficient")
|
436
|
+
data_dict["value"].append(a_coef)
|
437
|
+
|
438
|
+
if opt_gtc["display_betweenness_centrality_histogram"]["value"] == 1:
|
439
|
+
self.update_status([80, "Compute weighted betweenness centrality..."])
|
440
|
+
b_distribution_1 = betweenness_centrality(graph, weight='weight')
|
441
|
+
b_distribution = np.array(list(b_distribution_1.values()), dtype=float)
|
442
|
+
if save_histogram:
|
443
|
+
self.histogram_data["weighted_betweenness_distribution"] = b_distribution
|
444
|
+
data_dict["parameter"].append(f"{weight_type}-weighted betweenness centrality")
|
445
|
+
data_dict["value"].append(round(np.average(b_distribution), 5))
|
446
|
+
|
447
|
+
if opt_gtc["display_closeness_centrality_histogram"]["value"] == 1:
|
448
|
+
self.update_status([82, "Compute weighted closeness centrality..."])
|
449
|
+
close_distribution_1 = closeness_centrality(graph, distance='length')
|
450
|
+
close_distribution = np.array(list(close_distribution_1.values()), dtype=float)
|
451
|
+
if save_histogram:
|
452
|
+
self.histogram_data["weighted_closeness_distribution"] = close_distribution
|
453
|
+
data_dict["parameter"].append(f"Length-weighted average closeness centrality")
|
454
|
+
data_dict["value"].append(round(np.average(close_distribution), 5))
|
455
|
+
|
456
|
+
if opt_gtc["display_eigenvector_centrality_histogram"]["value"] == 1:
|
457
|
+
if self.abort:
|
458
|
+
self.update_status([-1, "Task aborted."])
|
459
|
+
return None
|
460
|
+
self.update_status([84, "Compute weighted eigenvector centrality..."])
|
461
|
+
try:
|
462
|
+
e_vecs_1 = eigenvector_centrality(graph, max_iter=100, weight='weight')
|
463
|
+
except nx.exception.PowerIterationFailedConvergence:
|
464
|
+
e_vecs_1 = eigenvector_centrality(graph, max_iter=10000, weight='weight')
|
465
|
+
e_vecs = np.array(list(e_vecs_1.values()), dtype=float)
|
466
|
+
if save_histogram:
|
467
|
+
self.histogram_data["weighted_eigenvector_distribution"] = e_vecs
|
468
|
+
data_dict["parameter"].append(f"{weight_type}-weighted average eigenvector centrality")
|
469
|
+
data_dict["value"].append(round(np.average(e_vecs), 5))
|
470
|
+
|
471
|
+
# calculate cross-sectional area of edges
|
472
|
+
wt_type = graph_obj.get_weight_type()
|
473
|
+
if wt_type == 'AREA':
|
474
|
+
self.update_status([68, "Computing average (edge) cross-sectional area..."])
|
475
|
+
temp_distribution = []
|
476
|
+
for (s, e) in graph.edges():
|
477
|
+
temp_distribution.append(graph[s][e]['weight'])
|
478
|
+
a_distribution = np.array(temp_distribution, dtype=float)
|
479
|
+
ae_val = np.average(a_distribution)
|
480
|
+
ae_val = round(ae_val, 5)
|
481
|
+
data_dict["parameter"].append(f"Average edge cross-sectional area (nm\u00b2)")
|
482
|
+
data_dict["value"].append(ae_val)
|
483
|
+
|
484
|
+
return pd.DataFrame(data_dict)
|
485
|
+
|
486
|
+
def compute_scaling_data(self, full_img_df: pd.DataFrame = None):
|
487
|
+
""""""
|
488
|
+
self.update_status([0, "Computing scaling behaviour..."])
|
489
|
+
self.ntwk_p.add_listener(self.track_img_progress)
|
490
|
+
# Get from configs
|
491
|
+
graph_groups = self.ntwk_p.build_graph_from_patches(num_square_filters=10, patch_count_per_filter=10)
|
492
|
+
self.ntwk_p.remove_listener(self.track_img_progress)
|
493
|
+
|
494
|
+
sorted_plt_data = defaultdict(lambda: defaultdict(list))
|
495
|
+
num_patches = 1
|
496
|
+
for (h, w), nx_graphs in graph_groups.items():
|
497
|
+
num_patches = len(nx_graphs)
|
498
|
+
for nx_graph in nx_graphs:
|
499
|
+
temp_df = self.compute_gt_metrics(nx_graph, save_histogram=False)
|
500
|
+
if temp_df is None:
|
501
|
+
# Skip the problematic graph
|
502
|
+
continue
|
503
|
+
|
504
|
+
for _, row in temp_df.iterrows():
|
505
|
+
x_param = row["parameter"]
|
506
|
+
y_value = row["value"]
|
507
|
+
if ' edge angle' in x_param: # Skip this
|
508
|
+
continue
|
509
|
+
sorted_plt_data[x_param][h].append(y_value)
|
510
|
+
|
511
|
+
# Include the computed GT metrics of the entire image
|
512
|
+
if full_img_df is not None:
|
513
|
+
# Get full image dimensions
|
514
|
+
sel_batch = self.ntwk_p.get_selected_batch()
|
515
|
+
h, w = sel_batch.images[0].img_bin.shape
|
516
|
+
for _ in range(num_patches):
|
517
|
+
for _, row in full_img_df.iterrows():
|
518
|
+
x_param = row["parameter"]
|
519
|
+
y_value = row["value"]
|
520
|
+
# print(f"{x_param}-{h}: {y_value}")
|
521
|
+
sorted_plt_data[x_param][h].append(y_value)
|
522
|
+
return sorted_plt_data
|
523
|
+
|
524
|
+
def compute_ohms_centrality(self, nx_graph: nx.Graph):
|
525
|
+
r"""
|
526
|
+
Computes Ohms centrality value for each node based on actual pixel width and length of edges in meters.
|
527
|
+
|
528
|
+
:param nx_graph: NetworkX graph object.
|
529
|
+
|
530
|
+
Returns: Ohms centrality distribution
|
531
|
+
"""
|
532
|
+
ohms_dict = {}
|
533
|
+
lst_area = []
|
534
|
+
lst_len = []
|
535
|
+
lst_width = []
|
536
|
+
|
537
|
+
sel_batch = self.ntwk_p.get_selected_batch()
|
538
|
+
sel_images = self.ntwk_p.get_selected_images(sel_batch)
|
539
|
+
px_sizes = np.array([img.configs["pixel_width"]["value"] for img in sel_images])
|
540
|
+
rho_dims = np.array([img.configs["resistivity"]["value"] for img in sel_images])
|
541
|
+
|
542
|
+
px_size = float(np.average(px_sizes.astype(float)))
|
543
|
+
rho_dim = float(np.average(rho_dims.astype(float)))
|
544
|
+
pixel_dim = px_size # * (10 ** 9) # Convert to nanometers
|
545
|
+
g_shape = 1
|
546
|
+
|
547
|
+
b_dict = betweenness_centrality(nx_graph)
|
548
|
+
lst_nodes = list(nx_graph.nodes())
|
549
|
+
for n in lst_nodes:
|
550
|
+
# compute Ohms centrality value for each node
|
551
|
+
b_val = float(b_dict[n])
|
552
|
+
if b_val == 0:
|
553
|
+
ohms_val = 0
|
554
|
+
else:
|
555
|
+
connected_nodes = nx_graph[n] # all nodes connected to node n
|
556
|
+
arr_len = []
|
557
|
+
arr_dia = []
|
558
|
+
for idx, val in connected_nodes.items():
|
559
|
+
arr_len.append(val['length'])
|
560
|
+
arr_dia.append(val['width'])
|
561
|
+
arr_len = np.array(arr_len, dtype=float)
|
562
|
+
arr_dia = np.array(arr_dia, dtype=float)
|
563
|
+
|
564
|
+
pix_width = float(np.average(arr_dia))
|
565
|
+
pix_length = np.sum(arr_len)
|
566
|
+
length = pix_length * pixel_dim
|
567
|
+
width = pix_width * pixel_dim
|
568
|
+
# area = math.pi * 89.6 * (width * 0.5) ** 2
|
569
|
+
area = g_shape * (width * width)
|
570
|
+
ohms_val = ((b_val * length * rho_dim) / area)
|
571
|
+
lst_len.append(length)
|
572
|
+
lst_area.append(area)
|
573
|
+
lst_width.append(width)
|
574
|
+
# if n < 5:
|
575
|
+
ohms_dict[n] = ohms_val
|
576
|
+
avg_area = np.average(np.array(lst_area, dtype=float))
|
577
|
+
med_area = np.median(np.array(lst_area, dtype=float))
|
578
|
+
avg_len = np.average(np.array(lst_len, dtype=float))
|
579
|
+
med_len = np.median(np.array(lst_len, dtype=float))
|
580
|
+
avg_width = np.average(np.array(lst_width, dtype=float))
|
581
|
+
med_width = np.median(np.array(lst_width, dtype=float))
|
582
|
+
res = {
|
583
|
+
'avg area': avg_area, 'med area': med_area,
|
584
|
+
'avg length': avg_len, 'med length': med_len,
|
585
|
+
'avg width': avg_width, 'med width': med_width,
|
586
|
+
'g shape': g_shape, 'conductivity': (1 / rho_dim)}
|
587
|
+
|
588
|
+
return ohms_dict, res
|
589
|
+
|
590
|
+
def average_node_connectivity(self, nx_graph: nx.Graph, flow_func=None):
|
591
|
+
r"""Returns the average connectivity of a graph G.
|
592
|
+
|
593
|
+
The average connectivity `\bar{\kappa}` of a graph G is the average
|
594
|
+
of local node connectivity over all pairs of the nx_graph nodes.
|
595
|
+
|
596
|
+
https://networkx.org/documentation/stable/_modules/networkx/algorithms/connectivity/connectivity.html#average_node_connectivity
|
597
|
+
|
598
|
+
Parameters
|
599
|
+
----------
|
600
|
+
:param nx_graph: NetworkX graph object.
|
601
|
+
:param flow_func : Function
|
602
|
+
A function for computing the maximum flow between a pair of nodes.
|
603
|
+
The function has to accept at least three parameters: a Digraph,
|
604
|
+
a source node, and a target node. And return a residual network
|
605
|
+
that follows NetworkX conventions (see: meth:`maximum_flow` for
|
606
|
+
details). If flow_func is None, the default maximum flow function
|
607
|
+
(:meth:`edmonds_karp`) is used. See :meth:`local_node_connectivity`
|
608
|
+
for details. The choice of the default function may change from
|
609
|
+
version to version and should not be relied on. Default value: None.
|
610
|
+
|
611
|
+
Returns
|
612
|
+
-------
|
613
|
+
K : float
|
614
|
+
Average node connectivity
|
615
|
+
|
616
|
+
References
|
617
|
+
----------
|
618
|
+
[1] Beineke, L., O. Oellermann, and r_network. Pippert (2002). The average
|
619
|
+
connectivity of a graph. Discrete mathematics 252(1-3), 31-45.
|
620
|
+
https://www.sciencedirect.com/science/article/pii/S0012365X01001807
|
621
|
+
|
622
|
+
"""
|
623
|
+
|
624
|
+
if nx_graph.is_directed():
|
625
|
+
iter_func = itertools.permutations
|
626
|
+
else:
|
627
|
+
iter_func = itertools.combinations
|
628
|
+
|
629
|
+
# Reuse the auxiliary digraph and the residual network
|
630
|
+
a_digraph = nx.algorithms.connectivity.build_auxiliary_node_connectivity(nx_graph)
|
631
|
+
r_network = nx.algorithms.flow.build_residual_network(a_digraph, "capacity")
|
632
|
+
# kwargs = {"flow_func": flow_func, "auxiliary": a_digraph, "residual": r_network}
|
633
|
+
|
634
|
+
num, den = 0, 0
|
635
|
+
with multiprocessing.Pool() as pool:
|
636
|
+
items = [(nx_graph, u, v, flow_func, a_digraph, r_network) for u, v in iter_func(nx_graph, 2)]
|
637
|
+
for n in pool.starmap(nx.algorithms.connectivity.local_node_connectivity, items):
|
638
|
+
num += n
|
639
|
+
den += 1
|
640
|
+
if self.abort:
|
641
|
+
self.update_status([-1, "Task aborted."])
|
642
|
+
return 0
|
643
|
+
if den == 0:
|
644
|
+
return 0
|
645
|
+
return num / den
|
646
|
+
|
647
|
+
def igraph_average_node_connectivity(self, nx_graph: nx.Graph):
|
648
|
+
r"""Returns the average connectivity of a graph G.
|
649
|
+
|
650
|
+
The average connectivity of a graph G is the average
|
651
|
+
of local node connectivity over all pairs of the Graph (G) nodes.
|
652
|
+
|
653
|
+
Parameters
|
654
|
+
---------
|
655
|
+
:param nx_graph: NetworkX graph object.
|
656
|
+
"""
|
657
|
+
|
658
|
+
cpu_count = get_num_cores()
|
659
|
+
num_threads = cpu_count if nx.number_of_nodes(nx_graph) < 2000 else cpu_count * 2
|
660
|
+
anc = 0
|
661
|
+
|
662
|
+
try:
|
663
|
+
filename, output_location = self.ntwk_p.get_filenames()
|
664
|
+
g_filename = filename + "_graph.txt"
|
665
|
+
graph_file = os.path.join(output_location, g_filename)
|
666
|
+
nx.write_edgelist(nx_graph, graph_file, data=False)
|
667
|
+
anc = sgt.compute_anc(graph_file, num_threads, self.allow_mp)
|
668
|
+
except Exception as err:
|
669
|
+
logging.exception("Computing ANC Error: %s", err, extra={'user': 'SGT Logs'})
|
670
|
+
return anc
|
671
|
+
|
672
|
+
def compute_graph_conductance(self, graph_obj):
|
673
|
+
"""
|
674
|
+
Computes graph conductance through an approach based on eigenvectors or spectral frequency.
|
675
|
+
Implements ideas proposed in: https://doi.org/10.1016/j.procs.2013.09.311.
|
676
|
+
|
677
|
+
Conductance can closely be approximated via eigenvalue computation,
|
678
|
+
a fact which has been well-known and well-used in the graph theory community.
|
679
|
+
|
680
|
+
The Laplacian matrix of a directed graph is by definition generally non-symmetric,
|
681
|
+
while, e.g., traditional spectral clustering is primarily developed for undirected
|
682
|
+
graphs with symmetric adjacency and Laplacian matrices. A trivial approach to applying the
|
683
|
+
techniques requiring symmetry is to turn the original directed graph into an
|
684
|
+
undirected graph and build the Laplacian matrix for the latter.
|
685
|
+
|
686
|
+
We need to remove isolated nodes (to avoid singular adjacency matrix).
|
687
|
+
The degree of a node is the number of edges incident to that node.
|
688
|
+
When a node has a degree of zero, it means that there are no edges
|
689
|
+
connected to that node. In other words, the node is isolated from
|
690
|
+
the rest of the graph.
|
691
|
+
|
692
|
+
:param graph_obj: Graph Extractor object.
|
693
|
+
|
694
|
+
"""
|
695
|
+
self.update_status([101, "Computing graph conductance..."])
|
696
|
+
# Make a copy of the graph
|
697
|
+
graph = graph_obj.nx_giant_graph.copy()
|
698
|
+
weighted = graph_obj.configs["has_weights"]["value"]
|
699
|
+
|
700
|
+
# It is important to notice our graph is (mostly) a directed graph,
|
701
|
+
# meaning that it is: (asymmetric) with self-looping nodes
|
702
|
+
|
703
|
+
# 1. Remove self-looping edges from the graph, they cause zero values in Degree matrix.
|
704
|
+
# 1a. Get Adjacency matrix
|
705
|
+
adj_mat = nx.adjacency_matrix(graph).todense()
|
706
|
+
|
707
|
+
# 1b. Remove (self-loops) non-zero diagonal values in Adjacency matrix
|
708
|
+
np.fill_diagonal(adj_mat, 0)
|
709
|
+
|
710
|
+
# 1c. Create the new graph
|
711
|
+
giant_graph = nx.from_numpy_array(adj_mat)
|
712
|
+
|
713
|
+
# 2a. Identify isolated nodes
|
714
|
+
isolated_nodes = list(nx.isolates(giant_graph))
|
715
|
+
|
716
|
+
# 2b. Remove isolated nodes
|
717
|
+
giant_graph.remove_nodes_from(isolated_nodes)
|
718
|
+
|
719
|
+
# 3a. Check the connectivity of the graph
|
720
|
+
# It has less than two nodes or is not connected.
|
721
|
+
# Identify connected components
|
722
|
+
connected_components = list(nx.connected_components(graph))
|
723
|
+
if not connected_components: # In case the graph is empty
|
724
|
+
connected_components = []
|
725
|
+
sub_graphs = [graph.subgraph(c).copy() for c in connected_components]
|
726
|
+
|
727
|
+
giant_graph = max(sub_graphs, key=lambda g: g.number_of_nodes())
|
728
|
+
|
729
|
+
# 4. Compute normalized-laplacian matrix
|
730
|
+
if weighted:
|
731
|
+
norm_laplacian_matrix = nx.normalized_laplacian_matrix(giant_graph, weight='weight').toarray()
|
732
|
+
else:
|
733
|
+
# norm_laplacian_matrix = compute_norm_laplacian_matrix(giant_graph)
|
734
|
+
norm_laplacian_matrix = nx.normalized_laplacian_matrix(giant_graph).toarray()
|
735
|
+
|
736
|
+
# 5. Compute eigenvalues
|
737
|
+
# e_vals, _ = xp.linalg.eig(norm_laplacian_matrix)
|
738
|
+
e_vals = sp.linalg.eigvals(norm_laplacian_matrix)
|
739
|
+
|
740
|
+
# 6. Approximate conductance using the 2nd smallest eigenvalue
|
741
|
+
# 6a. Compute the minimum and maximum values of graph conductance.
|
742
|
+
sorted_vals = np.array(e_vals.real)
|
743
|
+
sorted_vals.sort()
|
744
|
+
# approximate conductance using the 2nd smallest eigenvalue
|
745
|
+
try:
|
746
|
+
# Maximum Conductance
|
747
|
+
val_max = math.sqrt((2 * sorted_vals[1]))
|
748
|
+
except ValueError:
|
749
|
+
val_max = np.nan
|
750
|
+
# Minimum Graph Conductance
|
751
|
+
val_min = sorted_vals[1] / 2
|
752
|
+
|
753
|
+
return val_max, val_min
|
754
|
+
|
755
|
+
def get_compute_props(self):
|
756
|
+
"""
|
757
|
+
A method that retrieves graph theory computed parameters and stores them in a list-array.
|
758
|
+
|
759
|
+
Returns: list of computed GT params.
|
760
|
+
|
761
|
+
"""
|
762
|
+
self.props = []
|
763
|
+
# 1. Unweighted parameters
|
764
|
+
if self.output_df is None:
|
765
|
+
return
|
766
|
+
param_df = self.output_df.copy()
|
767
|
+
self.props.append(['UN-WEIGHTED', 'PARAMETERS'])
|
768
|
+
for _, row in param_df.iterrows():
|
769
|
+
x_param = row["parameter"]
|
770
|
+
y_value = row["value"]
|
771
|
+
self.props.append([x_param, y_value])
|
772
|
+
|
773
|
+
# 2. Weighted parameters
|
774
|
+
if self.weighted_output_df is None:
|
775
|
+
return
|
776
|
+
param_df = self.weighted_output_df.copy()
|
777
|
+
self.props.append(['WEIGHTED', 'PARAMETERS'])
|
778
|
+
for _, row in param_df.iterrows():
|
779
|
+
x_param = row["parameter"]
|
780
|
+
y_value = row["value"]
|
781
|
+
self.props.append([x_param, y_value])
|
782
|
+
|
783
|
+
def generate_pdf_output(self, graph_obj: FiberNetworkBuilder):
|
784
|
+
"""
|
785
|
+
Generate results as graphs and plots which should be written in a PDF file.
|
786
|
+
|
787
|
+
:param graph_obj: Graph extractor object.
|
788
|
+
|
789
|
+
:return: List of results.
|
790
|
+
"""
|
791
|
+
|
792
|
+
self.update_status([90, "Generating PDF GT Output..."])
|
793
|
+
opt_gtc = self.configs
|
794
|
+
out_figs = []
|
795
|
+
|
796
|
+
sel_batch = self.ntwk_p.get_selected_batch()
|
797
|
+
sel_images = self.ntwk_p.get_selected_images(sel_batch)
|
798
|
+
img_3d = [img.img_2d for img in sel_images]
|
799
|
+
img_3d = np.asarray(img_3d)
|
800
|
+
|
801
|
+
def plot_gt_results():
|
802
|
+
"""
|
803
|
+
Create a table of weighted and unweighted graph theory results.
|
804
|
+
|
805
|
+
:return: Matplotlib figures of unweighted and weighted graph theory results.
|
806
|
+
"""
|
807
|
+
|
808
|
+
opt_gte = graph_obj.configs
|
809
|
+
data = self.output_df
|
810
|
+
w_data = self.weighted_output_df
|
811
|
+
|
812
|
+
plt_fig = plt.Figure(figsize=(8.5, 11), dpi=300)
|
813
|
+
ax = plt_fig.add_subplot(1, 1, 1)
|
814
|
+
ax.set_axis_off()
|
815
|
+
ax.set_title("Unweighted GT parameters")
|
816
|
+
col_width = [2 / 3, 1 / 3]
|
817
|
+
tab_1 = tbl.table(ax, cellText=data.values[:, :], loc='upper center', colWidths=col_width, cellLoc='left')
|
818
|
+
tab_1.scale(1, 1.5)
|
819
|
+
|
820
|
+
if opt_gte["has_weights"]["value"] == 1 and w_data is not None:
|
821
|
+
plt_fig_wt = plt.Figure(figsize=(8.5, 11), dpi=300)
|
822
|
+
ax = plt_fig_wt.add_subplot(1, 1, 1)
|
823
|
+
ax.set_axis_off()
|
824
|
+
ax.set_title("Weighted GT parameters")
|
825
|
+
tab_2 = tbl.table(ax, cellText=w_data.values[:, :], loc='upper center', colWidths=col_width,
|
826
|
+
cellLoc='left')
|
827
|
+
tab_2.scale(1, 1.5)
|
828
|
+
else:
|
829
|
+
plt_fig_wt = None
|
830
|
+
return plt_fig, plt_fig_wt
|
831
|
+
|
832
|
+
def plot_bin_images():
|
833
|
+
"""
|
834
|
+
Create plot figures of original, processed, and binary image.
|
835
|
+
|
836
|
+
:return:
|
837
|
+
"""
|
838
|
+
|
839
|
+
plt_figs = []
|
840
|
+
is_3d = True if len(sel_images) > 1 else False
|
841
|
+
|
842
|
+
for i, img in enumerate(sel_images):
|
843
|
+
opt_img = img.configs
|
844
|
+
raw_img = img.img_2d
|
845
|
+
filtered_img = img.img_mod
|
846
|
+
img_bin = img.img_bin
|
847
|
+
|
848
|
+
img_histogram = cv2.calcHist([filtered_img], [0], None, [256], [0, 256])
|
849
|
+
|
850
|
+
plt_fig = plt.Figure(figsize=(8.5, 8.5), dpi=400)
|
851
|
+
ax_1 = plt_fig.add_subplot(2, 2, 1)
|
852
|
+
ax_2 = plt_fig.add_subplot(2, 2, 2)
|
853
|
+
ax_3 = plt_fig.add_subplot(2, 2, 3)
|
854
|
+
ax_4 = plt_fig.add_subplot(2, 2, 4)
|
855
|
+
|
856
|
+
ax_1.set_title(f"Frame {i}: Original Image") if is_3d else ax_1.set_title(f"Original Image")
|
857
|
+
ax_1.set_axis_off()
|
858
|
+
ax_1.imshow(raw_img, cmap='gray')
|
859
|
+
|
860
|
+
ax_2.set_title(f"Frame {i}: Processed Image") if is_3d else ax_2.set_title(f"Processed Image")
|
861
|
+
ax_2.set_axis_off()
|
862
|
+
ax_2.imshow(filtered_img, cmap='gray')
|
863
|
+
|
864
|
+
ax_3.set_title(f"Frame {i}: Binary Image") if is_3d else ax_3.set_title(f"Binary Image")
|
865
|
+
ax_3.set_axis_off()
|
866
|
+
ax_3.imshow(img_bin, cmap='gray')
|
867
|
+
|
868
|
+
ax_4.set_title(f"Frame {i}: Histogram of Processed Image") if is_3d else ax_4.set_title(
|
869
|
+
f"Histogram of Processed Image")
|
870
|
+
ax_4.set(yticks=[], xlabel='Pixel values', ylabel='Counts')
|
871
|
+
ax_4.plot(img_histogram)
|
872
|
+
if opt_img["threshold_type"]["value"] == 0:
|
873
|
+
thresh_arr = np.array(
|
874
|
+
[[int(opt_img["global_threshold_value"]["value"]),
|
875
|
+
int(opt_img["global_threshold_value"]["value"])],
|
876
|
+
[0, max(img_histogram)]], dtype='object')
|
877
|
+
ax_4.plot(thresh_arr[0], thresh_arr[1], ls='--', color='black')
|
878
|
+
elif opt_img["threshold_type"]["value"] == 2:
|
879
|
+
otsu_val = opt_img["otsu"]["value"]
|
880
|
+
thresh_arr = np.array([[otsu_val, otsu_val],
|
881
|
+
[0, max(img_histogram)]], dtype='object')
|
882
|
+
ax_4.plot(thresh_arr[0], thresh_arr[1], ls='--', color='black')
|
883
|
+
plt_figs.append(plt_fig)
|
884
|
+
return plt_figs
|
885
|
+
|
886
|
+
def plot_run_configs():
|
887
|
+
"""
|
888
|
+
Create a page (as a figure) that will show the user-selected parameters and options.
|
889
|
+
|
890
|
+
:return: A Matplotlib figure object.
|
891
|
+
"""
|
892
|
+
|
893
|
+
plt_fig = plt.Figure(figsize=(8.5, 8.5), dpi=300)
|
894
|
+
ax = plt_fig.add_subplot(1, 1, 1)
|
895
|
+
ax.set_axis_off()
|
896
|
+
ax.set_title("Run Info")
|
897
|
+
|
898
|
+
# similar to the start of the csv file, this is just getting all the relevant settings to display in the PDF
|
899
|
+
_, filename = os.path.split(self.ntwk_p.img_path)
|
900
|
+
now = datetime.datetime.now()
|
901
|
+
|
902
|
+
run_info = ""
|
903
|
+
run_info += filename + "\n"
|
904
|
+
run_info += now.strftime("%Y-%m-%d %H:%M:%S") + "\n----------------------------\n\n"
|
905
|
+
|
906
|
+
# Image Configs
|
907
|
+
sel_img_batch = self.ntwk_p.get_selected_batch()
|
908
|
+
run_info += sel_img_batch.images[0].get_config_info() # Get configs of first image
|
909
|
+
run_info += "\n\n"
|
910
|
+
|
911
|
+
# Graph Configs
|
912
|
+
run_info += graph_obj.get_config_info()
|
913
|
+
run_info += "\n\n"
|
914
|
+
|
915
|
+
ax.text(0.5, 0.5, run_info, horizontalalignment='center', verticalalignment='center')
|
916
|
+
return plt_fig
|
917
|
+
|
918
|
+
def plot_histograms():
|
919
|
+
"""
|
920
|
+
Create plot figures of graph theory histograms selected by the user.
|
921
|
+
|
922
|
+
:return: A list of Matplotlib figures.
|
923
|
+
"""
|
924
|
+
|
925
|
+
opt_gte = graph_obj.configs
|
926
|
+
plt_figs = []
|
927
|
+
|
928
|
+
def plot_distribution_histogram(ax: plt.axes, title: str, distribution: list, x_label: str,
|
929
|
+
plt_bins: np.ndarray = None, y_label: str = 'Counts'):
|
930
|
+
"""
|
931
|
+
Create a histogram from a distribution dataset.
|
932
|
+
|
933
|
+
:param ax: Plot axis.
|
934
|
+
:param title: Title text.
|
935
|
+
:param distribution: Dataset to be plotted.
|
936
|
+
:param x_label: X-label title text.
|
937
|
+
:param plt_bins: Bin dataset.
|
938
|
+
:param y_label: Y-label title text.
|
939
|
+
:return:
|
940
|
+
"""
|
941
|
+
font_1 = {'fontsize': 9}
|
942
|
+
if plt_bins is None:
|
943
|
+
plt_bins = np.linspace(min(distribution), max(distribution), 50)
|
944
|
+
try:
|
945
|
+
std_val = str(round(stdev(distribution), 3))
|
946
|
+
except StatisticsError:
|
947
|
+
std_val = "N/A"
|
948
|
+
hist_title = title + std_val
|
949
|
+
ax.set_title(hist_title, fontdict=font_1)
|
950
|
+
ax.set(xlabel=x_label, ylabel=y_label)
|
951
|
+
ax.hist(distribution, bins=plt_bins)
|
952
|
+
|
953
|
+
# Degree and Closeness
|
954
|
+
plt_fig = plt.Figure(figsize=(8.5, 11), dpi=300)
|
955
|
+
if opt_gtc["display_degree_histogram"]["value"] == 1:
|
956
|
+
deg_distribution = self.histogram_data["degree_distribution"]
|
957
|
+
bins = np.arange(0.5, max(deg_distribution) + 1.5, 1)
|
958
|
+
deg_title = r'Degree Distribution: $\sigma$='
|
959
|
+
ax_1 = plt_fig.add_subplot(2, 1, 1)
|
960
|
+
plot_distribution_histogram(ax_1, deg_title, deg_distribution, 'Degree', plt_bins=bins)
|
961
|
+
|
962
|
+
if opt_gtc["display_closeness_centrality_histogram"]["value"] == 1:
|
963
|
+
clo_distribution = self.histogram_data["closeness_distribution"]
|
964
|
+
cc_title = r"Closeness Centrality: $\sigma$="
|
965
|
+
ax_2 = plt_fig.add_subplot(2, 1, 2)
|
966
|
+
plot_distribution_histogram(ax_2, cc_title, clo_distribution, 'Closeness value')
|
967
|
+
plt_figs.append(plt_fig)
|
968
|
+
|
969
|
+
# Betweenness, Clustering, Eigenvector and Ohms
|
970
|
+
plt_fig = plt.Figure(figsize=(8.5, 11), dpi=300)
|
971
|
+
if opt_gtc["display_betweenness_centrality_histogram"]["value"] == 1:
|
972
|
+
bet_distribution = self.histogram_data["betweenness_distribution"]
|
973
|
+
bc_title = r"Betweenness Centrality: $\sigma$="
|
974
|
+
ax_1 = plt_fig.add_subplot(2, 2, 1)
|
975
|
+
plot_distribution_histogram(ax_1, bc_title, bet_distribution, 'Betweenness value')
|
976
|
+
|
977
|
+
if opt_gtc["compute_avg_clustering_coef"]["value"] == 1:
|
978
|
+
cluster_coefs = self.histogram_data["clustering_coefficients"]
|
979
|
+
clu_title = r"Clustering Coefficients: $\sigma$="
|
980
|
+
ax_2 = plt_fig.add_subplot(2, 2, 2)
|
981
|
+
plot_distribution_histogram(ax_2, clu_title, cluster_coefs, 'Clust. Coeff.')
|
982
|
+
|
983
|
+
if opt_gtc["display_ohms_histogram"]["value"] == 1:
|
984
|
+
ohm_distribution = self.histogram_data["ohms_distribution"]
|
985
|
+
oh_title = r"Ohms Centrality: $\sigma$="
|
986
|
+
ax_3 = plt_fig.add_subplot(2, 2, 3)
|
987
|
+
plot_distribution_histogram(ax_3, oh_title, ohm_distribution, 'Ohms value')
|
988
|
+
|
989
|
+
if opt_gtc["display_eigenvector_centrality_histogram"]["value"] == 1:
|
990
|
+
eig_distribution = self.histogram_data["eigenvector_distribution"]
|
991
|
+
ec_title = r"Eigenvector Centrality: $\sigma$="
|
992
|
+
ax_4 = plt_fig.add_subplot(2, 2, 4)
|
993
|
+
plot_distribution_histogram(ax_4, ec_title, eig_distribution, 'Eigenvector value')
|
994
|
+
plt_figs.append(plt_fig)
|
995
|
+
|
996
|
+
# weighted histograms
|
997
|
+
if opt_gte["has_weights"]["value"] == 1:
|
998
|
+
wt_type = graph_obj.get_weight_type()
|
999
|
+
weight_type = FiberNetworkBuilder.get_weight_options().get(wt_type)
|
1000
|
+
|
1001
|
+
# degree, betweenness, closeness and eigenvector
|
1002
|
+
plt_fig = plt.Figure(figsize=(8.5, 11), dpi=300)
|
1003
|
+
if opt_gtc["display_degree_histogram"]["value"] == 1:
|
1004
|
+
w_deg_distribution = self.histogram_data["weighted_degree_distribution"]
|
1005
|
+
bins = np.arange(0.5, max(w_deg_distribution) + 1.5, 1)
|
1006
|
+
w_deg_title = r"Weighted Degree: $\sigma$="
|
1007
|
+
ax_1 = plt_fig.add_subplot(2, 2, 1)
|
1008
|
+
plot_distribution_histogram(ax_1, w_deg_title, w_deg_distribution, 'Degree', plt_bins=bins)
|
1009
|
+
|
1010
|
+
if opt_gtc["display_betweenness_centrality_histogram"]["value"] == 1:
|
1011
|
+
w_bet_distribution = self.histogram_data["weighted_betweenness_distribution"]
|
1012
|
+
w_bt_title = weight_type + r"-Weighted Betweenness: $\sigma$="
|
1013
|
+
ax_2 = plt_fig.add_subplot(2, 2, 2)
|
1014
|
+
plot_distribution_histogram(ax_2, w_bt_title, w_bet_distribution, 'Betweenness value')
|
1015
|
+
|
1016
|
+
if opt_gtc["display_closeness_centrality_histogram"]["value"] == 1:
|
1017
|
+
w_clo_distribution = self.histogram_data["weighted_closeness_distribution"]
|
1018
|
+
w_clo_title = r"Length-Weighted Closeness: $\sigma$="
|
1019
|
+
ax_3 = plt_fig.add_subplot(2, 2, 3)
|
1020
|
+
plot_distribution_histogram(ax_3, w_clo_title, w_clo_distribution, 'Closeness value')
|
1021
|
+
|
1022
|
+
if opt_gtc["display_eigenvector_centrality_histogram"]["value"] == 1:
|
1023
|
+
w_eig_distribution = self.histogram_data["weighted_eigenvector_distribution"]
|
1024
|
+
w_ec_title = weight_type + r"-Weighted Eigenvector Cent.: $\sigma$="
|
1025
|
+
ax_4 = plt_fig.add_subplot(2, 2, 4)
|
1026
|
+
plot_distribution_histogram(ax_4, w_ec_title, w_eig_distribution, 'Eigenvector value')
|
1027
|
+
plt_figs.append(plt_fig)
|
1028
|
+
|
1029
|
+
return plt_figs
|
1030
|
+
|
1031
|
+
def plot_heatmaps():
|
1032
|
+
"""
|
1033
|
+
Create plot figures of graph theory heatmaps.
|
1034
|
+
|
1035
|
+
:return: A list of Matplotlib figures.
|
1036
|
+
"""
|
1037
|
+
|
1038
|
+
sz = 30
|
1039
|
+
lw = 1.5
|
1040
|
+
plt_figs = []
|
1041
|
+
opt_gte = graph_obj.configs
|
1042
|
+
wt_type = graph_obj.get_weight_type()
|
1043
|
+
weight_type = FiberNetworkBuilder.get_weight_options().get(wt_type)
|
1044
|
+
|
1045
|
+
def plot_distribution_heatmap(distribution: list, title: str, size: float, line_width: float):
|
1046
|
+
"""
|
1047
|
+
Create a heatmap from a distribution.
|
1048
|
+
|
1049
|
+
:param distribution: Dataset to be plotted.
|
1050
|
+
:param title: Title of the plot figure.
|
1051
|
+
:param size: Size of the scatter items.
|
1052
|
+
:param line_width: Size of the plot line-width.
|
1053
|
+
:return: Histogram plot figure.
|
1054
|
+
"""
|
1055
|
+
nx_graph = graph_obj.nx_giant_graph
|
1056
|
+
fig_grp = FiberNetworkBuilder.plot_graph_edges(img_3d, nx_graph, node_distribution_data=distribution,
|
1057
|
+
plot_nodes=True, line_width=line_width,
|
1058
|
+
node_marker_size=size)
|
1059
|
+
|
1060
|
+
plt_fig_inner = fig_grp[0]
|
1061
|
+
plt_fig_inner.set_size_inches(8.5, 8.5)
|
1062
|
+
plt_fig_inner.set_dpi(400)
|
1063
|
+
plt_ax = plt_fig_inner.axes[0]
|
1064
|
+
plt_ax.set_title(title, fontdict={'fontsize': 9})
|
1065
|
+
plt_ax.set_position([0.05, 0.05, 0.75, 0.75])
|
1066
|
+
|
1067
|
+
return plt_fig_inner
|
1068
|
+
|
1069
|
+
if opt_gtc["display_degree_histogram"]["value"] == 1:
|
1070
|
+
deg_distribution = self.histogram_data["degree_distribution"]
|
1071
|
+
plt_fig = plot_distribution_heatmap(deg_distribution, 'Degree Heatmap', sz, lw)
|
1072
|
+
plt_figs.append(plt_fig)
|
1073
|
+
if (opt_gtc["display_degree_histogram"]["value"] == 1) and (opt_gte["has_weights"]["value"] == 1):
|
1074
|
+
w_deg_distribution = self.histogram_data["weighted_degree_distribution"]
|
1075
|
+
plt_title = 'Weighted Degree Heatmap'
|
1076
|
+
plt_fig = plot_distribution_heatmap(w_deg_distribution, plt_title, sz, lw)
|
1077
|
+
plt_figs.append(plt_fig)
|
1078
|
+
if opt_gtc["compute_avg_clustering_coef"]["value"] == 1:
|
1079
|
+
cluster_coefs = self.histogram_data["clustering_coefficients"]
|
1080
|
+
plt_title = 'Clustering Coefficient Heatmap'
|
1081
|
+
plt_fig = plot_distribution_heatmap(cluster_coefs, plt_title, sz, lw)
|
1082
|
+
plt_figs.append(plt_fig)
|
1083
|
+
if opt_gtc["display_betweenness_centrality_histogram"]["value"] == 1:
|
1084
|
+
bet_distribution = self.histogram_data["betweenness_distribution"]
|
1085
|
+
plt_title = 'Betweenness Centrality Heatmap'
|
1086
|
+
plt_fig = plot_distribution_heatmap(bet_distribution, plt_title, sz, lw)
|
1087
|
+
plt_figs.append(plt_fig)
|
1088
|
+
if (opt_gtc["display_betweenness_centrality_histogram"]["value"] == 1) and (
|
1089
|
+
opt_gte["has_weights"]["value"] == 1):
|
1090
|
+
w_bet_distribution = self.histogram_data["weighted_betweenness_distribution"]
|
1091
|
+
plt_title = f'{weight_type}-Weighted Betweenness Centrality Heatmap'
|
1092
|
+
plt_fig = plot_distribution_heatmap(w_bet_distribution, plt_title, sz, lw)
|
1093
|
+
plt_figs.append(plt_fig)
|
1094
|
+
if opt_gtc["display_closeness_centrality_histogram"]["value"] == 1:
|
1095
|
+
clo_distribution = self.histogram_data["closeness_distribution"]
|
1096
|
+
plt_title = 'Closeness Centrality Heatmap'
|
1097
|
+
plt_fig = plot_distribution_heatmap(clo_distribution, plt_title, sz, lw)
|
1098
|
+
plt_figs.append(plt_fig)
|
1099
|
+
if (opt_gtc["display_closeness_centrality_histogram"]["value"] == 1) and (
|
1100
|
+
opt_gte["has_weights"]["value"] == 1):
|
1101
|
+
w_clo_distribution = self.histogram_data["weighted_closeness_distribution"]
|
1102
|
+
plt_title = 'Length-Weighted Closeness Centrality Heatmap'
|
1103
|
+
plt_fig = plot_distribution_heatmap(w_clo_distribution, plt_title, sz, lw)
|
1104
|
+
plt_figs.append(plt_fig)
|
1105
|
+
if opt_gtc["display_eigenvector_centrality_histogram"]["value"] == 1:
|
1106
|
+
eig_distribution = self.histogram_data["eigenvector_distribution"]
|
1107
|
+
plt_title = 'Eigenvector Centrality Heatmap'
|
1108
|
+
plt_fig = plot_distribution_heatmap(eig_distribution, plt_title, sz, lw)
|
1109
|
+
plt_figs.append(plt_fig)
|
1110
|
+
if (opt_gtc["display_eigenvector_centrality_histogram"]["value"] == 1) and (
|
1111
|
+
opt_gte["has_weights"]["value"] == 1):
|
1112
|
+
w_eig_distribution = self.histogram_data["weighted_eigenvector_distribution"]
|
1113
|
+
plt_title = f'{weight_type}-Weighted Eigenvector Centrality Heatmap'
|
1114
|
+
plt_fig = plot_distribution_heatmap(w_eig_distribution, plt_title, sz, lw)
|
1115
|
+
plt_figs.append(plt_fig)
|
1116
|
+
if opt_gtc["display_ohms_histogram"]["value"] == 1:
|
1117
|
+
ohm_distribution = self.histogram_data["ohms_distribution"]
|
1118
|
+
plt_title = 'Ohms Centrality Heatmap'
|
1119
|
+
plt_fig = plot_distribution_heatmap(ohm_distribution, plt_title, sz, lw)
|
1120
|
+
plt_figs.append(plt_fig)
|
1121
|
+
return plt_figs
|
1122
|
+
|
1123
|
+
# 1. plotting the original, processed, and binary image, as well as the histogram of pixel grayscale values
|
1124
|
+
figs = plot_bin_images()
|
1125
|
+
for fig in figs:
|
1126
|
+
out_figs.append(fig)
|
1127
|
+
|
1128
|
+
# 2a. plotting graph nodes
|
1129
|
+
fig = graph_obj.plot_graph_network(image_arr=img_3d, plot_nodes=True, a4_size=True)
|
1130
|
+
if fig is not None:
|
1131
|
+
out_figs.append(fig)
|
1132
|
+
|
1133
|
+
# 2b. plotting graph edges
|
1134
|
+
fig = graph_obj.plot_graph_network(image_arr=img_3d, a4_size=True)
|
1135
|
+
if fig is not None:
|
1136
|
+
out_figs.append(fig)
|
1137
|
+
|
1138
|
+
# 3a. displaying all the GT calculations in Table (on the entire page)
|
1139
|
+
fig, fig_wt = plot_gt_results()
|
1140
|
+
out_figs.append(fig)
|
1141
|
+
if fig_wt:
|
1142
|
+
out_figs.append(fig_wt)
|
1143
|
+
|
1144
|
+
# 3b. display scaling GT results in a Table
|
1145
|
+
figs = GraphAnalyzer.plot_scaling_behavior(self.scaling_data)
|
1146
|
+
for fig in figs:
|
1147
|
+
out_figs.append(fig)
|
1148
|
+
|
1149
|
+
# 4. displaying histograms
|
1150
|
+
self.update_status([92, "Generating histograms..."])
|
1151
|
+
figs = plot_histograms()
|
1152
|
+
for fig in figs:
|
1153
|
+
out_figs.append(fig)
|
1154
|
+
|
1155
|
+
# 5. displaying heatmaps
|
1156
|
+
if opt_gtc["display_heatmaps"]["value"] == 1:
|
1157
|
+
self.update_status([95, "Generating heatmaps..."])
|
1158
|
+
figs = plot_heatmaps()
|
1159
|
+
for fig in figs:
|
1160
|
+
out_figs.append(fig)
|
1161
|
+
|
1162
|
+
# 6. displaying run information
|
1163
|
+
fig = plot_run_configs()
|
1164
|
+
out_figs.append(fig)
|
1165
|
+
return out_figs
|
1166
|
+
|
1167
|
+
@staticmethod
|
1168
|
+
def plot_scaling_behavior(scaling_data: defaultdict = None):
|
1169
|
+
""""""
|
1170
|
+
|
1171
|
+
# Define our 'best-fit' model
|
1172
|
+
def power_law_model(x, a, k):
|
1173
|
+
"""
|
1174
|
+
A best-fit model that follows the power law distribution: y = a * x^(-k),
|
1175
|
+
where a and k are fitting parameters.
|
1176
|
+
|
1177
|
+
Args:
|
1178
|
+
x (np.array): Array of x values
|
1179
|
+
a (float): fitting parameter
|
1180
|
+
k (float): fitting parameter
|
1181
|
+
"""
|
1182
|
+
return a * x ** (-k)
|
1183
|
+
|
1184
|
+
def truncated_power_law_model(x, a, k, c):
|
1185
|
+
"""
|
1186
|
+
A best-fit model that follows the truncated power law distribution: y = a * x^(-k) * exp(-c * x),
|
1187
|
+
where a, c and k are fitting parameters.
|
1188
|
+
|
1189
|
+
https://en.wikipedia.org/wiki/Power_law#Power_law_with_exponential_cutoff
|
1190
|
+
|
1191
|
+
Args:
|
1192
|
+
x (np.array): Array of x values
|
1193
|
+
a (float): fitting parameter
|
1194
|
+
k (float): fitting parameter
|
1195
|
+
c (float): cut-off fitting parameter
|
1196
|
+
"""
|
1197
|
+
return a * (x ** (-k)) * np.exp(-c * x)
|
1198
|
+
|
1199
|
+
def lognormal_model(x, mu, sigma, a):
|
1200
|
+
"""
|
1201
|
+
Log-normal model (Y depends on X, X is log-normal).
|
1202
|
+
|
1203
|
+
Args:
|
1204
|
+
x (np.array): Array of x values
|
1205
|
+
mu (float): fitting parameter
|
1206
|
+
sigma (float): fitting parameter
|
1207
|
+
a (float): fitting parameter
|
1208
|
+
|
1209
|
+
Returns:
|
1210
|
+
|
1211
|
+
"""
|
1212
|
+
return a * (1 / (x * sigma * np.sqrt(2 * np.pi))) * np.exp(-((np.log(x) - mu) ** 2) / (2 * sigma ** 2))
|
1213
|
+
|
1214
|
+
def plot_axis(subplot_num, plt_type="", plot_err=True):
|
1215
|
+
""""""
|
1216
|
+
subplot_num += 1
|
1217
|
+
axis = fig.add_subplot(2, 2, subplot_num)
|
1218
|
+
if plot_err:
|
1219
|
+
axis.errorbar(x_avg, y_avg, xerr=x_err, yerr=y_err, label='Data', color='b', capsize=4, marker='s',
|
1220
|
+
markersize=4, linewidth=1, linestyle='-')
|
1221
|
+
axis.set_title(f"{plt_type}\nNodes vs {y_title}", fontsize=10)
|
1222
|
+
axis.set(xlabel='No. of Nodes', ylabel=f'{param_name}')
|
1223
|
+
# axis.legend()
|
1224
|
+
return axis, subplot_num
|
1225
|
+
|
1226
|
+
# Initialize plot figures
|
1227
|
+
figs = []
|
1228
|
+
if scaling_data is None:
|
1229
|
+
return figs
|
1230
|
+
|
1231
|
+
# Plot scaling behavior
|
1232
|
+
i = 0
|
1233
|
+
x_label = None
|
1234
|
+
x_values, x_avg, x_err = np.nan, np.nan, np.nan
|
1235
|
+
fig = plt.Figure(figsize=(8.5, 11), dpi=300)
|
1236
|
+
for param_name, plt_dict in scaling_data.items():
|
1237
|
+
# Retrieve plot data
|
1238
|
+
box_labels = sorted(plt_dict.keys()) # Optional: sort heights
|
1239
|
+
y_lst = [plt_dict[h] for h in box_labels] # shape: (n_samples, n_boxes)
|
1240
|
+
|
1241
|
+
# Pad with NaN
|
1242
|
+
max_len = max(len(row) for row in y_lst)
|
1243
|
+
padded_lst = [row + [np.nan] * (max_len - len(row)) for row in y_lst]
|
1244
|
+
|
1245
|
+
# Convert to a Numpy array
|
1246
|
+
y_values = np.array(padded_lst).T
|
1247
|
+
y_avg = np.nanmean(y_values, axis=0)
|
1248
|
+
y_err = np.nanstd(y_values, axis=0, ddof=1) / np.sqrt(y_values.shape[0])
|
1249
|
+
if np.any(np.isnan(y_avg)):
|
1250
|
+
# print(f"{param_name} has NaN values: {y_avg}")
|
1251
|
+
continue
|
1252
|
+
|
1253
|
+
# Plot of the nodes counts against others
|
1254
|
+
if x_label is None:
|
1255
|
+
x_label = param_name
|
1256
|
+
# x_values = y_values
|
1257
|
+
x_avg = y_avg
|
1258
|
+
x_err = y_err
|
1259
|
+
else:
|
1260
|
+
# 1. Transform to log-log scale
|
1261
|
+
log_x = np.log10(x_avg)
|
1262
|
+
log_y = np.log10(y_avg)
|
1263
|
+
x_fit = np.linspace(min(x_avg), max(x_avg), 100)
|
1264
|
+
y_title = param_name.split('(')[0] if '(' in param_name else param_name
|
1265
|
+
|
1266
|
+
# 2a. Perform linear regression in log-log scale
|
1267
|
+
try:
|
1268
|
+
slope, intercept, r_value, p_value, std_err = sp.stats.linregress(log_x, log_y)
|
1269
|
+
# Compute line of best-fit
|
1270
|
+
log_y_fit = slope * log_x + intercept
|
1271
|
+
|
1272
|
+
# 3a. Plot data (Log-Log scale with the line best-fit)
|
1273
|
+
ax, i = plot_axis(i, "Log-Log Plot of", plot_err=False)
|
1274
|
+
ax.plot(log_x, log_y, label='Data', color='b', marker='s', markersize=3)
|
1275
|
+
ax.plot(log_x, log_y_fit, label=f'Fit: slope={slope:.2f}, $R^2$={r_value ** 2:.3f}', color='r')
|
1276
|
+
ax.legend()
|
1277
|
+
"""i = i + 1
|
1278
|
+
ax = fig.add_subplot(2, 2, i)
|
1279
|
+
# ax.plot(x_values, y_values, 'b.', markersize=3)
|
1280
|
+
ax.plot(log_x, log_y, label='Data', color='b', marker='s', markersize=3)
|
1281
|
+
ax.plot(log_x, log_y_fit, label=f'Fit: slope={slope:.2f}, $R^2$={r_value ** 2:.3f}', color='r')
|
1282
|
+
ax.set_title(f"Log-Log Plot of Nodes vs {y_title}", fontsize=10)
|
1283
|
+
ax.set(xlabel='No. of Nodes (log scale)', ylabel=f'{param_name} (log scale)')
|
1284
|
+
ax.legend()"""
|
1285
|
+
except Exception as e:
|
1286
|
+
print(f"Log-Log Error: {e}")
|
1287
|
+
pass
|
1288
|
+
|
1289
|
+
# 2b. Compute the line of best-fit on our data according to our power-law model
|
1290
|
+
try:
|
1291
|
+
init_params = [1.0, 1.0] # initial guess for [a, k]
|
1292
|
+
optimal_params: np.ndarray = sp.optimize.curve_fit(power_law_model, x_avg, y_avg, p0=init_params)[0]
|
1293
|
+
a_fit, k_fit = float(optimal_params[0]), float(optimal_params[1])
|
1294
|
+
# print(f"Fitted parameters: a = {a_fit:.4f}, k = {k_fit:.4f}")
|
1295
|
+
# Generate points for the best-fit curve
|
1296
|
+
y_fit_pwr = power_law_model(x_fit, a_fit, k_fit)
|
1297
|
+
|
1298
|
+
# 3b. Plot data (power-law best fit)
|
1299
|
+
ax, i = plot_axis(i, "Power Law Fit and Plot of")
|
1300
|
+
ax.plot(x_fit, y_fit_pwr, label=f'Fit: $y = ax^{{-k}}$\n$a={a_fit:.2f}, k={k_fit:.2f}$',
|
1301
|
+
color='red')
|
1302
|
+
ax.legend()
|
1303
|
+
"""i = i + 1
|
1304
|
+
ax = fig.add_subplot(2, 2, i)
|
1305
|
+
ax.errorbar(x_avg, y_avg, xerr=x_err, yerr=y_err, label='Data', color='b', capsize=4, marker='s',
|
1306
|
+
markersize=4, linewidth=1, linestyle='-')
|
1307
|
+
ax.plot(x_fit, y_fit_pwr, label=f'Fit: $y = ax^{{-k}}$\n$a={a_fit:.2f}, k={k_fit:.2f}$',
|
1308
|
+
color='red')
|
1309
|
+
ax.set_title(f"Nodes vs {y_title}", fontsize=10)
|
1310
|
+
ax.set(xlabel='No. of Nodes', ylabel=f'{param_name}')
|
1311
|
+
ax.legend()"""
|
1312
|
+
except Exception as e:
|
1313
|
+
print(f"Power Law Error: {e}")
|
1314
|
+
pass
|
1315
|
+
|
1316
|
+
# 2c. Compute the line of best-fit according to our truncated power-law model
|
1317
|
+
try:
|
1318
|
+
init_params_cutoff = [1.0, 1.0, 0.1]
|
1319
|
+
opt_params_cutoff: np.ndarray = \
|
1320
|
+
sp.optimize.curve_fit(truncated_power_law_model, x_avg, y_avg, p0=init_params_cutoff)[0]
|
1321
|
+
a_fit_cut, k_fit_cut, c_fit_cut = float(opt_params_cutoff[0]), float(opt_params_cutoff[1]), float(
|
1322
|
+
opt_params_cutoff[2])
|
1323
|
+
# Generate points for the best-fit curve
|
1324
|
+
y_fit_cut = truncated_power_law_model(x_fit, a_fit_cut, k_fit_cut, c_fit_cut)
|
1325
|
+
print(f"Fitted parameters: a={a_fit_cut:.2f}, k={k_fit_cut:.2f}, c={c_fit_cut:.2f}")
|
1326
|
+
|
1327
|
+
# 3c. Plot data (truncated power-law best fit)
|
1328
|
+
ax, i = plot_axis(i, "Truncated Power Law Fit and Plot of")
|
1329
|
+
ax.plot(x_fit, y_fit_cut,
|
1330
|
+
label=f'Fit: $y = ax^{{-k}}*exp(-c*x)$\n$a={a_fit_cut:.2f}, k={k_fit_cut:.2f}, c={c_fit_cut:.2f}$',
|
1331
|
+
color='red')
|
1332
|
+
ax.legend()
|
1333
|
+
"""i = i + 1
|
1334
|
+
ax = fig.add_subplot(2, 2, i)
|
1335
|
+
ax.errorbar(x_avg, y_avg, xerr=x_err, yerr=y_err, label='Data', color='b', capsize=4, marker='s',
|
1336
|
+
markersize=4, linewidth=1, linestyle='-')
|
1337
|
+
ax.plot(x_fit, y_fit_cut,
|
1338
|
+
label=f'Fit: $y = ax^{{-k}}*exp(-c*x)$\n$a={a_fit_cut:.2f}, k={k_fit_cut:.2f}, c={c_fit_cut:.2f}$',
|
1339
|
+
color='red')
|
1340
|
+
ax.set_title(f"Nodes vs {y_title}", fontsize=10)
|
1341
|
+
ax.set(xlabel='No. of Nodes', ylabel=f'{param_name}')
|
1342
|
+
ax.legend()"""
|
1343
|
+
except Exception as e:
|
1344
|
+
print(f"Truncated Power Law Error: {e}")
|
1345
|
+
pass
|
1346
|
+
|
1347
|
+
# 2d. Compute best-fit, assuming Log-Normal dependence on X
|
1348
|
+
try:
|
1349
|
+
init_params_log = [1.0, 1.0, 10]
|
1350
|
+
opt_params_log: np.ndarray = \
|
1351
|
+
sp.optimize.curve_fit(lognormal_model, x_avg, y_avg, p0=init_params_log,
|
1352
|
+
bounds=([0, 0, 0], [np.inf, np.inf, np.inf]), maxfev=1000)[0]
|
1353
|
+
mu_fit, sigma_fit, a_log_fit = float(opt_params_log[0]), float(opt_params_log[1]), float(
|
1354
|
+
opt_params_log[2])
|
1355
|
+
# Generate predicted points for the best-fit curve
|
1356
|
+
y_fit_ln = lognormal_model(x_fit, mu_fit, sigma_fit, a_log_fit)
|
1357
|
+
|
1358
|
+
# 3c. Plot data (Log-normal distribution best fit)
|
1359
|
+
ax, i = plot_axis(i, "Log-Normal Fit and Plot of")
|
1360
|
+
ax.plot(x_fit, y_fit_ln,
|
1361
|
+
label=f'Fit: log-normal shape\n$\\mu={mu_fit:.2f}$, $\\sigma={sigma_fit:.2f}$',
|
1362
|
+
color='red')
|
1363
|
+
ax.legend()
|
1364
|
+
"""i = i + 1
|
1365
|
+
ax = fig.add_subplot(2, 2, i)
|
1366
|
+
ax.errorbar(x_avg, y_avg, xerr=x_err, yerr=y_err, label='Data', color='b', capsize=4, marker='s',
|
1367
|
+
markersize=4, linewidth=1, linestyle='-')
|
1368
|
+
ax.plot(x_fit, y_fit_ln,
|
1369
|
+
label=f'Fit: log-normal shape\n$\\mu={mu_fit:.2f}$, $\\sigma={sigma_fit:.2f}$',
|
1370
|
+
color='red')
|
1371
|
+
ax.set_title(f"Fit Assuming Log-Normal Dependence on\n Nodes vs {y_title}", fontsize=10)
|
1372
|
+
ax.set(xlabel='No. of Nodes', ylabel=f'{param_name}')
|
1373
|
+
ax.legend()"""
|
1374
|
+
except Exception as e:
|
1375
|
+
print(f"Log Normal Dependence Error: {e}")
|
1376
|
+
pass
|
1377
|
+
|
1378
|
+
# Navigate to the next subplot
|
1379
|
+
if (i + 1) > 1:
|
1380
|
+
figs.append(fig)
|
1381
|
+
fig = plt.Figure(figsize=(8.5, 11), dpi=300)
|
1382
|
+
i = 0
|
1383
|
+
|
1384
|
+
figs.append(fig) if i <= 4 else None
|
1385
|
+
return figs
|
1386
|
+
|
1387
|
+
@staticmethod
|
1388
|
+
def write_to_pdf(sgt_obj, update_func=None):
|
1389
|
+
"""
|
1390
|
+
Write results to a PDF file.
|
1391
|
+
|
1392
|
+
Args:
|
1393
|
+
sgt_obj: StructuralGT object with calculated GT parameters
|
1394
|
+
update_func: Callable for progress updates (e.g., update_func(percentage, message))
|
1395
|
+
|
1396
|
+
Returns:
|
1397
|
+
True if the PDF file is written successfully, otherwise False
|
1398
|
+
"""
|
1399
|
+
try:
|
1400
|
+
if update_func:
|
1401
|
+
update_func(98, "Writing PDF...")
|
1402
|
+
|
1403
|
+
filename, output_location = sgt_obj.ntwk_p.get_filenames()
|
1404
|
+
pdf_filename = filename + "_SGT_results.pdf"
|
1405
|
+
pdf_file = os.path.join(output_location, pdf_filename)
|
1406
|
+
|
1407
|
+
if not sgt_obj.plot_figures:
|
1408
|
+
raise ValueError("No figures available to write to PDF.")
|
1409
|
+
|
1410
|
+
with PdfPages(pdf_file) as pdf:
|
1411
|
+
for fig in sgt_obj.plot_figures:
|
1412
|
+
pdf.savefig(fig)
|
1413
|
+
|
1414
|
+
# if update_func:
|
1415
|
+
# update_func(100, "GT PDF successfully generated!")
|
1416
|
+
return True
|
1417
|
+
except Exception as err:
|
1418
|
+
logging.exception("GT Computation Error: %s", err, extra={'user': 'SGT Logs'})
|
1419
|
+
if update_func:
|
1420
|
+
update_func(-1, "Error occurred while trying to write to PDF.")
|
1421
|
+
return False
|
1422
|
+
|
1423
|
+
@staticmethod
|
1424
|
+
def safe_run_analyzer(sgt_obj, update_func):
|
1425
|
+
"""
|
1426
|
+
Safely compute GT metrics without raising exceptions or crushing app.
|
1427
|
+
|
1428
|
+
Args:
|
1429
|
+
sgt_obj: StructuralGT object with calculated GT parameters
|
1430
|
+
update_func: Callable for progress updates (e.g., update_func(percentage, message))
|
1431
|
+
"""
|
1432
|
+
try:
|
1433
|
+
# Add Listeners
|
1434
|
+
sgt_obj.add_listener(update_func)
|
1435
|
+
|
1436
|
+
sgt_obj.run_analyzer()
|
1437
|
+
if sgt_obj.abort:
|
1438
|
+
raise AbortException("Process aborted")
|
1439
|
+
|
1440
|
+
# Cleanup - remove listeners
|
1441
|
+
sgt_obj.remove_listener(update_func)
|
1442
|
+
return True, sgt_obj
|
1443
|
+
except AbortException:
|
1444
|
+
update_func(-1, "Task aborted by user or a fatal error occurred!")
|
1445
|
+
sgt_obj.remove_listener(update_func)
|
1446
|
+
return False, None
|
1447
|
+
except Exception as err:
|
1448
|
+
update_func(-1, "Error encountered! Try again")
|
1449
|
+
logging.exception("Error: %s", err, extra={'user': 'SGT Logs'})
|
1450
|
+
# Clean up listeners before exiting
|
1451
|
+
sgt_obj.remove_listener(update_func)
|
1452
|
+
return False, None
|
1453
|
+
|
1454
|
+
@staticmethod
|
1455
|
+
def safe_run_multi_analyzer(sgt_objs, update_func):
|
1456
|
+
"""
|
1457
|
+
Safely compute GT metrics of multiple images without raising exceptions or crushing the app.
|
1458
|
+
|
1459
|
+
Args:
|
1460
|
+
sgt_objs: List of StructuralGT objects with calculated GT parameters
|
1461
|
+
update_func: Callable for progress updates (e.g., update_func(percentage, message))
|
1462
|
+
"""
|
1463
|
+
try:
|
1464
|
+
i = 0
|
1465
|
+
keys_list = list(sgt_objs.keys())
|
1466
|
+
for key in keys_list:
|
1467
|
+
sgt_obj = sgt_objs[key]
|
1468
|
+
|
1469
|
+
status_msg = f"Analyzing Image: {(i + 1)} / {len(sgt_objs)}"
|
1470
|
+
update_func(101, status_msg)
|
1471
|
+
|
1472
|
+
start = time.time()
|
1473
|
+
success, new_sgt = GraphAnalyzer.safe_run_analyzer(sgt_obj, update_func)
|
1474
|
+
# TerminalApp.is_aborted(sgt_obj)
|
1475
|
+
if success:
|
1476
|
+
GraphAnalyzer.write_to_pdf(new_sgt, update_func)
|
1477
|
+
end = time.time()
|
1478
|
+
|
1479
|
+
i += 1
|
1480
|
+
num_cores = get_num_cores()
|
1481
|
+
sel_batch = sgt_obj.ntwk_p.get_selected_batch()
|
1482
|
+
graph_obj = sel_batch.graph_obj
|
1483
|
+
output = status_msg + "\n" + f"Run-time: {str(end - start)} seconds\n"
|
1484
|
+
output += "Number of cores: " + str(num_cores) + "\n"
|
1485
|
+
output += "Results generated for: " + sgt_obj.ntwk_p.img_path + "\n"
|
1486
|
+
output += "Node Count: " + str(graph_obj.nx_giant_graph.number_of_nodes()) + "\n"
|
1487
|
+
output += "Edge Count: " + str(graph_obj.nx_giant_graph.number_of_edges()) + "\n"
|
1488
|
+
filename, out_dir = sgt_obj.ntwk_p.get_filenames()
|
1489
|
+
out_file = os.path.join(out_dir, filename + '-v2_results.txt')
|
1490
|
+
write_txt_file(output, out_file)
|
1491
|
+
logging.info(output, extra={'user': 'SGT Logs'})
|
1492
|
+
return sgt_objs
|
1493
|
+
except AbortException:
|
1494
|
+
update_func(-1, "Task aborted by user or a fatal error occurred!")
|
1495
|
+
return None
|
1496
|
+
except Exception as err:
|
1497
|
+
update_func(-1, "Error encountered! Try again")
|
1498
|
+
logging.exception("Error: %s", err, extra={'user': 'SGT Logs'})
|
1499
|
+
return None
|