mediml 0.9.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- MEDiml/MEDscan.py +1696 -0
- MEDiml/__init__.py +21 -0
- MEDiml/biomarkers/BatchExtractor.py +806 -0
- MEDiml/biomarkers/BatchExtractorTexturalFilters.py +840 -0
- MEDiml/biomarkers/__init__.py +16 -0
- MEDiml/biomarkers/diagnostics.py +125 -0
- MEDiml/biomarkers/get_oriented_bound_box.py +158 -0
- MEDiml/biomarkers/glcm.py +1602 -0
- MEDiml/biomarkers/gldzm.py +523 -0
- MEDiml/biomarkers/glrlm.py +1315 -0
- MEDiml/biomarkers/glszm.py +555 -0
- MEDiml/biomarkers/int_vol_hist.py +527 -0
- MEDiml/biomarkers/intensity_histogram.py +615 -0
- MEDiml/biomarkers/local_intensity.py +89 -0
- MEDiml/biomarkers/morph.py +1756 -0
- MEDiml/biomarkers/ngldm.py +780 -0
- MEDiml/biomarkers/ngtdm.py +414 -0
- MEDiml/biomarkers/stats.py +373 -0
- MEDiml/biomarkers/utils.py +389 -0
- MEDiml/filters/TexturalFilter.py +299 -0
- MEDiml/filters/__init__.py +9 -0
- MEDiml/filters/apply_filter.py +134 -0
- MEDiml/filters/gabor.py +215 -0
- MEDiml/filters/laws.py +283 -0
- MEDiml/filters/log.py +147 -0
- MEDiml/filters/mean.py +121 -0
- MEDiml/filters/textural_filters_kernels.py +1738 -0
- MEDiml/filters/utils.py +107 -0
- MEDiml/filters/wavelet.py +237 -0
- MEDiml/learning/DataCleaner.py +198 -0
- MEDiml/learning/DesignExperiment.py +480 -0
- MEDiml/learning/FSR.py +667 -0
- MEDiml/learning/Normalization.py +112 -0
- MEDiml/learning/RadiomicsLearner.py +714 -0
- MEDiml/learning/Results.py +2237 -0
- MEDiml/learning/Stats.py +694 -0
- MEDiml/learning/__init__.py +10 -0
- MEDiml/learning/cleaning_utils.py +107 -0
- MEDiml/learning/ml_utils.py +1015 -0
- MEDiml/processing/__init__.py +6 -0
- MEDiml/processing/compute_suv_map.py +121 -0
- MEDiml/processing/discretisation.py +149 -0
- MEDiml/processing/interpolation.py +275 -0
- MEDiml/processing/resegmentation.py +66 -0
- MEDiml/processing/segmentation.py +912 -0
- MEDiml/utils/__init__.py +25 -0
- MEDiml/utils/batch_patients.py +45 -0
- MEDiml/utils/create_radiomics_table.py +131 -0
- MEDiml/utils/data_frame_export.py +42 -0
- MEDiml/utils/find_process_names.py +16 -0
- MEDiml/utils/get_file_paths.py +34 -0
- MEDiml/utils/get_full_rad_names.py +21 -0
- MEDiml/utils/get_institutions_from_ids.py +16 -0
- MEDiml/utils/get_patient_id_from_scan_name.py +22 -0
- MEDiml/utils/get_patient_names.py +26 -0
- MEDiml/utils/get_radiomic_names.py +27 -0
- MEDiml/utils/get_scan_name_from_rad_name.py +22 -0
- MEDiml/utils/image_reader_SITK.py +37 -0
- MEDiml/utils/image_volume_obj.py +22 -0
- MEDiml/utils/imref.py +340 -0
- MEDiml/utils/initialize_features_names.py +62 -0
- MEDiml/utils/inpolygon.py +159 -0
- MEDiml/utils/interp3.py +43 -0
- MEDiml/utils/json_utils.py +78 -0
- MEDiml/utils/mode.py +31 -0
- MEDiml/utils/parse_contour_string.py +58 -0
- MEDiml/utils/save_MEDscan.py +30 -0
- MEDiml/utils/strfind.py +32 -0
- MEDiml/utils/textureTools.py +188 -0
- MEDiml/utils/texture_features_names.py +115 -0
- MEDiml/utils/write_radiomics_csv.py +47 -0
- MEDiml/wrangling/DataManager.py +1724 -0
- MEDiml/wrangling/ProcessDICOM.py +512 -0
- MEDiml/wrangling/__init__.py +3 -0
- mediml-0.9.9.dist-info/LICENSE.md +674 -0
- mediml-0.9.9.dist-info/METADATA +232 -0
- mediml-0.9.9.dist-info/RECORD +78 -0
- mediml-0.9.9.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,1315 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
from copy import deepcopy
|
|
6
|
+
from typing import Dict, List, Union
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
import pandas as pd
|
|
10
|
+
|
|
11
|
+
from ..utils.textureTools import (coord2index, get_neighbour_direction,
|
|
12
|
+
is_list_all_none)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def extract_all(vol: np.ndarray,
|
|
16
|
+
dist_correction: Union[bool, str]=None,
|
|
17
|
+
merge_method: str="vol_merge") -> Dict:
|
|
18
|
+
"""Computes glrlm features.
|
|
19
|
+
This features refer to Grey Level Run Length Matrix family in
|
|
20
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
vol (ndarray): 3D volume, isotropically resampled, quantized
|
|
24
|
+
(e.g. n_g = 32, levels = [1, ..., n_g]), with NaNs outside the region
|
|
25
|
+
of interest.
|
|
26
|
+
dist_correction (Union[bool, str], optional): Set this variable to true in order to use
|
|
27
|
+
discretization length difference corrections as used
|
|
28
|
+
by the `Institute of Physics and Engineering in
|
|
29
|
+
Medicine <https://doi.org/10.1088/0031-9155/60/14/5471>`__.
|
|
30
|
+
Set this variable to false to replicate IBSI results.
|
|
31
|
+
Or use string and specify the norm for distance weighting.
|
|
32
|
+
Weighting is only performed if this argument is
|
|
33
|
+
"manhattan", "euclidean" or "chebyshev".
|
|
34
|
+
merge_method (str, optional): merging method which determines how features are
|
|
35
|
+
calculated. One of "average", "slice_merge", "dir_merge" and "vol_merge".
|
|
36
|
+
Note that not all combinations of spatial and merge method are valid.
|
|
37
|
+
method (str, optional): Either 'old' (deprecated) or 'new' (faster) method.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Dict: Dict of the glrlm features.
|
|
41
|
+
|
|
42
|
+
Raises:
|
|
43
|
+
ValueError:
|
|
44
|
+
If `method` is not 'old' or 'new'.
|
|
45
|
+
|
|
46
|
+
Todo:
|
|
47
|
+
* Enable calculation of RLM features using different spatial methods (2d, 2.5d, 3d)
|
|
48
|
+
* Enable calculation of RLM features using different RLM distance settings
|
|
49
|
+
* Enable calculation of RLM features for different merge methods (average, slice_merge, dir_merge, vol_merge)
|
|
50
|
+
* Provide the range of discretised intensities from a calling function and pass to get_rlm_features.
|
|
51
|
+
* Test if dist_correction works as expected.
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
rlm_features = get_rlm_features(vol=vol,
|
|
55
|
+
merge_method=merge_method,
|
|
56
|
+
dist_weight_norm=dist_correction)
|
|
57
|
+
|
|
58
|
+
return rlm_features
|
|
59
|
+
|
|
60
|
+
def get_rlm_features(vol: np.ndarray,
|
|
61
|
+
glrlm_spatial_method: str="3d",
|
|
62
|
+
merge_method: str="vol_merge",
|
|
63
|
+
dist_weight_norm: Union[bool, str]=None) -> Dict:
|
|
64
|
+
"""Extract run length matrix-based features from the intensity roi mask.
|
|
65
|
+
|
|
66
|
+
Note:
|
|
67
|
+
This code was adapted from the in-house radiomics software created at
|
|
68
|
+
OncoRay, Dresden, Germany.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
vol (ndarray): volume with discretised intensities as 3D numpy array (x, y, z).
|
|
72
|
+
glrlm_spatial_method (str, optional): spatial method which determines the way
|
|
73
|
+
co-occurrence matrices are calculated and how features are determined.
|
|
74
|
+
must be "2d", "2.5d" or "3d".
|
|
75
|
+
merge_method (str, optional): merging method which determines how features are
|
|
76
|
+
calculated. One of "average", "slice_merge", "dir_merge" and "vol_merge".
|
|
77
|
+
Note that not all combinations of spatial and merge method are valid.
|
|
78
|
+
dist_weight_norm (Union[bool, str], optional): norm for distance weighting. Weighting is only
|
|
79
|
+
performed if this argument is either "manhattan",
|
|
80
|
+
"euclidean", "chebyshev" or bool.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
Dict: Dict of the length matrix features.
|
|
84
|
+
"""
|
|
85
|
+
if type(glrlm_spatial_method) is not list:
|
|
86
|
+
glrlm_spatial_method = [glrlm_spatial_method]
|
|
87
|
+
|
|
88
|
+
if type(merge_method) is not list:
|
|
89
|
+
merge_method = [merge_method]
|
|
90
|
+
|
|
91
|
+
if type(dist_weight_norm) is bool:
|
|
92
|
+
if dist_weight_norm:
|
|
93
|
+
dist_weight_norm = "euclidean"
|
|
94
|
+
|
|
95
|
+
# Get the roi in tabular format
|
|
96
|
+
img_dims = vol.shape
|
|
97
|
+
index_id = np.arange(start=0, stop=vol.size)
|
|
98
|
+
coords = np.unravel_index(indices=index_id, shape=img_dims) # Convert flat index into coordinate
|
|
99
|
+
df_img = pd.DataFrame({"index_id": index_id,
|
|
100
|
+
"g": np.ravel(vol),
|
|
101
|
+
"x": coords[0],
|
|
102
|
+
"y": coords[1],
|
|
103
|
+
"z": coords[2],
|
|
104
|
+
"roi_int_mask": np.ravel(np.isfinite(vol))})
|
|
105
|
+
|
|
106
|
+
# Generate an empty feature list
|
|
107
|
+
feat_list = []
|
|
108
|
+
|
|
109
|
+
# Iterate over spatial arrangements
|
|
110
|
+
for ii_spatial in glrlm_spatial_method:
|
|
111
|
+
# Initiate list of rlm objects
|
|
112
|
+
rlm_list = []
|
|
113
|
+
|
|
114
|
+
# Perform 2D analysis
|
|
115
|
+
if ii_spatial.lower() in ["2d", "2.5d"]:
|
|
116
|
+
# Iterate over slices
|
|
117
|
+
for ii_slice in np.arange(0, img_dims[2]):
|
|
118
|
+
# Get neighbour direction and iterate over neighbours
|
|
119
|
+
nbrs = get_neighbour_direction(d=1,
|
|
120
|
+
distance="chebyshev",
|
|
121
|
+
centre=False,
|
|
122
|
+
complete=False,
|
|
123
|
+
dim3=False)
|
|
124
|
+
|
|
125
|
+
for ii_direction in np.arange(0, np.shape(nbrs)[1]):
|
|
126
|
+
# Add rlm matrices to list
|
|
127
|
+
rlm_list += [RunLengthMatrix(direction=nbrs[:, ii_direction],
|
|
128
|
+
direction_id=ii_direction,
|
|
129
|
+
spatial_method=ii_spatial.lower(),
|
|
130
|
+
img_slice=ii_slice)]
|
|
131
|
+
|
|
132
|
+
# Perform 3D analysis
|
|
133
|
+
if ii_spatial.lower() == "3d":
|
|
134
|
+
# Get neighbour direction and iterate over neighbours
|
|
135
|
+
nbrs = get_neighbour_direction(d=1,
|
|
136
|
+
distance="chebyshev",
|
|
137
|
+
centre=False,
|
|
138
|
+
complete=False,
|
|
139
|
+
dim3=True)
|
|
140
|
+
|
|
141
|
+
for ii_direction in np.arange(0, np.shape(nbrs)[1]):
|
|
142
|
+
# Add rlm matrices to list
|
|
143
|
+
rlm_list += [RunLengthMatrix(direction=nbrs[:, ii_direction],
|
|
144
|
+
direction_id=ii_direction,
|
|
145
|
+
spatial_method=ii_spatial.lower())]
|
|
146
|
+
|
|
147
|
+
# Calculate run length matrices
|
|
148
|
+
for rlm in rlm_list:
|
|
149
|
+
rlm.calculate_rlm_matrix(df_img=df_img,
|
|
150
|
+
img_dims=img_dims,
|
|
151
|
+
dist_weight_norm=dist_weight_norm)
|
|
152
|
+
|
|
153
|
+
# Merge matrices according to the given method
|
|
154
|
+
for merge_method in merge_method:
|
|
155
|
+
upd_list = combine_rlm_matrices(rlm_list=rlm_list,
|
|
156
|
+
merge_method=merge_method,
|
|
157
|
+
spatial_method=ii_spatial.lower())
|
|
158
|
+
|
|
159
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
160
|
+
if upd_list is None:
|
|
161
|
+
continue
|
|
162
|
+
|
|
163
|
+
# Calculate features
|
|
164
|
+
feat_run_list = []
|
|
165
|
+
for rlm in upd_list:
|
|
166
|
+
feat_run_list += [rlm.calculate_rlm_features()]
|
|
167
|
+
|
|
168
|
+
# Average feature values
|
|
169
|
+
feat_list += [pd.concat(feat_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
170
|
+
|
|
171
|
+
# Merge feature tables into a single dictionary
|
|
172
|
+
df_feat = pd.concat(feat_list, axis=1).to_dict(orient="records")[0]
|
|
173
|
+
|
|
174
|
+
return df_feat
|
|
175
|
+
|
|
176
|
+
def get_matrix(vol: np.ndarray,
|
|
177
|
+
glrlm_spatial_method: str="3d",
|
|
178
|
+
merge_method: str="vol_merge",
|
|
179
|
+
dist_weight_norm: Union[bool, str]=None) -> np.ndarray:
|
|
180
|
+
"""Extract run length matrix-based features from the intensity roi mask.
|
|
181
|
+
|
|
182
|
+
Note:
|
|
183
|
+
This code was adapted from the in-house radiomics software created at
|
|
184
|
+
OncoRay, Dresden, Germany.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
vol (ndarray): volume with discretised intensities as 3D numpy array (x, y, z).
|
|
188
|
+
glrlm_spatial_method (str, optional): spatial method which determines the way
|
|
189
|
+
co-occurrence matrices are calculated and how features are determined.
|
|
190
|
+
must be "2d", "2.5d" or "3d".
|
|
191
|
+
merge_method (str, optional): merging method which determines how features are
|
|
192
|
+
calculated. One of "average", "slice_merge", "dir_merge" and "vol_merge".
|
|
193
|
+
Note that not all combinations of spatial and merge method are valid.
|
|
194
|
+
dist_weight_norm (Union[bool, str], optional): norm for distance weighting. Weighting is only
|
|
195
|
+
performed if this argument is either "manhattan",
|
|
196
|
+
"euclidean", "chebyshev" or bool.
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
ndarray: Dict of the length matrix features.
|
|
200
|
+
"""
|
|
201
|
+
if type(glrlm_spatial_method) is not list:
|
|
202
|
+
glrlm_spatial_method = [glrlm_spatial_method]
|
|
203
|
+
|
|
204
|
+
if type(merge_method) is not list:
|
|
205
|
+
merge_method = [merge_method]
|
|
206
|
+
|
|
207
|
+
if type(dist_weight_norm) is bool:
|
|
208
|
+
if dist_weight_norm:
|
|
209
|
+
dist_weight_norm = "euclidean"
|
|
210
|
+
|
|
211
|
+
# Get the roi in tabular format
|
|
212
|
+
img_dims = vol.shape
|
|
213
|
+
index_id = np.arange(start=0, stop=vol.size)
|
|
214
|
+
coords = np.unravel_index(indices=index_id, shape=img_dims) # Convert flat index into coordinate
|
|
215
|
+
df_img = pd.DataFrame({"index_id": index_id,
|
|
216
|
+
"g": np.ravel(vol),
|
|
217
|
+
"x": coords[0],
|
|
218
|
+
"y": coords[1],
|
|
219
|
+
"z": coords[2],
|
|
220
|
+
"roi_int_mask": np.ravel(np.isfinite(vol))})
|
|
221
|
+
|
|
222
|
+
# Iterate over spatial arrangements
|
|
223
|
+
for ii_spatial in glrlm_spatial_method:
|
|
224
|
+
# Initiate list of rlm objects
|
|
225
|
+
rlm_list = []
|
|
226
|
+
|
|
227
|
+
# Perform 2D analysis
|
|
228
|
+
if ii_spatial.lower() in ["2d", "2.5d"]:
|
|
229
|
+
# Iterate over slices
|
|
230
|
+
for ii_slice in np.arange(0, img_dims[2]):
|
|
231
|
+
# Get neighbour direction and iterate over neighbours
|
|
232
|
+
nbrs = get_neighbour_direction(d=1,
|
|
233
|
+
distance="chebyshev",
|
|
234
|
+
centre=False,
|
|
235
|
+
complete=False,
|
|
236
|
+
dim3=False)
|
|
237
|
+
|
|
238
|
+
for ii_direction in np.arange(0, np.shape(nbrs)[1]):
|
|
239
|
+
# Add rlm matrices to list
|
|
240
|
+
rlm_list += [RunLengthMatrix(direction=nbrs[:, ii_direction],
|
|
241
|
+
direction_id=ii_direction,
|
|
242
|
+
spatial_method=ii_spatial.lower(),
|
|
243
|
+
img_slice=ii_slice)]
|
|
244
|
+
|
|
245
|
+
# Perform 3D analysis
|
|
246
|
+
if ii_spatial.lower() == "3d":
|
|
247
|
+
# Get neighbour direction and iterate over neighbours
|
|
248
|
+
nbrs = get_neighbour_direction(d=1,
|
|
249
|
+
distance="chebyshev",
|
|
250
|
+
centre=False,
|
|
251
|
+
complete=False,
|
|
252
|
+
dim3=True)
|
|
253
|
+
|
|
254
|
+
for ii_direction in np.arange(0, np.shape(nbrs)[1]):
|
|
255
|
+
# Add rlm matrices to list
|
|
256
|
+
rlm_list += [RunLengthMatrix(direction=nbrs[:, ii_direction],
|
|
257
|
+
direction_id=ii_direction,
|
|
258
|
+
spatial_method=ii_spatial.lower())]
|
|
259
|
+
|
|
260
|
+
# Calculate run length matrices
|
|
261
|
+
for rlm in rlm_list:
|
|
262
|
+
rlm.calculate_rlm_matrix(df_img=df_img,
|
|
263
|
+
img_dims=img_dims,
|
|
264
|
+
dist_weight_norm=dist_weight_norm)
|
|
265
|
+
|
|
266
|
+
# Merge matrices according to the given method
|
|
267
|
+
for merge_method in merge_method:
|
|
268
|
+
upd_list = combine_rlm_matrices(rlm_list=rlm_list,
|
|
269
|
+
merge_method=merge_method,
|
|
270
|
+
spatial_method=ii_spatial.lower())
|
|
271
|
+
|
|
272
|
+
return upd_list
|
|
273
|
+
|
|
274
|
+
def combine_rlm_matrices(rlm_list: list,
|
|
275
|
+
merge_method: str,
|
|
276
|
+
spatial_method: str)-> List:
|
|
277
|
+
"""Merges run length matrices prior to feature calculation.
|
|
278
|
+
|
|
279
|
+
Note:
|
|
280
|
+
This code was adapted from the in-house radiomics software created at
|
|
281
|
+
OncoRay, Dresden, Germany.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
rlm_list (List): List of RunLengthMatrix objects.
|
|
285
|
+
merge_method (str): Merging method which determines how features are calculated.
|
|
286
|
+
One of "average", "slice_merge", "dir_merge" and "vol_merge". Note that not all
|
|
287
|
+
combinations of spatial and merge method are valid.
|
|
288
|
+
spatial_method (str): Spatial method which determines the way co-occurrence
|
|
289
|
+
matrices are calculated and how features are determined. One of "2d", "2.5d"
|
|
290
|
+
or "3d".
|
|
291
|
+
|
|
292
|
+
Returns:
|
|
293
|
+
List[CooccurrenceMatrix]: List of one or more merged RunLengthMatrix objects.
|
|
294
|
+
"""
|
|
295
|
+
# Initiate empty list
|
|
296
|
+
use_list = []
|
|
297
|
+
|
|
298
|
+
# For average features over direction, maintain original run length matrices
|
|
299
|
+
if merge_method == "average" and spatial_method in ["2d", "3d"]:
|
|
300
|
+
# Make copy of rlm_list
|
|
301
|
+
for rlm in rlm_list:
|
|
302
|
+
use_list += [rlm._copy()]
|
|
303
|
+
|
|
304
|
+
# Set merge method to average
|
|
305
|
+
for rlm in use_list:
|
|
306
|
+
rlm.merge_method = "average"
|
|
307
|
+
|
|
308
|
+
# Merge rlms within each slice
|
|
309
|
+
elif merge_method == "slice_merge" and spatial_method == "2d":
|
|
310
|
+
# Find slice_ids
|
|
311
|
+
slice_id = []
|
|
312
|
+
for rlm in rlm_list:
|
|
313
|
+
slice_id += [rlm.slice]
|
|
314
|
+
|
|
315
|
+
# Iterate over unique slice_ids
|
|
316
|
+
for ii_slice in np.unique(slice_id):
|
|
317
|
+
slice_rlm_id = np.squeeze(np.where(slice_id == ii_slice))
|
|
318
|
+
|
|
319
|
+
# Select all matrices within the slice
|
|
320
|
+
sel_matrix_list = []
|
|
321
|
+
for rlm_id in slice_rlm_id:
|
|
322
|
+
sel_matrix_list += [rlm_list[rlm_id].matrix]
|
|
323
|
+
|
|
324
|
+
# Check if any matrix has been created for the currently selected slice
|
|
325
|
+
if is_list_all_none(sel_matrix_list):
|
|
326
|
+
# No matrix was created
|
|
327
|
+
use_list += [RunLengthMatrix(direction=None,
|
|
328
|
+
direction_id=None,
|
|
329
|
+
spatial_method=spatial_method,
|
|
330
|
+
img_slice=ii_slice,
|
|
331
|
+
merge_method=merge_method,
|
|
332
|
+
matrix=None,
|
|
333
|
+
n_v=0.0)]
|
|
334
|
+
else:
|
|
335
|
+
# Merge matrices within the slice
|
|
336
|
+
merge_rlm = pd.concat(sel_matrix_list, axis=0)
|
|
337
|
+
merge_rlm = merge_rlm.groupby(by=["i", "r"]).sum().reset_index()
|
|
338
|
+
|
|
339
|
+
# Update the number of voxels within the merged slice
|
|
340
|
+
merge_n_v = 0.0
|
|
341
|
+
for rlm_id in slice_rlm_id:
|
|
342
|
+
merge_n_v += rlm_list[rlm_id].n_v
|
|
343
|
+
|
|
344
|
+
# Create new run length matrix
|
|
345
|
+
use_list += [RunLengthMatrix(direction=None,
|
|
346
|
+
direction_id=None,
|
|
347
|
+
spatial_method=spatial_method,
|
|
348
|
+
img_slice=ii_slice,
|
|
349
|
+
merge_method=merge_method,
|
|
350
|
+
matrix=merge_rlm,
|
|
351
|
+
n_v=merge_n_v)]
|
|
352
|
+
|
|
353
|
+
# Merge rlms within each slice
|
|
354
|
+
elif merge_method == "dir_merge" and spatial_method == "2.5d":
|
|
355
|
+
# Find direction ids
|
|
356
|
+
dir_id = []
|
|
357
|
+
for rlm in rlm_list:
|
|
358
|
+
dir_id += [rlm.direction_id]
|
|
359
|
+
|
|
360
|
+
# Iterate over unique dir_ids
|
|
361
|
+
for ii_dir in np.unique(dir_id):
|
|
362
|
+
dir_rlm_id = np.squeeze(np.where(dir_id == ii_dir))
|
|
363
|
+
|
|
364
|
+
# Select all matrices with the same direction
|
|
365
|
+
sel_matrix_list = []
|
|
366
|
+
for rlm_id in dir_rlm_id:
|
|
367
|
+
sel_matrix_list += [rlm_list[rlm_id].matrix]
|
|
368
|
+
|
|
369
|
+
# Check if any matrix has been created for the currently selected direction
|
|
370
|
+
if is_list_all_none(sel_matrix_list):
|
|
371
|
+
# No matrix was created
|
|
372
|
+
use_list += [RunLengthMatrix(direction=rlm_list[dir_rlm_id[0]].direction,
|
|
373
|
+
direction_id=ii_dir,
|
|
374
|
+
spatial_method=spatial_method,
|
|
375
|
+
img_slice=None,
|
|
376
|
+
merge_method=merge_method,
|
|
377
|
+
matrix=None,
|
|
378
|
+
n_v=0.0)]
|
|
379
|
+
else:
|
|
380
|
+
# Merge matrices with the same direction
|
|
381
|
+
merge_rlm = pd.concat(sel_matrix_list, axis=0)
|
|
382
|
+
merge_rlm = merge_rlm.groupby(by=["i", "r"]).sum().reset_index()
|
|
383
|
+
|
|
384
|
+
# Update the number of voxels within the merged slice
|
|
385
|
+
merge_n_v = 0.0
|
|
386
|
+
for rlm_id in dir_rlm_id:
|
|
387
|
+
merge_n_v += rlm_list[rlm_id].n_v
|
|
388
|
+
|
|
389
|
+
# Create new run length matrix
|
|
390
|
+
use_list += [RunLengthMatrix(direction=rlm_list[dir_rlm_id[0]].direction,
|
|
391
|
+
direction_id=ii_dir,
|
|
392
|
+
spatial_method=spatial_method,
|
|
393
|
+
img_slice=None,
|
|
394
|
+
merge_method=merge_method,
|
|
395
|
+
matrix=merge_rlm,
|
|
396
|
+
n_v=merge_n_v)]
|
|
397
|
+
|
|
398
|
+
# Merge all rlms into a single representation
|
|
399
|
+
elif merge_method == "vol_merge" and spatial_method in ["2.5d", "3d"]:
|
|
400
|
+
# Select all matrices within the slice
|
|
401
|
+
sel_matrix_list = []
|
|
402
|
+
for rlm_id in np.arange(len(rlm_list)):
|
|
403
|
+
sel_matrix_list += [rlm_list[rlm_id].matrix]
|
|
404
|
+
|
|
405
|
+
# Check if any matrix has been created
|
|
406
|
+
if is_list_all_none(sel_matrix_list):
|
|
407
|
+
# No matrix was created
|
|
408
|
+
use_list += [RunLengthMatrix(direction=None,
|
|
409
|
+
direction_id=None,
|
|
410
|
+
spatial_method=spatial_method,
|
|
411
|
+
img_slice=None,
|
|
412
|
+
merge_method=merge_method,
|
|
413
|
+
matrix=None,
|
|
414
|
+
n_v=0.0)]
|
|
415
|
+
else:
|
|
416
|
+
# Merge run length matrices
|
|
417
|
+
merge_rlm = pd.concat(sel_matrix_list, axis=0)
|
|
418
|
+
merge_rlm = merge_rlm.groupby(by=["i", "r"]).sum().reset_index()
|
|
419
|
+
|
|
420
|
+
# Update the number of voxels
|
|
421
|
+
merge_n_v = 0.0
|
|
422
|
+
for rlm_id in np.arange(len(rlm_list)):
|
|
423
|
+
merge_n_v += rlm_list[rlm_id].n_v
|
|
424
|
+
|
|
425
|
+
# Create new run length matrix
|
|
426
|
+
use_list += [RunLengthMatrix(direction=None,
|
|
427
|
+
direction_id=None,
|
|
428
|
+
spatial_method=spatial_method,
|
|
429
|
+
img_slice=None,
|
|
430
|
+
merge_method=merge_method,
|
|
431
|
+
matrix=merge_rlm,
|
|
432
|
+
n_v=merge_n_v)]
|
|
433
|
+
|
|
434
|
+
else:
|
|
435
|
+
use_list = None
|
|
436
|
+
|
|
437
|
+
# Return to new rlm list to calling function
|
|
438
|
+
return use_list
|
|
439
|
+
|
|
440
|
+
class RunLengthMatrix:
|
|
441
|
+
"""Class that contains a single run length matrix.
|
|
442
|
+
|
|
443
|
+
Note:
|
|
444
|
+
This code was adapted from the in-house radiomics software created at
|
|
445
|
+
OncoRay, Dresden, Germany.
|
|
446
|
+
|
|
447
|
+
Args:
|
|
448
|
+
direction (ndarray): Direction along which neighbouring voxels are found.
|
|
449
|
+
direction_id (int): Direction index to identify unique direction vectors.
|
|
450
|
+
spatial_method (str): Spatial method used to calculate the co-occurrence
|
|
451
|
+
matrix: "2d", "2.5d" or "3d".
|
|
452
|
+
img_slice (ndarray, optional): Corresponding slice index (only if the
|
|
453
|
+
co-occurrence matrix corresponds to a 2d image slice).
|
|
454
|
+
merge_method (str, optional): Method for merging the co-occurrence matrix
|
|
455
|
+
with other co-occurrence matrices.
|
|
456
|
+
matrix (pandas.DataFrame, optional): The actual co-occurrence matrix in
|
|
457
|
+
sparse format (row, column, count).
|
|
458
|
+
n_v (int, optional): The number of voxels in the volume.
|
|
459
|
+
|
|
460
|
+
Attributes:
|
|
461
|
+
direction (ndarray): Direction along which neighbouring voxels are found.
|
|
462
|
+
direction_id (int): Direction index to identify unique direction vectors.
|
|
463
|
+
spatial_method (str): Spatial method used to calculate the co-occurrence
|
|
464
|
+
matrix: "2d", "2.5d" or "3d".
|
|
465
|
+
img_slice (ndarray): Corresponding slice index (only if the co-occurrence
|
|
466
|
+
matrix corresponds to a 2d image slice).
|
|
467
|
+
merge_method (str): Method for merging the co-occurrence matrix with other
|
|
468
|
+
co-occurrence matrices.
|
|
469
|
+
matrix (pandas.DataFrame): The actual co-occurrence matrix in sparse format
|
|
470
|
+
(row, column, count).
|
|
471
|
+
n_v (int): The number of voxels in the volume.
|
|
472
|
+
"""
|
|
473
|
+
|
|
474
|
+
def __init__(self,
|
|
475
|
+
direction: np.ndarray,
|
|
476
|
+
direction_id: int,
|
|
477
|
+
spatial_method: str,
|
|
478
|
+
img_slice: np.ndarray=None,
|
|
479
|
+
merge_method: str=None,
|
|
480
|
+
matrix: pd.DataFrame=None,
|
|
481
|
+
n_v: int=None) -> None:
|
|
482
|
+
"""
|
|
483
|
+
Initialising function for a new run length matrix
|
|
484
|
+
"""
|
|
485
|
+
|
|
486
|
+
# Direction and slice for which the current matrix is extracted
|
|
487
|
+
self.direction = direction
|
|
488
|
+
self.direction_id = direction_id
|
|
489
|
+
self.img_slice = img_slice
|
|
490
|
+
|
|
491
|
+
# Spatial analysis method (2d, 2.5d, 3d) and merge method (average, slice_merge, dir_merge, vol_merge)
|
|
492
|
+
self.spatial_method = spatial_method
|
|
493
|
+
|
|
494
|
+
# Place holders
|
|
495
|
+
self.merge_method = merge_method
|
|
496
|
+
self.matrix = matrix
|
|
497
|
+
self.n_v = n_v
|
|
498
|
+
|
|
499
|
+
def _copy(self):
|
|
500
|
+
"""Returns a copy of the RunLengthMatrix object."""
|
|
501
|
+
|
|
502
|
+
return deepcopy(self)
|
|
503
|
+
|
|
504
|
+
def _set_empty(self):
|
|
505
|
+
"""Creates an empty RunLengthMatrix"""
|
|
506
|
+
self.n_v = 0
|
|
507
|
+
self.matrix = None
|
|
508
|
+
|
|
509
|
+
def calculate_rlm_matrix(self,
|
|
510
|
+
df_img: pd.DataFrame,
|
|
511
|
+
img_dims: np.ndarray,
|
|
512
|
+
dist_weight_norm: str) -> None:
|
|
513
|
+
"""Function that calculates a run length matrix for the settings provided
|
|
514
|
+
during initialisation and the input image.
|
|
515
|
+
|
|
516
|
+
Args:
|
|
517
|
+
df_img (pandas.DataFrame): Data table containing image intensities, x, y and z coordinates,
|
|
518
|
+
and mask labels corresponding to voxels in the volume.
|
|
519
|
+
img_dims (ndarray, List[float]): Dimensions of the image volume.
|
|
520
|
+
dist_weight_norm (str): Norm for distance weighting. Weighting is only
|
|
521
|
+
performed if this parameter is either "manhattan", "euclidean" or "chebyshev".
|
|
522
|
+
|
|
523
|
+
Returns:
|
|
524
|
+
None. Assigns the created image table (rlm matrix) to the `matrix` attribute.
|
|
525
|
+
|
|
526
|
+
Raises:
|
|
527
|
+
ValueError:
|
|
528
|
+
If `self.spatial_method` is not "2d", "2.5d" or "3d".
|
|
529
|
+
Also, if ``dist_weight_norm`` is not "manhattan", "euclidean" or "chebyshev".
|
|
530
|
+
"""
|
|
531
|
+
# Check if the df_img actually exists
|
|
532
|
+
if df_img is None:
|
|
533
|
+
self._set_empty()
|
|
534
|
+
return
|
|
535
|
+
|
|
536
|
+
# Check if the roi contains any masked voxels. If this is not the case, don't construct the glrlm.
|
|
537
|
+
if not np.any(df_img.roi_int_mask):
|
|
538
|
+
self._set_empty()
|
|
539
|
+
return
|
|
540
|
+
|
|
541
|
+
# Create local copies of the image table
|
|
542
|
+
if self.spatial_method == "3d":
|
|
543
|
+
df_rlm = deepcopy(df_img)
|
|
544
|
+
elif self.spatial_method in ["2d", "2.5d"]:
|
|
545
|
+
df_rlm = deepcopy(df_img[df_img.z == self.img_slice])
|
|
546
|
+
df_rlm["index_id"] = np.arange(0, len(df_rlm))
|
|
547
|
+
df_rlm["z"] = 0
|
|
548
|
+
df_rlm = df_rlm.reset_index(drop=True)
|
|
549
|
+
else:
|
|
550
|
+
raise ValueError("The spatial method for grey level run length matrices \
|
|
551
|
+
should be one of \"2d\", \"2.5d\" or \"3d\".")
|
|
552
|
+
|
|
553
|
+
# Set grey level of voxels outside ROI to NaN
|
|
554
|
+
df_rlm.loc[df_rlm.roi_int_mask == False, "g"] = np.nan
|
|
555
|
+
|
|
556
|
+
# Set the number of voxels
|
|
557
|
+
self.n_v = np.sum(df_rlm.roi_int_mask.values)
|
|
558
|
+
|
|
559
|
+
# Determine update index number for direction
|
|
560
|
+
if (self.direction[2] + self.direction[1] * img_dims[2] + self.direction[0] * img_dims[2] * img_dims[1]) >= 0:
|
|
561
|
+
curr_dir = self.direction
|
|
562
|
+
else:
|
|
563
|
+
curr_dir = - self.direction
|
|
564
|
+
|
|
565
|
+
# Step size
|
|
566
|
+
ind_update = curr_dir[2] + curr_dir[1] * img_dims[2] + curr_dir[0] * img_dims[2] * img_dims[1]
|
|
567
|
+
|
|
568
|
+
# Generate information concerning segments
|
|
569
|
+
n_seg = ind_update # Number of segments
|
|
570
|
+
|
|
571
|
+
# Check if the number of segments is greater than one
|
|
572
|
+
if n_seg == 0:
|
|
573
|
+
self._set_empty()
|
|
574
|
+
return
|
|
575
|
+
|
|
576
|
+
seg_len = (len(df_rlm) - 1) // ind_update + 1 # Nominal segment length
|
|
577
|
+
trans_seg_len = np.tile([seg_len - 1], reps=n_seg) # Initial segment length for transitions (nominal length-1)
|
|
578
|
+
full_len_trans = n_seg - n_seg*seg_len + len(df_rlm) # Number of full segments
|
|
579
|
+
trans_seg_len[0:full_len_trans] += 1 # Update full segments
|
|
580
|
+
|
|
581
|
+
# Create transition vector
|
|
582
|
+
trans_vec = np.tile(np.arange(start=0, stop=len(df_rlm), step=ind_update), reps=ind_update)
|
|
583
|
+
trans_vec += np.repeat(np.arange(start=0, stop=n_seg), repeats=seg_len)
|
|
584
|
+
trans_vec = trans_vec[trans_vec < len(df_rlm)]
|
|
585
|
+
|
|
586
|
+
# Determine valid transitions
|
|
587
|
+
to_index = coord2index(x=df_rlm.x.values + curr_dir[0],
|
|
588
|
+
y=df_rlm.y.values + curr_dir[1],
|
|
589
|
+
z=df_rlm.z.values + curr_dir[2],
|
|
590
|
+
dims=img_dims)
|
|
591
|
+
|
|
592
|
+
# Determine which transitions are valid
|
|
593
|
+
end_ind = np.nonzero(to_index[trans_vec] < 0)[0] # Find transitions that form an endpoints
|
|
594
|
+
|
|
595
|
+
# Get an interspersed array of intensities. Runs are broken up by np.nan
|
|
596
|
+
intensities = np.insert(df_rlm.g.values[trans_vec], end_ind + 1, np.nan)
|
|
597
|
+
|
|
598
|
+
# Determine run length start and end indices
|
|
599
|
+
rle_end = np.array(np.append(np.where(intensities[1:] != intensities[:-1]), len(intensities) - 1))
|
|
600
|
+
rle_start = np.cumsum(np.append(0, np.diff(np.append(-1, rle_end))))[:-1]
|
|
601
|
+
|
|
602
|
+
# Generate dataframe
|
|
603
|
+
df_rltable = pd.DataFrame({"i": intensities[rle_start],
|
|
604
|
+
"r": rle_end - rle_start + 1})
|
|
605
|
+
df_rltable = df_rltable.loc[~np.isnan(df_rltable.i), :]
|
|
606
|
+
df_rltable = df_rltable.groupby(by=["i", "r"]).size().reset_index(name="n")
|
|
607
|
+
|
|
608
|
+
if dist_weight_norm in ["manhattan", "euclidean", "chebyshev"]:
|
|
609
|
+
if dist_weight_norm == "manhattan":
|
|
610
|
+
weight = sum(abs(self.direction))
|
|
611
|
+
elif dist_weight_norm == "euclidean":
|
|
612
|
+
weight = np.sqrt(sum(np.power(self.direction, 2.0)))
|
|
613
|
+
elif dist_weight_norm == "chebyshev":
|
|
614
|
+
weight = np.max(abs(self.direction))
|
|
615
|
+
df_rltable.n /= weight
|
|
616
|
+
|
|
617
|
+
# Add matrix to object
|
|
618
|
+
self.matrix = df_rltable
|
|
619
|
+
|
|
620
|
+
def calculate_rlm_features(self) -> pd.DataFrame:
|
|
621
|
+
"""Computes run length matrix features for the current run length matrix.
|
|
622
|
+
|
|
623
|
+
Returns:
|
|
624
|
+
pandas.DataFrame: Data frame with values for each feature.
|
|
625
|
+
"""
|
|
626
|
+
# Create feature table
|
|
627
|
+
feat_names = ["Frlm_sre",
|
|
628
|
+
"Frlm_lre",
|
|
629
|
+
"Frlm_lgre",
|
|
630
|
+
"Frlm_hgre",
|
|
631
|
+
"Frlm_srlge",
|
|
632
|
+
"Frlm_srhge",
|
|
633
|
+
"Frlm_lrlge",
|
|
634
|
+
"Frlm_lrhge",
|
|
635
|
+
"Frlm_glnu",
|
|
636
|
+
"Frlm_glnu_norm",
|
|
637
|
+
"Frlm_rlnu",
|
|
638
|
+
"Frlm_rlnu_norm",
|
|
639
|
+
"Frlm_r_perc",
|
|
640
|
+
"Frlm_gl_var",
|
|
641
|
+
"Frlm_rl_var",
|
|
642
|
+
"Frlm_rl_entr"]
|
|
643
|
+
|
|
644
|
+
df_feat = pd.DataFrame(np.full(shape=(1, len(feat_names)), fill_value=np.nan))
|
|
645
|
+
df_feat.columns = feat_names
|
|
646
|
+
|
|
647
|
+
# Don't return data for empty slices or slices without a good matrix
|
|
648
|
+
if self.matrix is None:
|
|
649
|
+
# Update names
|
|
650
|
+
# df_feat.columns += self._parse_feature_names()
|
|
651
|
+
return df_feat
|
|
652
|
+
elif len(self.matrix) == 0:
|
|
653
|
+
# Update names
|
|
654
|
+
# df_feat.columns += self._parse_feature_names()
|
|
655
|
+
return df_feat
|
|
656
|
+
|
|
657
|
+
# Create local copy of the run length matrix and set column names
|
|
658
|
+
df_rij = deepcopy(self.matrix)
|
|
659
|
+
df_rij.columns = ["i", "j", "rij"]
|
|
660
|
+
|
|
661
|
+
# Sum over grey levels
|
|
662
|
+
df_ri = df_rij.groupby(by="i")["rij"].agg(np.sum).reset_index().rename(columns={"rij": "ri"})
|
|
663
|
+
|
|
664
|
+
# Sum over run lengths
|
|
665
|
+
df_rj = df_rij.groupby(by="j")["rij"].agg(np.sum).reset_index().rename(columns={"rij": "rj"})
|
|
666
|
+
|
|
667
|
+
# Constant definitions
|
|
668
|
+
n_s = np.sum(df_rij.rij) * 1.0 # Number of runs
|
|
669
|
+
n_v = self.n_v * 1.0 # Number of voxels
|
|
670
|
+
|
|
671
|
+
##############################################
|
|
672
|
+
###### glrlm features ######
|
|
673
|
+
##############################################
|
|
674
|
+
# Short runs emphasis
|
|
675
|
+
df_feat.loc[0, "Frlm_sre"] = np.sum(df_rj.rj / df_rj.j ** 2.0) / n_s
|
|
676
|
+
|
|
677
|
+
# Long runs emphasis
|
|
678
|
+
df_feat.loc[0, "Frlm_lre"] = np.sum(df_rj.rj * df_rj.j ** 2.0) / n_s
|
|
679
|
+
|
|
680
|
+
# Grey level non-uniformity
|
|
681
|
+
df_feat.loc[0, "Frlm_glnu"] = np.sum(df_ri.ri ** 2.0) / n_s
|
|
682
|
+
|
|
683
|
+
# Grey level non-uniformity, normalised
|
|
684
|
+
df_feat.loc[0, "Frlm_glnu_norm"] = np.sum(df_ri.ri ** 2.0) / n_s ** 2.0
|
|
685
|
+
|
|
686
|
+
# Run length non-uniformity
|
|
687
|
+
df_feat.loc[0, "Frlm_rlnu"] = np.sum(df_rj.rj ** 2.0) / n_s
|
|
688
|
+
|
|
689
|
+
# Run length non-uniformity, normalised
|
|
690
|
+
df_feat.loc[0, "Frlm_rlnu_norm"] = np.sum(df_rj.rj ** 2.0) / n_s ** 2.0
|
|
691
|
+
|
|
692
|
+
# Run percentage
|
|
693
|
+
df_feat.loc[0, "Frlm_r_perc"] = n_s / n_v
|
|
694
|
+
|
|
695
|
+
# Low grey level run emphasis
|
|
696
|
+
df_feat.loc[0, "Frlm_lgre"] = np.sum(df_ri.ri / df_ri.i ** 2.0) / n_s
|
|
697
|
+
|
|
698
|
+
# High grey level run emphasis
|
|
699
|
+
df_feat.loc[0, "Frlm_hgre"] = np.sum(df_ri.ri * df_ri.i ** 2.0) / n_s
|
|
700
|
+
|
|
701
|
+
# Short run low grey level emphasis
|
|
702
|
+
df_feat.loc[0, "Frlm_srlge"] = np.sum(df_rij.rij / (df_rij.i * df_rij.j) ** 2.0) / n_s
|
|
703
|
+
|
|
704
|
+
# Short run high grey level emphasis
|
|
705
|
+
df_feat.loc[0, "Frlm_srhge"] = np.sum(df_rij.rij * df_rij.i ** 2.0 / df_rij.j ** 2.0) / n_s
|
|
706
|
+
|
|
707
|
+
# Long run low grey level emphasis
|
|
708
|
+
df_feat.loc[0, "Frlm_lrlge"] = np.sum(df_rij.rij * df_rij.j ** 2.0 / df_rij.i ** 2.0) / n_s
|
|
709
|
+
|
|
710
|
+
# Long run high grey level emphasis
|
|
711
|
+
df_feat.loc[0, "Frlm_lrhge"] = np.sum(df_rij.rij * df_rij.i ** 2.0 * df_rij.j ** 2.0) / n_s
|
|
712
|
+
|
|
713
|
+
# Grey level variance
|
|
714
|
+
mu = np.sum(df_rij.rij * df_rij.i) / n_s
|
|
715
|
+
df_feat.loc[0, "Frlm_gl_var"] = np.sum((df_rij.i - mu) ** 2.0 * df_rij.rij) / n_s
|
|
716
|
+
|
|
717
|
+
# Run length variance
|
|
718
|
+
mu = np.sum(df_rij.rij * df_rij.j) / n_s
|
|
719
|
+
df_feat.loc[0, "Frlm_rl_var"] = np.sum((df_rij.j - mu) ** 2.0 * df_rij.rij) / n_s
|
|
720
|
+
|
|
721
|
+
# Zone size entropy
|
|
722
|
+
df_feat.loc[0, "Frlm_rl_entr"] = - np.sum(df_rij.rij * np.log2(df_rij.rij / n_s)) / n_s
|
|
723
|
+
|
|
724
|
+
return df_feat
|
|
725
|
+
|
|
726
|
+
def calculate_feature(self,
|
|
727
|
+
name: str) -> pd.DataFrame:
|
|
728
|
+
"""Computes run length matrix features for the current run length matrix.
|
|
729
|
+
|
|
730
|
+
Returns:
|
|
731
|
+
ndarray: Value of feature given as parameter
|
|
732
|
+
"""
|
|
733
|
+
df_feat = pd.DataFrame(np.full(shape=(0, 0), fill_value=np.nan))
|
|
734
|
+
|
|
735
|
+
# Don't return data for empty slices or slices without a good matrix
|
|
736
|
+
if self.matrix is None:
|
|
737
|
+
# Update names
|
|
738
|
+
# df_feat.columns += self._parse_feature_names()
|
|
739
|
+
return df_feat
|
|
740
|
+
elif len(self.matrix) == 0:
|
|
741
|
+
# Update names
|
|
742
|
+
# df_feat.columns += self._parse_feature_names()
|
|
743
|
+
return df_feat
|
|
744
|
+
|
|
745
|
+
# Create local copy of the run length matrix and set column names
|
|
746
|
+
df_rij = deepcopy(self.matrix)
|
|
747
|
+
df_rij.columns = ["i", "j", "rij"]
|
|
748
|
+
|
|
749
|
+
# Sum over grey levels
|
|
750
|
+
df_ri = df_rij.groupby(by="i")["rij"].agg(np.sum).reset_index().rename(columns={"rij": "ri"})
|
|
751
|
+
|
|
752
|
+
# Sum over run lengths
|
|
753
|
+
df_rj = df_rij.groupby(by="j")["rij"].agg(np.sum).reset_index().rename(columns={"rij": "rj"})
|
|
754
|
+
|
|
755
|
+
# Constant definitions
|
|
756
|
+
n_s = np.sum(df_rij.rij) * 1.0 # Number of runs
|
|
757
|
+
n_v = self.n_v * 1.0 # Number of voxels
|
|
758
|
+
|
|
759
|
+
# Calculation glrlm feature
|
|
760
|
+
# Short runs emphasis
|
|
761
|
+
if name == "sre":
|
|
762
|
+
df_feat.loc["value", "sre"] = np.sum(df_rj.rj / df_rj.j ** 2.0) / n_s
|
|
763
|
+
# Long runs emphasis
|
|
764
|
+
elif name == "lre":
|
|
765
|
+
df_feat.loc["value", "lre"] = np.sum(df_rj.rj * df_rj.j ** 2.0) / n_s
|
|
766
|
+
# Grey level non-uniformity
|
|
767
|
+
elif name == "glnu":
|
|
768
|
+
df_feat.loc["value", "glnu"] = np.sum(df_ri.ri ** 2.0) / n_s
|
|
769
|
+
# Grey level non-uniformity, normalised
|
|
770
|
+
elif name == "glnu_norm":
|
|
771
|
+
df_feat.loc["value", "glnu_norm"] = np.sum(df_ri.ri ** 2.0) / n_s ** 2.0
|
|
772
|
+
# Run length non-uniformity
|
|
773
|
+
elif name == "rlnu":
|
|
774
|
+
df_feat.loc["value", "rlnu"] = np.sum(df_rj.rj ** 2.0) / n_s
|
|
775
|
+
# Run length non-uniformity, normalised
|
|
776
|
+
elif name == "rlnu_norm":
|
|
777
|
+
df_feat.loc["value", "rlnu_norm"] = np.sum(df_rj.rj ** 2.0) / n_s ** 2.0
|
|
778
|
+
# Run percentage
|
|
779
|
+
elif name == "r_perc":
|
|
780
|
+
df_feat.loc["value", "r_perc"] = n_s / n_v
|
|
781
|
+
# Low grey level run emphasis
|
|
782
|
+
elif name == "lgre":
|
|
783
|
+
df_feat.loc["value", "lgre"] = np.sum(df_ri.ri / df_ri.i ** 2.0) / n_s
|
|
784
|
+
# High grey level run emphasis
|
|
785
|
+
elif name == "hgre":
|
|
786
|
+
df_feat.loc["value", "hgre"] = np.sum(df_ri.ri * df_ri.i ** 2.0) / n_s
|
|
787
|
+
# Short run low grey level emphasis
|
|
788
|
+
elif name == "srlge":
|
|
789
|
+
df_feat.loc["value", "srlge"] = np.sum(df_rij.rij / (df_rij.i * df_rij.j) ** 2.0) / n_s
|
|
790
|
+
# Short run high grey level emphasis
|
|
791
|
+
elif name == "srhge":
|
|
792
|
+
df_feat.loc["value", "srhge"] = np.sum(df_rij.rij * df_rij.i ** 2.0 / df_rij.j ** 2.0) / n_s
|
|
793
|
+
# Long run low grey level emphasis
|
|
794
|
+
elif name == "lrlge":
|
|
795
|
+
df_feat.loc["value", "lrlge"] = np.sum(df_rij.rij * df_rij.j ** 2.0 / df_rij.i ** 2.0) / n_s
|
|
796
|
+
# Long run high grey level emphasis
|
|
797
|
+
elif name == "lrhge":
|
|
798
|
+
df_feat.loc["value", "lrhge"] = np.sum(df_rij.rij * df_rij.i ** 2.0 * df_rij.j ** 2.0) / n_s
|
|
799
|
+
# Grey level variance
|
|
800
|
+
elif name == "gl_var":
|
|
801
|
+
mu = np.sum(df_rij.rij * df_rij.i) / n_s
|
|
802
|
+
df_feat.loc["value", "gl_var"] = np.sum((df_rij.i - mu) ** 2.0 * df_rij.rij) / n_s
|
|
803
|
+
# Run length variance
|
|
804
|
+
elif name == "rl_var":
|
|
805
|
+
mu = np.sum(df_rij.rij * df_rij.j) / n_s
|
|
806
|
+
df_feat.loc["value", "rl_var"] = np.sum((df_rij.j - mu) ** 2.0 * df_rij.rij) / n_s
|
|
807
|
+
# Zone size entropy
|
|
808
|
+
elif name == "rl_entr":
|
|
809
|
+
df_feat.loc["value", "rl_entr"] = - np.sum(df_rij.rij * np.log2(df_rij.rij / n_s)) / n_s
|
|
810
|
+
else:
|
|
811
|
+
print("ERROR: Wrong arg. Use ones from list : (sre, lre, glnu, glnu_normn, rlnu \
|
|
812
|
+
rlnu_norm, r_perc, lgre, hgre, srlge, srhge, lrlge, lrhge, gl_var, rl_var, rl_entr)")
|
|
813
|
+
|
|
814
|
+
return df_feat
|
|
815
|
+
|
|
816
|
+
def _parse_feature_names(self) -> str:
|
|
817
|
+
""""Adds additional settings-related identifiers to each feature.
|
|
818
|
+
Not used currently, as the use of different settings for the
|
|
819
|
+
run length matrix is not supported.
|
|
820
|
+
"""
|
|
821
|
+
parse_str = ""
|
|
822
|
+
|
|
823
|
+
# Add spatial method
|
|
824
|
+
if self.spatial_method is not None:
|
|
825
|
+
parse_str += "_" + self.spatial_method
|
|
826
|
+
|
|
827
|
+
# Add merge method
|
|
828
|
+
if self.merge_method is not None:
|
|
829
|
+
if self.merge_method == "average":
|
|
830
|
+
parse_str += "_avg"
|
|
831
|
+
if self.merge_method == "slice_merge":
|
|
832
|
+
parse_str += "_s_mrg"
|
|
833
|
+
if self.merge_method == "dir_merge":
|
|
834
|
+
parse_str += "_d_mrg"
|
|
835
|
+
if self.merge_method == "vol_merge":
|
|
836
|
+
parse_str += "_v_mrg"
|
|
837
|
+
|
|
838
|
+
return parse_str
|
|
839
|
+
|
|
840
|
+
def sre(upd_list: np.ndarray) -> float:
|
|
841
|
+
"""Compute Short runs emphasis feature from the run length matrices list.
|
|
842
|
+
This feature refers to "Frlm_sre" (ID = 22OV) in
|
|
843
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
844
|
+
|
|
845
|
+
Args:
|
|
846
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
847
|
+
|
|
848
|
+
Returns:
|
|
849
|
+
float: Dict of the Short runs emphasis feature.
|
|
850
|
+
"""
|
|
851
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
852
|
+
if upd_list is not None:
|
|
853
|
+
|
|
854
|
+
# Calculate Short runs emphasis feature
|
|
855
|
+
sre_list = []
|
|
856
|
+
sre_run_list = []
|
|
857
|
+
for rlm in upd_list:
|
|
858
|
+
sre_run_list += [rlm.calculate_feature("sre")]
|
|
859
|
+
|
|
860
|
+
# Average feature values
|
|
861
|
+
sre_list += [pd.concat(sre_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
862
|
+
|
|
863
|
+
# Merge feature tables into a single dictionary.
|
|
864
|
+
df_sre = pd.concat(sre_list, axis=1).to_dict(orient="records")[0]
|
|
865
|
+
sre = list(df_sre.values())[0]
|
|
866
|
+
|
|
867
|
+
return sre
|
|
868
|
+
|
|
869
|
+
def lre(upd_list: np.ndarray) -> float:
|
|
870
|
+
"""Compute Long runs emphasis feature from the run length matrices list.
|
|
871
|
+
This feature refers to "Frlm_lre" (ID = W4KF) in
|
|
872
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
873
|
+
|
|
874
|
+
Args:
|
|
875
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
876
|
+
|
|
877
|
+
Returns:
|
|
878
|
+
float: Dict of the Long runs emphasis feature.
|
|
879
|
+
"""
|
|
880
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
881
|
+
if upd_list is not None:
|
|
882
|
+
|
|
883
|
+
# Calculate Long runs emphasis feature
|
|
884
|
+
lre_list = []
|
|
885
|
+
lre_run_list = []
|
|
886
|
+
for rlm in upd_list:
|
|
887
|
+
lre_run_list += [rlm.calculate_feature("lre")]
|
|
888
|
+
|
|
889
|
+
# Average feature values
|
|
890
|
+
lre_list += [pd.concat(lre_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
891
|
+
|
|
892
|
+
# Merge feature tables into a single dictionary.
|
|
893
|
+
df_lre = pd.concat(lre_list, axis=1).to_dict(orient="records")[0]
|
|
894
|
+
lre = list(df_lre.values())[0]
|
|
895
|
+
|
|
896
|
+
return lre
|
|
897
|
+
|
|
898
|
+
def glnu(upd_list: np.ndarray) -> float:
|
|
899
|
+
"""Compute Grey level non-uniformity feature from the run length matrices list.
|
|
900
|
+
This feature refers to "Frlm_glnu" (ID = R5YN) in
|
|
901
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
902
|
+
|
|
903
|
+
Args:
|
|
904
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
905
|
+
|
|
906
|
+
Returns:
|
|
907
|
+
float: Dict of the Grey level non-uniformity feature.
|
|
908
|
+
"""
|
|
909
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
910
|
+
if upd_list is not None:
|
|
911
|
+
|
|
912
|
+
# Calculate Grey level non-uniformity feature
|
|
913
|
+
glnu_list = []
|
|
914
|
+
glnu_run_list = []
|
|
915
|
+
for rlm in upd_list:
|
|
916
|
+
glnu_run_list += [rlm.calculate_feature("glnu")]
|
|
917
|
+
|
|
918
|
+
# Average feature values
|
|
919
|
+
glnu_list += [pd.concat(glnu_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
920
|
+
|
|
921
|
+
# Merge feature tables into a single dictionary.
|
|
922
|
+
df_glnu = pd.concat(glnu_list, axis=1).to_dict(orient="records")[0]
|
|
923
|
+
glnu = list(df_glnu.values())[0]
|
|
924
|
+
|
|
925
|
+
return glnu
|
|
926
|
+
|
|
927
|
+
def glnu_norm(upd_list: np.ndarray) -> float:
|
|
928
|
+
"""Compute Grey level non-uniformity normalised feature from the run length matrices list.
|
|
929
|
+
This feature refers to "Frlm_glnu_norm" (ID = OVBL) in
|
|
930
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
931
|
+
|
|
932
|
+
Args:
|
|
933
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
934
|
+
|
|
935
|
+
Returns:
|
|
936
|
+
float: Dict of the Grey level non-uniformity normalised feature.
|
|
937
|
+
"""
|
|
938
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
939
|
+
if upd_list is not None:
|
|
940
|
+
|
|
941
|
+
# Calculate Grey level non-uniformity normalised feature
|
|
942
|
+
glnu_norm_list = []
|
|
943
|
+
glnu_norm_run_list = []
|
|
944
|
+
for rlm in upd_list:
|
|
945
|
+
glnu_norm_run_list += [rlm.calculate_feature("glnu_norm")]
|
|
946
|
+
|
|
947
|
+
# Average feature values
|
|
948
|
+
glnu_norm_list += [pd.concat(glnu_norm_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
949
|
+
|
|
950
|
+
# Merge feature tables into a single dictionary.
|
|
951
|
+
df_glnu_norm = pd.concat(glnu_norm_list, axis=1).to_dict(orient="records")[0]
|
|
952
|
+
glnu_norm = list(df_glnu_norm.values())[0]
|
|
953
|
+
|
|
954
|
+
return glnu_norm
|
|
955
|
+
|
|
956
|
+
def rlnu(upd_list: np.ndarray) -> float:
|
|
957
|
+
"""Compute Run length non-uniformity feature from the run length matrices list.
|
|
958
|
+
This feature refers to "Frlm_rlnu" (ID = W92Y) in
|
|
959
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
960
|
+
|
|
961
|
+
Args:
|
|
962
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
963
|
+
|
|
964
|
+
Returns:
|
|
965
|
+
float: Dict of the Run length non-uniformity feature.
|
|
966
|
+
"""
|
|
967
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
968
|
+
if upd_list is not None:
|
|
969
|
+
|
|
970
|
+
# Calculate Run length non-uniformity feature
|
|
971
|
+
rlnu_list = []
|
|
972
|
+
rlnu_run_list = []
|
|
973
|
+
for rlm in upd_list:
|
|
974
|
+
rlnu_run_list += [rlm.calculate_feature("rlnu")]
|
|
975
|
+
|
|
976
|
+
# Average feature values
|
|
977
|
+
rlnu_list += [pd.concat(rlnu_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
978
|
+
|
|
979
|
+
# Merge feature tables into a single dictionary.
|
|
980
|
+
df_rlnu = pd.concat(rlnu_list, axis=1).to_dict(orient="records")[0]
|
|
981
|
+
rlnu = list(df_rlnu.values())[0]
|
|
982
|
+
|
|
983
|
+
return rlnu
|
|
984
|
+
|
|
985
|
+
def rlnu_norm(upd_list: np.ndarray) -> float:
|
|
986
|
+
"""Compute Run length non-uniformity normalised feature from the run length matrices list.
|
|
987
|
+
This feature refers to "Frlm_rlnu_norm" (ID = IC23) in
|
|
988
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
989
|
+
|
|
990
|
+
Args:
|
|
991
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
992
|
+
|
|
993
|
+
Returns:
|
|
994
|
+
float: Dict of the Run length non-uniformity normalised feature.
|
|
995
|
+
"""
|
|
996
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
997
|
+
if upd_list is not None:
|
|
998
|
+
|
|
999
|
+
# Calculate Run length non-uniformity normalised feature
|
|
1000
|
+
rlnu_norm_list = []
|
|
1001
|
+
rlnu_norm_run_list = []
|
|
1002
|
+
for rlm in upd_list:
|
|
1003
|
+
rlnu_norm_run_list += [rlm.calculate_feature("rlnu_norm")]
|
|
1004
|
+
|
|
1005
|
+
# Average feature values
|
|
1006
|
+
rlnu_norm_list += [pd.concat(rlnu_norm_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1007
|
+
|
|
1008
|
+
# Merge feature tables into a single dictionary.
|
|
1009
|
+
df_rlnu_norm = pd.concat(rlnu_norm_list, axis=1).to_dict(orient="records")[0]
|
|
1010
|
+
rlnu_norm = list(df_rlnu_norm.values())[0]
|
|
1011
|
+
|
|
1012
|
+
return rlnu_norm
|
|
1013
|
+
|
|
1014
|
+
def r_perc(upd_list: np.ndarray) -> float:
|
|
1015
|
+
"""Compute Run percentage feature from the run length matrices list.
|
|
1016
|
+
This feature refers to "Frlm_r_perc" (ID = 9ZK5) in
|
|
1017
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1018
|
+
|
|
1019
|
+
Args:
|
|
1020
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1021
|
+
|
|
1022
|
+
Returns:
|
|
1023
|
+
float: Dict of the Run percentage feature.
|
|
1024
|
+
"""
|
|
1025
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1026
|
+
if upd_list is not None:
|
|
1027
|
+
|
|
1028
|
+
# Calculate Run percentage feature
|
|
1029
|
+
r_perc_list = []
|
|
1030
|
+
r_perc_run_list = []
|
|
1031
|
+
for rlm in upd_list:
|
|
1032
|
+
r_perc_run_list += [rlm.calculate_feature("r_perc")]
|
|
1033
|
+
|
|
1034
|
+
# Average feature values
|
|
1035
|
+
r_perc_list += [pd.concat(r_perc_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1036
|
+
|
|
1037
|
+
# Merge feature tables into a single dictionary.
|
|
1038
|
+
df_r_perc = pd.concat(r_perc_list, axis=1).to_dict(orient="records")[0]
|
|
1039
|
+
r_perc = list(df_r_perc.values())[0]
|
|
1040
|
+
|
|
1041
|
+
return r_perc
|
|
1042
|
+
|
|
1043
|
+
def lgre(upd_list: np.ndarray) -> float:
|
|
1044
|
+
"""Compute Low grey level run emphasis feature from the run length matrices list.
|
|
1045
|
+
This feature refers to "Frlm_lgre" (ID = V3SW) in
|
|
1046
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1047
|
+
|
|
1048
|
+
Args:
|
|
1049
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1050
|
+
|
|
1051
|
+
Returns:
|
|
1052
|
+
float: Dict of the Low grey level run emphasis feature.
|
|
1053
|
+
"""
|
|
1054
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1055
|
+
if upd_list is not None:
|
|
1056
|
+
|
|
1057
|
+
# Calculate Low grey level run emphasis feature
|
|
1058
|
+
lgre_list = []
|
|
1059
|
+
lgre_run_list = []
|
|
1060
|
+
for rlm in upd_list:
|
|
1061
|
+
lgre_run_list += [rlm.calculate_feature("lgre")]
|
|
1062
|
+
|
|
1063
|
+
# Average feature values
|
|
1064
|
+
lgre_list += [pd.concat(lgre_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1065
|
+
|
|
1066
|
+
# Merge feature tables into a single dictionary.
|
|
1067
|
+
df_lgre = pd.concat(lgre_list, axis=1).to_dict(orient="records")[0]
|
|
1068
|
+
lgre = list(df_lgre.values())[0]
|
|
1069
|
+
|
|
1070
|
+
return lgre
|
|
1071
|
+
|
|
1072
|
+
def hgre(upd_list: np.ndarray) -> float:
|
|
1073
|
+
"""Compute High grey level run emphasis feature from the run length matrices list.
|
|
1074
|
+
This feature refers to "Frlm_hgre" (ID = G3QZ) in
|
|
1075
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1076
|
+
|
|
1077
|
+
Args:
|
|
1078
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1079
|
+
|
|
1080
|
+
Returns:
|
|
1081
|
+
float: Dict of the High grey level run emphasis feature.
|
|
1082
|
+
"""
|
|
1083
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1084
|
+
if upd_list is not None:
|
|
1085
|
+
|
|
1086
|
+
# Calculate High grey level run emphasis feature
|
|
1087
|
+
hgre_list = []
|
|
1088
|
+
hgre_run_list = []
|
|
1089
|
+
for rlm in upd_list:
|
|
1090
|
+
hgre_run_list += [rlm.calculate_feature("hgre")]
|
|
1091
|
+
|
|
1092
|
+
# Average feature values
|
|
1093
|
+
hgre_list += [pd.concat(hgre_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1094
|
+
|
|
1095
|
+
# Merge feature tables into a single dictionary.
|
|
1096
|
+
df_hgre = pd.concat(hgre_list, axis=1).to_dict(orient="records")[0]
|
|
1097
|
+
hgre = list(df_hgre.values())[0]
|
|
1098
|
+
|
|
1099
|
+
return hgre
|
|
1100
|
+
|
|
1101
|
+
def srlge(upd_list: np.ndarray) -> float:
|
|
1102
|
+
"""Compute Short run low grey level emphasis feature from the run length matrices list.
|
|
1103
|
+
This feature refers to "Frlm_srlge" (ID = HTZT) in
|
|
1104
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1105
|
+
|
|
1106
|
+
Args:
|
|
1107
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1108
|
+
|
|
1109
|
+
Returns:
|
|
1110
|
+
float: Dict of the Short run low grey level emphasis feature.
|
|
1111
|
+
"""
|
|
1112
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1113
|
+
if upd_list is not None:
|
|
1114
|
+
|
|
1115
|
+
# Calculate Short run low grey level emphasis feature
|
|
1116
|
+
srlge_list = []
|
|
1117
|
+
srlge_run_list = []
|
|
1118
|
+
for rlm in upd_list:
|
|
1119
|
+
srlge_run_list += [rlm.calculate_feature("srlge")]
|
|
1120
|
+
|
|
1121
|
+
# Average feature values
|
|
1122
|
+
srlge_list += [pd.concat(srlge_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1123
|
+
|
|
1124
|
+
# Merge feature tables into a single dictionary.
|
|
1125
|
+
df_srlge = pd.concat(srlge_list, axis=1).to_dict(orient="records")[0]
|
|
1126
|
+
srlge = list(df_srlge.values())[0]
|
|
1127
|
+
|
|
1128
|
+
return srlge
|
|
1129
|
+
|
|
1130
|
+
def srhge(upd_list: np.ndarray) -> float:
|
|
1131
|
+
"""Compute Short run high grey level emphasis feature from the run length matrices list.
|
|
1132
|
+
This feature refers to "Frlm_srhge" (ID = GD3A) in
|
|
1133
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1134
|
+
|
|
1135
|
+
Args:
|
|
1136
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1137
|
+
|
|
1138
|
+
Returns:
|
|
1139
|
+
float: Dict of the Short run high grey level emphasis feature.
|
|
1140
|
+
"""
|
|
1141
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1142
|
+
if upd_list is not None:
|
|
1143
|
+
|
|
1144
|
+
# Calculate Short run high grey level emphasis feature
|
|
1145
|
+
srhge_list = []
|
|
1146
|
+
srhge_run_list = []
|
|
1147
|
+
for rlm in upd_list:
|
|
1148
|
+
srhge_run_list += [rlm.calculate_feature("srhge")]
|
|
1149
|
+
|
|
1150
|
+
# Average feature values
|
|
1151
|
+
srhge_list += [pd.concat(srhge_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1152
|
+
|
|
1153
|
+
# Merge feature tables into a single dictionary.
|
|
1154
|
+
df_srhge = pd.concat(srhge_list, axis=1).to_dict(orient="records")[0]
|
|
1155
|
+
srhge = list(df_srhge.values())[0]
|
|
1156
|
+
|
|
1157
|
+
return srhge
|
|
1158
|
+
|
|
1159
|
+
def lrlge(upd_list: np.ndarray) -> float:
|
|
1160
|
+
"""Compute Long run low grey level emphasis feature from the run length matrices list.
|
|
1161
|
+
This feature refers to "Frlm_lrlge" (ID = IVPO) in
|
|
1162
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1163
|
+
|
|
1164
|
+
Args:
|
|
1165
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1166
|
+
|
|
1167
|
+
Returns:
|
|
1168
|
+
float: Dict of the Long run low grey level emphasis feature.
|
|
1169
|
+
"""
|
|
1170
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1171
|
+
if upd_list is not None:
|
|
1172
|
+
|
|
1173
|
+
# Calculate Long run low grey level emphasis feature
|
|
1174
|
+
lrlge_list = []
|
|
1175
|
+
lrlge_run_list = []
|
|
1176
|
+
for rlm in upd_list:
|
|
1177
|
+
lrlge_run_list += [rlm.calculate_feature("lrlge")]
|
|
1178
|
+
|
|
1179
|
+
# Average feature values
|
|
1180
|
+
lrlge_list += [pd.concat(lrlge_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1181
|
+
|
|
1182
|
+
# Merge feature tables into a single dictionary.
|
|
1183
|
+
df_lrlge = pd.concat(lrlge_list, axis=1).to_dict(orient="records")[0]
|
|
1184
|
+
lrlge = list(df_lrlge.values())[0]
|
|
1185
|
+
|
|
1186
|
+
return lrlge
|
|
1187
|
+
|
|
1188
|
+
def lrhge(upd_list: np.ndarray) -> float:
|
|
1189
|
+
"""Compute Long run high grey level emphasisfeature from the run length matrices list.
|
|
1190
|
+
This feature refers to "Frlm_lrhge" (ID = 3KUM) in
|
|
1191
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1192
|
+
|
|
1193
|
+
Args:
|
|
1194
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1195
|
+
|
|
1196
|
+
Returns:
|
|
1197
|
+
float: Dict of the Long run high grey level emphasis feature.
|
|
1198
|
+
"""
|
|
1199
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1200
|
+
if upd_list is not None:
|
|
1201
|
+
|
|
1202
|
+
# Calculate Long run high grey level emphasis feature
|
|
1203
|
+
lrhge_list = []
|
|
1204
|
+
lrhge_run_list = []
|
|
1205
|
+
for rlm in upd_list:
|
|
1206
|
+
lrhge_run_list += [rlm.calculate_feature("lrhge")]
|
|
1207
|
+
|
|
1208
|
+
# Average feature values
|
|
1209
|
+
lrhge_list += [pd.concat(lrhge_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1210
|
+
|
|
1211
|
+
# Merge feature tables into a single dictionary.
|
|
1212
|
+
df_lrhge = pd.concat(lrhge_list, axis=1).to_dict(orient="records")[0]
|
|
1213
|
+
lrhge = list(df_lrhge.values())[0]
|
|
1214
|
+
|
|
1215
|
+
return lrhge
|
|
1216
|
+
|
|
1217
|
+
def gl_var(upd_list: np.ndarray) -> float:
|
|
1218
|
+
"""Compute Grey level variance feature from the run length matrices list.
|
|
1219
|
+
This feature refers to "Frlm_gl_var" (ID = 8CE5) in
|
|
1220
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1221
|
+
|
|
1222
|
+
Args:
|
|
1223
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1224
|
+
|
|
1225
|
+
Returns:
|
|
1226
|
+
float: Dict of the Grey level variance feature.
|
|
1227
|
+
"""
|
|
1228
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1229
|
+
if upd_list is not None:
|
|
1230
|
+
|
|
1231
|
+
# Calculate Grey level variance feature
|
|
1232
|
+
gl_var_list = []
|
|
1233
|
+
gl_var_run_list = []
|
|
1234
|
+
for rlm in upd_list:
|
|
1235
|
+
gl_var_run_list += [rlm.calculate_feature("gl_var")]
|
|
1236
|
+
|
|
1237
|
+
# Average feature values
|
|
1238
|
+
gl_var_list += [pd.concat(gl_var_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1239
|
+
|
|
1240
|
+
# Merge feature tables into a single dictionary.
|
|
1241
|
+
df_gl_var = pd.concat(gl_var_list, axis=1).to_dict(orient="records")[0]
|
|
1242
|
+
gl_var = list(df_gl_var.values())[0]
|
|
1243
|
+
|
|
1244
|
+
return gl_var
|
|
1245
|
+
|
|
1246
|
+
def rl_var(upd_list: np.ndarray) -> float:
|
|
1247
|
+
"""Compute Run length variancefeature from the run length matrices list.
|
|
1248
|
+
This feature refers to "Frlm_rl_var" (ID = SXLW) in
|
|
1249
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1250
|
+
|
|
1251
|
+
Args:
|
|
1252
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1253
|
+
|
|
1254
|
+
Returns:
|
|
1255
|
+
float: Dict of the Run length variance feature.
|
|
1256
|
+
"""
|
|
1257
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1258
|
+
if upd_list is not None:
|
|
1259
|
+
|
|
1260
|
+
# Calculate Run length variance feature
|
|
1261
|
+
rl_var_list = []
|
|
1262
|
+
rl_var_run_list = []
|
|
1263
|
+
for rlm in upd_list:
|
|
1264
|
+
rl_var_run_list += [rlm.calculate_feature("rl_var")]
|
|
1265
|
+
|
|
1266
|
+
# Average feature values
|
|
1267
|
+
rl_var_list += [pd.concat(rl_var_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1268
|
+
|
|
1269
|
+
# Merge feature tables into a single dictionary.
|
|
1270
|
+
df_rl_var = pd.concat(rl_var_list, axis=1).to_dict(orient="records")[0]
|
|
1271
|
+
rl_var = list(df_rl_var.values())[0]
|
|
1272
|
+
|
|
1273
|
+
return rl_var
|
|
1274
|
+
|
|
1275
|
+
def rl_entr(upd_list: np.ndarray) -> float:
|
|
1276
|
+
"""Compute Zone size entropy feature from the run length matrices list.
|
|
1277
|
+
This feature refers to "Frlm_rl_entr" (ID = HJ9O) in
|
|
1278
|
+
the `IBSI1 reference manual <https://arxiv.org/pdf/1612.07003.pdf>`__.
|
|
1279
|
+
|
|
1280
|
+
Args:
|
|
1281
|
+
upd_list (ndarray): Run length matrices computed and merged according given method.
|
|
1282
|
+
|
|
1283
|
+
Returns:
|
|
1284
|
+
float: Dict of the Zone size entropy feature.
|
|
1285
|
+
"""
|
|
1286
|
+
# Skip if no matrices are available (due to illegal combinations of merge and spatial methods
|
|
1287
|
+
if upd_list is not None:
|
|
1288
|
+
|
|
1289
|
+
# Calculate Zone size entropyfeature
|
|
1290
|
+
rl_entr_list = []
|
|
1291
|
+
rl_entr_run_list = []
|
|
1292
|
+
for rlm in upd_list:
|
|
1293
|
+
rl_entr_run_list += [rlm.calculate_feature("rl_entr")]
|
|
1294
|
+
|
|
1295
|
+
# Average feature values
|
|
1296
|
+
rl_entr_list += [pd.concat(rl_entr_run_list, axis=0).mean(axis=0, skipna=True).to_frame().transpose()]
|
|
1297
|
+
|
|
1298
|
+
# Merge feature tables into a single dictionary.
|
|
1299
|
+
df_rl_entr = pd.concat(rl_entr_list, axis=1).to_dict(orient="records")[0]
|
|
1300
|
+
rl_entr = list(df_rl_entr.values())[0]
|
|
1301
|
+
|
|
1302
|
+
return rl_entr
|
|
1303
|
+
|
|
1304
|
+
def merge_feature(feat_list: np.ndarray) -> float:
|
|
1305
|
+
"""Merge feature tables into a single dictionary.
|
|
1306
|
+
|
|
1307
|
+
Args:
|
|
1308
|
+
feat_list (ndarray): volume with discretised intensities as 3D numpy array (x, y, z).
|
|
1309
|
+
|
|
1310
|
+
Returns:
|
|
1311
|
+
float: Dict of the length matrix feature.
|
|
1312
|
+
"""
|
|
1313
|
+
df_feat = pd.concat(feat_list, axis=1).to_dict(orient="records")[0]
|
|
1314
|
+
|
|
1315
|
+
return df_feat
|