dbdicom 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbdicom might be problematic. Click here for more details.
- dbdicom/__init__.py +4 -3
- dbdicom/create.py +34 -97
- dbdicom/dro.py +174 -0
- dbdicom/ds/dataset.py +29 -3
- dbdicom/ds/types/mr_image.py +18 -7
- dbdicom/extensions/__init__.py +10 -0
- dbdicom/{wrappers → extensions}/dipy.py +191 -205
- dbdicom/extensions/elastix.py +503 -0
- dbdicom/extensions/matplotlib.py +107 -0
- dbdicom/extensions/numpy.py +271 -0
- dbdicom/{wrappers → extensions}/scipy.py +130 -31
- dbdicom/{wrappers → extensions}/skimage.py +1 -1
- dbdicom/extensions/sklearn.py +243 -0
- dbdicom/extensions/vreg.py +1390 -0
- dbdicom/external/dcm4che/bin/emf2sf +57 -57
- dbdicom/manager.py +70 -36
- dbdicom/pipelines.py +66 -0
- dbdicom/record.py +266 -43
- dbdicom/types/instance.py +17 -3
- dbdicom/types/series.py +1900 -404
- dbdicom/utils/image.py +152 -21
- dbdicom/utils/vreg.py +327 -135
- dbdicom-0.2.3.dist-info/METADATA +88 -0
- {dbdicom-0.2.1.dist-info → dbdicom-0.2.3.dist-info}/RECORD +27 -41
- {dbdicom-0.2.1.dist-info → dbdicom-0.2.3.dist-info}/WHEEL +1 -1
- dbdicom/external/__pycache__/__init__.cpython-310.pyc +0 -0
- dbdicom/external/__pycache__/__init__.cpython-37.pyc +0 -0
- dbdicom/external/dcm4che/__pycache__/__init__.cpython-310.pyc +0 -0
- dbdicom/external/dcm4che/__pycache__/__init__.cpython-37.pyc +0 -0
- dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-310.pyc +0 -0
- dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-37.pyc +0 -0
- dbdicom/external/dcm4che/lib/linux-x86/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/linux-x86-64/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/linux-x86-64/libopencv_java.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis2.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis2.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-x86/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-x86-64/libclib_jiio.so +0 -0
- dbdicom/wrappers/__init__.py +0 -7
- dbdicom/wrappers/elastix.py +0 -855
- dbdicom/wrappers/numpy.py +0 -119
- dbdicom/wrappers/sklearn.py +0 -151
- dbdicom/wrappers/vreg.py +0 -273
- dbdicom-0.2.1.dist-info/METADATA +0 -276
- {dbdicom-0.2.1.dist-info → dbdicom-0.2.3.dist-info}/LICENSE +0 -0
- {dbdicom-0.2.1.dist-info → dbdicom-0.2.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
"""
|
|
2
|
+
``dbdicom`` extensions calling numpy functions. These do not require additional packages to be installed.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import dbdicom as dbd
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def mean_intensity_projection(series:dbd.Series, dims=('SliceLocation','InstanceNumber'), axis=-1) -> dbd.Series:
|
|
10
|
+
"""Create a mean intensity projection along a specified dimension.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
series (dbdicom.Series): Original series.
|
|
14
|
+
dims (tuple, optional): Dimensions of the array. Defaults to ('SliceLocation','InstanceNumber').
|
|
15
|
+
axis (int, optional): axis along which the maximum is to be taken. Defaults to -1.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
dbicom.Series: mean intensity projection.
|
|
19
|
+
|
|
20
|
+
Example:
|
|
21
|
+
|
|
22
|
+
Get the MIP function from the numpy extension to dbdicom:
|
|
23
|
+
|
|
24
|
+
>>> from db.extensions.numpy import mean_intensity_projection
|
|
25
|
+
|
|
26
|
+
Create a zero-filled array, describing 8 MRI images each measured at 3 flip angles and 2 repetition times:
|
|
27
|
+
|
|
28
|
+
>>> coords = {
|
|
29
|
+
... 'SliceLocation': np.arange(8),
|
|
30
|
+
... 'FlipAngle': [2, 15, 30],
|
|
31
|
+
... 'RepetitionTime': [2.5, 5.0],
|
|
32
|
+
... }
|
|
33
|
+
>>> series = db.zeros((128,128,8,3,2), coords)
|
|
34
|
+
|
|
35
|
+
Create a mean intensity projection on the slice locations and check the dimensions:
|
|
36
|
+
|
|
37
|
+
>>> mip = mean_intensity_projection(series)
|
|
38
|
+
>>> array = mip.pixel_values(dims=('SliceLocation', 'ImageNumber'))
|
|
39
|
+
>>> print(array.shape)
|
|
40
|
+
(128, 128, 8, 1)
|
|
41
|
+
|
|
42
|
+
Create a mean intensity projection along the Slice Location axis:
|
|
43
|
+
|
|
44
|
+
>>> mip = mean_intensity_projection(series, dims=tuple(coords), axis=0)
|
|
45
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
46
|
+
>>> print(array.shape)
|
|
47
|
+
(128, 128, 1, 3, 2)
|
|
48
|
+
|
|
49
|
+
Create a mean intensity projection along the Flip Angle axis:
|
|
50
|
+
|
|
51
|
+
>>> mip = mean_intensity_projection(series, dims=tuple(coords), axis=1)
|
|
52
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
53
|
+
>>> print(array.shape)
|
|
54
|
+
(128, 128, 8, 1, 2)
|
|
55
|
+
|
|
56
|
+
Create a mean intensity projection along the Repetition Time axis:
|
|
57
|
+
|
|
58
|
+
>>> mip = mean_intensity_projection(series, dims=tuple(coords), axis=2)
|
|
59
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
60
|
+
>>> print(array.shape)
|
|
61
|
+
(128, 128, 8, 3, 1)
|
|
62
|
+
"""
|
|
63
|
+
array = series.pixel_values(dims=dims)
|
|
64
|
+
array = np.mean(array, axis=axis)
|
|
65
|
+
|
|
66
|
+
# Save as DICOM
|
|
67
|
+
proj = series.new_sibling(SeriesDescription = series.SeriesDescription + '[mean axis ' + str(axis) + ']')
|
|
68
|
+
frames = series.frames(dims)
|
|
69
|
+
frames = np.take(frames, 0, axis=axis)
|
|
70
|
+
frames = frames.ravel()
|
|
71
|
+
array = array.reshape((array.shape[0], array.shape[1], -1))
|
|
72
|
+
for z in range(frames.size):
|
|
73
|
+
series.progress(z+1, frames.size, 'Saving results.. ')
|
|
74
|
+
frames_z = frames[z].copy_to(proj)
|
|
75
|
+
frames_z.set_pixel_values(array[:,:,z])
|
|
76
|
+
|
|
77
|
+
return proj
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def maximum_intensity_projection(series:dbd.Series, dims=('SliceLocation','InstanceNumber'), axis=-1) -> dbd.Series:
|
|
81
|
+
"""Create a maximum intensity projection along a specified dimension.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
series (dbdicom.Series): Original series.
|
|
85
|
+
dims (tuple, optional): Dimensions of the array. Defaults to ('SliceLocation','InstanceNumber').
|
|
86
|
+
axis (int, optional): axis along which the maximum is to be taken. Defaults to -1.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
dbicom.Series: maximum intensity projection.
|
|
90
|
+
|
|
91
|
+
Example:
|
|
92
|
+
|
|
93
|
+
Get the MIP function from the numpy extension to dbdicom:
|
|
94
|
+
|
|
95
|
+
>>> from db.extensions.numpy import maximum_intensity_projection
|
|
96
|
+
|
|
97
|
+
Create a zero-filled array, describing 8 MRI images each measured at 3 flip angles and 2 repetition times:
|
|
98
|
+
|
|
99
|
+
>>> coords = {
|
|
100
|
+
... 'SliceLocation': np.arange(8),
|
|
101
|
+
... 'FlipAngle': [2, 15, 30],
|
|
102
|
+
... 'RepetitionTime': [2.5, 5.0],
|
|
103
|
+
... }
|
|
104
|
+
>>> series = db.zeros((128,128,8,3,2), coords)
|
|
105
|
+
|
|
106
|
+
Create a maximum intensity projection on the slice locations and check the dimensions:
|
|
107
|
+
|
|
108
|
+
>>> mip = maximum_intensity_projection(series)
|
|
109
|
+
>>> array = mip.pixel_values(dims=('SliceLocation', 'ImageNumber'))
|
|
110
|
+
>>> print(array.shape)
|
|
111
|
+
(128, 128, 8, 1)
|
|
112
|
+
|
|
113
|
+
Create a maximum intensity projection along the Slice Location axis:
|
|
114
|
+
|
|
115
|
+
>>> mip = maximum_intensity_projection(series, dims=tuple(coords), axis=0)
|
|
116
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
117
|
+
>>> print(array.shape)
|
|
118
|
+
(128, 128, 1, 3, 2)
|
|
119
|
+
|
|
120
|
+
Create a maximum intensity projection along the Flip Angle axis:
|
|
121
|
+
|
|
122
|
+
>>> mip = maximum_intensity_projection(series, dims=tuple(coords), axis=1)
|
|
123
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
124
|
+
>>> print(array.shape)
|
|
125
|
+
(128, 128, 8, 1, 2)
|
|
126
|
+
|
|
127
|
+
Create a maximum intensity projection along the Repetition Time axis:
|
|
128
|
+
|
|
129
|
+
>>> mip = maximum_intensity_projection(series, dims=tuple(coords), axis=2)
|
|
130
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
131
|
+
>>> print(array.shape)
|
|
132
|
+
(128, 128, 8, 3, 1)
|
|
133
|
+
"""
|
|
134
|
+
|
|
135
|
+
array = series.pixel_values(dims=dims)
|
|
136
|
+
array = np.amax(array, axis=axis)
|
|
137
|
+
|
|
138
|
+
# Save as DICOM
|
|
139
|
+
proj = series.new_sibling(SeriesDescription = series.SeriesDescription + '[max axis ' + str(axis) + ']')
|
|
140
|
+
frames = series.frames(dims)
|
|
141
|
+
frames = np.take(frames, 0, axis=axis)
|
|
142
|
+
frames = frames.ravel()
|
|
143
|
+
array = array.reshape((array.shape[0], array.shape[1], -1))
|
|
144
|
+
for z in range(frames.size):
|
|
145
|
+
series.progress(z+1, frames.size, 'Saving results.. ')
|
|
146
|
+
frames_z = frames[z].copy_to(proj)
|
|
147
|
+
frames_z.set_pixel_values(array[:,:,z])
|
|
148
|
+
|
|
149
|
+
return proj
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def norm_projection(series:dbd.Series, dims=('SliceLocation','InstanceNumber'), axis=-1, ord=None) -> dbd.Series:
|
|
153
|
+
"""Projection along a specified dimension using the vector norm.
|
|
154
|
+
|
|
155
|
+
This functions uses numpy.linalg.norm to calculate the projection, see: https://numpy.org/doc/stable/reference/generated/numpy.linalg.norm.html
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
series (dbdicom.Series): Original series.
|
|
159
|
+
dims (tuple, optional): Dimensions of the array. Defaults to ('SliceLocation','InstanceNumber').
|
|
160
|
+
axis (int, optional): axis along which the maximum is to be taken. Defaults to -1.
|
|
161
|
+
ord (int, optional): order of the norm - see documentation of numpy.linalg.norm for details
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
dbicom.Series: maximum intensity projection.
|
|
165
|
+
|
|
166
|
+
Example:
|
|
167
|
+
|
|
168
|
+
Get the function from the numpy extension to dbdicom:
|
|
169
|
+
|
|
170
|
+
>>> from db.extensions.numpy import norm_projection
|
|
171
|
+
|
|
172
|
+
Create a zero-filled array, describing 8 MRI images each measured at 3 flip angles and 2 repetition times:
|
|
173
|
+
|
|
174
|
+
>>> coords = {
|
|
175
|
+
... 'SliceLocation': np.arange(8),
|
|
176
|
+
... 'FlipAngle': [2, 15, 30],
|
|
177
|
+
... 'RepetitionTime': [2.5, 5.0],
|
|
178
|
+
... }
|
|
179
|
+
>>> series = db.zeros((128,128,8,3,2), coords)
|
|
180
|
+
|
|
181
|
+
Create a norm projection on the slice locations and check the dimensions:
|
|
182
|
+
|
|
183
|
+
>>> mip = norm_projection(series)
|
|
184
|
+
>>> array = mip.pixel_values(dims=('SliceLocation', 'ImageNumber'))
|
|
185
|
+
>>> print(array.shape)
|
|
186
|
+
(128, 128, 8, 1)
|
|
187
|
+
|
|
188
|
+
Create a norm projection along the Slice Location axis:
|
|
189
|
+
|
|
190
|
+
>>> mip = norm_projection(series, dims=tuple(coords), axis=0)
|
|
191
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
192
|
+
>>> print(array.shape)
|
|
193
|
+
(128, 128, 1, 3, 2)
|
|
194
|
+
|
|
195
|
+
Create a norm projection along the Flip Angle axis:
|
|
196
|
+
|
|
197
|
+
>>> mip = norm_projection(series, dims=tuple(coords), axis=1)
|
|
198
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
199
|
+
>>> print(array.shape)
|
|
200
|
+
(128, 128, 8, 1, 2)
|
|
201
|
+
|
|
202
|
+
Create a norm projection along the Repetition Time axis:
|
|
203
|
+
|
|
204
|
+
>>> mip = norm_projection(series, dims=tuple(coords), axis=2)
|
|
205
|
+
>>> array = mip.pixel_values(dims=tuple(coords))
|
|
206
|
+
>>> print(array.shape)
|
|
207
|
+
(128, 128, 8, 3, 1)
|
|
208
|
+
"""
|
|
209
|
+
array = series.pixel_values(dims=dims)
|
|
210
|
+
array = np.linalg.norm(array, ord=ord, axis=axis)
|
|
211
|
+
|
|
212
|
+
# Save as DICOM
|
|
213
|
+
proj = series.new_sibling(SeriesDescription = series.SeriesDescription + '[norm axis ' + str(axis) + ']')
|
|
214
|
+
frames = series.frames(dims)
|
|
215
|
+
frames = np.take(frames, 0, axis=axis)
|
|
216
|
+
frames = frames.ravel()
|
|
217
|
+
array = array.reshape((array.shape[0], array.shape[1], -1))
|
|
218
|
+
for z in range(frames.size):
|
|
219
|
+
series.progress(z+1, frames.size, 'Saving results.. ')
|
|
220
|
+
frames_z = frames[z].copy_to(proj)
|
|
221
|
+
frames_z.set_pixel_values(array[:,:,z])
|
|
222
|
+
|
|
223
|
+
return proj
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def threshold(input:dbd.Series, low_threshold=0, high_threshold=1, method='absolute')-> dbd.Series:
|
|
228
|
+
"""Create a mask series by thresholding.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
input (dbd.Series): original data to be masked
|
|
232
|
+
low_threshold (int, optional): Lower threshold for masking. Defaults to 0.
|
|
233
|
+
high_threshold (int, optional): Upper threshold for masking. Defaults to 1.
|
|
234
|
+
method (str, optional): Type of thresholding, either 'absolute' (thresholds are absolute signal values), 'quantiles' (thresholds are quantiles), or 'range' (thresholds are between 0 and 1). Defaults to 'absolute'.
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
dbd.Series: mask series with values = 1 inside and 0 outside.
|
|
238
|
+
"""
|
|
239
|
+
suffix = ' [Threshold segmentation]'
|
|
240
|
+
desc = input.instance().SeriesDescription
|
|
241
|
+
filtered = input.copy(SeriesDescription = desc+suffix)
|
|
242
|
+
#images = filtered.instances()
|
|
243
|
+
images = filtered.images()
|
|
244
|
+
for i, image in enumerate(images):
|
|
245
|
+
input.status.progress(i+1, len(images), 'Filtering ' + desc)
|
|
246
|
+
image.read()
|
|
247
|
+
array = image.array()
|
|
248
|
+
if method == 'quantiles':
|
|
249
|
+
range = np.quantile(array, [low_threshold, high_threshold])
|
|
250
|
+
elif method == 'range':
|
|
251
|
+
min, max = np.amin(array), np.amax(array)
|
|
252
|
+
range = [min+low_threshold*(max-min), min+high_threshold*(max-min)]
|
|
253
|
+
else:
|
|
254
|
+
range = [low_threshold, high_threshold]
|
|
255
|
+
array = np.logical_and(array > range[0], array < range[1])
|
|
256
|
+
image.set_array(array)
|
|
257
|
+
array = array.astype(np.ubyte)
|
|
258
|
+
_reset_window(image, array)
|
|
259
|
+
image.clear()
|
|
260
|
+
input.status.hide()
|
|
261
|
+
return filtered
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
# Helper functions
|
|
266
|
+
|
|
267
|
+
def _reset_window(image, array):
|
|
268
|
+
min = np.amin(array)
|
|
269
|
+
max = np.amax(array)
|
|
270
|
+
image.WindowCenter= (max+min)/2
|
|
271
|
+
image.WindowWidth = 0.9*(max-min)
|
|
@@ -7,6 +7,7 @@ from dbdicom.utils.image import multislice_affine_transform
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
# OBSOLETE - replaced by vreg._equal_geometry
|
|
10
11
|
def _equal_geometry(affine1, affine2):
|
|
11
12
|
# Check if both are the same,
|
|
12
13
|
# ignoring the order in the list
|
|
@@ -34,6 +35,7 @@ def _equal_geometry(affine1, affine2):
|
|
|
34
35
|
|
|
35
36
|
|
|
36
37
|
# Better use set(tuple())
|
|
38
|
+
# OBSOLETE - unused
|
|
37
39
|
def _lists_have_equal_items(list1, list2):
|
|
38
40
|
# Convert the lists to sets
|
|
39
41
|
set1 = set([tuple(x) for x in list1])
|
|
@@ -101,7 +103,7 @@ def _mask_curve_3d_data(msk_arr, img_hdrs, ids, dim):
|
|
|
101
103
|
data = []
|
|
102
104
|
nt = msk_arr[0].shape[-1]
|
|
103
105
|
for t in range(nt):
|
|
104
|
-
img_hdrs[0][0,0].
|
|
106
|
+
img_hdrs[0][0,0].progress(t+1, nt, 'Extracting mask time curves..')
|
|
105
107
|
# Concatenate data at time t for each slice group
|
|
106
108
|
arr = [_mask_data(arr_i[...,t], img_hdrs[i][...,t]) for i, arr_i in enumerate(msk_arr)]
|
|
107
109
|
arr = [d for d in arr if d is not None]
|
|
@@ -120,7 +122,7 @@ def _mask_curve_3d_data(msk_arr, img_hdrs, ids, dim):
|
|
|
120
122
|
data.append(ids + vals)
|
|
121
123
|
return data
|
|
122
124
|
|
|
123
|
-
|
|
125
|
+
# OBSOLETE to vreg
|
|
124
126
|
def mask_statistics(masks, images):
|
|
125
127
|
if not isinstance(masks, list):
|
|
126
128
|
masks = [masks]
|
|
@@ -141,7 +143,7 @@ def mask_statistics(masks, images):
|
|
|
141
143
|
df_all_masks = pd.concat([df_all_masks, df_mask], ignore_index=True)
|
|
142
144
|
return df_all_masks
|
|
143
145
|
|
|
144
|
-
|
|
146
|
+
# OBSOLETE to vreg
|
|
145
147
|
def _mask_statistics(mask, image):
|
|
146
148
|
|
|
147
149
|
# Get mask array
|
|
@@ -157,7 +159,7 @@ def _mask_statistics(mask, image):
|
|
|
157
159
|
data.append(row)
|
|
158
160
|
return pd.DataFrame(data, columns=columns)
|
|
159
161
|
|
|
160
|
-
|
|
162
|
+
# OBSOLETE to vreg
|
|
161
163
|
def _mask_data_slice_groups(msk_arr, img_hdrs):
|
|
162
164
|
if isinstance(msk_arr, list):
|
|
163
165
|
# Loop over slice groups
|
|
@@ -172,7 +174,7 @@ def _mask_data_slice_groups(msk_arr, img_hdrs):
|
|
|
172
174
|
data = _mask_data(msk_arr, img_hdrs)
|
|
173
175
|
return data
|
|
174
176
|
|
|
175
|
-
|
|
177
|
+
# OBSOLETE to vreg
|
|
176
178
|
def _mask_data(msk_arr, imgs):
|
|
177
179
|
data = []
|
|
178
180
|
for i, image in np.ndenumerate(imgs):
|
|
@@ -190,7 +192,7 @@ def _mask_data(msk_arr, imgs):
|
|
|
190
192
|
else:
|
|
191
193
|
return np.concatenate(data)
|
|
192
194
|
|
|
193
|
-
|
|
195
|
+
# OBSOLETE to vreg
|
|
194
196
|
def _summary_stats(data):
|
|
195
197
|
if data is None:
|
|
196
198
|
return {}
|
|
@@ -217,7 +219,7 @@ def _summary_stats(data):
|
|
|
217
219
|
'Skewness': scipy.stats.skew(data),
|
|
218
220
|
}
|
|
219
221
|
|
|
220
|
-
|
|
222
|
+
# Obsolete - moved to vreg
|
|
221
223
|
def array(series, on=None, **kwargs):
|
|
222
224
|
"""Return the array overlaid on another series"""
|
|
223
225
|
|
|
@@ -242,7 +244,7 @@ def overlay(features):
|
|
|
242
244
|
mapped_features.append(mapped)
|
|
243
245
|
return mapped_features
|
|
244
246
|
|
|
245
|
-
|
|
247
|
+
# OBSOLETE - see vreg
|
|
246
248
|
def map_to(source, target, **kwargs):
|
|
247
249
|
"""Map non-zero pixels onto another series"""
|
|
248
250
|
|
|
@@ -273,16 +275,67 @@ def map_to(source, target, **kwargs):
|
|
|
273
275
|
def _map_series_to_slice_group(source, target, affine_source, affine_target, **kwargs):
|
|
274
276
|
|
|
275
277
|
if isinstance(affine_source, list):
|
|
276
|
-
|
|
278
|
+
array_target, headers_target = target.array(['SliceLocation','AcquisitionTime'], pixels_first=True)
|
|
279
|
+
array = None
|
|
277
280
|
for affine_slice_group in affine_source:
|
|
278
281
|
slice_group_source = source.new_sibling()
|
|
279
282
|
slice_group_source.adopt(affine_slice_group[1])
|
|
280
|
-
|
|
281
|
-
mapped_series.append(mapped)
|
|
283
|
+
array_sg, weight_sg = _map_slice_group_to_slice_group_array(slice_group_source, affine_slice_group[0], target, affine_target, array_target.shape[:3], **kwargs)
|
|
282
284
|
slice_group_source.remove()
|
|
283
|
-
|
|
285
|
+
if array is None:
|
|
286
|
+
array = array_sg
|
|
287
|
+
weight = weight_sg
|
|
288
|
+
else:
|
|
289
|
+
array += weight_sg*array_sg
|
|
290
|
+
weight += weight_sg
|
|
291
|
+
nozero = np.where(weight > 0)
|
|
292
|
+
array[nozero] = array[nozero]/weight[nozero]
|
|
293
|
+
|
|
294
|
+
# Create new series
|
|
295
|
+
mapped_series = source.new_sibling(suffix='overlay')
|
|
296
|
+
ns, nt, nk = array.shape[2], array.shape[3], array.shape[4]
|
|
297
|
+
cnt=0
|
|
298
|
+
for t in range(nt):
|
|
299
|
+
for k in range(nk):
|
|
300
|
+
for s in range(ns):
|
|
301
|
+
cnt+=1
|
|
302
|
+
source.progress(cnt, ns*nt*nk, 'Saving results..')
|
|
303
|
+
image = headers_target[s,0,0].copy_to(mapped_series)
|
|
304
|
+
image.AcquisitionTime = t
|
|
305
|
+
image.set_array(array[:,:,s,t,k])
|
|
306
|
+
return mapped_series
|
|
284
307
|
else:
|
|
285
308
|
return _map_slice_group_to_slice_group(source, affine_source[0], target, affine_target, **kwargs)
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def _map_slice_group_to_slice_group_array(source, affine_source, target, output_affine, target_shape, **kwargs):
|
|
312
|
+
|
|
313
|
+
# Get source arrays
|
|
314
|
+
array_source, headers_source = source.array(['SliceLocation','AcquisitionTime'], pixels_first=True)
|
|
315
|
+
|
|
316
|
+
# Get message status updates
|
|
317
|
+
source_desc = source.instance().SeriesDescription
|
|
318
|
+
target_desc = target.instance().SeriesDescription
|
|
319
|
+
message = 'Mapping ' + source_desc + ' onto ' + target_desc
|
|
320
|
+
source.message(message)
|
|
321
|
+
|
|
322
|
+
array_mapped = multislice_affine_transform(
|
|
323
|
+
array_source,
|
|
324
|
+
affine_source,
|
|
325
|
+
output_affine,
|
|
326
|
+
output_shape = target_shape,
|
|
327
|
+
slice_thickness = headers_source[0,0,0].SliceThickness,
|
|
328
|
+
**kwargs,
|
|
329
|
+
)
|
|
330
|
+
weights_mapped = multislice_affine_transform(
|
|
331
|
+
np.ones(array_source.shape),
|
|
332
|
+
affine_source,
|
|
333
|
+
output_affine,
|
|
334
|
+
output_shape = target_shape,
|
|
335
|
+
slice_thickness = headers_source[0,0,0].SliceThickness,
|
|
336
|
+
**kwargs,
|
|
337
|
+
)
|
|
338
|
+
return array_mapped, weights_mapped
|
|
286
339
|
|
|
287
340
|
|
|
288
341
|
def _map_slice_group_to_slice_group(source, affine_source, target, output_affine, **kwargs):
|
|
@@ -306,19 +359,13 @@ def _map_slice_group_to_slice_group(source, affine_source, target, output_affine
|
|
|
306
359
|
**kwargs,
|
|
307
360
|
)
|
|
308
361
|
|
|
309
|
-
#
|
|
310
|
-
# Preserve source window settings and set the same in result
|
|
311
|
-
#
|
|
312
|
-
|
|
362
|
+
# Create new series
|
|
313
363
|
# Retain source acquisition times
|
|
314
364
|
# Assign acquisition time of slice=0 to all slices
|
|
315
|
-
nt = headers_source.shape[1]
|
|
316
|
-
acq_times = [headers_source[0,t,0].AcquisitionTime for t in range(nt)]
|
|
317
|
-
|
|
318
|
-
# Create new series
|
|
319
365
|
mapped_series = source.new_sibling(suffix='overlay')
|
|
320
366
|
nt, nk = array_source.shape[3], array_source.shape[4]
|
|
321
367
|
ns = headers_target.shape[0]
|
|
368
|
+
acq_times = [headers_source[0,t,0].AcquisitionTime for t in range(nt)]
|
|
322
369
|
cnt=0
|
|
323
370
|
for t in range(nt):
|
|
324
371
|
for k in range(nk):
|
|
@@ -331,7 +378,7 @@ def _map_slice_group_to_slice_group(source, affine_source, target, output_affine
|
|
|
331
378
|
return mapped_series
|
|
332
379
|
|
|
333
380
|
|
|
334
|
-
|
|
381
|
+
# OBSOLETE - see vreg
|
|
335
382
|
def mask_array(mask, on=None, dim='InstanceNumber'):
|
|
336
383
|
"""Map non-zero pixels onto another series"""
|
|
337
384
|
|
|
@@ -1009,6 +1056,29 @@ def uniform_filter(input, size=3, **kwargs):
|
|
|
1009
1056
|
image.clear()
|
|
1010
1057
|
input.status.hide()
|
|
1011
1058
|
return filtered
|
|
1059
|
+
|
|
1060
|
+
|
|
1061
|
+
#https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.uniform_filter.html#scipy.ndimage.uniform_filter
|
|
1062
|
+
# This has a bug it seems
|
|
1063
|
+
def uniform_filter_3d(input, size=3, **kwargs):
|
|
1064
|
+
"""
|
|
1065
|
+
wrapper for scipy.ndimage.uniform_filter.
|
|
1066
|
+
|
|
1067
|
+
Parameters
|
|
1068
|
+
----------
|
|
1069
|
+
input: dbdicom series
|
|
1070
|
+
|
|
1071
|
+
Returns
|
|
1072
|
+
-------
|
|
1073
|
+
filtered : dbdicom series
|
|
1074
|
+
"""
|
|
1075
|
+
array, headers = input.array(sortby='SliceLocation', pixels_first=True, first_volume=True)
|
|
1076
|
+
input.message('Computing uniform filter..')
|
|
1077
|
+
array = scipy.ndimage.uniform_filter(array, size=size, **kwargs)
|
|
1078
|
+
suffix = ' [Uniform Filter x ' + str(size) + ']'
|
|
1079
|
+
output = input.new_sibling(suffix=suffix)
|
|
1080
|
+
output.set_array(array, headers, pixels_first=True)
|
|
1081
|
+
return output
|
|
1012
1082
|
|
|
1013
1083
|
|
|
1014
1084
|
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.gaussian_filter.html#scipy.ndimage.gaussian_filter
|
|
@@ -1043,6 +1113,28 @@ def gaussian_filter(input, sigma, **kwargs):
|
|
|
1043
1113
|
return filtered
|
|
1044
1114
|
|
|
1045
1115
|
|
|
1116
|
+
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.gaussian_filter.html#scipy.ndimage.gaussian_filter
|
|
1117
|
+
def gaussian_filter_3d(input, sigma, **kwargs):
|
|
1118
|
+
"""
|
|
1119
|
+
wrapper for scipy.ndimage.gaussian_filter.
|
|
1120
|
+
|
|
1121
|
+
Parameters
|
|
1122
|
+
----------
|
|
1123
|
+
input: dbdicom series
|
|
1124
|
+
|
|
1125
|
+
Returns
|
|
1126
|
+
-------
|
|
1127
|
+
filtered : dbdicom series
|
|
1128
|
+
"""
|
|
1129
|
+
suffix = ' [Gaussian Filter x ' + str(sigma) + ' ]'
|
|
1130
|
+
array, headers = input.array(sortby='SliceLocation', pixels_first=True, first_volume=True)
|
|
1131
|
+
input.message('Computing Gaussian filter..')
|
|
1132
|
+
array = scipy.ndimage.gaussian_filter(array, sigma, **kwargs)
|
|
1133
|
+
output = input.new_sibling(suffix=suffix)
|
|
1134
|
+
output.set_array(array, headers, pixels_first=True)
|
|
1135
|
+
return output
|
|
1136
|
+
|
|
1137
|
+
|
|
1046
1138
|
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.fourier_shift.html#scipy.ndimage.fourier_shift
|
|
1047
1139
|
def fourier_shift(input, shift, **kwargs):
|
|
1048
1140
|
"""
|
|
@@ -1080,13 +1172,13 @@ def fourier_shift(input, shift, **kwargs):
|
|
|
1080
1172
|
|
|
1081
1173
|
|
|
1082
1174
|
|
|
1083
|
-
def series_calculator(series, operation='1 - series'):
|
|
1175
|
+
def series_calculator(series, operation='1 - series', param=None):
|
|
1084
1176
|
|
|
1085
1177
|
desc = series.instance().SeriesDescription
|
|
1086
1178
|
result = series.copy(SeriesDescription = desc + ' [' + operation + ']')
|
|
1087
1179
|
images = result.images()
|
|
1088
1180
|
for i, img in enumerate(images):
|
|
1089
|
-
series.
|
|
1181
|
+
series.progress(i+1, len(images), 'Calculating..')
|
|
1090
1182
|
img.read()
|
|
1091
1183
|
array = img.array()
|
|
1092
1184
|
if operation == '1 - series':
|
|
@@ -1101,6 +1193,10 @@ def series_calculator(series, operation='1 - series'):
|
|
|
1101
1193
|
array = np.exp(array)
|
|
1102
1194
|
elif operation == 'integer(series)':
|
|
1103
1195
|
array = np.around(array)
|
|
1196
|
+
elif operation == 'abs(series)':
|
|
1197
|
+
array = np.abs(array)
|
|
1198
|
+
elif operation == 'a * series':
|
|
1199
|
+
array = array*param
|
|
1104
1200
|
array[~np.isfinite(array)] = 0
|
|
1105
1201
|
img.set_array(array)
|
|
1106
1202
|
_reset_window(img, array)
|
|
@@ -1109,7 +1205,7 @@ def series_calculator(series, operation='1 - series'):
|
|
|
1109
1205
|
return result
|
|
1110
1206
|
|
|
1111
1207
|
|
|
1112
|
-
def image_calculator(series1, series2, operation='series 1 - series 2', integer=False):
|
|
1208
|
+
def image_calculator(series1, series2, operation='series 1 - series 2', integer=False, series_desc=None):
|
|
1113
1209
|
|
|
1114
1210
|
result = map_to(series2, series1)
|
|
1115
1211
|
if result == series2: # same geometry
|
|
@@ -1124,22 +1220,22 @@ def image_calculator(series1, series2, operation='series 1 - series 2', integer=
|
|
|
1124
1220
|
img2.read()
|
|
1125
1221
|
array1 = img1.array()
|
|
1126
1222
|
array2 = img2.array()
|
|
1127
|
-
if operation
|
|
1223
|
+
if operation in ['series 1 + series 2', '+', 'sum']:
|
|
1128
1224
|
array = array1 + array2
|
|
1129
1225
|
desc = ' [add]'
|
|
1130
|
-
elif operation
|
|
1226
|
+
elif operation in ['series 1 - series 2', '-', 'diff']:
|
|
1131
1227
|
array = array1 - array2
|
|
1132
1228
|
desc = ' [difference]'
|
|
1133
|
-
elif operation
|
|
1229
|
+
elif operation in ['series 1 / series 2', '/', 'div']:
|
|
1134
1230
|
array = array1 / array2
|
|
1135
1231
|
desc = ' [divide]'
|
|
1136
|
-
elif operation
|
|
1232
|
+
elif operation in ['series 1 * series 2', '*', 'mult']:
|
|
1137
1233
|
array = array1 * array2
|
|
1138
1234
|
desc = ' [multiply]'
|
|
1139
|
-
elif operation
|
|
1235
|
+
elif operation in ['(series 1 - series 2)/series 2', 'rdiff']:
|
|
1140
1236
|
array = (array1 - array2)/array2
|
|
1141
1237
|
desc = ' [relative difference]'
|
|
1142
|
-
elif operation
|
|
1238
|
+
elif operation in ['average(series 1, series 2)', 'avr', 'mean']:
|
|
1143
1239
|
array = (array1 + array2)/2
|
|
1144
1240
|
desc = ' [average]'
|
|
1145
1241
|
array[~np.isfinite(array)] = 0
|
|
@@ -1148,7 +1244,10 @@ def image_calculator(series1, series2, operation='series 1 - series 2', integer=
|
|
|
1148
1244
|
img2.set_array(array)
|
|
1149
1245
|
_reset_window(img2, array.astype(np.ubyte))
|
|
1150
1246
|
img2.clear()
|
|
1151
|
-
|
|
1247
|
+
if series_desc is None:
|
|
1248
|
+
result.SeriesDescription = result.instance().SeriesDescription + desc
|
|
1249
|
+
else:
|
|
1250
|
+
result.SeriesDescription = series_desc
|
|
1152
1251
|
return result
|
|
1153
1252
|
|
|
1154
1253
|
|