dbdicom 0.2.0__py3-none-any.whl → 0.3.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbdicom/__init__.py +3 -25
- dbdicom/api.py +496 -0
- dbdicom/const.py +144 -0
- dbdicom/database.py +133 -0
- dbdicom/dataset.py +471 -0
- dbdicom/dbd.py +1290 -0
- dbdicom/external/__pycache__/__init__.cpython-311.pyc +0 -0
- dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
- dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
- dbdicom/external/dcm4che/bin/emf2sf +57 -57
- dbdicom/register.py +402 -0
- dbdicom/{ds/types → sop_classes}/ct_image.py +2 -16
- dbdicom/{ds/types → sop_classes}/enhanced_mr_image.py +206 -160
- dbdicom/sop_classes/mr_image.py +338 -0
- dbdicom/sop_classes/parametric_map.py +381 -0
- dbdicom/sop_classes/secondary_capture.py +140 -0
- dbdicom/sop_classes/segmentation.py +311 -0
- dbdicom/{ds/types → sop_classes}/ultrasound_multiframe_image.py +1 -15
- dbdicom/{ds/types → sop_classes}/xray_angiographic_image.py +2 -17
- dbdicom/utils/arrays.py +142 -0
- dbdicom/utils/files.py +0 -20
- dbdicom/utils/image.py +43 -466
- dbdicom/utils/pydicom_dataset.py +386 -0
- dbdicom-0.3.16.dist-info/METADATA +26 -0
- dbdicom-0.3.16.dist-info/RECORD +54 -0
- {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info}/WHEEL +1 -1
- dbdicom/create.py +0 -450
- dbdicom/ds/__init__.py +0 -10
- dbdicom/ds/create.py +0 -63
- dbdicom/ds/dataset.py +0 -841
- dbdicom/ds/dictionaries.py +0 -620
- dbdicom/ds/types/mr_image.py +0 -267
- dbdicom/ds/types/parametric_map.py +0 -226
- dbdicom/external/__pycache__/__init__.cpython-310.pyc +0 -0
- dbdicom/external/__pycache__/__init__.cpython-37.pyc +0 -0
- dbdicom/external/dcm4che/__pycache__/__init__.cpython-310.pyc +0 -0
- dbdicom/external/dcm4che/__pycache__/__init__.cpython-37.pyc +0 -0
- dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-310.pyc +0 -0
- dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-37.pyc +0 -0
- dbdicom/external/dcm4che/lib/linux-x86/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/linux-x86-64/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/linux-x86-64/libopencv_java.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis2.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis2.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-x86/libclib_jiio.so +0 -0
- dbdicom/external/dcm4che/lib/solaris-x86-64/libclib_jiio.so +0 -0
- dbdicom/manager.py +0 -2077
- dbdicom/message.py +0 -119
- dbdicom/record.py +0 -1526
- dbdicom/types/database.py +0 -107
- dbdicom/types/instance.py +0 -184
- dbdicom/types/patient.py +0 -40
- dbdicom/types/series.py +0 -816
- dbdicom/types/study.py +0 -58
- dbdicom/utils/variables.py +0 -155
- dbdicom/utils/vreg.py +0 -2626
- dbdicom/wrappers/__init__.py +0 -7
- dbdicom/wrappers/dipy.py +0 -462
- dbdicom/wrappers/elastix.py +0 -855
- dbdicom/wrappers/numpy.py +0 -119
- dbdicom/wrappers/scipy.py +0 -1413
- dbdicom/wrappers/skimage.py +0 -1030
- dbdicom/wrappers/sklearn.py +0 -151
- dbdicom/wrappers/vreg.py +0 -273
- dbdicom-0.2.0.dist-info/METADATA +0 -276
- dbdicom-0.2.0.dist-info/RECORD +0 -81
- {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info/licenses}/LICENSE +0 -0
- {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info}/top_level.txt +0 -0
dbdicom/dbd.py
ADDED
|
@@ -0,0 +1,1290 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import json
|
|
4
|
+
from typing import Union
|
|
5
|
+
import zipfile
|
|
6
|
+
import re
|
|
7
|
+
from copy import deepcopy
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from tqdm import tqdm
|
|
11
|
+
import numpy as np
|
|
12
|
+
import vreg
|
|
13
|
+
from pydicom.dataset import Dataset
|
|
14
|
+
import pydicom
|
|
15
|
+
|
|
16
|
+
import dbdicom.utils.arrays
|
|
17
|
+
import dbdicom.dataset as dbdataset
|
|
18
|
+
import dbdicom.database as dbdatabase
|
|
19
|
+
import dbdicom.register as register
|
|
20
|
+
import dbdicom.const as const
|
|
21
|
+
from dbdicom.utils.pydicom_dataset import (
|
|
22
|
+
get_values,
|
|
23
|
+
set_values,
|
|
24
|
+
set_value,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class DataBaseDicom():
|
|
30
|
+
"""Class to read and write a DICOM folder.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
path (str): path to the DICOM folder.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(self, path):
|
|
37
|
+
|
|
38
|
+
if not os.path.exists(path):
|
|
39
|
+
os.makedirs(path)
|
|
40
|
+
self.path = path
|
|
41
|
+
|
|
42
|
+
file = self._register_file()
|
|
43
|
+
if os.path.exists(file):
|
|
44
|
+
try:
|
|
45
|
+
with open(file, 'r') as f:
|
|
46
|
+
self.register = json.load(f)
|
|
47
|
+
# remove the json file after reading it. If the database
|
|
48
|
+
# is not properly closed this will prevent that changes
|
|
49
|
+
# have been made which are not reflected in the json
|
|
50
|
+
# file on disk
|
|
51
|
+
# os.remove(file)
|
|
52
|
+
except Exception as e:
|
|
53
|
+
# raise ValueError(
|
|
54
|
+
# f'Cannot open {file}. Please close any programs that are '
|
|
55
|
+
# f'using it and try again. Alternatively you can delete the file '
|
|
56
|
+
# f'manually and try again.'
|
|
57
|
+
# )
|
|
58
|
+
# If the file can't be read, delete it and load again
|
|
59
|
+
os.remove(file)
|
|
60
|
+
self.read()
|
|
61
|
+
else:
|
|
62
|
+
self.read()
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def read(self):
|
|
66
|
+
"""Read the DICOM folder again
|
|
67
|
+
"""
|
|
68
|
+
self.register = dbdatabase.read(self.path)
|
|
69
|
+
# For now ensure all series have just a single CIOD
|
|
70
|
+
# Leaving this out for now until the issue occurs again.
|
|
71
|
+
# self._split_series()
|
|
72
|
+
return self
|
|
73
|
+
|
|
74
|
+
def delete(self, entity, not_exists_ok=False):
|
|
75
|
+
"""Delete a DICOM entity from the database
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
entity (list): entity to delete
|
|
79
|
+
not_exists_ok (bool): By default, an exception is raised when attempting
|
|
80
|
+
to delete an entity that does not exist. Set this to True to pass over this silently.
|
|
81
|
+
"""
|
|
82
|
+
# delete datasets on disk
|
|
83
|
+
try:
|
|
84
|
+
removed = register.index(self.register, entity)
|
|
85
|
+
except ValueError:
|
|
86
|
+
if not_exists_ok:
|
|
87
|
+
return self
|
|
88
|
+
else:
|
|
89
|
+
raise ValueError(
|
|
90
|
+
f"The entity you are trying to delete does not exist. \n"
|
|
91
|
+
f"You can set not_exists_ok=True in dbdicom.delete() to avoid this error."
|
|
92
|
+
)
|
|
93
|
+
for index in removed:
|
|
94
|
+
file = os.path.join(self.path, str(Path(*index)))
|
|
95
|
+
if os.path.exists(file):
|
|
96
|
+
os.remove(file)
|
|
97
|
+
# Drop the entity from the register
|
|
98
|
+
register.remove(self.register, entity)
|
|
99
|
+
# Cleanup empty folders
|
|
100
|
+
remove_empty_folders(entity[0])
|
|
101
|
+
return self
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def close(self):
|
|
105
|
+
"""Close the DICOM folder
|
|
106
|
+
|
|
107
|
+
This also saves changes in the header file to disk.
|
|
108
|
+
"""
|
|
109
|
+
file = self._register_file()
|
|
110
|
+
with open(file, 'w') as f:
|
|
111
|
+
json.dump(self.register, f, indent=4)
|
|
112
|
+
return self
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _register_file(self):
|
|
116
|
+
return os.path.join(self.path, 'index.json')
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def summary(self):
|
|
120
|
+
"""Return a summary of the contents of the database.
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
dict: Nested dictionary with summary information on the database.
|
|
124
|
+
"""
|
|
125
|
+
return register.summary(self.register)
|
|
126
|
+
|
|
127
|
+
def to_csv(self, csv_file) -> dict:
|
|
128
|
+
"""Write a summary of the contents of the database to csv.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
path (str): path to the DICOM folder
|
|
132
|
+
csv_file (str): path to the csv file
|
|
133
|
+
"""
|
|
134
|
+
register.to_csv(self.register, csv_file)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def print(self):
|
|
138
|
+
"""Print the contents of the DICOM folder
|
|
139
|
+
"""
|
|
140
|
+
register.print_tree(self.register)
|
|
141
|
+
return self
|
|
142
|
+
|
|
143
|
+
def patients(self, name=None, contains=None, isin=None):
|
|
144
|
+
"""Return a list of patients in the DICOM folder.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
name (str, optional): value of PatientName, to search for
|
|
148
|
+
individuals with a given name. Defaults to None.
|
|
149
|
+
contains (str, optional): substring of PatientName, to
|
|
150
|
+
search for individuals based on part of their name.
|
|
151
|
+
Defaults to None.
|
|
152
|
+
isin (list, optional): List of PatientName values, to search
|
|
153
|
+
for patients whose name is in the list. Defaults to None.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
list: list of patients fulfilling the criteria.
|
|
157
|
+
"""
|
|
158
|
+
return register.patients(self.register, self.path, name, contains, isin)
|
|
159
|
+
|
|
160
|
+
def studies(self, entity=None, desc=None, contains=None, isin=None):
|
|
161
|
+
"""Return a list of studies in the DICOM folder.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
entity (str or list): path to a DICOM folder (to search in
|
|
165
|
+
the whole folder), or a two-element list identifying a
|
|
166
|
+
patient (to search studies of a given patient).
|
|
167
|
+
desc (str, optional): value of StudyDescription, to search for
|
|
168
|
+
studies with a given description. Defaults to None.
|
|
169
|
+
contains (str, optional): substring of StudyDescription, to
|
|
170
|
+
search for studies based on part of their description.
|
|
171
|
+
Defaults to None.
|
|
172
|
+
isin (list, optional): List of StudyDescription values, to search
|
|
173
|
+
for studies whose description is in a list. Defaults to None.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
list: list of studies fulfilling the criteria.
|
|
177
|
+
"""
|
|
178
|
+
if entity == None:
|
|
179
|
+
entity = self.path
|
|
180
|
+
if isinstance(entity, str):
|
|
181
|
+
studies = []
|
|
182
|
+
for patient in self.patients():
|
|
183
|
+
studies += self.studies(patient, desc, contains, isin)
|
|
184
|
+
return studies
|
|
185
|
+
elif len(entity)==1:
|
|
186
|
+
studies = []
|
|
187
|
+
for patient in self.patients():
|
|
188
|
+
studies += self.studies(patient, desc, contains, isin)
|
|
189
|
+
return studies
|
|
190
|
+
else:
|
|
191
|
+
return register.studies(self.register, entity, desc, contains, isin)
|
|
192
|
+
|
|
193
|
+
def series(self, entity=None, desc=None, contains=None, isin=None):
|
|
194
|
+
"""Return a list of series in the DICOM folder.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
entity (str or list): path to a DICOM folder (to search in
|
|
198
|
+
the whole folder), or a list identifying a
|
|
199
|
+
patient or a study (to search series of a given patient
|
|
200
|
+
or study).
|
|
201
|
+
desc (str, optional): value of SeriesDescription, to search for
|
|
202
|
+
series with a given description. Defaults to None.
|
|
203
|
+
contains (str, optional): substring of SeriesDescription, to
|
|
204
|
+
search for series based on part of their description.
|
|
205
|
+
Defaults to None.
|
|
206
|
+
isin (list, optional): List of SeriesDescription values, to search
|
|
207
|
+
for series whose description is in a list. Defaults to None.
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
list: list of series fulfilling the criteria.
|
|
211
|
+
"""
|
|
212
|
+
if entity == None:
|
|
213
|
+
entity = self.path
|
|
214
|
+
if isinstance(entity, str):
|
|
215
|
+
series = []
|
|
216
|
+
for study in self.studies(entity):
|
|
217
|
+
series += self.series(study, desc, contains, isin)
|
|
218
|
+
return series
|
|
219
|
+
elif len(entity)==1:
|
|
220
|
+
series = []
|
|
221
|
+
for study in self.studies(entity):
|
|
222
|
+
series += self.series(study, desc, contains, isin)
|
|
223
|
+
return series
|
|
224
|
+
elif len(entity)==2:
|
|
225
|
+
series = []
|
|
226
|
+
for study in self.studies(entity):
|
|
227
|
+
series += self.series(study, desc, contains, isin)
|
|
228
|
+
return series
|
|
229
|
+
else: # path = None (all series) or path = patient (all series in patient)
|
|
230
|
+
return register.series(self.register, entity, desc, contains, isin)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def volume(self, entity:Union[list, str], dims:list=None, verbose=1, **kwargs) -> vreg.Volume3D:
|
|
234
|
+
"""Read volume.
|
|
235
|
+
|
|
236
|
+
Args:
|
|
237
|
+
entity (list, str): DICOM series to read
|
|
238
|
+
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
239
|
+
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
240
|
+
kwargs (dict, optional): keywords to filter the series.
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
vreg.Volume3D:
|
|
244
|
+
"""
|
|
245
|
+
# if isinstance(entity, str): # path to folder
|
|
246
|
+
# return [self.volume(s, dims) for s in self.series(entity)]
|
|
247
|
+
# if len(entity) < 4: # folder, patient or study
|
|
248
|
+
# return [self.volume(s, dims) for s in self.series(entity)]
|
|
249
|
+
|
|
250
|
+
if dims is None:
|
|
251
|
+
dims = []
|
|
252
|
+
elif isinstance(dims, str):
|
|
253
|
+
dims = [dims]
|
|
254
|
+
else:
|
|
255
|
+
dims = list(dims)
|
|
256
|
+
dims = ['SliceLocation'] + dims # replace by slice_location(ds)
|
|
257
|
+
|
|
258
|
+
# Read dicom files
|
|
259
|
+
values = [[] for _ in dims]
|
|
260
|
+
volumes = []
|
|
261
|
+
|
|
262
|
+
files = register.files(self.register, entity)
|
|
263
|
+
for f in tqdm(files, desc='Reading volume..', disable=(verbose==0)):
|
|
264
|
+
ds = pydicom.dcmread(f)
|
|
265
|
+
|
|
266
|
+
# Check if the file can be used
|
|
267
|
+
if _skip(ds, kwargs):
|
|
268
|
+
continue
|
|
269
|
+
|
|
270
|
+
values_f = get_values(ds, dims)
|
|
271
|
+
for d in range(len(dims)):
|
|
272
|
+
values[d].append(values_f[d])
|
|
273
|
+
volumes.append(dbdataset.volume(ds))
|
|
274
|
+
|
|
275
|
+
# check slice cosines and reverse if needed
|
|
276
|
+
volumes = check_slice_cosines(volumes, values[0])
|
|
277
|
+
|
|
278
|
+
# Format coordinates as mesh
|
|
279
|
+
# coords = []
|
|
280
|
+
# for v in values:
|
|
281
|
+
# if np.isscalar(v[0]):
|
|
282
|
+
# v_arr = np.array(v)
|
|
283
|
+
# else:
|
|
284
|
+
# v_arr = np.empty(len(v), dtype=object)
|
|
285
|
+
# v_arr[:] = v
|
|
286
|
+
# coords.append(v_arr)
|
|
287
|
+
#coords = [np.array(v) for v in values]
|
|
288
|
+
#coords, inds = dbdicom.utils.arrays.meshvals(coords)
|
|
289
|
+
coords, inds = dbdicom.utils.arrays.meshvals(values)
|
|
290
|
+
|
|
291
|
+
# Check that all slices have the same coordinates
|
|
292
|
+
if len(dims) > 1:
|
|
293
|
+
# Loop over all coordinates after slice location
|
|
294
|
+
for c in coords[1:]:
|
|
295
|
+
# Loop over all slice locations
|
|
296
|
+
for k in range(1, c.shape[0]):
|
|
297
|
+
# Coordinate c of slice k
|
|
298
|
+
if not np.array_equal(c[k,...], c[0,...]):
|
|
299
|
+
raise ValueError(
|
|
300
|
+
"Cannot build a single volume. Not all slices "
|
|
301
|
+
"have the same coordinates."
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
# Build volumes
|
|
305
|
+
vols = np.array(volumes)
|
|
306
|
+
vols = vols[inds].reshape(coords[0].shape)
|
|
307
|
+
|
|
308
|
+
# Infer spacing between slices from slice locations
|
|
309
|
+
# Technically only necessary if SpacingBetweenSlices not set or incorrect
|
|
310
|
+
vols = infer_slice_spacing(vols)
|
|
311
|
+
|
|
312
|
+
# Join 2D volumes into 3D volumes
|
|
313
|
+
vol = vreg.join(vols)
|
|
314
|
+
# try:
|
|
315
|
+
# vol = vreg.join(vols)
|
|
316
|
+
# except ValueError:
|
|
317
|
+
# # some vendors define the slice vector as -cross product
|
|
318
|
+
# # of row and column vector. Check if that solves the issue.
|
|
319
|
+
# for v in vols.reshape(-1):
|
|
320
|
+
# v.affine[:3,2] = -v.affine[:3,2]
|
|
321
|
+
# # Then try again
|
|
322
|
+
# vol = vreg.join(vols)
|
|
323
|
+
|
|
324
|
+
# For multi-dimensional volumes, set dimensions and coordinates
|
|
325
|
+
if vol.ndim > 3:
|
|
326
|
+
# Coordinates of slice 0
|
|
327
|
+
c0 = [c[0,...] for c in coords[1:]]
|
|
328
|
+
vol.set_coords(c0)
|
|
329
|
+
vol.set_dims(dims[1:])
|
|
330
|
+
return vol
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
# Obsolete API - phase out
|
|
334
|
+
def volumes_2d(self, *args, **kwargs):
|
|
335
|
+
return self.slices(*args, **kwargs)
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
def slices(self, entity:Union[list, str], dims:list=None, verbose=1) -> list:
|
|
339
|
+
"""Read 2D volumes from the series
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
entity (list, str): DICOM series to read
|
|
343
|
+
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
344
|
+
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
345
|
+
|
|
346
|
+
Returns:
|
|
347
|
+
list of vreg.Volume3D
|
|
348
|
+
"""
|
|
349
|
+
# if isinstance(entity, str): # path to folder
|
|
350
|
+
# return [self.volume(s, dims) for s in self.series(entity)]
|
|
351
|
+
# if len(entity) < 4: # folder, patient or study
|
|
352
|
+
# return [self.volume(s, dims) for s in self.series(entity)]
|
|
353
|
+
|
|
354
|
+
if dims is None:
|
|
355
|
+
dims = []
|
|
356
|
+
elif isinstance(dims, str):
|
|
357
|
+
dims = [dims]
|
|
358
|
+
else:
|
|
359
|
+
dims = list(dims)
|
|
360
|
+
dims = ['SliceLocation'] + dims
|
|
361
|
+
|
|
362
|
+
# Read dicom files
|
|
363
|
+
values = {}
|
|
364
|
+
volumes = {}
|
|
365
|
+
|
|
366
|
+
files = register.files(self.register, entity)
|
|
367
|
+
for f in tqdm(files, desc='Reading volume..', disable=(verbose==0)):
|
|
368
|
+
ds = pydicom.dcmread(f)
|
|
369
|
+
values_f = get_values(ds, dims)
|
|
370
|
+
vol = dbdataset.volume(ds, multislice=True)
|
|
371
|
+
slice_loc = values_f[0]
|
|
372
|
+
if slice_loc in volumes:
|
|
373
|
+
volumes[slice_loc].append(vol)
|
|
374
|
+
for d in range(len(dims)):
|
|
375
|
+
values[slice_loc][d].append(values_f[d])
|
|
376
|
+
else:
|
|
377
|
+
volumes[slice_loc] = [vol]
|
|
378
|
+
values[slice_loc] = [[values_f[d]] for d in range(len(dims))]
|
|
379
|
+
|
|
380
|
+
# Build a volume for each slice location
|
|
381
|
+
volumes_2d = []
|
|
382
|
+
for slice_loc in volumes.keys():
|
|
383
|
+
vols_list = volumes[slice_loc]
|
|
384
|
+
|
|
385
|
+
if values == {}:
|
|
386
|
+
if len(vols_list) > 1:
|
|
387
|
+
raise ValueError(
|
|
388
|
+
"Cannot return a 2D volume - multiple slices at the same "
|
|
389
|
+
"location. \n Use InstanceNumber or another suitable DICOM "
|
|
390
|
+
"attribute as dimension to sort them.")
|
|
391
|
+
volumes_2d.append(vols_list[0])
|
|
392
|
+
continue
|
|
393
|
+
|
|
394
|
+
# Sort by coordinata values
|
|
395
|
+
vals_list = values[slice_loc]
|
|
396
|
+
|
|
397
|
+
# Format coordinates as mesh
|
|
398
|
+
# coords = [np.array(v) for v in vals_list]
|
|
399
|
+
# coords, inds = dbdicom.utils.arrays.meshvals(coords)
|
|
400
|
+
coords, inds = dbdicom.utils.arrays.meshvals(vals_list)
|
|
401
|
+
|
|
402
|
+
# Check that all slices have the same coordinates
|
|
403
|
+
if len(dims) > 1:
|
|
404
|
+
# Loop over all coordinates after slice location
|
|
405
|
+
for c in coords[1:]:
|
|
406
|
+
# Loop over all slice locations
|
|
407
|
+
for k in range(1, c.shape[0]):
|
|
408
|
+
# Coordinate c of slice k
|
|
409
|
+
if not np.array_equal(c[k,...], c[0,...]):
|
|
410
|
+
raise ValueError(
|
|
411
|
+
"Cannot build a single volume. Not all slices "
|
|
412
|
+
"have the same coordinates."
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
# Build volumes, sort and reshape along the coordinates
|
|
416
|
+
vols = np.array(vols_list)
|
|
417
|
+
vols = vols[inds].reshape(coords[0].shape)
|
|
418
|
+
|
|
419
|
+
# Join 2D volumes along the extra dimensions
|
|
420
|
+
vol = vreg.join(vols[0,...].reshape((1,) + vols.shape[1:]))
|
|
421
|
+
|
|
422
|
+
# For multi-dimensional volumes, set dimensions and coordinates
|
|
423
|
+
if vol.ndim > 3:
|
|
424
|
+
# Coordinates of slice 0
|
|
425
|
+
c0 = [c[0,...] for c in coords[1:]]
|
|
426
|
+
vol.set_coords(c0)
|
|
427
|
+
vol.set_dims(dims[1:])
|
|
428
|
+
|
|
429
|
+
volumes_2d.append(vol)
|
|
430
|
+
|
|
431
|
+
# Sort volumes by affine slice location
|
|
432
|
+
volumes_2d.sort(key=lambda v: affine_slice_loc(v.affine))
|
|
433
|
+
|
|
434
|
+
return volumes_2d
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
def pixel_data(self, series:list, dims:list=None, verbose=1) -> np.ndarray:
|
|
438
|
+
"""Read the pixel data from a DICOM series
|
|
439
|
+
|
|
440
|
+
Args:
|
|
441
|
+
series (list or str): DICOM series to read. This can also
|
|
442
|
+
be a path to a folder containing DICOM files, or a
|
|
443
|
+
patient or study to read all series in that patient or
|
|
444
|
+
study. In those cases a list is returned.
|
|
445
|
+
dims (list, optional): Dimensions of the array.
|
|
446
|
+
|
|
447
|
+
Returns:
|
|
448
|
+
numpy.ndarray or tuple: numpy array with pixel values, with
|
|
449
|
+
at least 3 dimensions (x,y,z).
|
|
450
|
+
"""
|
|
451
|
+
vols = self.volumes_2d(series, dims, verbose)
|
|
452
|
+
for v in vols[1:]:
|
|
453
|
+
if v.shape != vols[0].shape:
|
|
454
|
+
raise ValueError(
|
|
455
|
+
"Cannot return a pixel array because slices have different shapes." \
|
|
456
|
+
"Instead try using volumes_2d to return a list of 2D volumes."
|
|
457
|
+
)
|
|
458
|
+
slices = [v.values for v in vols]
|
|
459
|
+
pixel_array = np.concatenate(slices, axis=2)
|
|
460
|
+
return pixel_array
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
def values(self, series:list, *attr, dims:list=None, verbose=1) -> Union[dict, tuple]:
|
|
465
|
+
"""Read the values of some attributes from a DICOM series
|
|
466
|
+
|
|
467
|
+
Args:
|
|
468
|
+
series (list): DICOM series to read.
|
|
469
|
+
attr (tuple, optional): DICOM attributes to read.
|
|
470
|
+
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
471
|
+
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
472
|
+
|
|
473
|
+
Returns:
|
|
474
|
+
tuple: arrays with values for the attributes.
|
|
475
|
+
"""
|
|
476
|
+
# if isinstance(series, str): # path to folder
|
|
477
|
+
# return [self.values(s, attr, dims) for s in self.series(series)]
|
|
478
|
+
# if len(series) < 4: # folder, patient or study
|
|
479
|
+
# return [self.values(s, attr, dims) for s in self.series(series)]
|
|
480
|
+
|
|
481
|
+
if dims is None:
|
|
482
|
+
dims = ['InstanceNumber']
|
|
483
|
+
elif np.isscalar(dims):
|
|
484
|
+
dims = [dims]
|
|
485
|
+
else:
|
|
486
|
+
dims = list(dims)
|
|
487
|
+
|
|
488
|
+
# Read dicom files
|
|
489
|
+
coord_values = [[] for _ in dims]
|
|
490
|
+
attr_values = [[] for _ in attr]
|
|
491
|
+
|
|
492
|
+
files = register.files(self.register, series)
|
|
493
|
+
for f in tqdm(files, desc='Reading values..', disable=(verbose==0)):
|
|
494
|
+
ds = pydicom.dcmread(f)
|
|
495
|
+
coord_values_f = get_values(ds, dims)
|
|
496
|
+
for d in range(len(dims)):
|
|
497
|
+
coord_values[d].append(coord_values_f[d])
|
|
498
|
+
attr_values_f = get_values(ds, attr)
|
|
499
|
+
for a in range(len(attr)):
|
|
500
|
+
attr_values[a].append(attr_values_f[a])
|
|
501
|
+
|
|
502
|
+
# Format coordinates as mesh
|
|
503
|
+
# coords = [np.array(v) for v in coord_values]
|
|
504
|
+
# coords, inds = dbdicom.utils.arrays.meshvals(coords)
|
|
505
|
+
coords, inds = dbdicom.utils.arrays.meshvals(coord_values)
|
|
506
|
+
|
|
507
|
+
# Sort values accordingly
|
|
508
|
+
#values = [np.array(v) for v in attr_values]
|
|
509
|
+
values = dbdicom.utils.arrays.to_array_list(attr_values)
|
|
510
|
+
values = [v[inds].reshape(coords[0].shape) for v in values]
|
|
511
|
+
|
|
512
|
+
# Return values
|
|
513
|
+
if len(values) == 1:
|
|
514
|
+
return values[0]
|
|
515
|
+
else:
|
|
516
|
+
return tuple(values)
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
def write_volume(
|
|
520
|
+
self, vol:Union[vreg.Volume3D, tuple], series:list,
|
|
521
|
+
ref:list=None, append=False, verbose=1, **kwargs,
|
|
522
|
+
):
|
|
523
|
+
"""Write a vreg.Volume3D to a DICOM series
|
|
524
|
+
|
|
525
|
+
Args:
|
|
526
|
+
vol (vreg.Volume3D): Volume to write to the series.
|
|
527
|
+
series (list): DICOM series to read
|
|
528
|
+
ref (list): Reference series
|
|
529
|
+
append (bool): by default write_volume will only write to a new series,
|
|
530
|
+
and raise an error when attempting to write to an existing series.
|
|
531
|
+
To overrule this behaviour and add the volume to an existing series, set append to True.
|
|
532
|
+
Default is False.
|
|
533
|
+
verbose (bool): if set to 1, a progress bar is shown
|
|
534
|
+
kwargs: Keyword-value pairs to be set on the fly
|
|
535
|
+
"""
|
|
536
|
+
series_full_name = full_name(series)
|
|
537
|
+
if series_full_name in self.series():
|
|
538
|
+
if not append:
|
|
539
|
+
raise ValueError(f"Series {series_full_name[-1]} already exists in study {series_full_name[-2]}.")
|
|
540
|
+
|
|
541
|
+
if isinstance(vol, tuple):
|
|
542
|
+
vol = vreg.volume(vol[0], vol[1])
|
|
543
|
+
if ref is None:
|
|
544
|
+
ds = dbdataset.new_dataset('MRImage')
|
|
545
|
+
#ds = dbdataset.new_dataset('ParametricMap')
|
|
546
|
+
else:
|
|
547
|
+
if ref[0] == series[0]:
|
|
548
|
+
ref_mgr = self
|
|
549
|
+
else:
|
|
550
|
+
ref_mgr = DataBaseDicom(ref[0])
|
|
551
|
+
files = register.files(ref_mgr.register, ref)
|
|
552
|
+
ref_mgr.close()
|
|
553
|
+
ds = pydicom.dcmread(files[0])
|
|
554
|
+
|
|
555
|
+
# Get the attributes of the destination series
|
|
556
|
+
attr = self._series_attributes(series)
|
|
557
|
+
n = self._max_instance_number(attr['SeriesInstanceUID'])
|
|
558
|
+
|
|
559
|
+
if vol.ndim==3:
|
|
560
|
+
slices = vol.split()
|
|
561
|
+
for i, sl in tqdm(enumerate(slices), desc='Writing volume..', disable=verbose==0):
|
|
562
|
+
dbdataset.set_volume(ds, sl)
|
|
563
|
+
if kwargs != {}:
|
|
564
|
+
set_values(ds, list(kwargs.keys()), list(kwargs.values()))
|
|
565
|
+
self._write_dataset(ds, attr, n + 1 + i)
|
|
566
|
+
else:
|
|
567
|
+
i=0
|
|
568
|
+
vols = vol.separate().reshape(-1)
|
|
569
|
+
for vt in tqdm(vols, desc='Writing volume..', disable=verbose==0):
|
|
570
|
+
slices = vt.split()
|
|
571
|
+
for sl in slices:
|
|
572
|
+
dbdataset.set_volume(ds, sl)
|
|
573
|
+
if kwargs != {}:
|
|
574
|
+
set_values(list(kwargs.keys()), list(kwargs.values()))
|
|
575
|
+
self._write_dataset(ds, attr, n + 1 + i)
|
|
576
|
+
i+=1
|
|
577
|
+
return self
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
def edit(
|
|
581
|
+
self, series:list, new_values:dict, dims:list=None, verbose=1,
|
|
582
|
+
):
|
|
583
|
+
"""Edit attribute values in a new DICOM series
|
|
584
|
+
|
|
585
|
+
Args:
|
|
586
|
+
series (list): DICOM series to edit
|
|
587
|
+
new_values (dict): dictionary with attribute: value pairs to write to the series
|
|
588
|
+
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
589
|
+
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
590
|
+
"""
|
|
591
|
+
|
|
592
|
+
if dims is None:
|
|
593
|
+
dims = ['InstanceNumber']
|
|
594
|
+
elif np.isscalar(dims):
|
|
595
|
+
dims = [dims]
|
|
596
|
+
else:
|
|
597
|
+
dims = list(dims)
|
|
598
|
+
|
|
599
|
+
# Check that all values have the correct nr of elements
|
|
600
|
+
files = register.files(self.register, series)
|
|
601
|
+
for a in new_values.values():
|
|
602
|
+
if np.isscalar(a):
|
|
603
|
+
pass
|
|
604
|
+
elif np.array(a).size != len(files):
|
|
605
|
+
raise ValueError(
|
|
606
|
+
f"Incorrect value lengths. All values need to have {len(files)} elements"
|
|
607
|
+
)
|
|
608
|
+
|
|
609
|
+
# Read dicom files to sort them
|
|
610
|
+
coord_values = [[] for _ in dims]
|
|
611
|
+
for f in tqdm(files, desc='Sorting series..', disable=(verbose==0)):
|
|
612
|
+
ds = pydicom.dcmread(f)
|
|
613
|
+
coord_values_f = get_values(ds, dims)
|
|
614
|
+
for d in range(len(dims)):
|
|
615
|
+
coord_values[d].append(coord_values_f[d])
|
|
616
|
+
|
|
617
|
+
# Format coordinates as mesh
|
|
618
|
+
# coords = [np.array(v) for v in coord_values]
|
|
619
|
+
# coords, inds = dbdicom.utils.arrays.meshvals(coords)
|
|
620
|
+
coords, inds = dbdicom.utils.arrays.meshvals(coord_values)
|
|
621
|
+
|
|
622
|
+
# Sort files accordingly
|
|
623
|
+
files = np.array(files)[inds]
|
|
624
|
+
|
|
625
|
+
# Now edit and write the files
|
|
626
|
+
attr = self._series_attributes(series)
|
|
627
|
+
n = self._max_instance_number(attr['SeriesInstanceUID'])
|
|
628
|
+
|
|
629
|
+
# Drop existing attributes if they are edited
|
|
630
|
+
attr = {a:attr[a] for a in attr if a not in new_values}
|
|
631
|
+
|
|
632
|
+
# List instances to be edited
|
|
633
|
+
to_drop = register.index(self.register, series)
|
|
634
|
+
|
|
635
|
+
# Write the instances
|
|
636
|
+
tags = list(new_values.keys())
|
|
637
|
+
for i, f in tqdm(enumerate(files), desc='Writing values..', disable=(verbose==0)):
|
|
638
|
+
ds = pydicom.dcmread(f)
|
|
639
|
+
values = []
|
|
640
|
+
for a in new_values.values():
|
|
641
|
+
if np.isscalar(a):
|
|
642
|
+
values.append(a)
|
|
643
|
+
else:
|
|
644
|
+
values.append(np.array(a).reshape(-1)[i])
|
|
645
|
+
set_values(ds, tags, values)
|
|
646
|
+
self._write_dataset(ds, attr, n + 1 + i)
|
|
647
|
+
|
|
648
|
+
# Delete the originals files
|
|
649
|
+
register.drop(self.register, to_drop)
|
|
650
|
+
[os.remove(os.path.join(self.path, str(Path(*idx)))) for idx in to_drop]
|
|
651
|
+
|
|
652
|
+
return self
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
def to_nifti(self, series:list, file:str, dims=None, verbose=1):
|
|
656
|
+
"""Save a DICOM series in nifti format.
|
|
657
|
+
|
|
658
|
+
Args:
|
|
659
|
+
series (list): DICOM series to read
|
|
660
|
+
file (str): file path of the nifti file.
|
|
661
|
+
dims (list, optional): Non-spatial dimensions of the volume.
|
|
662
|
+
Defaults to None.
|
|
663
|
+
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
664
|
+
|
|
665
|
+
"""
|
|
666
|
+
vol = self.volume(series, dims, verbose)
|
|
667
|
+
vreg.write_nifti(vol, file)
|
|
668
|
+
return self
|
|
669
|
+
|
|
670
|
+
def from_nifti(self, file:str, series:list, ref:list=None):
|
|
671
|
+
"""Create a DICOM series from a nifti file.
|
|
672
|
+
|
|
673
|
+
Args:
|
|
674
|
+
file (str): file path of the nifti file.
|
|
675
|
+
series (list): DICOM series to create
|
|
676
|
+
ref (list): DICOM series to use as template.
|
|
677
|
+
"""
|
|
678
|
+
vol = vreg.read_nifti(file)
|
|
679
|
+
self.write_volume(vol, series, ref)
|
|
680
|
+
return self
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
|
|
684
|
+
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
def files(self, entity:list) -> list:
|
|
689
|
+
"""Read the files in a DICOM entity
|
|
690
|
+
|
|
691
|
+
Args:
|
|
692
|
+
entity (list or str): DICOM entity to read. This can
|
|
693
|
+
be a path to a folder containing DICOM files, or a
|
|
694
|
+
patient or study to read all series in that patient or
|
|
695
|
+
study.
|
|
696
|
+
|
|
697
|
+
Returns:
|
|
698
|
+
list: list of valid dicom files.
|
|
699
|
+
"""
|
|
700
|
+
if isinstance(entity, str): # path to folder
|
|
701
|
+
files = []
|
|
702
|
+
for s in self.series(entity):
|
|
703
|
+
files += self.files(s)
|
|
704
|
+
return files
|
|
705
|
+
if len(entity) < 4: # folder, patient or study
|
|
706
|
+
files = []
|
|
707
|
+
for s in self.series(entity):
|
|
708
|
+
files += self.files(s)
|
|
709
|
+
return files
|
|
710
|
+
|
|
711
|
+
return register.files(self.register, entity)
|
|
712
|
+
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
def unique(self, pars:list, entity:list) -> dict:
|
|
716
|
+
"""Return a list of unique values for a DICOM entity
|
|
717
|
+
|
|
718
|
+
Args:
|
|
719
|
+
pars (list, str/tuple): attribute or attributes to return.
|
|
720
|
+
entity (list): DICOM entity to search (Patient, Study or Series)
|
|
721
|
+
|
|
722
|
+
Returns:
|
|
723
|
+
dict: if a pars is a list, this returns a dictionary with
|
|
724
|
+
unique values for each attribute. If pars is a scalar
|
|
725
|
+
this returnes a list of values.
|
|
726
|
+
"""
|
|
727
|
+
if not isinstance(pars, list):
|
|
728
|
+
single=True
|
|
729
|
+
pars = [pars]
|
|
730
|
+
else:
|
|
731
|
+
single=False
|
|
732
|
+
|
|
733
|
+
v = self._values(pars, entity)
|
|
734
|
+
|
|
735
|
+
# Return a list with unique values for each attribute
|
|
736
|
+
values = []
|
|
737
|
+
for a in range(v.shape[1]):
|
|
738
|
+
va = v[:,a]
|
|
739
|
+
# Remove None values
|
|
740
|
+
va = va[[x is not None for x in va]]
|
|
741
|
+
va = list(va)
|
|
742
|
+
# Get unique values and sort
|
|
743
|
+
va = [x for i, x in enumerate(va) if i==va.index(x)]
|
|
744
|
+
try:
|
|
745
|
+
va.sort()
|
|
746
|
+
except:
|
|
747
|
+
pass
|
|
748
|
+
values.append(va)
|
|
749
|
+
|
|
750
|
+
if single:
|
|
751
|
+
return values[0]
|
|
752
|
+
else:
|
|
753
|
+
return {p: values[i] for i, p in enumerate(pars)}
|
|
754
|
+
|
|
755
|
+
|
|
756
|
+
def copy(self, from_entity, to_entity=None):
|
|
757
|
+
"""Copy a DICOM entity (patient, study or series)
|
|
758
|
+
|
|
759
|
+
Args:
|
|
760
|
+
from_entity (list): entity to copy
|
|
761
|
+
to_entity (list, optional): entity after copying. If this is not
|
|
762
|
+
provided, a copy will be made in the same study and returned
|
|
763
|
+
|
|
764
|
+
Returns:
|
|
765
|
+
entity: the copied entity. If th to_entity is provided, this is
|
|
766
|
+
returned.
|
|
767
|
+
"""
|
|
768
|
+
if len(from_entity) == 4:
|
|
769
|
+
if to_entity is None:
|
|
770
|
+
to_entity = deepcopy(from_entity)
|
|
771
|
+
if isinstance(to_entity[-1], tuple):
|
|
772
|
+
to_entity[-1] = (to_entity[-1][0] + '_copy', 0)
|
|
773
|
+
else:
|
|
774
|
+
to_entity[-1] = (to_entity[-1] + '_copy', 0)
|
|
775
|
+
while to_entity in self.series():
|
|
776
|
+
to_entity[-1] = (to_entity[-1][0], to_entity[-1][1] + 1)
|
|
777
|
+
if len(to_entity) != 4:
|
|
778
|
+
raise ValueError(
|
|
779
|
+
f"Cannot copy series {from_entity} to series {to_entity}. "
|
|
780
|
+
f"{to_entity} is not a series (needs 4 elements)."
|
|
781
|
+
)
|
|
782
|
+
self._copy_series(from_entity, to_entity)
|
|
783
|
+
return to_entity
|
|
784
|
+
|
|
785
|
+
if len(from_entity) == 3:
|
|
786
|
+
if to_entity is None:
|
|
787
|
+
to_entity = deepcopy(from_entity)
|
|
788
|
+
if isinstance(to_entity[-1], tuple):
|
|
789
|
+
to_entity[-1] = (to_entity[-1][0] + '_copy', 0)
|
|
790
|
+
else:
|
|
791
|
+
to_entity[-1] = (to_entity[-1] + '_copy', 0)
|
|
792
|
+
while to_entity in self.studies():
|
|
793
|
+
to_entity[-1][1] += 1
|
|
794
|
+
if len(to_entity) != 3:
|
|
795
|
+
raise ValueError(
|
|
796
|
+
f"Cannot copy study {from_entity} to study {to_entity}. "
|
|
797
|
+
f"{to_entity} is not a study (needs 3 elements)."
|
|
798
|
+
)
|
|
799
|
+
self._copy_study(from_entity, to_entity)
|
|
800
|
+
return to_entity
|
|
801
|
+
|
|
802
|
+
if len(from_entity) == 2:
|
|
803
|
+
if to_entity is None:
|
|
804
|
+
to_entity = deepcopy(from_entity)
|
|
805
|
+
to_entity[-1] += '_copy'
|
|
806
|
+
while to_entity in self.patients():
|
|
807
|
+
to_entity[-1] += '_copy'
|
|
808
|
+
if len(to_entity) != 2:
|
|
809
|
+
raise ValueError(
|
|
810
|
+
f"Cannot copy patient {from_entity} to patient {to_entity}. "
|
|
811
|
+
f"{to_entity} is not a patient (needs 2 elements)."
|
|
812
|
+
)
|
|
813
|
+
self._copy_patient(from_entity, to_entity)
|
|
814
|
+
return to_entity
|
|
815
|
+
|
|
816
|
+
raise ValueError(
|
|
817
|
+
f"Cannot copy {from_entity} to {to_entity}. "
|
|
818
|
+
)
|
|
819
|
+
|
|
820
|
+
def move(self, from_entity, to_entity):
|
|
821
|
+
"""Move a DICOM entity
|
|
822
|
+
|
|
823
|
+
Args:
|
|
824
|
+
entity (list): entity to move
|
|
825
|
+
"""
|
|
826
|
+
self.copy(from_entity, to_entity)
|
|
827
|
+
self.delete(from_entity)
|
|
828
|
+
return self
|
|
829
|
+
|
|
830
|
+
def split_series(self, series:list, attr:Union[str, tuple], key=None) -> list:
|
|
831
|
+
"""
|
|
832
|
+
Split a series into multiple series
|
|
833
|
+
|
|
834
|
+
Args:
|
|
835
|
+
series (list): series to split.
|
|
836
|
+
attr (str or tuple): dicom attribute to split the series by.
|
|
837
|
+
key (function): split by by key(attr)
|
|
838
|
+
Returns:
|
|
839
|
+
list: list of two-element tuples, where the first element is
|
|
840
|
+
is the value and the second element is the series corresponding to that value.
|
|
841
|
+
"""
|
|
842
|
+
|
|
843
|
+
# Find all values of the attr and list files per value
|
|
844
|
+
all_files = register.files(self.register, series)
|
|
845
|
+
files = []
|
|
846
|
+
values = []
|
|
847
|
+
for f in tqdm(all_files, desc=f'Reading {attr}'):
|
|
848
|
+
ds = pydicom.dcmread(f)
|
|
849
|
+
v = get_values(ds, attr)
|
|
850
|
+
if key is not None:
|
|
851
|
+
v = key(v)
|
|
852
|
+
if v in values:
|
|
853
|
+
index = values.index(v)
|
|
854
|
+
files[index].append(f)
|
|
855
|
+
else:
|
|
856
|
+
values.append(v)
|
|
857
|
+
files.append([f])
|
|
858
|
+
|
|
859
|
+
# Copy the files for each value (sorted) to new series
|
|
860
|
+
split_series = []
|
|
861
|
+
for index, v in tqdm(enumerate(values), desc='Writing new series'):
|
|
862
|
+
series_desc = series[-1] if isinstance(series, str) else series[-1][0]
|
|
863
|
+
series_desc = clean_folder_name(f'{series_desc}_{attr}_{v}')
|
|
864
|
+
series_v = series[:3] + [(series_desc, 0)]
|
|
865
|
+
self._files_to_series(files[index], series_v)
|
|
866
|
+
split_series.append((v, series_v))
|
|
867
|
+
return split_series
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
def _values(self, attributes:list, entity:list):
|
|
871
|
+
# Create a np array v with values for each instance and attribute
|
|
872
|
+
# if set(attributes) <= set(dbdatabase.COLUMNS):
|
|
873
|
+
# index = register.index(self.register, entity)
|
|
874
|
+
# v = self.register.loc[index, attributes].values
|
|
875
|
+
# else:
|
|
876
|
+
files = register.files(self.register, entity)
|
|
877
|
+
v = np.empty((len(files), len(attributes)), dtype=object)
|
|
878
|
+
for i, f in enumerate(files):
|
|
879
|
+
ds = pydicom.dcmread(f)
|
|
880
|
+
v[i,:] = get_values(ds, attributes)
|
|
881
|
+
return v
|
|
882
|
+
|
|
883
|
+
def _copy_patient(self, from_patient, to_patient):
|
|
884
|
+
from_patient_studies = register.studies(self.register, from_patient)
|
|
885
|
+
for from_study in tqdm(from_patient_studies, desc=f'Copying patient {from_patient[1:]}'):
|
|
886
|
+
# Count the studies with the same description in the target patient
|
|
887
|
+
study_desc = from_study[-1][0]
|
|
888
|
+
if to_patient[0]==from_patient[0]:
|
|
889
|
+
cnt = len(self.studies(to_patient, desc=study_desc))
|
|
890
|
+
else:
|
|
891
|
+
mgr = DataBaseDicom(to_patient[0])
|
|
892
|
+
cnt = len(mgr.studies(to_patient, desc=study_desc))
|
|
893
|
+
mgr.close()
|
|
894
|
+
# Ensure the copied studies end up in a separate study with the same description
|
|
895
|
+
to_study = to_patient + [(study_desc, cnt)]
|
|
896
|
+
self._copy_study(from_study, to_study)
|
|
897
|
+
|
|
898
|
+
def _copy_study(self, from_study, to_study):
|
|
899
|
+
from_study_series = register.series(self.register, from_study)
|
|
900
|
+
for from_series in tqdm(from_study_series, desc=f'Copying study {from_study[1:]}'):
|
|
901
|
+
# Count the series with the same description in the target study
|
|
902
|
+
series_desc = from_series[-1][0]
|
|
903
|
+
if to_study[0]==from_study[0]:
|
|
904
|
+
cnt = len(self.series(to_study, desc=series_desc))
|
|
905
|
+
else:
|
|
906
|
+
mgr = DataBaseDicom(to_study[0])
|
|
907
|
+
cnt = len(mgr.series(to_study, desc=series_desc))
|
|
908
|
+
mgr.close()
|
|
909
|
+
# Ensure the copied series end up in a separate series with the same description
|
|
910
|
+
to_series = to_study + [(series_desc, cnt)]
|
|
911
|
+
self._copy_series(from_series, to_series)
|
|
912
|
+
|
|
913
|
+
def _copy_series(self, from_series, to_series):
|
|
914
|
+
# Get the files to be exported
|
|
915
|
+
from_series_files = register.files(self.register, from_series)
|
|
916
|
+
if to_series[0] == from_series[0]:
|
|
917
|
+
# Copy in the same database
|
|
918
|
+
self._files_to_series(from_series_files, to_series)
|
|
919
|
+
else:
|
|
920
|
+
# Copy to another database
|
|
921
|
+
mgr = DataBaseDicom(to_series[0])
|
|
922
|
+
mgr._files_to_series(from_series_files, to_series)
|
|
923
|
+
mgr.close()
|
|
924
|
+
|
|
925
|
+
|
|
926
|
+
def _files_to_series(self, files, to_series):
|
|
927
|
+
|
|
928
|
+
# Get the attributes of the destination series
|
|
929
|
+
attr = self._series_attributes(to_series)
|
|
930
|
+
n = self._max_instance_number(attr['SeriesInstanceUID'])
|
|
931
|
+
|
|
932
|
+
# Copy the files to the new series
|
|
933
|
+
for i, f in tqdm(enumerate(files), total=len(files), desc=f'Copying series {to_series[1:]}'):
|
|
934
|
+
# Read dataset and assign new properties
|
|
935
|
+
ds = pydicom.dcmread(f)
|
|
936
|
+
self._write_dataset(ds, attr, n + 1 + i)
|
|
937
|
+
|
|
938
|
+
def _max_study_id(self, patient_id):
|
|
939
|
+
for pt in self.register:
|
|
940
|
+
if pt['PatientID'] == patient_id:
|
|
941
|
+
# Find the largest integer StudyID
|
|
942
|
+
n = []
|
|
943
|
+
for st in pt['studies']:
|
|
944
|
+
try:
|
|
945
|
+
n.append(int(st['StudyID']))
|
|
946
|
+
except:
|
|
947
|
+
pass
|
|
948
|
+
if n == []:
|
|
949
|
+
return 0
|
|
950
|
+
else:
|
|
951
|
+
return int(np.amax(n))
|
|
952
|
+
return 0
|
|
953
|
+
|
|
954
|
+
def _max_series_number(self, study_uid):
|
|
955
|
+
for pt in self.register:
|
|
956
|
+
for st in pt['studies']:
|
|
957
|
+
if st['StudyInstanceUID'] == study_uid:
|
|
958
|
+
n = [sr['SeriesNumber'] for sr in st['series']]
|
|
959
|
+
return int(np.amax(n))
|
|
960
|
+
return 0
|
|
961
|
+
|
|
962
|
+
def _max_instance_number(self, series_uid):
|
|
963
|
+
for pt in self.register:
|
|
964
|
+
for st in pt['studies']:
|
|
965
|
+
for sr in st['series']:
|
|
966
|
+
if sr['SeriesInstanceUID'] == series_uid:
|
|
967
|
+
n = list(sr['instances'].keys())
|
|
968
|
+
return int(np.amax([int(i) for i in n]))
|
|
969
|
+
return 0
|
|
970
|
+
|
|
971
|
+
# def _attributes(self, entity):
|
|
972
|
+
# if len(entity)==4:
|
|
973
|
+
# return self._series_attributes(entity)
|
|
974
|
+
# if len(entity)==3:
|
|
975
|
+
# return self._study_attributes(entity)
|
|
976
|
+
# if len(entity)==2:
|
|
977
|
+
# return self._patient_attributes(entity)
|
|
978
|
+
|
|
979
|
+
|
|
980
|
+
def _patient_attributes(self, patient):
|
|
981
|
+
try:
|
|
982
|
+
# If the patient exists and has files, read from file
|
|
983
|
+
files = register.files(self.register, patient)
|
|
984
|
+
attr = const.PATIENT_MODULE
|
|
985
|
+
ds = pydicom.dcmread(files[0])
|
|
986
|
+
vals = get_values(ds, attr)
|
|
987
|
+
except:
|
|
988
|
+
# If the patient does not exist, generate values
|
|
989
|
+
if patient in self.patients():
|
|
990
|
+
raise ValueError(
|
|
991
|
+
f"Cannot create patient with id {patient[1]}."
|
|
992
|
+
f"The ID is already taken. Please provide a unique ID."
|
|
993
|
+
)
|
|
994
|
+
attr = ['PatientID', 'PatientName']
|
|
995
|
+
vals = [patient[1], 'Anonymous']
|
|
996
|
+
return {attr[i]:vals[i] for i in range(len(attr)) if vals[i] is not None}
|
|
997
|
+
|
|
998
|
+
|
|
999
|
+
def _study_attributes(self, study):
|
|
1000
|
+
patient_attr = self._patient_attributes(study[:2])
|
|
1001
|
+
try:
|
|
1002
|
+
# If the study exists and has files, read from file
|
|
1003
|
+
files = register.files(self.register, study)
|
|
1004
|
+
attr = const.STUDY_MODULE
|
|
1005
|
+
ds = pydicom.dcmread(files[0])
|
|
1006
|
+
vals = get_values(ds, attr)
|
|
1007
|
+
except register.AmbiguousError as e:
|
|
1008
|
+
raise register.AmbiguousError(e)
|
|
1009
|
+
except:
|
|
1010
|
+
# If the study does not exist or is empty, generate values
|
|
1011
|
+
if study[:-1] not in self.patients():
|
|
1012
|
+
study_id = 1
|
|
1013
|
+
else:
|
|
1014
|
+
study_id = 1 + self._max_study_id(study[1])
|
|
1015
|
+
attr = ['StudyInstanceUID', 'StudyDescription', 'StudyID']
|
|
1016
|
+
study_uid = pydicom.uid.generate_uid()
|
|
1017
|
+
study_desc = study[-1] if isinstance(study[-1], str) else study[-1][0]
|
|
1018
|
+
#study_date = datetime.today().strftime('%Y%m%d')
|
|
1019
|
+
vals = [study_uid, study_desc, str(study_id)]
|
|
1020
|
+
return patient_attr | {attr[i]:vals[i] for i in range(len(attr)) if vals[i] is not None}
|
|
1021
|
+
|
|
1022
|
+
|
|
1023
|
+
def _series_attributes(self, series):
|
|
1024
|
+
study_attr = self._study_attributes(series[:3])
|
|
1025
|
+
try:
|
|
1026
|
+
# If the series exists and has files, read from file
|
|
1027
|
+
files = register.files(self.register, series)
|
|
1028
|
+
attr = const.SERIES_MODULE
|
|
1029
|
+
ds = pydicom.dcmread(files[0])
|
|
1030
|
+
vals = get_values(ds, attr)
|
|
1031
|
+
except register.AmbiguousError as e:
|
|
1032
|
+
raise register.AmbiguousError(e)
|
|
1033
|
+
except:
|
|
1034
|
+
# If the series does not exist or is empty, generate values
|
|
1035
|
+
try:
|
|
1036
|
+
study_uid = register.study_uid(self.register, series[:-1])
|
|
1037
|
+
except:
|
|
1038
|
+
series_number = 1
|
|
1039
|
+
else:
|
|
1040
|
+
series_number = 1 + self._max_series_number(study_uid)
|
|
1041
|
+
attr = ['SeriesInstanceUID', 'SeriesDescription', 'SeriesNumber']
|
|
1042
|
+
series_uid = pydicom.uid.generate_uid()
|
|
1043
|
+
series_desc = series[-1] if isinstance(series[-1], str) else series[-1][0]
|
|
1044
|
+
vals = [series_uid, series_desc, int(series_number)]
|
|
1045
|
+
return study_attr | {attr[i]:vals[i] for i in range(len(attr)) if vals[i] is not None}
|
|
1046
|
+
|
|
1047
|
+
|
|
1048
|
+
def _write_dataset(self, ds:Dataset, attr:dict, instance_nr:int):
|
|
1049
|
+
# Set new attributes
|
|
1050
|
+
attr['SOPInstanceUID'] = pydicom.uid.generate_uid()
|
|
1051
|
+
attr['InstanceNumber'] = str(instance_nr)
|
|
1052
|
+
set_values(ds, list(attr.keys()), list(attr.values()))
|
|
1053
|
+
# Save results in a new file
|
|
1054
|
+
rel_dir = [
|
|
1055
|
+
f"Patient__{attr['PatientID']}",
|
|
1056
|
+
f"Study__{attr['StudyID']}__{attr['StudyDescription']}",
|
|
1057
|
+
f"Series__{attr['SeriesNumber']}__{attr['SeriesDescription']}",
|
|
1058
|
+
]
|
|
1059
|
+
dir = os.path.join(self.path, str(Path(*rel_dir)))
|
|
1060
|
+
os.makedirs(dir, exist_ok=True)
|
|
1061
|
+
filename = pydicom.uid.generate_uid() + '.dcm'
|
|
1062
|
+
dbdataset.write(ds, os.path.join(dir, filename))
|
|
1063
|
+
# Add an entry in the register
|
|
1064
|
+
rel_path = rel_dir + [filename]
|
|
1065
|
+
register.add_instance(self.register, attr, rel_path)
|
|
1066
|
+
|
|
1067
|
+
|
|
1068
|
+
# TODO: deprecate
|
|
1069
|
+
def archive(self, archive_path):
|
|
1070
|
+
# TODO add flat=True option for zipping at patient level
|
|
1071
|
+
for pt in tqdm(self.register, desc='Archiving '):
|
|
1072
|
+
for st in pt['studies']:
|
|
1073
|
+
zip_dir = os.path.join(
|
|
1074
|
+
archive_path,
|
|
1075
|
+
f"Patient__{pt['PatientID']}",
|
|
1076
|
+
f"Study__{st['StudyID']}__{st['StudyDescription']}",
|
|
1077
|
+
)
|
|
1078
|
+
os.makedirs(zip_dir, exist_ok=True)
|
|
1079
|
+
for sr in st['series']:
|
|
1080
|
+
zip_file = os.path.join(
|
|
1081
|
+
zip_dir,
|
|
1082
|
+
f"Series__{sr['SeriesNumber']}__{sr['SeriesDescription']}.zip",
|
|
1083
|
+
)
|
|
1084
|
+
if os.path.exists(zip_file):
|
|
1085
|
+
continue
|
|
1086
|
+
try:
|
|
1087
|
+
with zipfile.ZipFile(zip_file, 'w') as zipf:
|
|
1088
|
+
for rel_path in sr['instances'].values():
|
|
1089
|
+
file = os.path.join(self.path, str(Path(*rel_path)))
|
|
1090
|
+
zipf.write(file, arcname=os.path.basename(file))
|
|
1091
|
+
except Exception as e:
|
|
1092
|
+
raise RuntimeError(
|
|
1093
|
+
f"Error extracting series {sr['SeriesDescription']} "
|
|
1094
|
+
f"in study {st['StudyDescription']} of patient {pt['PatientID']}."
|
|
1095
|
+
)
|
|
1096
|
+
|
|
1097
|
+
|
|
1098
|
+
|
|
1099
|
+
def full_name(entity):
|
|
1100
|
+
|
|
1101
|
+
if len(entity)==3: # study
|
|
1102
|
+
if isinstance(entity[-1], tuple):
|
|
1103
|
+
return entity
|
|
1104
|
+
else:
|
|
1105
|
+
full_name_study = deepcopy(entity)
|
|
1106
|
+
full_name_study[-1] = (full_name_study[-1], 0)
|
|
1107
|
+
return full_name_study
|
|
1108
|
+
|
|
1109
|
+
elif len(entity)==4: # series
|
|
1110
|
+
full_name_study = full_name(entity[:3])
|
|
1111
|
+
series = full_name_study + [entity[-1]]
|
|
1112
|
+
if isinstance(series[-1], tuple):
|
|
1113
|
+
return series
|
|
1114
|
+
else:
|
|
1115
|
+
full_name_series = deepcopy(series)
|
|
1116
|
+
full_name_series[-1] = (full_name_series[-1], 0)
|
|
1117
|
+
return full_name_series
|
|
1118
|
+
else:
|
|
1119
|
+
return entity
|
|
1120
|
+
|
|
1121
|
+
|
|
1122
|
+
def clean_folder_name(name, replacement="", max_length=255):
|
|
1123
|
+
# Strip leading/trailing whitespace
|
|
1124
|
+
name = name.strip()
|
|
1125
|
+
|
|
1126
|
+
# Replace invalid characters (Windows, macOS, Linux-safe)
|
|
1127
|
+
illegal_chars = r'[<>:"/\\|?*\[\]\x00-\x1F\x7F]'
|
|
1128
|
+
name = re.sub(illegal_chars, replacement, name)
|
|
1129
|
+
|
|
1130
|
+
# Replace reserved Windows names
|
|
1131
|
+
reserved = {
|
|
1132
|
+
"CON", "PRN", "AUX", "NUL",
|
|
1133
|
+
*(f"COM{i}" for i in range(1, 10)),
|
|
1134
|
+
*(f"LPT{i}" for i in range(1, 10))
|
|
1135
|
+
}
|
|
1136
|
+
name_upper = name.upper().split(".")[0] # Just base name
|
|
1137
|
+
if name_upper in reserved:
|
|
1138
|
+
name = f"{name}_folder"
|
|
1139
|
+
|
|
1140
|
+
# Truncate to max length (common max: 255 bytes)
|
|
1141
|
+
return name[:max_length] or "folder"
|
|
1142
|
+
|
|
1143
|
+
|
|
1144
|
+
|
|
1145
|
+
def remove_empty_folders(path):
|
|
1146
|
+
"""
|
|
1147
|
+
Removes all empty subfolders from a given directory.
|
|
1148
|
+
|
|
1149
|
+
This function walks through the directory tree from the bottom up.
|
|
1150
|
+
This is crucial because it allows child directories to be removed before
|
|
1151
|
+
their parents, potentially making the parent directory empty and
|
|
1152
|
+
eligible for removal in the same pass.
|
|
1153
|
+
|
|
1154
|
+
Args:
|
|
1155
|
+
path (str): The absolute or relative path to the directory to scan.
|
|
1156
|
+
"""
|
|
1157
|
+
# Walk the directory tree in a bottom-up manner (topdown=False)
|
|
1158
|
+
for dirpath, dirnames, filenames in os.walk(path, topdown=False):
|
|
1159
|
+
# A directory is considered empty if it has no subdirectories and no files
|
|
1160
|
+
if not dirnames and not filenames:
|
|
1161
|
+
try:
|
|
1162
|
+
shutil.rmtree(dirpath)
|
|
1163
|
+
except OSError as e:
|
|
1164
|
+
# This might happen due to permissions issues
|
|
1165
|
+
print(f"Error removing {dirpath}: {e}")
|
|
1166
|
+
|
|
1167
|
+
|
|
1168
|
+
def check_slice_cosines(vols, locs):
|
|
1169
|
+
|
|
1170
|
+
# If not all slice locations are defined there is nothing to check
|
|
1171
|
+
if None in locs:
|
|
1172
|
+
return vols
|
|
1173
|
+
|
|
1174
|
+
# Count the number of volumes with correct slice locs
|
|
1175
|
+
cnt = _count_correct_slice_locations(vols, locs)
|
|
1176
|
+
|
|
1177
|
+
# If they are all correct, we are done
|
|
1178
|
+
if cnt == len(vols):
|
|
1179
|
+
return vols
|
|
1180
|
+
|
|
1181
|
+
# If not, flip the slice cosine and count again
|
|
1182
|
+
for v in vols:
|
|
1183
|
+
v.affine[:3, 2] *= -1
|
|
1184
|
+
cnt = _count_correct_slice_locations(vols, locs)
|
|
1185
|
+
|
|
1186
|
+
# If they are all correct, we are done
|
|
1187
|
+
if cnt == len(vols):
|
|
1188
|
+
return vols
|
|
1189
|
+
|
|
1190
|
+
# # Otherwise raise an error as slice locations must be corrupt
|
|
1191
|
+
# raise ValueError(
|
|
1192
|
+
# "Corrupted DICOM files: not all SliceLocation values match up "
|
|
1193
|
+
# "with the ImagePositionPatient."
|
|
1194
|
+
# )
|
|
1195
|
+
|
|
1196
|
+
# Otherwise flip back and return as is
|
|
1197
|
+
for v in vols:
|
|
1198
|
+
v.affine[:3, 2] *= -1
|
|
1199
|
+
return vols
|
|
1200
|
+
|
|
1201
|
+
|
|
1202
|
+
def _count_correct_slice_locations(vols, locs):
|
|
1203
|
+
|
|
1204
|
+
slice_cosine_0 = vols[0].affine[:3, 2]
|
|
1205
|
+
cnt = 0
|
|
1206
|
+
for i, v in enumerate(vols):
|
|
1207
|
+
slice_cosine = v.affine[:3, 2]
|
|
1208
|
+
if not np.array_equal(slice_cosine_0, slice_cosine):
|
|
1209
|
+
raise ValueError(
|
|
1210
|
+
"Cannot read volume: not all slices have the same orientation. "
|
|
1211
|
+
"Split the series by ImageOrientationPatient and try again."
|
|
1212
|
+
)
|
|
1213
|
+
slice_pos = v.affine[:3, 3]
|
|
1214
|
+
slice_loc = np.dot(slice_pos, slice_cosine)
|
|
1215
|
+
diff = np.around(locs[i] - slice_loc, 2) # precision 10-2 mm
|
|
1216
|
+
if diff == 0:
|
|
1217
|
+
cnt += 1
|
|
1218
|
+
return cnt
|
|
1219
|
+
|
|
1220
|
+
|
|
1221
|
+
def infer_slice_spacing(vols):
|
|
1222
|
+
# In case spacing between slices is not (correctly) encoded in
|
|
1223
|
+
# DICOM it can be inferred from the slice locations.
|
|
1224
|
+
|
|
1225
|
+
shape = vols.shape
|
|
1226
|
+
vols = vols.reshape((shape[0], -1))
|
|
1227
|
+
slice_spacing = np.zeros(vols.shape[-1])
|
|
1228
|
+
|
|
1229
|
+
for d in range(vols.shape[-1]):
|
|
1230
|
+
|
|
1231
|
+
# For single slice volumes there is nothing to do
|
|
1232
|
+
if vols[:,d].shape[0]==1:
|
|
1233
|
+
continue
|
|
1234
|
+
|
|
1235
|
+
# Get a normal slice vector from the first volume.
|
|
1236
|
+
mat = vols[0,d].affine[:3,:3]
|
|
1237
|
+
normal = mat[:,2]/np.linalg.norm(mat[:,2])
|
|
1238
|
+
|
|
1239
|
+
# Get slice locations by projection on the normal.
|
|
1240
|
+
pos = [v.affine[:3,3] for v in vols[:,d]]
|
|
1241
|
+
slice_loc = [np.dot(p, normal) for p in pos]
|
|
1242
|
+
|
|
1243
|
+
# Sort slice locations and take consecutive differences.
|
|
1244
|
+
slice_loc = np.sort(slice_loc)
|
|
1245
|
+
distances = slice_loc[1:] - slice_loc[:-1]
|
|
1246
|
+
|
|
1247
|
+
# Round to 10 micrometer and check if unique
|
|
1248
|
+
distances = np.around(distances, 2)
|
|
1249
|
+
slice_spacing_d = np.unique(distances)
|
|
1250
|
+
|
|
1251
|
+
# Check if slice spacings are unique - otherwise this is not a volume
|
|
1252
|
+
if len(slice_spacing_d) > 1:
|
|
1253
|
+
raise ValueError(
|
|
1254
|
+
'Cannot build a volume - spacings between slices are not unique.'
|
|
1255
|
+
)
|
|
1256
|
+
else:
|
|
1257
|
+
slice_spacing_d= slice_spacing_d[0]
|
|
1258
|
+
|
|
1259
|
+
# Set correct slice spacing in all volumes
|
|
1260
|
+
for v in vols[:,d]:
|
|
1261
|
+
v.affine[:3,2] = normal * abs(slice_spacing_d)
|
|
1262
|
+
|
|
1263
|
+
slice_spacing[d] = slice_spacing_d
|
|
1264
|
+
|
|
1265
|
+
# Check slice_spacing is the same across dimensions
|
|
1266
|
+
# Not sure if this is possible as volumes are sorted by slice location
|
|
1267
|
+
slice_spacing = np.unique(slice_spacing)
|
|
1268
|
+
if len(slice_spacing) > 1:
|
|
1269
|
+
raise ValueError(
|
|
1270
|
+
'Cannot build a volume - spacings between slices are not unique.'
|
|
1271
|
+
)
|
|
1272
|
+
|
|
1273
|
+
return vols.reshape(shape)
|
|
1274
|
+
|
|
1275
|
+
|
|
1276
|
+
|
|
1277
|
+
def affine_slice_loc(affine):
|
|
1278
|
+
slice_cosine = affine[:3, 2] / np.linalg.norm(affine[:3, 2])
|
|
1279
|
+
return np.dot(affine[:3, 3], slice_cosine)
|
|
1280
|
+
|
|
1281
|
+
|
|
1282
|
+
def _skip(ds, kwargs):
|
|
1283
|
+
if kwargs == {}:
|
|
1284
|
+
return False
|
|
1285
|
+
fltrs = get_values(ds, list(kwargs.keys()))
|
|
1286
|
+
for i, f in enumerate(kwargs.values()):
|
|
1287
|
+
if fltrs[i] != f:
|
|
1288
|
+
return True
|
|
1289
|
+
return False
|
|
1290
|
+
|