dbdicom 0.3.9__py3-none-any.whl → 0.3.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbdicom might be problematic. Click here for more details.
- dbdicom/api.py +60 -46
- dbdicom/dataset.py +2 -2
- dbdicom/dbd.py +272 -135
- dbdicom/register.py +21 -0
- dbdicom/sop_classes/enhanced_mr_image.py +1 -1
- dbdicom/utils/arrays.py +124 -36
- {dbdicom-0.3.9.dist-info → dbdicom-0.3.11.dist-info}/METADATA +1 -1
- {dbdicom-0.3.9.dist-info → dbdicom-0.3.11.dist-info}/RECORD +11 -11
- {dbdicom-0.3.9.dist-info → dbdicom-0.3.11.dist-info}/WHEEL +0 -0
- {dbdicom-0.3.9.dist-info → dbdicom-0.3.11.dist-info}/licenses/LICENSE +0 -0
- {dbdicom-0.3.9.dist-info → dbdicom-0.3.11.dist-info}/top_level.txt +0 -0
dbdicom/api.py
CHANGED
|
@@ -4,8 +4,7 @@ import zipfile
|
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
from typing import Union
|
|
6
6
|
from tqdm import tqdm
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
import numpy as np
|
|
9
8
|
import vreg
|
|
10
9
|
|
|
11
10
|
from dbdicom.dbd import DataBaseDicom
|
|
@@ -164,26 +163,34 @@ def series(entity:str | list, desc:str=None, contains:str=None, isin:list=None)-
|
|
|
164
163
|
"To retrieve a series, the entity must be a database, patient or study."
|
|
165
164
|
)
|
|
166
165
|
|
|
167
|
-
def copy(from_entity:list, to_entity
|
|
168
|
-
"""Copy a DICOM
|
|
166
|
+
def copy(from_entity:list, to_entity=None):
|
|
167
|
+
"""Copy a DICOM entity (patient, study or series)
|
|
169
168
|
|
|
170
169
|
Args:
|
|
171
170
|
from_entity (list): entity to copy
|
|
172
|
-
to_entity (list): entity after copying.
|
|
171
|
+
to_entity (list, optional): entity after copying. If this is not
|
|
172
|
+
provided, a copy will be made in the same study and returned.
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
entity: the copied entity. If th to_entity is provided, this is
|
|
176
|
+
returned.
|
|
173
177
|
"""
|
|
174
178
|
dbd = open(from_entity[0])
|
|
175
|
-
dbd.copy(from_entity, to_entity)
|
|
179
|
+
from_entity_copy = dbd.copy(from_entity, to_entity)
|
|
176
180
|
dbd.close()
|
|
181
|
+
return from_entity_copy
|
|
177
182
|
|
|
178
183
|
|
|
179
|
-
def delete(entity:list):
|
|
184
|
+
def delete(entity:list, not_exists_ok=False):
|
|
180
185
|
"""Delete a DICOM entity
|
|
181
186
|
|
|
182
187
|
Args:
|
|
183
188
|
entity (list): entity to delete
|
|
189
|
+
not_exists_ok (bool): By default, an exception is raised when attempting
|
|
190
|
+
to delete an entity that does not exist. Set this to True to pass over this silently.
|
|
184
191
|
"""
|
|
185
192
|
dbd = open(entity[0])
|
|
186
|
-
dbd.delete(entity)
|
|
193
|
+
dbd.delete(entity, not_exists_ok)
|
|
187
194
|
dbd.close()
|
|
188
195
|
|
|
189
196
|
|
|
@@ -216,25 +223,42 @@ def split_series(series:list, attr:Union[str, tuple], key=None)->list:
|
|
|
216
223
|
return split_series
|
|
217
224
|
|
|
218
225
|
|
|
219
|
-
def volume(
|
|
220
|
-
"""Read volume
|
|
226
|
+
def volume(series:list, dims:list=None, verbose=1) -> vreg.Volume3D:
|
|
227
|
+
"""Read volume from a series.
|
|
221
228
|
|
|
222
229
|
Args:
|
|
223
|
-
|
|
230
|
+
series (list, str): DICOM entity to read
|
|
224
231
|
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
225
232
|
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
226
233
|
|
|
227
234
|
Returns:
|
|
228
|
-
vreg.Volume3D
|
|
229
|
-
a volume, else a list of volumes.
|
|
235
|
+
vreg.Volume3D.
|
|
230
236
|
"""
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
dbd = open(entity[0])
|
|
234
|
-
vol = dbd.volume(entity, dims, verbose)
|
|
237
|
+
dbd = open(series[0])
|
|
238
|
+
vol = dbd.volume(series, dims, verbose)
|
|
235
239
|
dbd.close()
|
|
236
240
|
return vol
|
|
237
241
|
|
|
242
|
+
|
|
243
|
+
def values(series:list, *attr, dims:list=None, verbose=1) -> Union[np.ndarray, list]:
|
|
244
|
+
"""Read the values of some attributes from a DICOM series
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
series (list): DICOM series to read.
|
|
248
|
+
attr (tuple, optional): DICOM attributes to read.
|
|
249
|
+
dims (list, optional): Dimensions to sort the values.
|
|
250
|
+
If dims is not provided, values are sorted by
|
|
251
|
+
InstanceNumber.
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
tuple: arrays with values for the attributes.
|
|
255
|
+
"""
|
|
256
|
+
dbd = open(series[0])
|
|
257
|
+
values = dbd.values(series, *attr, dims=dims, verbose=verbose)
|
|
258
|
+
dbd.close()
|
|
259
|
+
return values
|
|
260
|
+
|
|
261
|
+
|
|
238
262
|
def write_volume(vol:Union[vreg.Volume3D, tuple], series:list, ref:list=None):
|
|
239
263
|
"""Write a vreg.Volume3D to a DICOM series
|
|
240
264
|
|
|
@@ -247,6 +271,25 @@ def write_volume(vol:Union[vreg.Volume3D, tuple], series:list, ref:list=None):
|
|
|
247
271
|
dbd.write_volume(vol, series, ref)
|
|
248
272
|
dbd.close()
|
|
249
273
|
|
|
274
|
+
|
|
275
|
+
def edit(series:list, new_values:dict, dims:list=None, verbose=1):
|
|
276
|
+
"""Edit attribute values in a DICOM series
|
|
277
|
+
|
|
278
|
+
Warning: this function edits all values as requested. Please take care
|
|
279
|
+
when editing attributes that affect the DICOM file organisation, such as
|
|
280
|
+
UIDs, as this could corrupt the database.
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
series (list): DICOM series to edit
|
|
284
|
+
new_values (dict): dictionary with attribute: value pairs to write to the series
|
|
285
|
+
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
286
|
+
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
287
|
+
|
|
288
|
+
"""
|
|
289
|
+
dbd = open(series[0])
|
|
290
|
+
dbd.edit(series, new_values, dims=dims, verbose=verbose)
|
|
291
|
+
dbd.close()
|
|
292
|
+
|
|
250
293
|
def to_nifti(series:list, file:str, dims:list=None, verbose=1):
|
|
251
294
|
"""Save a DICOM series in nifti format.
|
|
252
295
|
|
|
@@ -274,35 +317,6 @@ def from_nifti(file:str, series:list, ref:list=None):
|
|
|
274
317
|
dbd.close()
|
|
275
318
|
|
|
276
319
|
|
|
277
|
-
def values(series:list, attr=None, dims:list=None, coords=False) -> Union[dict, tuple]:
|
|
278
|
-
"""Read the values of some or all attributes from a DICOM series
|
|
279
|
-
|
|
280
|
-
Args:
|
|
281
|
-
series (list or str): DICOM series to read. This can also
|
|
282
|
-
be a path to a folder containing DICOM files, or a
|
|
283
|
-
patient or study to read all series in that patient or
|
|
284
|
-
study. In those cases a list is returned.
|
|
285
|
-
attr (list, optional): list of DICOM attributes to read.
|
|
286
|
-
dims (list, optional): Dimensions to sort the attributes.
|
|
287
|
-
If dims is not provided, values are sorted by
|
|
288
|
-
InstanceNumber.
|
|
289
|
-
coords (bool): If set to True, the coordinates of the
|
|
290
|
-
attributes are returned alongside the values
|
|
291
|
-
|
|
292
|
-
Returns:
|
|
293
|
-
dict or tuple: values as a dictionary in the last
|
|
294
|
-
return value, where each value is a numpy array with
|
|
295
|
-
the required dimensions. If coords is set to True,
|
|
296
|
-
these are returned too.
|
|
297
|
-
"""
|
|
298
|
-
if isinstance(series, str):
|
|
299
|
-
series = [series]
|
|
300
|
-
dbd = open(series[0])
|
|
301
|
-
array = dbd.values(series, attr, dims, coords)
|
|
302
|
-
dbd.close()
|
|
303
|
-
return array
|
|
304
|
-
|
|
305
|
-
|
|
306
320
|
def files(entity:list) -> list:
|
|
307
321
|
"""Read the files in a DICOM entity
|
|
308
322
|
|
dbdicom/dataset.py
CHANGED
|
@@ -370,7 +370,7 @@ def set_volume(ds, volume:vreg.Volume3D):
|
|
|
370
370
|
set_affine(ds, volume.affine)
|
|
371
371
|
if volume.coords is not None:
|
|
372
372
|
# All other dimensions should have size 1
|
|
373
|
-
coords =
|
|
373
|
+
coords = [c.reshape(-1) for c in volume.coords]
|
|
374
374
|
for i, d in enumerate(volume.dims):
|
|
375
375
|
if not is_valid_dicom_tag(d):
|
|
376
376
|
raise ValueError(
|
|
@@ -380,7 +380,7 @@ def set_volume(ds, volume:vreg.Volume3D):
|
|
|
380
380
|
"tags to change the dimensions."
|
|
381
381
|
)
|
|
382
382
|
else:
|
|
383
|
-
set_values(ds, d, coords[i
|
|
383
|
+
set_values(ds, d, coords[i][0])
|
|
384
384
|
|
|
385
385
|
|
|
386
386
|
|
dbdicom/dbd.py
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import os
|
|
2
|
+
import shutil
|
|
2
3
|
import json
|
|
3
4
|
from typing import Union
|
|
4
5
|
import zipfile
|
|
5
6
|
import re
|
|
7
|
+
from copy import deepcopy
|
|
6
8
|
|
|
7
9
|
from tqdm import tqdm
|
|
8
10
|
import numpy as np
|
|
@@ -70,20 +72,33 @@ class DataBaseDicom():
|
|
|
70
72
|
|
|
71
73
|
|
|
72
74
|
|
|
73
|
-
def delete(self, entity):
|
|
75
|
+
def delete(self, entity, not_exists_ok=False):
|
|
74
76
|
"""Delete a DICOM entity from the database
|
|
75
77
|
|
|
76
78
|
Args:
|
|
77
79
|
entity (list): entity to delete
|
|
80
|
+
not_exists_ok (bool): By default, an exception is raised when attempting
|
|
81
|
+
to delete an entity that does not exist. Set this to True to pass over this silently.
|
|
78
82
|
"""
|
|
79
|
-
|
|
80
|
-
|
|
83
|
+
# delete datasets on disk
|
|
84
|
+
try:
|
|
85
|
+
removed = register.index(self.register, entity)
|
|
86
|
+
except ValueError:
|
|
87
|
+
if not_exists_ok:
|
|
88
|
+
return self
|
|
89
|
+
else:
|
|
90
|
+
raise ValueError(
|
|
91
|
+
f"The entity you are trying to delete does not exist. \n"
|
|
92
|
+
f"You can set not_exists_ok=True in dbdicom.delete() to avoid this error."
|
|
93
|
+
)
|
|
81
94
|
for index in removed:
|
|
82
95
|
file = os.path.join(self.path, index)
|
|
83
96
|
if os.path.exists(file):
|
|
84
97
|
os.remove(file)
|
|
85
|
-
#
|
|
86
|
-
|
|
98
|
+
# drop the entity from the register
|
|
99
|
+
register.remove(self.register, entity)
|
|
100
|
+
# cleanup empty folders
|
|
101
|
+
remove_empty_folders(entity[0])
|
|
87
102
|
return self
|
|
88
103
|
|
|
89
104
|
|
|
@@ -206,22 +221,22 @@ class DataBaseDicom():
|
|
|
206
221
|
return register.series(self.register, entity, desc, contains, isin)
|
|
207
222
|
|
|
208
223
|
|
|
209
|
-
def volume(self, entity:Union[list, str], dims:list=None, verbose=1) ->
|
|
210
|
-
"""Read volume
|
|
224
|
+
def volume(self, entity:Union[list, str], dims:list=None, verbose=1) -> vreg.Volume3D:
|
|
225
|
+
"""Read volume.
|
|
211
226
|
|
|
212
227
|
Args:
|
|
213
|
-
entity (list, str): DICOM
|
|
228
|
+
entity (list, str): DICOM series to read
|
|
214
229
|
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
215
230
|
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
216
231
|
|
|
217
232
|
Returns:
|
|
218
|
-
vreg.Volume3D
|
|
219
|
-
a volume, else a list of volumes.
|
|
233
|
+
vreg.Volume3D:
|
|
220
234
|
"""
|
|
221
|
-
if isinstance(entity, str): # path to folder
|
|
222
|
-
|
|
223
|
-
if len(entity) < 4: # folder, patient or study
|
|
224
|
-
|
|
235
|
+
# if isinstance(entity, str): # path to folder
|
|
236
|
+
# return [self.volume(s, dims) for s in self.series(entity)]
|
|
237
|
+
# if len(entity) < 4: # folder, patient or study
|
|
238
|
+
# return [self.volume(s, dims) for s in self.series(entity)]
|
|
239
|
+
|
|
225
240
|
if dims is None:
|
|
226
241
|
dims = []
|
|
227
242
|
elif isinstance(dims, str):
|
|
@@ -230,33 +245,39 @@ class DataBaseDicom():
|
|
|
230
245
|
dims = list(dims)
|
|
231
246
|
dims = ['SliceLocation'] + dims
|
|
232
247
|
|
|
233
|
-
files = register.files(self.register, entity)
|
|
234
|
-
|
|
235
248
|
# Read dicom files
|
|
236
|
-
values = []
|
|
249
|
+
values = [[] for _ in dims]
|
|
237
250
|
volumes = []
|
|
251
|
+
|
|
252
|
+
files = register.files(self.register, entity)
|
|
238
253
|
for f in tqdm(files, desc='Reading volume..', disable=(verbose==0)):
|
|
239
254
|
ds = pydicom.dcmread(f)
|
|
240
|
-
|
|
255
|
+
values_f = get_values(ds, dims)
|
|
256
|
+
for d in range(len(dims)):
|
|
257
|
+
values[d].append(values_f[d])
|
|
241
258
|
volumes.append(dbdataset.volume(ds))
|
|
242
259
|
|
|
243
|
-
# Format as mesh
|
|
244
|
-
|
|
245
|
-
values = [np.array(v, dtype=object) for v in values] # object array to allow for mixed types
|
|
246
|
-
coords = np.stack(values, axis=-1)
|
|
260
|
+
# Format coordinates as mesh
|
|
261
|
+
coords = [np.array(v) for v in values]
|
|
247
262
|
coords, inds = dbdicom.utils.arrays.meshvals(coords)
|
|
248
|
-
vols = np.array(volumes)
|
|
249
|
-
vols = vols[inds].reshape(coords.shape[1:])
|
|
250
263
|
|
|
251
264
|
# Check that all slices have the same coordinates
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
265
|
+
if len(dims) > 1:
|
|
266
|
+
# Loop over all coordinates after slice location
|
|
267
|
+
for c in coords[1:]:
|
|
268
|
+
# Loop over all slice locations
|
|
269
|
+
for k in range(1, c.shape[0]):
|
|
270
|
+
# Coordinate c of slice k
|
|
271
|
+
if not np.array_equal(c[k,...], c[0,...]):
|
|
272
|
+
raise ValueError(
|
|
273
|
+
"Cannot build a single volume. Not all slices "
|
|
274
|
+
"have the same coordinates."
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
# Build volumes
|
|
278
|
+
vols = np.array(volumes)
|
|
279
|
+
vols = vols[inds].reshape(coords[0].shape)
|
|
280
|
+
|
|
260
281
|
# Infer spacing between slices from slice locations
|
|
261
282
|
# Technically only necessary if SpacingBetweenSlices not set or incorrect
|
|
262
283
|
vols = infer_slice_spacing(vols)
|
|
@@ -272,11 +293,66 @@ class DataBaseDicom():
|
|
|
272
293
|
# Then try again
|
|
273
294
|
vol = vreg.join(vols)
|
|
274
295
|
if vol.ndim > 3:
|
|
296
|
+
# Coordinates of slice 0
|
|
297
|
+
c0 = [c[0,...] for c in coords[1:]]
|
|
275
298
|
vol.set_coords(c0)
|
|
276
299
|
vol.set_dims(dims[1:])
|
|
277
300
|
return vol
|
|
278
301
|
|
|
279
|
-
|
|
302
|
+
|
|
303
|
+
def values(self, series:list, *attr, dims:list=None, verbose=1) -> Union[dict, tuple]:
|
|
304
|
+
"""Read the values of some attributes from a DICOM series
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
series (list): DICOM series to read.
|
|
308
|
+
attr (tuple, optional): DICOM attributes to read.
|
|
309
|
+
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
310
|
+
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
311
|
+
|
|
312
|
+
Returns:
|
|
313
|
+
tuple: arrays with values for the attributes.
|
|
314
|
+
"""
|
|
315
|
+
# if isinstance(series, str): # path to folder
|
|
316
|
+
# return [self.values(s, attr, dims) for s in self.series(series)]
|
|
317
|
+
# if len(series) < 4: # folder, patient or study
|
|
318
|
+
# return [self.values(s, attr, dims) for s in self.series(series)]
|
|
319
|
+
|
|
320
|
+
if dims is None:
|
|
321
|
+
dims = ['InstanceNumber']
|
|
322
|
+
elif np.isscalar(dims):
|
|
323
|
+
dims = [dims]
|
|
324
|
+
else:
|
|
325
|
+
dims = list(dims)
|
|
326
|
+
|
|
327
|
+
# Read dicom files
|
|
328
|
+
coord_values = [[] for _ in dims]
|
|
329
|
+
attr_values = [[] for _ in attr]
|
|
330
|
+
|
|
331
|
+
files = register.files(self.register, series)
|
|
332
|
+
for f in tqdm(files, desc='Reading values..', disable=(verbose==0)):
|
|
333
|
+
ds = pydicom.dcmread(f)
|
|
334
|
+
coord_values_f = get_values(ds, dims)
|
|
335
|
+
for d in range(len(dims)):
|
|
336
|
+
coord_values[d].append(coord_values_f[d])
|
|
337
|
+
attr_values_f = get_values(ds, attr)
|
|
338
|
+
for a in range(len(attr)):
|
|
339
|
+
attr_values[a].append(attr_values_f[a])
|
|
340
|
+
|
|
341
|
+
# Format coordinates as mesh
|
|
342
|
+
coords = [np.array(v) for v in coord_values]
|
|
343
|
+
coords, inds = dbdicom.utils.arrays.meshvals(coords)
|
|
344
|
+
|
|
345
|
+
# Sort values accordingly
|
|
346
|
+
values = [np.array(v) for v in attr_values]
|
|
347
|
+
values = [v[inds].reshape(coords[0].shape) for v in values]
|
|
348
|
+
|
|
349
|
+
# Return values
|
|
350
|
+
if len(values) == 1:
|
|
351
|
+
return values[0]
|
|
352
|
+
else:
|
|
353
|
+
return tuple(values)
|
|
354
|
+
|
|
355
|
+
|
|
280
356
|
def write_volume(
|
|
281
357
|
self, vol:Union[vreg.Volume3D, tuple], series:list,
|
|
282
358
|
ref:list=None,
|
|
@@ -288,6 +364,10 @@ class DataBaseDicom():
|
|
|
288
364
|
series (list): DICOM series to read
|
|
289
365
|
ref (list): Reference series
|
|
290
366
|
"""
|
|
367
|
+
series_full_name = full_name(series)
|
|
368
|
+
if series_full_name in self.series():
|
|
369
|
+
raise ValueError(f"Series {series_full_name[-1]} already exists in study {series_full_name[-2]}.")
|
|
370
|
+
|
|
291
371
|
if isinstance(vol, tuple):
|
|
292
372
|
vol = vreg.volume(vol[0], vol[1])
|
|
293
373
|
if ref is None:
|
|
@@ -318,11 +398,85 @@ class DataBaseDicom():
|
|
|
318
398
|
slices = vt.split()
|
|
319
399
|
for sl in slices:
|
|
320
400
|
dbdataset.set_volume(ds, sl)
|
|
321
|
-
sl_coords = [
|
|
401
|
+
sl_coords = [c.ravel()[0] for c in sl.coords]
|
|
322
402
|
set_value(ds, sl.dims, sl_coords)
|
|
323
403
|
self._write_dataset(ds, attr, n + 1 + i)
|
|
324
404
|
i+=1
|
|
325
405
|
return self
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
def edit(
|
|
409
|
+
self, series:list, new_values:dict, dims:list=None, verbose=1,
|
|
410
|
+
):
|
|
411
|
+
"""Edit attribute values in a new DICOM series
|
|
412
|
+
|
|
413
|
+
Args:
|
|
414
|
+
series (list): DICOM series to edit
|
|
415
|
+
new_values (dict): dictionary with attribute: value pairs to write to the series
|
|
416
|
+
dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
|
|
417
|
+
verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
|
|
418
|
+
"""
|
|
419
|
+
|
|
420
|
+
if dims is None:
|
|
421
|
+
dims = ['InstanceNumber']
|
|
422
|
+
elif np.isscalar(dims):
|
|
423
|
+
dims = [dims]
|
|
424
|
+
else:
|
|
425
|
+
dims = list(dims)
|
|
426
|
+
|
|
427
|
+
# Check that all values have the correct nr of elements
|
|
428
|
+
files = register.files(self.register, series)
|
|
429
|
+
for a in new_values.values():
|
|
430
|
+
if np.isscalar(a):
|
|
431
|
+
pass
|
|
432
|
+
elif np.array(a).size != len(files):
|
|
433
|
+
raise ValueError(
|
|
434
|
+
f"Incorrect value lengths. All values need to have {len(files)} elements"
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
# Read dicom files to sort them
|
|
438
|
+
coord_values = [[] for _ in dims]
|
|
439
|
+
for f in tqdm(files, desc='Sorting series..', disable=(verbose==0)):
|
|
440
|
+
ds = pydicom.dcmread(f)
|
|
441
|
+
coord_values_f = get_values(ds, dims)
|
|
442
|
+
for d in range(len(dims)):
|
|
443
|
+
coord_values[d].append(coord_values_f[d])
|
|
444
|
+
|
|
445
|
+
# Format coordinates as mesh
|
|
446
|
+
coords = [np.array(v) for v in coord_values]
|
|
447
|
+
coords, inds = dbdicom.utils.arrays.meshvals(coords)
|
|
448
|
+
|
|
449
|
+
# Sort files accordingly
|
|
450
|
+
files = np.array(files)[inds]
|
|
451
|
+
|
|
452
|
+
# Now edit and write the files
|
|
453
|
+
attr = self._series_attributes(series)
|
|
454
|
+
n = self._max_instance_number(attr['SeriesInstanceUID'])
|
|
455
|
+
|
|
456
|
+
# Drop existing attributes if they are edited
|
|
457
|
+
attr = {a:attr[a] for a in attr if a not in new_values}
|
|
458
|
+
|
|
459
|
+
# List instances to be edited
|
|
460
|
+
to_drop = register.index(self.register, series)
|
|
461
|
+
|
|
462
|
+
# Write the instances
|
|
463
|
+
tags = list(new_values.keys())
|
|
464
|
+
for i, f in tqdm(enumerate(files), desc='Writing values..', disable=(verbose==0)):
|
|
465
|
+
ds = pydicom.dcmread(f)
|
|
466
|
+
values = []
|
|
467
|
+
for a in new_values.values():
|
|
468
|
+
if np.isscalar(a):
|
|
469
|
+
values.append(a)
|
|
470
|
+
else:
|
|
471
|
+
values.append(np.array(a).reshape(-1)[i])
|
|
472
|
+
set_values(ds, tags, values)
|
|
473
|
+
self._write_dataset(ds, attr, n + 1 + i)
|
|
474
|
+
|
|
475
|
+
# Delete the originals files
|
|
476
|
+
register.drop(self.register, to_drop)
|
|
477
|
+
[os.remove(os.path.join(self.path, idx)) for idx in to_drop]
|
|
478
|
+
|
|
479
|
+
return self
|
|
326
480
|
|
|
327
481
|
|
|
328
482
|
def to_nifti(self, series:list, file:str, dims=None, verbose=1):
|
|
@@ -456,103 +610,7 @@ class DataBaseDicom():
|
|
|
456
610
|
return arrays, values_return
|
|
457
611
|
|
|
458
612
|
|
|
459
|
-
def values(self, series:list, attr=None, dims:list=None, coords=False) -> Union[dict, tuple]:
|
|
460
|
-
"""Read the values of some or all attributes from a DICOM series
|
|
461
|
-
|
|
462
|
-
Args:
|
|
463
|
-
series (list or str): DICOM series to read. This can also
|
|
464
|
-
be a path to a folder containing DICOM files, or a
|
|
465
|
-
patient or study to read all series in that patient or
|
|
466
|
-
study. In those cases a list is returned.
|
|
467
|
-
attr (list, optional): list of DICOM attributes to read.
|
|
468
|
-
dims (list, optional): Dimensions to sort the attributes.
|
|
469
|
-
If dims is not provided, values are sorted by
|
|
470
|
-
InstanceNumber.
|
|
471
|
-
coords (bool): If set to True, the coordinates of the
|
|
472
|
-
attributes are returned alongside the values
|
|
473
|
-
|
|
474
|
-
Returns:
|
|
475
|
-
dict or tuple: values as a dictionary in the last
|
|
476
|
-
return value, where each value is a numpy array with
|
|
477
|
-
the required dimensions. If coords is set to True,
|
|
478
|
-
these are returned too.
|
|
479
|
-
"""
|
|
480
|
-
if isinstance(series, str): # path to folder
|
|
481
|
-
return [self.values(s, attr, dims, coords) for s in self.series(series)]
|
|
482
|
-
if len(series) < 4: # folder, patient or study
|
|
483
|
-
return [self.values(s, attr, dims, coords) for s in self.series(series)]
|
|
484
|
-
|
|
485
|
-
if dims is None:
|
|
486
|
-
dims = ['InstanceNumber']
|
|
487
|
-
elif np.isscalar(dims):
|
|
488
|
-
dims = [dims]
|
|
489
|
-
else:
|
|
490
|
-
dims = list(dims)
|
|
491
|
-
|
|
492
|
-
files = register.files(self.register, series)
|
|
493
|
-
|
|
494
|
-
# Ensure return_vals is a list
|
|
495
|
-
if attr is None:
|
|
496
|
-
# If attributes are not provided, read all
|
|
497
|
-
# attributes from the first file
|
|
498
|
-
ds = pydicom.dcmread(files[0])
|
|
499
|
-
exclude = ['PixelData', 'FloatPixelData', 'DoubleFloatPixelData']
|
|
500
|
-
params = []
|
|
501
|
-
param_labels = []
|
|
502
|
-
for elem in ds:
|
|
503
|
-
if elem.keyword not in exclude:
|
|
504
|
-
params.append(elem.tag)
|
|
505
|
-
# For known tags use the keyword as label
|
|
506
|
-
label = elem.tag if len(elem.keyword)==0 else elem.keyword
|
|
507
|
-
param_labels.append(label)
|
|
508
|
-
elif np.isscalar(attr):
|
|
509
|
-
params = [attr]
|
|
510
|
-
param_labels = params[:]
|
|
511
|
-
else:
|
|
512
|
-
params = list(attr)
|
|
513
|
-
param_labels = params[:]
|
|
514
|
-
|
|
515
|
-
# Read dicom files
|
|
516
|
-
coords_array = []
|
|
517
|
-
values = np.empty(len(files), dtype=dict)
|
|
518
|
-
for i, f in tqdm(enumerate(files), desc='Reading values..'):
|
|
519
|
-
ds = pydicom.dcmread(f)
|
|
520
|
-
coords_array.append(get_values(ds, dims))
|
|
521
|
-
# save as dict so numpy does not stack as arrays
|
|
522
|
-
values[i] = {'values': get_values(ds, params)}
|
|
523
|
-
|
|
524
|
-
# Format as mesh
|
|
525
|
-
coords_array = np.stack([v for v in coords_array], axis=-1)
|
|
526
|
-
coords_array, inds = dbdicom.utils.arrays.meshvals(coords_array)
|
|
527
|
-
|
|
528
|
-
# Sort values accordingly
|
|
529
|
-
values = values[inds].reshape(-1)
|
|
530
613
|
|
|
531
|
-
# Return values as a dictionary
|
|
532
|
-
values_dict = {}
|
|
533
|
-
for p in range(len(params)):
|
|
534
|
-
# Get the type from the first value
|
|
535
|
-
vp0 = values[0]['values'][p]
|
|
536
|
-
# Build an array of the right type
|
|
537
|
-
vp = np.zeros(values.size, dtype=type(vp0))
|
|
538
|
-
# Populate the arrate with values for parameter p
|
|
539
|
-
for i, v in enumerate(values):
|
|
540
|
-
vp[i] = v['values'][p]
|
|
541
|
-
# Reshape values for parameter p
|
|
542
|
-
vp = vp.reshape(coords_array.shape[1:])
|
|
543
|
-
# Eneter in the dictionary
|
|
544
|
-
values_dict[param_labels[p]] = vp
|
|
545
|
-
|
|
546
|
-
# If only one, return as value
|
|
547
|
-
if len(params) == 1:
|
|
548
|
-
values_return = values_dict[params[0]]
|
|
549
|
-
else:
|
|
550
|
-
values_return = values_dict
|
|
551
|
-
|
|
552
|
-
if coords:
|
|
553
|
-
return values_return, coords_array
|
|
554
|
-
else:
|
|
555
|
-
return values_return
|
|
556
614
|
|
|
557
615
|
|
|
558
616
|
def files(self, entity:list) -> list:
|
|
@@ -622,34 +680,66 @@ class DataBaseDicom():
|
|
|
622
680
|
else:
|
|
623
681
|
return {p: values[i] for i, p in enumerate(pars)}
|
|
624
682
|
|
|
625
|
-
def copy(self, from_entity, to_entity):
|
|
683
|
+
def copy(self, from_entity, to_entity=None):
|
|
626
684
|
"""Copy a DICOM entity (patient, study or series)
|
|
627
685
|
|
|
628
686
|
Args:
|
|
629
687
|
from_entity (list): entity to copy
|
|
630
|
-
to_entity (list): entity after copying.
|
|
688
|
+
to_entity (list, optional): entity after copying. If this is not
|
|
689
|
+
provided, a copy will be made in the same study and returned
|
|
690
|
+
|
|
691
|
+
Returns:
|
|
692
|
+
entity: the copied entity. If th to_entity is provided, this is
|
|
693
|
+
returned.
|
|
631
694
|
"""
|
|
632
695
|
if len(from_entity) == 4:
|
|
696
|
+
if to_entity is None:
|
|
697
|
+
to_entity = deepcopy(from_entity)
|
|
698
|
+
if isinstance(to_entity[-1], tuple):
|
|
699
|
+
to_entity[-1] = (to_entity[-1][0] + '_copy', 0)
|
|
700
|
+
else:
|
|
701
|
+
to_entity[-1] = (to_entity[-1] + '_copy', 0)
|
|
702
|
+
while to_entity in self.series():
|
|
703
|
+
to_entity[-1][1] += 1
|
|
633
704
|
if len(to_entity) != 4:
|
|
634
705
|
raise ValueError(
|
|
635
706
|
f"Cannot copy series {from_entity} to series {to_entity}. "
|
|
636
707
|
f"{to_entity} is not a series (needs 4 elements)."
|
|
637
708
|
)
|
|
638
|
-
|
|
709
|
+
self._copy_series(from_entity, to_entity)
|
|
710
|
+
return to_entity
|
|
711
|
+
|
|
639
712
|
if len(from_entity) == 3:
|
|
713
|
+
if to_entity is None:
|
|
714
|
+
to_entity = deepcopy(from_entity)
|
|
715
|
+
if isinstance(to_entity[-1], tuple):
|
|
716
|
+
to_entity[-1] = (to_entity[-1][0] + '_copy', 0)
|
|
717
|
+
else:
|
|
718
|
+
to_entity[-1] = (to_entity[-1] + '_copy', 0)
|
|
719
|
+
while to_entity in self.studies():
|
|
720
|
+
to_entity[-1][1] += 1
|
|
640
721
|
if len(to_entity) != 3:
|
|
641
722
|
raise ValueError(
|
|
642
723
|
f"Cannot copy study {from_entity} to study {to_entity}. "
|
|
643
724
|
f"{to_entity} is not a study (needs 3 elements)."
|
|
644
725
|
)
|
|
645
|
-
|
|
726
|
+
self._copy_study(from_entity, to_entity)
|
|
727
|
+
return to_entity
|
|
728
|
+
|
|
646
729
|
if len(from_entity) == 2:
|
|
730
|
+
if to_entity is None:
|
|
731
|
+
to_entity = deepcopy(from_entity)
|
|
732
|
+
to_entity[-1] += '_copy'
|
|
733
|
+
while to_entity in self.patients():
|
|
734
|
+
to_entity[-1] += '_copy'
|
|
647
735
|
if len(to_entity) != 2:
|
|
648
736
|
raise ValueError(
|
|
649
737
|
f"Cannot copy patient {from_entity} to patient {to_entity}. "
|
|
650
738
|
f"{to_entity} is not a patient (needs 2 elements)."
|
|
651
739
|
)
|
|
652
|
-
|
|
740
|
+
self._copy_patient(from_entity, to_entity)
|
|
741
|
+
return to_entity
|
|
742
|
+
|
|
653
743
|
raise ValueError(
|
|
654
744
|
f"Cannot copy {from_entity} to {to_entity}. "
|
|
655
745
|
)
|
|
@@ -930,6 +1020,28 @@ class DataBaseDicom():
|
|
|
930
1020
|
|
|
931
1021
|
|
|
932
1022
|
|
|
1023
|
+
def full_name(entity):
|
|
1024
|
+
|
|
1025
|
+
if len(entity)==3: # study
|
|
1026
|
+
if isinstance(entity[-1], tuple):
|
|
1027
|
+
return entity
|
|
1028
|
+
else:
|
|
1029
|
+
full_name_study = deepcopy(entity)
|
|
1030
|
+
full_name_study[-1] = (full_name_study[-1], 0)
|
|
1031
|
+
return full_name_study
|
|
1032
|
+
|
|
1033
|
+
elif len(entity)==4: # series
|
|
1034
|
+
full_name_study = full_name(entity[:3])
|
|
1035
|
+
series = full_name_study + [entity[-1]]
|
|
1036
|
+
if isinstance(series[-1], tuple):
|
|
1037
|
+
return series
|
|
1038
|
+
else:
|
|
1039
|
+
full_name_series = deepcopy(series)
|
|
1040
|
+
full_name_series[-1] = (full_name_series[-1], 0)
|
|
1041
|
+
return full_name_series
|
|
1042
|
+
else:
|
|
1043
|
+
return entity
|
|
1044
|
+
|
|
933
1045
|
|
|
934
1046
|
def clean_folder_name(name, replacement="", max_length=255):
|
|
935
1047
|
# Strip leading/trailing whitespace
|
|
@@ -954,6 +1066,30 @@ def clean_folder_name(name, replacement="", max_length=255):
|
|
|
954
1066
|
|
|
955
1067
|
|
|
956
1068
|
|
|
1069
|
+
def remove_empty_folders(path):
|
|
1070
|
+
"""
|
|
1071
|
+
Removes all empty subfolders from a given directory.
|
|
1072
|
+
|
|
1073
|
+
This function walks through the directory tree from the bottom up.
|
|
1074
|
+
This is crucial because it allows child directories to be removed before
|
|
1075
|
+
their parents, potentially making the parent directory empty and
|
|
1076
|
+
eligible for removal in the same pass.
|
|
1077
|
+
|
|
1078
|
+
Args:
|
|
1079
|
+
path (str): The absolute or relative path to the directory to scan.
|
|
1080
|
+
"""
|
|
1081
|
+
# Walk the directory tree in a bottom-up manner (topdown=False)
|
|
1082
|
+
for dirpath, dirnames, filenames in os.walk(path, topdown=False):
|
|
1083
|
+
# A directory is considered empty if it has no subdirectories and no files
|
|
1084
|
+
if not dirnames and not filenames:
|
|
1085
|
+
try:
|
|
1086
|
+
shutil.rmtree(dirpath)
|
|
1087
|
+
except OSError as e:
|
|
1088
|
+
# This might happen due to permissions issues
|
|
1089
|
+
print(f"Error removing {dirpath}: {e}")
|
|
1090
|
+
|
|
1091
|
+
|
|
1092
|
+
|
|
957
1093
|
def infer_slice_spacing(vols):
|
|
958
1094
|
# In case spacing between slices is not (correctly) encoded in
|
|
959
1095
|
# DICOM it can be inferred from the slice locations.
|
|
@@ -1010,3 +1146,4 @@ def infer_slice_spacing(vols):
|
|
|
1010
1146
|
|
|
1011
1147
|
|
|
1012
1148
|
|
|
1149
|
+
|
dbdicom/register.py
CHANGED
|
@@ -96,6 +96,27 @@ def index(dbtree, entity):
|
|
|
96
96
|
if sr['SeriesInstanceUID'] == series_uid:
|
|
97
97
|
return list(sr['instances'].values())
|
|
98
98
|
|
|
99
|
+
def remove(dbtree, entity):
|
|
100
|
+
if len(entity)==2:
|
|
101
|
+
patient_id = entity[1]
|
|
102
|
+
for pt in sorted(dbtree, key=lambda pt: pt['PatientID']):
|
|
103
|
+
if pt['PatientID'] == patient_id:
|
|
104
|
+
dbtree.remove(pt)
|
|
105
|
+
elif len(entity)==3:
|
|
106
|
+
study_uid = uid(dbtree, entity)
|
|
107
|
+
for pt in sorted(dbtree, key=lambda pt: pt['PatientID']):
|
|
108
|
+
for st in sorted(pt['studies'], key=lambda st: st['StudyInstanceUID']):
|
|
109
|
+
if st['StudyInstanceUID'] == study_uid:
|
|
110
|
+
pt['studies'].remove(st)
|
|
111
|
+
elif len(entity)==4:
|
|
112
|
+
series_uid = uid(dbtree, entity)
|
|
113
|
+
for pt in sorted(dbtree, key=lambda pt: pt['PatientID']):
|
|
114
|
+
for st in sorted(pt['studies'], key=lambda st: st['StudyInstanceUID']):
|
|
115
|
+
for sr in sorted(st['series'], key=lambda sr: sr['SeriesNumber']):
|
|
116
|
+
if sr['SeriesInstanceUID'] == series_uid:
|
|
117
|
+
st['series'].remove(sr)
|
|
118
|
+
return dbtree
|
|
119
|
+
|
|
99
120
|
|
|
100
121
|
def drop(dbtree, relpaths):
|
|
101
122
|
for pt in sorted(dbtree[:], key=lambda pt: pt['PatientID']):
|
|
@@ -110,7 +110,7 @@ def from_volume(vol:vreg.Volume3D):
|
|
|
110
110
|
|
|
111
111
|
# Assign parameters using dims as DICOM keywords
|
|
112
112
|
for ax_i, axis in enumerate(vol.dims):
|
|
113
|
-
val = vol.coords[
|
|
113
|
+
val = vol.coords[ax_i][indices]
|
|
114
114
|
|
|
115
115
|
sequence, attr = axis.split("/")
|
|
116
116
|
if not hasattr(frame_ds, sequence):
|
dbdicom/utils/arrays.py
CHANGED
|
@@ -1,40 +1,128 @@
|
|
|
1
1
|
import numpy as np
|
|
2
2
|
|
|
3
|
+
from typing import List, Tuple
|
|
3
4
|
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
for
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
5
|
+
|
|
6
|
+
def meshvals(arrays) -> Tuple[List[np.ndarray], np.ndarray]:
|
|
7
|
+
"""
|
|
8
|
+
Lexicographically sort flattened N coordinate arrays and reshape back to inferred grid shape,
|
|
9
|
+
preserving original type of each input array.
|
|
10
|
+
|
|
11
|
+
Parameters
|
|
12
|
+
----------
|
|
13
|
+
*arrays : array-like
|
|
14
|
+
Flattened coordinate arrays of the same length. Can be numbers, strings, or list objects.
|
|
15
|
+
|
|
16
|
+
Returns
|
|
17
|
+
-------
|
|
18
|
+
sorted_arrays : list[np.ndarray]
|
|
19
|
+
Coordinate arrays reshaped to inferred N-D grid shape, dtype/type preserved.
|
|
20
|
+
indices : np.ndarray
|
|
21
|
+
Permutation indices applied to the flattened arrays.
|
|
22
|
+
shape : tuple[int, ...]
|
|
23
|
+
Inferred grid shape (number of unique values per axis).
|
|
24
|
+
"""
|
|
25
|
+
# Remember original type/dtype for each array
|
|
26
|
+
orig_types = [a.dtype if isinstance(a[0], np.ndarray) else type(a[0]) for a in arrays]
|
|
27
|
+
|
|
28
|
+
# Convert non arrays to object arrays
|
|
29
|
+
arrs = []
|
|
30
|
+
for a in arrays:
|
|
31
|
+
arrs_a = np.empty(len(a), dtype=object)
|
|
32
|
+
arrs_a[:] = a
|
|
33
|
+
arrs.append(arrs_a)
|
|
34
|
+
|
|
35
|
+
# Stack arrays as columns (M x N)
|
|
36
|
+
coords = np.stack(arrs, axis=1)
|
|
37
|
+
|
|
38
|
+
# Lexicographic sort using structured array
|
|
39
|
+
indices = np.lexsort(coords.T[::-1])
|
|
40
|
+
sorted_coords = coords[indices]
|
|
41
|
+
|
|
42
|
+
# Check that all coordinates are unique
|
|
43
|
+
points = [tuple(col) for col in sorted_coords]
|
|
44
|
+
if not all_elements_unique(points):
|
|
31
45
|
raise ValueError(
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
46
|
+
f"Improper coordinates. Coordinate values are not unique."
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Infer shape from unique values per axis
|
|
50
|
+
shape = tuple(len(np.unique(sorted_coords[:, i])) for i in range(sorted_coords.shape[1]))
|
|
51
|
+
|
|
52
|
+
# Check perfect grid
|
|
53
|
+
if np.prod(shape) != sorted_coords.shape[0]:
|
|
54
|
+
raise ValueError(
|
|
55
|
+
f"Coordinates do not form a perfect Cartesian grid: inferred shape {shape} "
|
|
56
|
+
f"does not match number of points {sorted_coords.shape[0]}"
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Split back into individual arrays and cast to original type
|
|
60
|
+
sorted_arrays = []
|
|
61
|
+
for i, orig_type in enumerate(orig_types):
|
|
62
|
+
arr = sorted_coords[:, i]
|
|
63
|
+
arr = arr.astype(orig_type).reshape(shape)
|
|
64
|
+
sorted_arrays.append(arr)
|
|
65
|
+
|
|
66
|
+
return sorted_arrays, indices
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def all_elements_unique(items):
|
|
70
|
+
"""
|
|
71
|
+
The most general uniqueness check, but also the slowest (O(n^2)).
|
|
72
|
+
|
|
73
|
+
It works for ANY type that supports equality checking (==), including
|
|
74
|
+
lists, dicts, and custom objects, without requiring them to be hashable.
|
|
75
|
+
"""
|
|
76
|
+
for i in range(len(items)):
|
|
77
|
+
for j in range(i + 1, len(items)):
|
|
78
|
+
if items[i] == items[j]:
|
|
79
|
+
return False
|
|
80
|
+
return True
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# def NEWmeshvals(coords):
|
|
85
|
+
# stack_coords = [np.array(c, dtype=object) for c in coords]
|
|
86
|
+
# stack_coords = np.stack(stack_coords)
|
|
87
|
+
# mesh_coords, sorted_indices = _meshvals(stack_coords)
|
|
88
|
+
# mesh_coords = [mesh_coords[d,...] for d in range(mesh_coords.shape[0])]
|
|
89
|
+
# return mesh_coords, sorted_indices
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
# def _meshvals(coords):
|
|
93
|
+
# # Input array shape: (d, f) with d = nr of dims and f = nr of frames
|
|
94
|
+
# # Output array shape: (d, f1,..., fd)
|
|
95
|
+
# if coords.size == 0:
|
|
96
|
+
# return np.array([])
|
|
97
|
+
# # Sort by column
|
|
98
|
+
# sorted_indices = np.lexsort(coords[::-1])
|
|
99
|
+
# sorted_array = coords[:, sorted_indices]
|
|
100
|
+
# # Find shape
|
|
101
|
+
# shape = _mesh_shape(sorted_array)
|
|
102
|
+
# # Reshape
|
|
103
|
+
# mesh_array = sorted_array.reshape(shape)
|
|
104
|
+
# return mesh_array, sorted_indices
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# def _mesh_shape(sorted_array):
|
|
108
|
+
|
|
109
|
+
# nd = np.unique(sorted_array[0,:]).size
|
|
110
|
+
# shape = (sorted_array.shape[0], nd)
|
|
111
|
+
|
|
112
|
+
# for dim in range(1,shape[0]):
|
|
113
|
+
# shape_dim = (shape[0], np.prod(shape[1:]), -1)
|
|
114
|
+
# sorted_array = sorted_array.reshape(shape_dim)
|
|
115
|
+
# nd = [np.unique(sorted_array[dim,d,:]).size for d in range(shape_dim[1])]
|
|
116
|
+
# shape = shape + (max(nd),)
|
|
117
|
+
|
|
118
|
+
# if np.prod(shape) != sorted_array.size:
|
|
119
|
+
# raise ValueError(
|
|
120
|
+
# 'Improper dimensions for the series. This usually means '
|
|
121
|
+
# 'that there are multiple images at the same location, \n or that '
|
|
122
|
+
# 'there are no images at one or more locations. \n\n'
|
|
123
|
+
# 'Make sure to specify proper dimensions when reading a pixel array or volume. \n'
|
|
124
|
+
# 'If the default dimensions of pixel_array (InstanceNumber) generate this error, '
|
|
125
|
+
# 'the DICOM data may be corrupted.'
|
|
126
|
+
# )
|
|
127
|
+
|
|
128
|
+
# return shape
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dbdicom
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.11
|
|
4
4
|
Summary: A pythonic interface for reading and writing DICOM databases
|
|
5
5
|
Author-email: Steven Sourbron <s.sourbron@sheffield.ac.uk>, Ebony Gunwhy <e.gunwhy@sheffield.ac.uk>
|
|
6
6
|
Project-URL: Homepage, https://openmiblab.github.io/dbdicom/
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
dbdicom/__init__.py,sha256=dW5aezonmMc_41Dp1PuYmXQlr307RkyJxsJuetkpWso,87
|
|
2
|
-
dbdicom/api.py,sha256=
|
|
2
|
+
dbdicom/api.py,sha256=lXF74j1PvQ00WdRu_CJvvtKNoBVGcwUH7YohM_m9Dk4,14949
|
|
3
3
|
dbdicom/const.py,sha256=BqBiRRjeiSqDr1W6YvaayD8WKCjG4Cny2NT0GeLM6bI,4269
|
|
4
4
|
dbdicom/database.py,sha256=mkYQAAf9fETzUhSQflQFp7RQUBdPlSlDty9nn6KY1jQ,4771
|
|
5
|
-
dbdicom/dataset.py,sha256=
|
|
6
|
-
dbdicom/dbd.py,sha256=
|
|
7
|
-
dbdicom/register.py,sha256=
|
|
5
|
+
dbdicom/dataset.py,sha256=pokXlcXLM33OdqMKGfVcWHlVE9ez4iBfpoefVWw1ob8,14421
|
|
6
|
+
dbdicom/dbd.py,sha256=vitqM706NObGnd29vBpizO24OPOaAdAJ7ODYwXE4Mp8,44600
|
|
7
|
+
dbdicom/register.py,sha256=5yXnTbRUu8rYJqeIbSv5SiRf2E4BZ0JZyvKm_xvaXZQ,14944
|
|
8
8
|
dbdicom/external/__init__.py,sha256=XNQqfspyf6vFGedXlRKZsUB8k8E-0W19Uamwn8Aioxo,316
|
|
9
9
|
dbdicom/external/__pycache__/__init__.cpython-311.pyc,sha256=pXAQ35ixd92fm6YcuHgzR1t6RcASQ-cHhU1wOA5b8sw,542
|
|
10
10
|
dbdicom/external/dcm4che/README.md,sha256=0aAGRs36W3_0s5LzWHRGf_tqariS_JP4iJggaxnD4Xw,8987
|
|
@@ -35,20 +35,20 @@ dbdicom/external/dcm4che/lib/windows-x86/clib_jiio_util.dll,sha256=wi4yyrI1gTRo_
|
|
|
35
35
|
dbdicom/external/dcm4che/lib/windows-x86/opencv_java.dll,sha256=QanyzLy0Cd79-aOVPwOcXwikUYeutne0Au-Um91_B4M,8505856
|
|
36
36
|
dbdicom/external/dcm4che/lib/windows-x86-64/opencv_java.dll,sha256=TmjW2SbG4MR3GQ95T8xCVVDLgsdKukgaHBPUvWkfXp8,11039232
|
|
37
37
|
dbdicom/sop_classes/ct_image.py,sha256=16PNv_0e1_7cfxE12JWlx5YQeaTAQVzwtXTjxs3aonk,2812
|
|
38
|
-
dbdicom/sop_classes/enhanced_mr_image.py,sha256=
|
|
38
|
+
dbdicom/sop_classes/enhanced_mr_image.py,sha256=6x4CEd982i64e90ZlFDKNSc83XHC2k2DVit1iyjXjCU,33368
|
|
39
39
|
dbdicom/sop_classes/mr_image.py,sha256=1biIw7R26Fc38FAeSlWxd29VO17e8cEQdDIdLbeXTzw,10959
|
|
40
40
|
dbdicom/sop_classes/parametric_map.py,sha256=2OKBuC2bo03OEpKqimQS-nVGFp1cKRPYwVgmDGVf1JU,12288
|
|
41
41
|
dbdicom/sop_classes/secondary_capture.py,sha256=wgNRX8qyhV7HR7Jq2tQWPPuGpiRzYl6qPOgK6qFbPUc,4541
|
|
42
42
|
dbdicom/sop_classes/segmentation.py,sha256=I8-PciIoIz27_-dZ4esBZSw0TBBbO8KbNYTiTmVe62g,11465
|
|
43
43
|
dbdicom/sop_classes/ultrasound_multiframe_image.py,sha256=j3KN5R90j6WwPMy01hAN2_XSum5TvksF2MYoNGfX_yE,2797
|
|
44
44
|
dbdicom/sop_classes/xray_angiographic_image.py,sha256=nWysCGaEWKVNItnOgyJfcGMpS3oEK1T0_uNR2D7p0Ls,3270
|
|
45
|
-
dbdicom/utils/arrays.py,sha256=
|
|
45
|
+
dbdicom/utils/arrays.py,sha256=_dJGFQPVRfchIRN6vra08RBYnEezobclHv5rEndQ3OA,4588
|
|
46
46
|
dbdicom/utils/dcm4che.py,sha256=Vxq8NYWWK3BuqJkzhBQ89oMqzJlnxqTxgsgTo_Frznc,2317
|
|
47
47
|
dbdicom/utils/files.py,sha256=qhWNJqeWnRjDNbERpC6Mz962_TW9mFdvd2lnBbK3xt4,2259
|
|
48
48
|
dbdicom/utils/image.py,sha256=zRM1O0bxPp-qpf3Iv_GRS1omKaMN1SgSkAwufWLJ0Fk,3863
|
|
49
49
|
dbdicom/utils/pydicom_dataset.py,sha256=XM3EERsCWPlEaUzVaFQSbPNiNbEGwxIbf-sUKKf_YxA,12755
|
|
50
|
-
dbdicom-0.3.
|
|
51
|
-
dbdicom-0.3.
|
|
52
|
-
dbdicom-0.3.
|
|
53
|
-
dbdicom-0.3.
|
|
54
|
-
dbdicom-0.3.
|
|
50
|
+
dbdicom-0.3.11.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
|
|
51
|
+
dbdicom-0.3.11.dist-info/METADATA,sha256=vsikqOJDm30MtPF3YpAalFQ4v6poQ_g5-C_aXkCgGrg,1031
|
|
52
|
+
dbdicom-0.3.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
53
|
+
dbdicom-0.3.11.dist-info/top_level.txt,sha256=nJWxXg4YjD6QblfmhrzTMXcr8FSKNc0Yk-CAIDUsYkQ,8
|
|
54
|
+
dbdicom-0.3.11.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|