dbdicom 0.3.11__tar.gz → 0.3.13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbdicom might be problematic. Click here for more details.

Files changed (65) hide show
  1. {dbdicom-0.3.11/src/dbdicom.egg-info → dbdicom-0.3.13}/PKG-INFO +1 -1
  2. {dbdicom-0.3.11 → dbdicom-0.3.13}/pyproject.toml +1 -1
  3. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/api.py +38 -22
  4. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/dataset.py +17 -8
  5. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/dbd.py +137 -113
  6. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/register.py +3 -0
  7. {dbdicom-0.3.11 → dbdicom-0.3.13/src/dbdicom.egg-info}/PKG-INFO +1 -1
  8. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom.egg-info/SOURCES.txt +1 -0
  9. {dbdicom-0.3.11 → dbdicom-0.3.13}/tests/test_api.py +80 -1
  10. dbdicom-0.3.13/tests/test_mt.py +39 -0
  11. {dbdicom-0.3.11 → dbdicom-0.3.13}/LICENSE +0 -0
  12. {dbdicom-0.3.11 → dbdicom-0.3.13}/MANIFEST.in +0 -0
  13. {dbdicom-0.3.11 → dbdicom-0.3.13}/README.rst +0 -0
  14. {dbdicom-0.3.11 → dbdicom-0.3.13}/setup.cfg +0 -0
  15. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/__init__.py +0 -0
  16. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/const.py +0 -0
  17. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/database.py +0 -0
  18. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/__init__.py +0 -0
  19. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/__pycache__/__init__.cpython-311.pyc +0 -0
  20. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/README.md +0 -0
  21. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/__init__.py +0 -0
  22. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
  23. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/bin/__init__.py +0 -0
  24. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
  25. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/bin/deidentify +0 -0
  26. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/bin/deidentify.bat +0 -0
  27. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/bin/emf2sf +0 -0
  28. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/bin/emf2sf.bat +0 -0
  29. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/etc/__init__.py +0 -0
  30. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/etc/emf2sf/__init__.py +0 -0
  31. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/etc/emf2sf/log4j.properties +0 -0
  32. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/__init__.py +0 -0
  33. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/commons-cli-1.4.jar +0 -0
  34. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/dcm4che-core-5.23.1.jar +0 -0
  35. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/dcm4che-emf-5.23.1.jar +0 -0
  36. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/dcm4che-tool-common-5.23.1.jar +0 -0
  37. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/dcm4che-tool-emf2sf-5.23.1.jar +0 -0
  38. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/log4j-1.2.17.jar +0 -0
  39. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/macosx-x86-64/libopencv_java.jnilib +0 -0
  40. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/slf4j-api-1.7.30.jar +0 -0
  41. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/slf4j-log4j12-1.7.30.jar +0 -0
  42. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio.dll +0 -0
  43. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio_sse2.dll +0 -0
  44. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio_util.dll +0 -0
  45. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/windows-x86/opencv_java.dll +0 -0
  46. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/external/dcm4che/lib/windows-x86-64/opencv_java.dll +0 -0
  47. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/sop_classes/ct_image.py +0 -0
  48. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/sop_classes/enhanced_mr_image.py +0 -0
  49. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/sop_classes/mr_image.py +0 -0
  50. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/sop_classes/parametric_map.py +0 -0
  51. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/sop_classes/secondary_capture.py +0 -0
  52. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/sop_classes/segmentation.py +0 -0
  53. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/sop_classes/ultrasound_multiframe_image.py +0 -0
  54. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/sop_classes/xray_angiographic_image.py +0 -0
  55. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/utils/arrays.py +0 -0
  56. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/utils/dcm4che.py +0 -0
  57. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/utils/files.py +0 -0
  58. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/utils/image.py +0 -0
  59. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom/utils/pydicom_dataset.py +0 -0
  60. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom.egg-info/dependency_links.txt +0 -0
  61. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom.egg-info/requires.txt +0 -0
  62. {dbdicom-0.3.11 → dbdicom-0.3.13}/src/dbdicom.egg-info/top_level.txt +0 -0
  63. {dbdicom-0.3.11 → dbdicom-0.3.13}/tests/test_dcm4che.py +0 -0
  64. {dbdicom-0.3.11 → dbdicom-0.3.13}/tests/test_sop_classes.py +0 -0
  65. {dbdicom-0.3.11 → dbdicom-0.3.13}/tests/test_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dbdicom
3
- Version: 0.3.11
3
+ Version: 0.3.13
4
4
  Summary: A pythonic interface for reading and writing DICOM databases
5
5
  Author-email: Steven Sourbron <s.sourbron@sheffield.ac.uk>, Ebony Gunwhy <e.gunwhy@sheffield.ac.uk>
6
6
  Project-URL: Homepage, https://openmiblab.github.io/dbdicom/
@@ -7,7 +7,7 @@ requires = ['setuptools>=61.2']
7
7
 
8
8
  [project]
9
9
  name = "dbdicom"
10
- version = "0.3.11"
10
+ version = "0.3.13"
11
11
  dependencies = [
12
12
  "tqdm",
13
13
  "importlib-resources",
@@ -240,6 +240,23 @@ def volume(series:list, dims:list=None, verbose=1) -> vreg.Volume3D:
240
240
  return vol
241
241
 
242
242
 
243
+ def volumes_2d(series:list, dims:list=None, verbose=1) -> vreg.Volume3D:
244
+ """Read 2D volumes from the series
245
+
246
+ Args:
247
+ entity (list, str): DICOM series to read
248
+ dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
249
+ verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
250
+
251
+ Returns:
252
+ list of vreg.Volume3D
253
+ """
254
+ dbd = open(series[0])
255
+ vol = dbd.volumes_2d(series, dims, verbose)
256
+ dbd.close()
257
+ return vol
258
+
259
+
243
260
  def values(series:list, *attr, dims:list=None, verbose=1) -> Union[np.ndarray, list]:
244
261
  """Read the values of some attributes from a DICOM series
245
262
 
@@ -259,16 +276,23 @@ def values(series:list, *attr, dims:list=None, verbose=1) -> Union[np.ndarray, l
259
276
  return values
260
277
 
261
278
 
262
- def write_volume(vol:Union[vreg.Volume3D, tuple], series:list, ref:list=None):
279
+
280
+ def write_volume(vol:Union[vreg.Volume3D, tuple], series:list,
281
+ ref:list=None, append=False, verbose=1):
263
282
  """Write a vreg.Volume3D to a DICOM series
264
283
 
265
284
  Args:
266
285
  vol (vreg.Volume3D or tuple): Volume to write to the series.
267
286
  series (list): DICOM series to read
268
287
  dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
288
+ append (bool): by default write_volume will only write to a new series,
289
+ and raise an error when attempting to write to an existing series.
290
+ To overrule this behaviour and add the volume to an existing series, set append to True.
291
+ Default is False.
292
+ verbose (bool): if set to 1, a progress bar is shown. verbose=0 does not show updates.
269
293
  """
270
294
  dbd = open(series[0])
271
- dbd.write_volume(vol, series, ref)
295
+ dbd.write_volume(vol, series, ref, append, verbose)
272
296
  dbd.close()
273
297
 
274
298
 
@@ -337,39 +361,27 @@ def files(entity:list) -> list:
337
361
  return files
338
362
 
339
363
 
340
- def pixel_data(series:list, dims:list=None, coords=False, attr:list=None) -> tuple:
364
+ def pixel_data(series:list, dims:list=None, verbose=1) -> tuple:
341
365
  """Read the pixel data from a DICOM series
342
366
 
343
367
  Args:
344
- series (list): DICOM series to read
368
+ series (list or str): DICOM series to read. This can also
369
+ be a path to a folder containing DICOM files, or a
370
+ patient or study to read all series in that patient or
371
+ study. In those cases a list is returned.
345
372
  dims (list, optional): Dimensions of the array.
346
- coords (bool): If set to True, the coordinates of the
347
- slices are returned alongside the pixel data.
348
- attr (list, optional): list of DICOM attributes that are
349
- read on the fly to avoid reading the data twice.
350
373
 
351
374
  Returns:
352
- tuple: numpy array with pixel values and an array with
353
- coordinates of the slices according to dims. If include
354
- is provide these are returned as a dictionary in a third
355
- return value.
375
+ numpy.ndarray or tuple: numpy array with pixel values, with
376
+ at least 3 dimensions (x,y,z).
356
377
  """
357
378
  if isinstance(series, str):
358
379
  series = [series]
359
380
  dbd = open(series[0])
360
- array = dbd.pixel_data(series, dims, coords, attr)
381
+ array = dbd.pixel_data(series, dims, verbose)
361
382
  dbd.close()
362
383
  return array
363
384
 
364
- # write_pixel_data()
365
- # values()
366
- # write_values()
367
- # to_png(series, folder, dims)
368
- # to_npy(series, folder, dims)
369
- # split(series, attribute)
370
- # extract(series, *kwargs) # subseries
371
-
372
- # zeros(series, shape, dims)
373
385
 
374
386
  def unique(pars:list, entity:list) -> dict:
375
387
  """Return a list of unique values for a DICOM entity
@@ -420,6 +432,8 @@ def _copy_and_extract_zips(src_folder, dest_folder):
420
432
  if file.lower().endswith('.zip'):
421
433
  try:
422
434
  zip_dest_folder = dest_file_path[:-4]
435
+ if os.path.exists(zip_dest_folder):
436
+ continue
423
437
  with zipfile.ZipFile(src_file_path, 'r') as zip_ref:
424
438
  zip_ref.extractall(zip_dest_folder)
425
439
  #tqdm.write(f"Extracted ZIP: {src_file_path}")
@@ -427,6 +441,8 @@ def _copy_and_extract_zips(src_folder, dest_folder):
427
441
  except zipfile.BadZipFile:
428
442
  tqdm.write(f"Bad ZIP file skipped: {src_file_path}")
429
443
  else:
444
+ if os.path.exists(dest_file_path):
445
+ continue
430
446
  shutil.copy2(src_file_path, dest_file_path)
431
447
 
432
448
  pbar.update(1)
@@ -85,8 +85,8 @@ def write(ds, file, status=None):
85
85
  dir = os.path.dirname(file)
86
86
  if not os.path.exists(dir):
87
87
  os.makedirs(dir)
88
- #ds.save_as(file, write_like_original=False) # deprecated
89
- ds.save_as(file, enforce_file_format=True)
88
+ # ds.save_as(file, write_like_original=False) # deprecated
89
+ pydicom.dcmwrite(file, ds, enforce_file_format=True)
90
90
 
91
91
 
92
92
  def codify(source_file, save_file, **kwargs):
@@ -232,11 +232,20 @@ def set_lut(ds, RGB):
232
232
 
233
233
 
234
234
 
235
- def affine(ds):
236
- # Spacing Between Slices is not required so can be absent
237
- slice_spacing = ds.get("SpacingBetweenSlices")
238
- if slice_spacing is None:
235
+ def affine(ds, multislice=False):
236
+
237
+ if multislice:
238
+ # For 2D scans the slice_spacing is the slice thickness
239
239
  slice_spacing = ds.get("SliceThickness")
240
+ else:
241
+ # For 3D scans the slice spacing is the SpacingBetweenSlices
242
+ # Spacing Between Slices is not required so can be absent
243
+ # This is less critical because when reading a 3D volume the
244
+ # definitive slice_spacing is inferred from the slice positions.
245
+ slice_spacing = ds.get("SpacingBetweenSlices")
246
+ if slice_spacing is None:
247
+ slice_spacing = ds.get("SliceThickness")
248
+
240
249
  return image.affine_matrix(
241
250
  get_values(ds, 'ImageOrientationPatient'),
242
251
  get_values(ds, 'ImagePositionPatient'),
@@ -339,8 +348,8 @@ def set_pixel_data(ds, array):
339
348
  # ds.PixelData = array.tobytes()
340
349
 
341
350
 
342
- def volume(ds):
343
- return vreg.volume(pixel_data(ds), affine(ds))
351
+ def volume(ds, multislice=False):
352
+ return vreg.volume(pixel_data(ds), affine(ds, multislice=multislice))
344
353
 
345
354
 
346
355
 
@@ -70,8 +70,6 @@ class DataBaseDicom():
70
70
  # self._split_series()
71
71
  return self
72
72
 
73
-
74
-
75
73
  def delete(self, entity, not_exists_ok=False):
76
74
  """Delete a DICOM entity from the database
77
75
 
@@ -292,6 +290,8 @@ class DataBaseDicom():
292
290
  v.affine[:3,2] = -v.affine[:3,2]
293
291
  # Then try again
294
292
  vol = vreg.join(vols)
293
+
294
+ # For multi-dimensional volumes, set dimensions and coordinates
295
295
  if vol.ndim > 3:
296
296
  # Coordinates of slice 0
297
297
  c0 = [c[0,...] for c in coords[1:]]
@@ -300,6 +300,128 @@ class DataBaseDicom():
300
300
  return vol
301
301
 
302
302
 
303
+ def volumes_2d(self, entity:Union[list, str], dims:list=None, verbose=1) -> list:
304
+ """Read 2D volumes from the series
305
+
306
+ Args:
307
+ entity (list, str): DICOM series to read
308
+ dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
309
+ verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
310
+
311
+ Returns:
312
+ list of vreg.Volume3D
313
+ """
314
+ # if isinstance(entity, str): # path to folder
315
+ # return [self.volume(s, dims) for s in self.series(entity)]
316
+ # if len(entity) < 4: # folder, patient or study
317
+ # return [self.volume(s, dims) for s in self.series(entity)]
318
+
319
+ if dims is None:
320
+ dims = []
321
+ elif isinstance(dims, str):
322
+ dims = [dims]
323
+ else:
324
+ dims = list(dims)
325
+ dims = ['SliceLocation'] + dims
326
+
327
+ # Read dicom files
328
+ values = {}
329
+ volumes = {}
330
+
331
+ files = register.files(self.register, entity)
332
+ for f in tqdm(files, desc='Reading volume..', disable=(verbose==0)):
333
+ ds = pydicom.dcmread(f)
334
+ values_f = get_values(ds, dims)
335
+ vol = dbdataset.volume(ds, multislice=True)
336
+ slice_loc = values_f[0]
337
+ if slice_loc in volumes:
338
+ volumes[slice_loc].append(vol)
339
+ for d in range(len(dims)):
340
+ values[slice_loc][d].append(values_f[d])
341
+ else:
342
+ volumes[slice_loc] = [vol]
343
+ values[slice_loc] = [[values_f[d]] for d in range(len(dims))]
344
+
345
+ # Build a volume for each slice location
346
+ volumes_2d = []
347
+ for slice_loc in volumes.keys():
348
+ vols_list = volumes[slice_loc]
349
+
350
+ if values == {}:
351
+ if len(vols_list) > 1:
352
+ raise ValueError(
353
+ "Cannot return a 2D volume - multiple slices at the same "
354
+ "location. \n Use InstanceNumber or another suitable DICOM "
355
+ "attribute as dimension to sort them.")
356
+ volumes_2d.append(vols_list[0])
357
+ continue
358
+
359
+ # Sort by coordinata values
360
+ vals_list = values[slice_loc]
361
+
362
+ # Format coordinates as mesh
363
+ coords = [np.array(v) for v in vals_list]
364
+ coords, inds = dbdicom.utils.arrays.meshvals(coords)
365
+
366
+ # Check that all slices have the same coordinates
367
+ if len(dims) > 1:
368
+ # Loop over all coordinates after slice location
369
+ for c in coords[1:]:
370
+ # Loop over all slice locations
371
+ for k in range(1, c.shape[0]):
372
+ # Coordinate c of slice k
373
+ if not np.array_equal(c[k,...], c[0,...]):
374
+ raise ValueError(
375
+ "Cannot build a single volume. Not all slices "
376
+ "have the same coordinates."
377
+ )
378
+
379
+ # Build volumes, sort and reshape along the coordinates
380
+ vols = np.array(vols_list)
381
+ vols = vols[inds].reshape(coords[0].shape)
382
+
383
+ # Join 2D volumes along the extra dimensions
384
+ vol = vreg.join(vols[0,...].reshape((1,) + vols.shape[1:]))
385
+
386
+ # For multi-dimensional volumes, set dimensions and coordinates
387
+ if vol.ndim > 3:
388
+ # Coordinates of slice 0
389
+ c0 = [c[0,...] for c in coords[1:]]
390
+ vol.set_coords(c0)
391
+ vol.set_dims(dims[1:])
392
+
393
+ volumes_2d.append(vol)
394
+
395
+ return volumes_2d
396
+
397
+
398
+ def pixel_data(self, series:list, dims:list=None, verbose=1) -> np.ndarray:
399
+ """Read the pixel data from a DICOM series
400
+
401
+ Args:
402
+ series (list or str): DICOM series to read. This can also
403
+ be a path to a folder containing DICOM files, or a
404
+ patient or study to read all series in that patient or
405
+ study. In those cases a list is returned.
406
+ dims (list, optional): Dimensions of the array.
407
+
408
+ Returns:
409
+ numpy.ndarray or tuple: numpy array with pixel values, with
410
+ at least 3 dimensions (x,y,z).
411
+ """
412
+ vols = self.volumes_2d(series, dims, verbose)
413
+ for v in vols[1:]:
414
+ if v.shape != vols[0].shape:
415
+ raise ValueError(
416
+ "Cannot return a pixel array because slices have different shapes." \
417
+ "Instead try using volumes_2d to return a list of 2D volumes."
418
+ )
419
+ slices = [v.values for v in vols]
420
+ pixel_array = np.concatenate(slices, axis=2)
421
+ return pixel_array
422
+
423
+
424
+
303
425
  def values(self, series:list, *attr, dims:list=None, verbose=1) -> Union[dict, tuple]:
304
426
  """Read the values of some attributes from a DICOM series
305
427
 
@@ -355,7 +477,7 @@ class DataBaseDicom():
355
477
 
356
478
  def write_volume(
357
479
  self, vol:Union[vreg.Volume3D, tuple], series:list,
358
- ref:list=None,
480
+ ref:list=None, append=False, verbose=1,
359
481
  ):
360
482
  """Write a vreg.Volume3D to a DICOM series
361
483
 
@@ -363,10 +485,16 @@ class DataBaseDicom():
363
485
  vol (vreg.Volume3D): Volume to write to the series.
364
486
  series (list): DICOM series to read
365
487
  ref (list): Reference series
488
+ append (bool): by default write_volume will only write to a new series,
489
+ and raise an error when attempting to write to an existing series.
490
+ To overrule this behaviour and add the volume to an existing series, set append to True.
491
+ Default is False.
492
+ verbose (bool): if set to 1, a progress bar is shown
366
493
  """
367
494
  series_full_name = full_name(series)
368
495
  if series_full_name in self.series():
369
- raise ValueError(f"Series {series_full_name[-1]} already exists in study {series_full_name[-2]}.")
496
+ if not append:
497
+ raise ValueError(f"Series {series_full_name[-1]} already exists in study {series_full_name[-2]}.")
370
498
 
371
499
  if isinstance(vol, tuple):
372
500
  vol = vreg.volume(vol[0], vol[1])
@@ -388,13 +516,13 @@ class DataBaseDicom():
388
516
 
389
517
  if vol.ndim==3:
390
518
  slices = vol.split()
391
- for i, sl in tqdm(enumerate(slices), desc='Writing volume..'):
519
+ for i, sl in tqdm(enumerate(slices), desc='Writing volume..', disable=verbose==0):
392
520
  dbdataset.set_volume(ds, sl)
393
521
  self._write_dataset(ds, attr, n + 1 + i)
394
522
  else:
395
523
  i=0
396
524
  vols = vol.separate().reshape(-1)
397
- for vt in tqdm(vols, desc='Writing volume..'):
525
+ for vt in tqdm(vols, desc='Writing volume..', disable=verbose==0):
398
526
  slices = vt.split()
399
527
  for sl in slices:
400
528
  dbdataset.set_volume(ds, sl)
@@ -506,108 +634,7 @@ class DataBaseDicom():
506
634
  self.write_volume(vol, series, ref)
507
635
  return self
508
636
 
509
- def pixel_data(self, series:list, dims:list=None, coords=False, attr=None) -> np.ndarray:
510
- """Read the pixel data from a DICOM series
511
-
512
- Args:
513
- series (list or str): DICOM series to read. This can also
514
- be a path to a folder containing DICOM files, or a
515
- patient or study to read all series in that patient or
516
- study. In those cases a list is returned.
517
- dims (list, optional): Dimensions of the array.
518
- coords (bool): If set to True, the coordinates of the
519
- arrays are returned alongside the pixel data
520
- attr (list, optional): list of DICOM attributes that are
521
- read on the fly to avoid reading the data twice.
522
-
523
- Returns:
524
- numpy.ndarray or tuple: numpy array with pixel values, with
525
- at least 3 dimensions (x,y,z). If
526
- coords is set these are returned too as an array with
527
- coordinates of the slices according to dims. If include
528
- is provided the values are returned as a dictionary in the last
529
- return value.
530
- """
531
- if isinstance(series, str): # path to folder
532
- return [self.pixel_data(s, dims, coords, attr) for s in self.series(series)]
533
- if len(series) < 4: # folder, patient or study
534
- return [self.pixel_data(s, dims, coords, attr) for s in self.series(series)]
535
637
 
536
- if dims is None:
537
- dims = ['InstanceNumber']
538
- elif np.isscalar(dims):
539
- dims = [dims]
540
- else:
541
- dims = list(dims)
542
-
543
- # Ensure return_vals is a list
544
- if attr is None:
545
- params = []
546
- elif np.isscalar(attr):
547
- params = [attr]
548
- else:
549
- params = list(attr)
550
-
551
- files = register.files(self.register, series)
552
-
553
- # Read dicom files
554
- coords_array = []
555
- arrays = np.empty(len(files), dtype=dict)
556
- if attr is not None:
557
- values = np.empty(len(files), dtype=dict)
558
- for i, f in tqdm(enumerate(files), desc='Reading pixel data..'):
559
- ds = pydicom.dcmread(f)
560
- coords_array.append(get_values(ds, dims))
561
- # save as dict so numpy does not stack as arrays
562
- arrays[i] = {'pixel_data': dbdataset.pixel_data(ds)}
563
- if attr is not None:
564
- values[i] = {'values': get_values(ds, params)}
565
-
566
- # Format as mesh
567
- coords_array = np.stack([v for v in coords_array], axis=-1)
568
- coords_array, inds = dbdicom.utils.arrays.meshvals(coords_array)
569
-
570
- arrays = arrays[inds].reshape(coords_array.shape[1:])
571
- arrays = np.stack([a['pixel_data'] for a in arrays.reshape(-1)], axis=-1)
572
- arrays = arrays.reshape(arrays.shape[:2] + coords_array.shape[1:])
573
-
574
- if attr is None:
575
- if coords:
576
- return arrays, coords_array
577
- else:
578
- return arrays
579
-
580
- # Return values as a dictionary
581
- values = values[inds].reshape(-1)
582
- values_dict = {}
583
- for p in range(len(params)):
584
- # Get the type from the first value
585
- vp0 = values[0]['values'][p]
586
- # Build an array of the right type
587
- vp = np.zeros(values.size, dtype=type(vp0))
588
- # Populate the array with values for parameter p
589
- for i, v in enumerate(values):
590
- vp[i] = v['values'][p]
591
- # Reshape values for parameter p
592
- vp = vp.reshape(coords_array.shape[1:])
593
- # Eneter in the dictionary
594
- values_dict[params[p]] = vp
595
-
596
- # If only one, return as value
597
- if len(params) == 1:
598
- values_return = values_dict[attr[0]]
599
- else:
600
- values_return = values_dict
601
-
602
- # problem if the values are a list. Needs an array with a prespeficied dtype
603
- # values = values[inds].reshape(coords_array.shape[1:])
604
- # values = np.stack([a['values'] for a in values.reshape(-1)], axis=-1)
605
- # values = values.reshape((len(params), ) + coords_array.shape[1:])
606
-
607
- if coords:
608
- return arrays, coords_array, values_return
609
- else:
610
- return arrays, values_return
611
638
 
612
639
 
613
640
 
@@ -700,7 +727,7 @@ class DataBaseDicom():
700
727
  else:
701
728
  to_entity[-1] = (to_entity[-1] + '_copy', 0)
702
729
  while to_entity in self.series():
703
- to_entity[-1][1] += 1
730
+ to_entity[-1] = (to_entity[-1][0], to_entity[-1][1] + 1)
704
731
  if len(to_entity) != 4:
705
732
  raise ValueError(
706
733
  f"Cannot copy series {from_entity} to series {to_entity}. "
@@ -1120,7 +1147,7 @@ def infer_slice_spacing(vols):
1120
1147
  distances = np.around(distances, 2)
1121
1148
  slice_spacing_d = np.unique(distances)
1122
1149
 
1123
- # Check if unique - otherwise this is not a volume
1150
+ # Check if slice spacings are unique - otherwise this is not a volume
1124
1151
  if len(slice_spacing_d) > 1:
1125
1152
  raise ValueError(
1126
1153
  'Cannot build a volume - spacings between slices are not unique.'
@@ -1135,6 +1162,7 @@ def infer_slice_spacing(vols):
1135
1162
  slice_spacing[d] = slice_spacing_d
1136
1163
 
1137
1164
  # Check slice_spacing is the same across dimensions
1165
+ # Not sure if this is possible as volumes are sorted by slice location
1138
1166
  slice_spacing = np.unique(slice_spacing)
1139
1167
  if len(slice_spacing) > 1:
1140
1168
  raise ValueError(
@@ -1143,7 +1171,3 @@ def infer_slice_spacing(vols):
1143
1171
 
1144
1172
  return vols.reshape(shape)
1145
1173
 
1146
-
1147
-
1148
-
1149
-
@@ -79,6 +79,7 @@ def index(dbtree, entity):
79
79
  for sr in sorted(st['series'], key=lambda sr: sr['SeriesNumber']):
80
80
  idx += list(sr['instances'].values())
81
81
  return idx
82
+ raise ValueError(f'Patient {patient_id} not found')
82
83
  elif len(entity)==3:
83
84
  study_uid = uid(dbtree, entity)
84
85
  idx = []
@@ -88,6 +89,7 @@ def index(dbtree, entity):
88
89
  for sr in sorted(st['series'], key=lambda sr: sr['SeriesNumber']):
89
90
  idx += list(sr['instances'].values())
90
91
  return idx
92
+ raise ValueError(f'Study {study_uid} not found')
91
93
  elif len(entity)==4:
92
94
  series_uid = uid(dbtree, entity)
93
95
  for pt in sorted(dbtree, key=lambda pt: pt['PatientID']):
@@ -95,6 +97,7 @@ def index(dbtree, entity):
95
97
  for sr in sorted(st['series'], key=lambda sr: sr['SeriesNumber']):
96
98
  if sr['SeriesInstanceUID'] == series_uid:
97
99
  return list(sr['instances'].values())
100
+ raise ValueError(f'Series {series_uid} not found')
98
101
 
99
102
  def remove(dbtree, entity):
100
103
  if len(entity)==2:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dbdicom
3
- Version: 0.3.11
3
+ Version: 0.3.13
4
4
  Summary: A pythonic interface for reading and writing DICOM databases
5
5
  Author-email: Steven Sourbron <s.sourbron@sheffield.ac.uk>, Ebony Gunwhy <e.gunwhy@sheffield.ac.uk>
6
6
  Project-URL: Homepage, https://openmiblab.github.io/dbdicom/
@@ -58,5 +58,6 @@ src/dbdicom/utils/image.py
58
58
  src/dbdicom/utils/pydicom_dataset.py
59
59
  tests/test_api.py
60
60
  tests/test_dcm4che.py
61
+ tests/test_mt.py
61
62
  tests/test_sop_classes.py
62
63
  tests/test_utils.py
@@ -11,6 +11,8 @@ shutil.rmtree(tmp)
11
11
  os.makedirs(tmp, exist_ok=True)
12
12
 
13
13
 
14
+
15
+
14
16
  def test_write_volume():
15
17
 
16
18
  values = 100*np.random.rand(128, 192, 20).astype(np.float32)
@@ -24,11 +26,84 @@ def test_write_volume():
24
26
  series = [tmp, '007', 'dbdicom_test', 'dixon']
25
27
  db.write_volume(vol, series)
26
28
 
29
+ # Writing to an existing series returns an error by default
30
+ try:
31
+ db.write_volume(vol, series)
32
+ except:
33
+ assert True
34
+ else:
35
+ assert False
36
+
37
+ # Translate the volume in the z-direction over 10mm and append to the series
38
+ # This creates a series with two volumes separated by a gap of 5 mm
39
+ vol2 = vol.translate([0,0,20], coords='volume')
40
+ db.write_volume(vol2, series, append=True)
41
+
42
+ # Reading now throws an error as there are multiple volumes in the series
43
+ try:
44
+ db.volume(series, dims=['ImageType'])
45
+ except:
46
+ assert True
47
+ else:
48
+ assert False
49
+
50
+
51
+ shutil.rmtree(tmp)
52
+
53
+
54
+ def test_volumes_2d():
55
+
56
+ # Write one volume
57
+ values = 100*np.random.rand(128, 192, 5).astype(np.float32)
58
+ vol = vreg.volume(values)
59
+ series = [tmp, '007', 'dbdicom_test', 'ax']
60
+ db.write_volume(vol, series)
61
+
62
+ # Shift it up to leave a gap and write to the same series
63
+ vol2 = vol.translate([0,0,10], coords='volume')
64
+ db.write_volume(vol2, series, append=True)
65
+
66
+ # Trying to read as a single volume throws an error because of the gap
67
+ try:
68
+ db.volume(series)
69
+ except:
70
+ assert True
71
+ else:
72
+ assert False
73
+
74
+ # But we can read them as 2D volumes, returning 10 2D volumes
75
+ vols = db.volumes_2d(series)
76
+ assert len(vols) == 10
77
+
78
+ # Now 4D
79
+ values = np.zeros((256, 256, 5, 2))
80
+ affine = np.eye(4)
81
+ vol = vreg.volume(values, affine, coords=(['INPHASE', 'OUTPHASE'], ), dims=['ImageType'])
82
+ series = [tmp, '007', 'dbdicom_test', 'dixon']
83
+ db.write_volume(vol, series)
84
+
85
+ vol2 = vol.translate([0,0,10], coords='volume')
86
+ db.write_volume(vol2, series, append=True)
87
+
88
+ vols = db.volumes_2d(series, dims=['ImageType'])
89
+ assert len(vols) == 10
90
+ assert vols[-1].shape == (256, 256, 1, 2)
91
+
27
92
  shutil.rmtree(tmp)
28
93
 
29
94
 
30
95
  def test_volume():
31
96
 
97
+ # One slice
98
+ values = 100*np.random.rand(128, 192, 1).astype(np.float32)
99
+ vol = vreg.volume(values)
100
+ series = [tmp, '007', 'test', 'slice']
101
+ db.write_volume(vol, series)
102
+ vol2 = db.volume(series)
103
+ assert np.linalg.norm(vol2.values-vol.values) < 0.0001*np.linalg.norm(vol.values)
104
+ assert np.linalg.norm(vol2.affine-vol.affine) == 0
105
+
106
+ # 3D volume
32
107
  values = 100*np.random.rand(128, 192, 20).astype(np.float32)
33
108
  vol = vreg.volume(values)
34
109
  series = [tmp, '007', 'test', 'ax']
@@ -37,6 +112,7 @@ def test_volume():
37
112
  assert np.linalg.norm(vol2.values-vol.values) < 0.0001*np.linalg.norm(vol.values)
38
113
  assert np.linalg.norm(vol2.affine-vol.affine) == 0
39
114
 
115
+ # 4D volume
40
116
  values = 100*np.random.rand(256, 256, 3, 2).astype(np.float32)
41
117
  vol = vreg.volume(values, dims=['ImageType'], coords=(['INPHASE', 'OUTPHASE'], ), orient='coronal')
42
118
  series = [tmp, '007', 'dbdicom_test', 'dixon']
@@ -120,6 +196,8 @@ def test_edit():
120
196
  assert np.array_equal(tr, new_tr)
121
197
  assert np.array_equal(pn, new_pn)
122
198
 
199
+ shutil.rmtree(tmp)
200
+
123
201
 
124
202
  def test_write_database():
125
203
  values = 100*np.random.rand(16, 16, 4).astype(np.float32)
@@ -215,9 +293,10 @@ def test_copy():
215
293
 
216
294
  if __name__ == '__main__':
217
295
 
296
+ test_write_volume()
297
+ test_volumes_2d()
218
298
  test_values()
219
299
  test_edit()
220
- test_write_volume()
221
300
  test_volume()
222
301
  test_write_database()
223
302
  test_copy()
@@ -0,0 +1,39 @@
1
+ import os
2
+ import shutil
3
+ import numpy as np
4
+ import dbdicom as db
5
+ import vreg
6
+
7
+
8
+ tmp = os.path.join(os.getcwd(), 'tests', 'tmp')
9
+ os.makedirs(tmp, exist_ok=True)
10
+ shutil.rmtree(tmp)
11
+ os.makedirs(tmp, exist_ok=True)
12
+
13
+
14
+ def test_write():
15
+
16
+ # Currently works as expected
17
+ values = 100*np.random.rand(256, 256, 16, 2).astype(np.float32)
18
+ dims = ['ScanOptions']
19
+ coords = (['PFP', 'PFP'], )
20
+ vol = vreg.volume(values, dims=dims, coords=coords, orient='coronal')
21
+ series = [tmp, '007', 'dbdicom_test', 'mt1']
22
+ db.write_volume(vol, series)
23
+
24
+ # # Currently does not work
25
+ # values = 100*np.random.rand(256, 256, 16, 2).astype(np.float32)
26
+ # dims = ['ScanOptions']
27
+ # coords = (['PFP', ['PFP', 'MT']], )
28
+ # vol = vreg.volume(values, dims=dims, coords=coords, orient='coronal')
29
+ # series = [tmp, '007', 'dbdicom_test', 'mt2']
30
+ # db.write_volume(vol, series)
31
+
32
+ shutil.rmtree(tmp)
33
+
34
+
35
+ if __name__ == '__main__':
36
+
37
+ test_write()
38
+
39
+ print('All mt tests have passed!!!')
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes