dbdicom 0.3.10__tar.gz → 0.3.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbdicom might be problematic. Click here for more details.

Files changed (64) hide show
  1. {dbdicom-0.3.10/src/dbdicom.egg-info → dbdicom-0.3.12}/PKG-INFO +1 -1
  2. {dbdicom-0.3.10 → dbdicom-0.3.12}/pyproject.toml +1 -1
  3. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/api.py +42 -24
  4. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/dataset.py +17 -8
  5. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/dbd.py +149 -112
  6. {dbdicom-0.3.10 → dbdicom-0.3.12/src/dbdicom.egg-info}/PKG-INFO +1 -1
  7. {dbdicom-0.3.10 → dbdicom-0.3.12}/tests/test_api.py +80 -1
  8. {dbdicom-0.3.10 → dbdicom-0.3.12}/LICENSE +0 -0
  9. {dbdicom-0.3.10 → dbdicom-0.3.12}/MANIFEST.in +0 -0
  10. {dbdicom-0.3.10 → dbdicom-0.3.12}/README.rst +0 -0
  11. {dbdicom-0.3.10 → dbdicom-0.3.12}/setup.cfg +0 -0
  12. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/__init__.py +0 -0
  13. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/const.py +0 -0
  14. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/database.py +0 -0
  15. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/__init__.py +0 -0
  16. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/__pycache__/__init__.cpython-311.pyc +0 -0
  17. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/README.md +0 -0
  18. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/__init__.py +0 -0
  19. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
  20. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/bin/__init__.py +0 -0
  21. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
  22. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/bin/deidentify +0 -0
  23. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/bin/deidentify.bat +0 -0
  24. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/bin/emf2sf +0 -0
  25. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/bin/emf2sf.bat +0 -0
  26. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/etc/__init__.py +0 -0
  27. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/etc/emf2sf/__init__.py +0 -0
  28. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/etc/emf2sf/log4j.properties +0 -0
  29. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/__init__.py +0 -0
  30. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/commons-cli-1.4.jar +0 -0
  31. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/dcm4che-core-5.23.1.jar +0 -0
  32. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/dcm4che-emf-5.23.1.jar +0 -0
  33. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/dcm4che-tool-common-5.23.1.jar +0 -0
  34. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/dcm4che-tool-emf2sf-5.23.1.jar +0 -0
  35. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/log4j-1.2.17.jar +0 -0
  36. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/macosx-x86-64/libopencv_java.jnilib +0 -0
  37. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/slf4j-api-1.7.30.jar +0 -0
  38. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/slf4j-log4j12-1.7.30.jar +0 -0
  39. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio.dll +0 -0
  40. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio_sse2.dll +0 -0
  41. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio_util.dll +0 -0
  42. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/windows-x86/opencv_java.dll +0 -0
  43. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/external/dcm4che/lib/windows-x86-64/opencv_java.dll +0 -0
  44. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/register.py +0 -0
  45. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/sop_classes/ct_image.py +0 -0
  46. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/sop_classes/enhanced_mr_image.py +0 -0
  47. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/sop_classes/mr_image.py +0 -0
  48. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/sop_classes/parametric_map.py +0 -0
  49. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/sop_classes/secondary_capture.py +0 -0
  50. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/sop_classes/segmentation.py +0 -0
  51. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/sop_classes/ultrasound_multiframe_image.py +0 -0
  52. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/sop_classes/xray_angiographic_image.py +0 -0
  53. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/utils/arrays.py +0 -0
  54. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/utils/dcm4che.py +0 -0
  55. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/utils/files.py +0 -0
  56. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/utils/image.py +0 -0
  57. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom/utils/pydicom_dataset.py +0 -0
  58. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom.egg-info/SOURCES.txt +0 -0
  59. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom.egg-info/dependency_links.txt +0 -0
  60. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom.egg-info/requires.txt +0 -0
  61. {dbdicom-0.3.10 → dbdicom-0.3.12}/src/dbdicom.egg-info/top_level.txt +0 -0
  62. {dbdicom-0.3.10 → dbdicom-0.3.12}/tests/test_dcm4che.py +0 -0
  63. {dbdicom-0.3.10 → dbdicom-0.3.12}/tests/test_sop_classes.py +0 -0
  64. {dbdicom-0.3.10 → dbdicom-0.3.12}/tests/test_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dbdicom
3
- Version: 0.3.10
3
+ Version: 0.3.12
4
4
  Summary: A pythonic interface for reading and writing DICOM databases
5
5
  Author-email: Steven Sourbron <s.sourbron@sheffield.ac.uk>, Ebony Gunwhy <e.gunwhy@sheffield.ac.uk>
6
6
  Project-URL: Homepage, https://openmiblab.github.io/dbdicom/
@@ -7,7 +7,7 @@ requires = ['setuptools>=61.2']
7
7
 
8
8
  [project]
9
9
  name = "dbdicom"
10
- version = "0.3.10"
10
+ version = "0.3.12"
11
11
  dependencies = [
12
12
  "tqdm",
13
13
  "importlib-resources",
@@ -181,14 +181,16 @@ def copy(from_entity:list, to_entity=None):
181
181
  return from_entity_copy
182
182
 
183
183
 
184
- def delete(entity:list):
184
+ def delete(entity:list, not_exists_ok=False):
185
185
  """Delete a DICOM entity
186
186
 
187
187
  Args:
188
188
  entity (list): entity to delete
189
+ not_exists_ok (bool): By default, an exception is raised when attempting
190
+ to delete an entity that does not exist. Set this to True to pass over this silently.
189
191
  """
190
192
  dbd = open(entity[0])
191
- dbd.delete(entity)
193
+ dbd.delete(entity, not_exists_ok)
192
194
  dbd.close()
193
195
 
194
196
 
@@ -238,6 +240,23 @@ def volume(series:list, dims:list=None, verbose=1) -> vreg.Volume3D:
238
240
  return vol
239
241
 
240
242
 
243
+ def volumes_2d(series:list, dims:list=None, verbose=1) -> vreg.Volume3D:
244
+ """Read 2D volumes from the series
245
+
246
+ Args:
247
+ entity (list, str): DICOM series to read
248
+ dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
249
+ verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
250
+
251
+ Returns:
252
+ list of vreg.Volume3D
253
+ """
254
+ dbd = open(series[0])
255
+ vol = dbd.volumes_2d(series, dims, verbose)
256
+ dbd.close()
257
+ return vol
258
+
259
+
241
260
  def values(series:list, *attr, dims:list=None, verbose=1) -> Union[np.ndarray, list]:
242
261
  """Read the values of some attributes from a DICOM series
243
262
 
@@ -257,16 +276,23 @@ def values(series:list, *attr, dims:list=None, verbose=1) -> Union[np.ndarray, l
257
276
  return values
258
277
 
259
278
 
260
- def write_volume(vol:Union[vreg.Volume3D, tuple], series:list, ref:list=None):
279
+
280
+ def write_volume(vol:Union[vreg.Volume3D, tuple], series:list,
281
+ ref:list=None, append=False, verbose=1):
261
282
  """Write a vreg.Volume3D to a DICOM series
262
283
 
263
284
  Args:
264
285
  vol (vreg.Volume3D or tuple): Volume to write to the series.
265
286
  series (list): DICOM series to read
266
287
  dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
288
+ append (bool): by default write_volume will only write to a new series,
289
+ and raise an error when attempting to write to an existing series.
290
+ To overrule this behaviour and add the volume to an existing series, set append to True.
291
+ Default is False.
292
+ verbose (bool): if set to 1, a progress bar is shown. verbose=0 does not show updates.
267
293
  """
268
294
  dbd = open(series[0])
269
- dbd.write_volume(vol, series, ref)
295
+ dbd.write_volume(vol, series, ref, append, verbose)
270
296
  dbd.close()
271
297
 
272
298
 
@@ -335,39 +361,27 @@ def files(entity:list) -> list:
335
361
  return files
336
362
 
337
363
 
338
- def pixel_data(series:list, dims:list=None, coords=False, attr:list=None) -> tuple:
364
+ def pixel_data(series:list, dims:list=None, verbose=1) -> tuple:
339
365
  """Read the pixel data from a DICOM series
340
366
 
341
367
  Args:
342
- series (list): DICOM series to read
368
+ series (list or str): DICOM series to read. This can also
369
+ be a path to a folder containing DICOM files, or a
370
+ patient or study to read all series in that patient or
371
+ study. In those cases a list is returned.
343
372
  dims (list, optional): Dimensions of the array.
344
- coords (bool): If set to True, the coordinates of the
345
- slices are returned alongside the pixel data.
346
- attr (list, optional): list of DICOM attributes that are
347
- read on the fly to avoid reading the data twice.
348
373
 
349
374
  Returns:
350
- tuple: numpy array with pixel values and an array with
351
- coordinates of the slices according to dims. If include
352
- is provide these are returned as a dictionary in a third
353
- return value.
375
+ numpy.ndarray or tuple: numpy array with pixel values, with
376
+ at least 3 dimensions (x,y,z).
354
377
  """
355
378
  if isinstance(series, str):
356
379
  series = [series]
357
380
  dbd = open(series[0])
358
- array = dbd.pixel_data(series, dims, coords, attr)
381
+ array = dbd.pixel_data(series, dims, verbose)
359
382
  dbd.close()
360
383
  return array
361
384
 
362
- # write_pixel_data()
363
- # values()
364
- # write_values()
365
- # to_png(series, folder, dims)
366
- # to_npy(series, folder, dims)
367
- # split(series, attribute)
368
- # extract(series, *kwargs) # subseries
369
-
370
- # zeros(series, shape, dims)
371
385
 
372
386
  def unique(pars:list, entity:list) -> dict:
373
387
  """Return a list of unique values for a DICOM entity
@@ -418,6 +432,8 @@ def _copy_and_extract_zips(src_folder, dest_folder):
418
432
  if file.lower().endswith('.zip'):
419
433
  try:
420
434
  zip_dest_folder = dest_file_path[:-4]
435
+ if os.path.exists(zip_dest_folder):
436
+ continue
421
437
  with zipfile.ZipFile(src_file_path, 'r') as zip_ref:
422
438
  zip_ref.extractall(zip_dest_folder)
423
439
  #tqdm.write(f"Extracted ZIP: {src_file_path}")
@@ -425,6 +441,8 @@ def _copy_and_extract_zips(src_folder, dest_folder):
425
441
  except zipfile.BadZipFile:
426
442
  tqdm.write(f"Bad ZIP file skipped: {src_file_path}")
427
443
  else:
444
+ if os.path.exists(dest_file_path):
445
+ continue
428
446
  shutil.copy2(src_file_path, dest_file_path)
429
447
 
430
448
  pbar.update(1)
@@ -85,8 +85,8 @@ def write(ds, file, status=None):
85
85
  dir = os.path.dirname(file)
86
86
  if not os.path.exists(dir):
87
87
  os.makedirs(dir)
88
- #ds.save_as(file, write_like_original=False) # deprecated
89
- ds.save_as(file, enforce_file_format=True)
88
+ ds.save_as(file, write_like_original=False) # deprecated
89
+ # ds.save_as(file, enforce_file_format=True)
90
90
 
91
91
 
92
92
  def codify(source_file, save_file, **kwargs):
@@ -232,11 +232,20 @@ def set_lut(ds, RGB):
232
232
 
233
233
 
234
234
 
235
- def affine(ds):
236
- # Spacing Between Slices is not required so can be absent
237
- slice_spacing = ds.get("SpacingBetweenSlices")
238
- if slice_spacing is None:
235
+ def affine(ds, multislice=False):
236
+
237
+ if multislice:
238
+ # For 2D scans the slice_spacing is the slice thickness
239
239
  slice_spacing = ds.get("SliceThickness")
240
+ else:
241
+ # For 3D scans the slice spacing is the SpacingBetweenSlices
242
+ # Spacing Between Slices is not required so can be absent
243
+ # This is less critical because when reading a 3D volume the
244
+ # definitive slice_spacing is inferred from the slice positions.
245
+ slice_spacing = ds.get("SpacingBetweenSlices")
246
+ if slice_spacing is None:
247
+ slice_spacing = ds.get("SliceThickness")
248
+
240
249
  return image.affine_matrix(
241
250
  get_values(ds, 'ImageOrientationPatient'),
242
251
  get_values(ds, 'ImagePositionPatient'),
@@ -339,8 +348,8 @@ def set_pixel_data(ds, array):
339
348
  # ds.PixelData = array.tobytes()
340
349
 
341
350
 
342
- def volume(ds):
343
- return vreg.volume(pixel_data(ds), affine(ds))
351
+ def volume(ds, multislice=False):
352
+ return vreg.volume(pixel_data(ds), affine(ds, multislice=multislice))
344
353
 
345
354
 
346
355
 
@@ -72,14 +72,25 @@ class DataBaseDicom():
72
72
 
73
73
 
74
74
 
75
- def delete(self, entity):
75
+ def delete(self, entity, not_exists_ok=False):
76
76
  """Delete a DICOM entity from the database
77
77
 
78
78
  Args:
79
79
  entity (list): entity to delete
80
+ not_exists_ok (bool): By default, an exception is raised when attempting
81
+ to delete an entity that does not exist. Set this to True to pass over this silently.
80
82
  """
81
83
  # delete datasets on disk
82
- removed = register.index(self.register, entity)
84
+ try:
85
+ removed = register.index(self.register, entity)
86
+ except ValueError:
87
+ if not_exists_ok:
88
+ return self
89
+ else:
90
+ raise ValueError(
91
+ f"The entity you are trying to delete does not exist. \n"
92
+ f"You can set not_exists_ok=True in dbdicom.delete() to avoid this error."
93
+ )
83
94
  for index in removed:
84
95
  file = os.path.join(self.path, index)
85
96
  if os.path.exists(file):
@@ -281,6 +292,8 @@ class DataBaseDicom():
281
292
  v.affine[:3,2] = -v.affine[:3,2]
282
293
  # Then try again
283
294
  vol = vreg.join(vols)
295
+
296
+ # For multi-dimensional volumes, set dimensions and coordinates
284
297
  if vol.ndim > 3:
285
298
  # Coordinates of slice 0
286
299
  c0 = [c[0,...] for c in coords[1:]]
@@ -289,6 +302,128 @@ class DataBaseDicom():
289
302
  return vol
290
303
 
291
304
 
305
+ def volumes_2d(self, entity:Union[list, str], dims:list=None, verbose=1) -> list:
306
+ """Read 2D volumes from the series
307
+
308
+ Args:
309
+ entity (list, str): DICOM series to read
310
+ dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
311
+ verbose (bool, optional): If set to 1, shows progress bar. Defaults to 1.
312
+
313
+ Returns:
314
+ list of vreg.Volume3D
315
+ """
316
+ # if isinstance(entity, str): # path to folder
317
+ # return [self.volume(s, dims) for s in self.series(entity)]
318
+ # if len(entity) < 4: # folder, patient or study
319
+ # return [self.volume(s, dims) for s in self.series(entity)]
320
+
321
+ if dims is None:
322
+ dims = []
323
+ elif isinstance(dims, str):
324
+ dims = [dims]
325
+ else:
326
+ dims = list(dims)
327
+ dims = ['SliceLocation'] + dims
328
+
329
+ # Read dicom files
330
+ values = {}
331
+ volumes = {}
332
+
333
+ files = register.files(self.register, entity)
334
+ for f in tqdm(files, desc='Reading volume..', disable=(verbose==0)):
335
+ ds = pydicom.dcmread(f)
336
+ values_f = get_values(ds, dims)
337
+ vol = dbdataset.volume(ds, multislice=True)
338
+ slice_loc = values_f[0]
339
+ if slice_loc in volumes:
340
+ volumes[slice_loc].append(vol)
341
+ for d in range(len(dims)):
342
+ values[slice_loc][d].append(values_f[d])
343
+ else:
344
+ volumes[slice_loc] = [vol]
345
+ values[slice_loc] = [[values_f[d]] for d in range(len(dims))]
346
+
347
+ # Build a volume for each slice location
348
+ volumes_2d = []
349
+ for slice_loc in volumes.keys():
350
+ vols_list = volumes[slice_loc]
351
+
352
+ if values == {}:
353
+ if len(vols_list) > 1:
354
+ raise ValueError(
355
+ "Cannot return a 2D volume - multiple slices at the same "
356
+ "location. \n Use InstanceNumber or another suitable DICOM "
357
+ "attribute as dimension to sort them.")
358
+ volumes_2d.append(vols_list[0])
359
+ continue
360
+
361
+ # Sort by coordinata values
362
+ vals_list = values[slice_loc]
363
+
364
+ # Format coordinates as mesh
365
+ coords = [np.array(v) for v in vals_list]
366
+ coords, inds = dbdicom.utils.arrays.meshvals(coords)
367
+
368
+ # Check that all slices have the same coordinates
369
+ if len(dims) > 1:
370
+ # Loop over all coordinates after slice location
371
+ for c in coords[1:]:
372
+ # Loop over all slice locations
373
+ for k in range(1, c.shape[0]):
374
+ # Coordinate c of slice k
375
+ if not np.array_equal(c[k,...], c[0,...]):
376
+ raise ValueError(
377
+ "Cannot build a single volume. Not all slices "
378
+ "have the same coordinates."
379
+ )
380
+
381
+ # Build volumes, sort and reshape along the coordinates
382
+ vols = np.array(vols_list)
383
+ vols = vols[inds].reshape(coords[0].shape)
384
+
385
+ # Join 2D volumes along the extra dimensions
386
+ vol = vreg.join(vols[0,...].reshape((1,) + vols.shape[1:]))
387
+
388
+ # For multi-dimensional volumes, set dimensions and coordinates
389
+ if vol.ndim > 3:
390
+ # Coordinates of slice 0
391
+ c0 = [c[0,...] for c in coords[1:]]
392
+ vol.set_coords(c0)
393
+ vol.set_dims(dims[1:])
394
+
395
+ volumes_2d.append(vol)
396
+
397
+ return volumes_2d
398
+
399
+
400
+ def pixel_data(self, series:list, dims:list=None, verbose=1) -> np.ndarray:
401
+ """Read the pixel data from a DICOM series
402
+
403
+ Args:
404
+ series (list or str): DICOM series to read. This can also
405
+ be a path to a folder containing DICOM files, or a
406
+ patient or study to read all series in that patient or
407
+ study. In those cases a list is returned.
408
+ dims (list, optional): Dimensions of the array.
409
+
410
+ Returns:
411
+ numpy.ndarray or tuple: numpy array with pixel values, with
412
+ at least 3 dimensions (x,y,z).
413
+ """
414
+ vols = self.volumes_2d(series, dims, verbose)
415
+ for v in vols[1:]:
416
+ if v.shape != vols[0].shape:
417
+ raise ValueError(
418
+ "Cannot return a pixel array because slices have different shapes." \
419
+ "Instead try using volumes_2d to return a list of 2D volumes."
420
+ )
421
+ slices = [v.values for v in vols]
422
+ pixel_array = np.concatenate(slices, axis=2)
423
+ return pixel_array
424
+
425
+
426
+
292
427
  def values(self, series:list, *attr, dims:list=None, verbose=1) -> Union[dict, tuple]:
293
428
  """Read the values of some attributes from a DICOM series
294
429
 
@@ -344,7 +479,7 @@ class DataBaseDicom():
344
479
 
345
480
  def write_volume(
346
481
  self, vol:Union[vreg.Volume3D, tuple], series:list,
347
- ref:list=None,
482
+ ref:list=None, append=False, verbose=1,
348
483
  ):
349
484
  """Write a vreg.Volume3D to a DICOM series
350
485
 
@@ -352,10 +487,16 @@ class DataBaseDicom():
352
487
  vol (vreg.Volume3D): Volume to write to the series.
353
488
  series (list): DICOM series to read
354
489
  ref (list): Reference series
490
+ append (bool): by default write_volume will only write to a new series,
491
+ and raise an error when attempting to write to an existing series.
492
+ To overrule this behaviour and add the volume to an existing series, set append to True.
493
+ Default is False.
494
+ verbose (bool): if set to 1, a progress bar is shown
355
495
  """
356
496
  series_full_name = full_name(series)
357
497
  if series_full_name in self.series():
358
- raise ValueError(f"Series {series_full_name[-1]} already exists in study {series_full_name[-2]}.")
498
+ if not append:
499
+ raise ValueError(f"Series {series_full_name[-1]} already exists in study {series_full_name[-2]}.")
359
500
 
360
501
  if isinstance(vol, tuple):
361
502
  vol = vreg.volume(vol[0], vol[1])
@@ -377,13 +518,13 @@ class DataBaseDicom():
377
518
 
378
519
  if vol.ndim==3:
379
520
  slices = vol.split()
380
- for i, sl in tqdm(enumerate(slices), desc='Writing volume..'):
521
+ for i, sl in tqdm(enumerate(slices), desc='Writing volume..', disable=verbose==0):
381
522
  dbdataset.set_volume(ds, sl)
382
523
  self._write_dataset(ds, attr, n + 1 + i)
383
524
  else:
384
525
  i=0
385
526
  vols = vol.separate().reshape(-1)
386
- for vt in tqdm(vols, desc='Writing volume..'):
527
+ for vt in tqdm(vols, desc='Writing volume..', disable=verbose==0):
387
528
  slices = vt.split()
388
529
  for sl in slices:
389
530
  dbdataset.set_volume(ds, sl)
@@ -495,108 +636,7 @@ class DataBaseDicom():
495
636
  self.write_volume(vol, series, ref)
496
637
  return self
497
638
 
498
- def pixel_data(self, series:list, dims:list=None, coords=False, attr=None) -> np.ndarray:
499
- """Read the pixel data from a DICOM series
500
639
 
501
- Args:
502
- series (list or str): DICOM series to read. This can also
503
- be a path to a folder containing DICOM files, or a
504
- patient or study to read all series in that patient or
505
- study. In those cases a list is returned.
506
- dims (list, optional): Dimensions of the array.
507
- coords (bool): If set to True, the coordinates of the
508
- arrays are returned alongside the pixel data
509
- attr (list, optional): list of DICOM attributes that are
510
- read on the fly to avoid reading the data twice.
511
-
512
- Returns:
513
- numpy.ndarray or tuple: numpy array with pixel values, with
514
- at least 3 dimensions (x,y,z). If
515
- coords is set these are returned too as an array with
516
- coordinates of the slices according to dims. If include
517
- is provided the values are returned as a dictionary in the last
518
- return value.
519
- """
520
- if isinstance(series, str): # path to folder
521
- return [self.pixel_data(s, dims, coords, attr) for s in self.series(series)]
522
- if len(series) < 4: # folder, patient or study
523
- return [self.pixel_data(s, dims, coords, attr) for s in self.series(series)]
524
-
525
- if dims is None:
526
- dims = ['InstanceNumber']
527
- elif np.isscalar(dims):
528
- dims = [dims]
529
- else:
530
- dims = list(dims)
531
-
532
- # Ensure return_vals is a list
533
- if attr is None:
534
- params = []
535
- elif np.isscalar(attr):
536
- params = [attr]
537
- else:
538
- params = list(attr)
539
-
540
- files = register.files(self.register, series)
541
-
542
- # Read dicom files
543
- coords_array = []
544
- arrays = np.empty(len(files), dtype=dict)
545
- if attr is not None:
546
- values = np.empty(len(files), dtype=dict)
547
- for i, f in tqdm(enumerate(files), desc='Reading pixel data..'):
548
- ds = pydicom.dcmread(f)
549
- coords_array.append(get_values(ds, dims))
550
- # save as dict so numpy does not stack as arrays
551
- arrays[i] = {'pixel_data': dbdataset.pixel_data(ds)}
552
- if attr is not None:
553
- values[i] = {'values': get_values(ds, params)}
554
-
555
- # Format as mesh
556
- coords_array = np.stack([v for v in coords_array], axis=-1)
557
- coords_array, inds = dbdicom.utils.arrays.meshvals(coords_array)
558
-
559
- arrays = arrays[inds].reshape(coords_array.shape[1:])
560
- arrays = np.stack([a['pixel_data'] for a in arrays.reshape(-1)], axis=-1)
561
- arrays = arrays.reshape(arrays.shape[:2] + coords_array.shape[1:])
562
-
563
- if attr is None:
564
- if coords:
565
- return arrays, coords_array
566
- else:
567
- return arrays
568
-
569
- # Return values as a dictionary
570
- values = values[inds].reshape(-1)
571
- values_dict = {}
572
- for p in range(len(params)):
573
- # Get the type from the first value
574
- vp0 = values[0]['values'][p]
575
- # Build an array of the right type
576
- vp = np.zeros(values.size, dtype=type(vp0))
577
- # Populate the array with values for parameter p
578
- for i, v in enumerate(values):
579
- vp[i] = v['values'][p]
580
- # Reshape values for parameter p
581
- vp = vp.reshape(coords_array.shape[1:])
582
- # Eneter in the dictionary
583
- values_dict[params[p]] = vp
584
-
585
- # If only one, return as value
586
- if len(params) == 1:
587
- values_return = values_dict[attr[0]]
588
- else:
589
- values_return = values_dict
590
-
591
- # problem if the values are a list. Needs an array with a prespeficied dtype
592
- # values = values[inds].reshape(coords_array.shape[1:])
593
- # values = np.stack([a['values'] for a in values.reshape(-1)], axis=-1)
594
- # values = values.reshape((len(params), ) + coords_array.shape[1:])
595
-
596
- if coords:
597
- return arrays, coords_array, values_return
598
- else:
599
- return arrays, values_return
600
640
 
601
641
 
602
642
 
@@ -1109,7 +1149,7 @@ def infer_slice_spacing(vols):
1109
1149
  distances = np.around(distances, 2)
1110
1150
  slice_spacing_d = np.unique(distances)
1111
1151
 
1112
- # Check if unique - otherwise this is not a volume
1152
+ # Check if slice spacings are unique - otherwise this is not a volume
1113
1153
  if len(slice_spacing_d) > 1:
1114
1154
  raise ValueError(
1115
1155
  'Cannot build a volume - spacings between slices are not unique.'
@@ -1124,6 +1164,7 @@ def infer_slice_spacing(vols):
1124
1164
  slice_spacing[d] = slice_spacing_d
1125
1165
 
1126
1166
  # Check slice_spacing is the same across dimensions
1167
+ # Not sure if this is possible as volumes are sorted by slice location
1127
1168
  slice_spacing = np.unique(slice_spacing)
1128
1169
  if len(slice_spacing) > 1:
1129
1170
  raise ValueError(
@@ -1132,7 +1173,3 @@ def infer_slice_spacing(vols):
1132
1173
 
1133
1174
  return vols.reshape(shape)
1134
1175
 
1135
-
1136
-
1137
-
1138
-
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dbdicom
3
- Version: 0.3.10
3
+ Version: 0.3.12
4
4
  Summary: A pythonic interface for reading and writing DICOM databases
5
5
  Author-email: Steven Sourbron <s.sourbron@sheffield.ac.uk>, Ebony Gunwhy <e.gunwhy@sheffield.ac.uk>
6
6
  Project-URL: Homepage, https://openmiblab.github.io/dbdicom/
@@ -11,6 +11,8 @@ shutil.rmtree(tmp)
11
11
  os.makedirs(tmp, exist_ok=True)
12
12
 
13
13
 
14
+
15
+
14
16
  def test_write_volume():
15
17
 
16
18
  values = 100*np.random.rand(128, 192, 20).astype(np.float32)
@@ -24,11 +26,84 @@ def test_write_volume():
24
26
  series = [tmp, '007', 'dbdicom_test', 'dixon']
25
27
  db.write_volume(vol, series)
26
28
 
29
+ # Writing to an existing series returns an error by default
30
+ try:
31
+ db.write_volume(vol, series)
32
+ except:
33
+ assert True
34
+ else:
35
+ assert False
36
+
37
+ # Translate the volume in the z-direction over 10mm and append to the series
38
+ # This creates a series with two volumes separated by a gap of 5 mm
39
+ vol2 = vol.translate([0,0,20], coords='volume')
40
+ db.write_volume(vol2, series, append=True)
41
+
42
+ # Reading now throws an error as there are multiple volumes in the series
43
+ try:
44
+ db.volume(series, dims=['ImageType'])
45
+ except:
46
+ assert True
47
+ else:
48
+ assert False
49
+
50
+
51
+ shutil.rmtree(tmp)
52
+
53
+
54
+ def test_volumes_2d():
55
+
56
+ # Write one volume
57
+ values = 100*np.random.rand(128, 192, 5).astype(np.float32)
58
+ vol = vreg.volume(values)
59
+ series = [tmp, '007', 'dbdicom_test', 'ax']
60
+ db.write_volume(vol, series)
61
+
62
+ # Shift it up to leave a gap and write to the same series
63
+ vol2 = vol.translate([0,0,10], coords='volume')
64
+ db.write_volume(vol2, series, append=True)
65
+
66
+ # Trying to read as a single volume throws an error because of the gap
67
+ try:
68
+ db.volume(series)
69
+ except:
70
+ assert True
71
+ else:
72
+ assert False
73
+
74
+ # But we can read them as 2D volumes, returning 10 2D volumes
75
+ vols = db.volumes_2d(series)
76
+ assert len(vols) == 10
77
+
78
+ # Now 4D
79
+ values = np.zeros((256, 256, 5, 2))
80
+ affine = np.eye(4)
81
+ vol = vreg.volume(values, affine, coords=(['INPHASE', 'OUTPHASE'], ), dims=['ImageType'])
82
+ series = [tmp, '007', 'dbdicom_test', 'dixon']
83
+ db.write_volume(vol, series)
84
+
85
+ vol2 = vol.translate([0,0,10], coords='volume')
86
+ db.write_volume(vol2, series, append=True)
87
+
88
+ vols = db.volumes_2d(series, dims=['ImageType'])
89
+ assert len(vols) == 10
90
+ assert vols[-1].shape == (256, 256, 1, 2)
91
+
27
92
  shutil.rmtree(tmp)
28
93
 
29
94
 
30
95
  def test_volume():
31
96
 
97
+ # One slice
98
+ values = 100*np.random.rand(128, 192, 1).astype(np.float32)
99
+ vol = vreg.volume(values)
100
+ series = [tmp, '007', 'test', 'slice']
101
+ db.write_volume(vol, series)
102
+ vol2 = db.volume(series)
103
+ assert np.linalg.norm(vol2.values-vol.values) < 0.0001*np.linalg.norm(vol.values)
104
+ assert np.linalg.norm(vol2.affine-vol.affine) == 0
105
+
106
+ # 3D volume
32
107
  values = 100*np.random.rand(128, 192, 20).astype(np.float32)
33
108
  vol = vreg.volume(values)
34
109
  series = [tmp, '007', 'test', 'ax']
@@ -37,6 +112,7 @@ def test_volume():
37
112
  assert np.linalg.norm(vol2.values-vol.values) < 0.0001*np.linalg.norm(vol.values)
38
113
  assert np.linalg.norm(vol2.affine-vol.affine) == 0
39
114
 
115
+ # 4D volume
40
116
  values = 100*np.random.rand(256, 256, 3, 2).astype(np.float32)
41
117
  vol = vreg.volume(values, dims=['ImageType'], coords=(['INPHASE', 'OUTPHASE'], ), orient='coronal')
42
118
  series = [tmp, '007', 'dbdicom_test', 'dixon']
@@ -120,6 +196,8 @@ def test_edit():
120
196
  assert np.array_equal(tr, new_tr)
121
197
  assert np.array_equal(pn, new_pn)
122
198
 
199
+ shutil.rmtree(tmp)
200
+
123
201
 
124
202
  def test_write_database():
125
203
  values = 100*np.random.rand(16, 16, 4).astype(np.float32)
@@ -215,9 +293,10 @@ def test_copy():
215
293
 
216
294
  if __name__ == '__main__':
217
295
 
296
+ test_write_volume()
297
+ test_volumes_2d()
218
298
  test_values()
219
299
  test_edit()
220
- test_write_volume()
221
300
  test_volume()
222
301
  test_write_database()
223
302
  test_copy()
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes