dbdicom 0.3.1__tar.gz → 0.3.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbdicom might be problematic. Click here for more details.

Files changed (65) hide show
  1. {dbdicom-0.3.1/src/dbdicom.egg-info → dbdicom-0.3.3}/PKG-INFO +2 -4
  2. {dbdicom-0.3.1 → dbdicom-0.3.3}/pyproject.toml +4 -4
  3. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/api.py +38 -23
  4. dbdicom-0.3.3/src/dbdicom/database.py +126 -0
  5. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/dataset.py +35 -60
  6. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/dbd.py +221 -201
  7. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/__pycache__/__init__.cpython-311.pyc +0 -0
  8. dbdicom-0.3.3/src/dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
  9. dbdicom-0.3.3/src/dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
  10. dbdicom-0.3.3/src/dbdicom/register.py +609 -0
  11. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/sop_classes/mr_image.py +156 -143
  12. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/sop_classes/parametric_map.py +93 -22
  13. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/utils/image.py +10 -10
  14. {dbdicom-0.3.1 → dbdicom-0.3.3/src/dbdicom.egg-info}/PKG-INFO +2 -4
  15. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom.egg-info/SOURCES.txt +2 -0
  16. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom.egg-info/requires.txt +1 -3
  17. dbdicom-0.3.3/tests/test_api.py +25 -0
  18. {dbdicom-0.3.1 → dbdicom-0.3.3}/tests/test_dcm4che.py +2 -4
  19. dbdicom-0.3.1/src/dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
  20. dbdicom-0.3.1/src/dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
  21. dbdicom-0.3.1/src/dbdicom/register.py +0 -527
  22. {dbdicom-0.3.1 → dbdicom-0.3.3}/LICENSE +0 -0
  23. {dbdicom-0.3.1 → dbdicom-0.3.3}/MANIFEST.in +0 -0
  24. {dbdicom-0.3.1 → dbdicom-0.3.3}/README.rst +0 -0
  25. {dbdicom-0.3.1 → dbdicom-0.3.3}/setup.cfg +0 -0
  26. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/__init__.py +0 -0
  27. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/const.py +0 -0
  28. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/__init__.py +0 -0
  29. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/README.md +0 -0
  30. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/__init__.py +0 -0
  31. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/bin/__init__.py +0 -0
  32. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/bin/deidentify +0 -0
  33. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/bin/deidentify.bat +0 -0
  34. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/bin/emf2sf +0 -0
  35. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/bin/emf2sf.bat +0 -0
  36. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/etc/__init__.py +0 -0
  37. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/etc/emf2sf/__init__.py +0 -0
  38. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/etc/emf2sf/log4j.properties +0 -0
  39. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/__init__.py +0 -0
  40. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/commons-cli-1.4.jar +0 -0
  41. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/dcm4che-core-5.23.1.jar +0 -0
  42. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/dcm4che-emf-5.23.1.jar +0 -0
  43. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/dcm4che-tool-common-5.23.1.jar +0 -0
  44. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/dcm4che-tool-emf2sf-5.23.1.jar +0 -0
  45. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/log4j-1.2.17.jar +0 -0
  46. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/macosx-x86-64/libopencv_java.jnilib +0 -0
  47. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/slf4j-api-1.7.30.jar +0 -0
  48. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/slf4j-log4j12-1.7.30.jar +0 -0
  49. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio.dll +0 -0
  50. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio_sse2.dll +0 -0
  51. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/windows-x86/clib_jiio_util.dll +0 -0
  52. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/windows-x86/opencv_java.dll +0 -0
  53. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/external/dcm4che/lib/windows-x86-64/opencv_java.dll +0 -0
  54. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/sop_classes/ct_image.py +0 -0
  55. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/sop_classes/enhanced_mr_image.py +0 -0
  56. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/sop_classes/secondary_capture.py +0 -0
  57. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/sop_classes/segmentation.py +0 -0
  58. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/sop_classes/ultrasound_multiframe_image.py +0 -0
  59. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/sop_classes/xray_angiographic_image.py +0 -0
  60. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/utils/arrays.py +0 -0
  61. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/utils/dcm4che.py +0 -0
  62. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/utils/files.py +0 -0
  63. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom/utils/variables.py +0 -0
  64. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom.egg-info/dependency_links.txt +0 -0
  65. {dbdicom-0.3.1 → dbdicom-0.3.3}/src/dbdicom.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dbdicom
3
- Version: 0.3.1
3
+ Version: 0.3.3
4
4
  Summary: A pythonic interface for reading and writing DICOM databases
5
5
  Author-email: Steven Sourbron <s.sourbron@sheffield.ac.uk>, Ebony Gunwhy <e.gunwhy@sheffield.ac.uk>
6
6
  Project-URL: Homepage, https://openmiblab.github.io/dbdicom/
@@ -22,7 +22,5 @@ Requires-Dist: importlib-resources
22
22
  Requires-Dist: numpy
23
23
  Requires-Dist: pandas
24
24
  Requires-Dist: vreg
25
- Requires-Dist: pydicom
26
- Requires-Dist: python-gdcm
27
- Requires-Dist: pylibjpeg-libjpeg
25
+ Requires-Dist: pydicom[basic,pixeldata]
28
26
  Dynamic: license-file
@@ -7,16 +7,16 @@ requires = ['setuptools>=61.2']
7
7
 
8
8
  [project]
9
9
  name = "dbdicom"
10
- version = "0.3.1"
10
+ version = "0.3.3"
11
11
  dependencies = [
12
12
  "tqdm",
13
13
  "importlib-resources",
14
14
  "numpy",
15
15
  "pandas", # make obsolete
16
16
  'vreg',
17
- "pydicom",
18
- "python-gdcm",
19
- "pylibjpeg-libjpeg",
17
+ "pydicom[basic,pixeldata]",
18
+ #"python-gdcm",
19
+ #"pylibjpeg-libjpeg",
20
20
  ]
21
21
 
22
22
  # optional information
@@ -1,5 +1,5 @@
1
+ from typing import Union
1
2
 
2
- import numpy as np
3
3
  import vreg
4
4
 
5
5
  from dbdicom.dbd import DataBaseDicom
@@ -16,6 +16,15 @@ def open(path:str) -> DataBaseDicom:
16
16
  """
17
17
  return DataBaseDicom(path)
18
18
 
19
+ def to_json(path):
20
+ """Summarise the contents of the DICOM folder in a json file
21
+
22
+ Args:
23
+ path (str): path to the DICOM folder
24
+ """
25
+ dbd = open(path)
26
+ dbd.close()
27
+
19
28
  def print(path):
20
29
  """Print the contents of the DICOM folder
21
30
 
@@ -42,6 +51,21 @@ def summary(path) -> dict:
42
51
  return s
43
52
 
44
53
 
54
+ def tree(path) -> dict:
55
+ """Return the structure of the database as a dictionary tree.
56
+
57
+ Args:
58
+ path (str): path to the DICOM folder
59
+
60
+ Returns:
61
+ dict: Nested dictionary with summary information on the database.
62
+ """
63
+ dbd = open(path)
64
+ s = dbd.register
65
+ dbd.close()
66
+ return s
67
+
68
+
45
69
  def patients(path, name:str=None, contains:str=None, isin:list=None)->list:
46
70
  """Return a list of patients in the DICOM folder.
47
71
 
@@ -167,41 +191,36 @@ def move(from_entity:list, to_entity:list):
167
191
  dbd.close()
168
192
 
169
193
 
170
- def volume(series:list, dims:list=None, multislice=False) -> vreg.Volume3D:
194
+ def volume(series:list, dims:list=None) -> vreg.Volume3D:
171
195
  """Read a vreg.Volume3D from a DICOM series
172
196
 
173
197
  Args:
174
198
  series (list): DICOM series to read
175
199
  dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
176
- multislice (bool, optional): Whether the data are to be read
177
- as multislice or not. In multislice data the voxel size
178
- is taken from the slice gap rather thsan the slice thickness. Defaults to False.
179
200
 
180
201
  Returns:
181
202
  vreg.Volume3D: vole read from the series.
182
203
  """
204
+ if isinstance(series, str):
205
+ series = [series]
183
206
  dbd = open(series[0])
184
- vol = dbd.volume(series, dims, multislice)
207
+ vol = dbd.volume(series, dims)
185
208
  dbd.close()
186
209
  return vol
187
210
 
188
- def write_volume(vol:vreg.Volume3D, series:list, ref:list=None,
189
- multislice=False):
211
+ def write_volume(vol:Union[vreg.Volume3D, tuple], series:list, ref:list=None):
190
212
  """Write a vreg.Volume3D to a DICOM series
191
213
 
192
214
  Args:
193
- vol (vreg.Volume3D): Volume to write to the series.
215
+ vol (vreg.Volume3D or tuple): Volume to write to the series.
194
216
  series (list): DICOM series to read
195
217
  dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
196
- multislice (bool, optional): Whether the data are to be read
197
- as multislice or not. In multislice data the voxel size
198
- is taken from the slice gap rather thsan the slice thickness. Defaults to False.
199
218
  """
200
219
  dbd = open(series[0])
201
- dbd.write_volume(vol, series, ref, multislice)
220
+ dbd.write_volume(vol, series, ref)
202
221
  dbd.close()
203
222
 
204
- def to_nifti(series:list, file:str, dims:list=None, multislice=False):
223
+ def to_nifti(series:list, file:str, dims:list=None):
205
224
  """Save a DICOM series in nifti format.
206
225
 
207
226
  Args:
@@ -209,27 +228,21 @@ def to_nifti(series:list, file:str, dims:list=None, multislice=False):
209
228
  file (str): file path of the nifti file.
210
229
  dims (list, optional): Non-spatial dimensions of the volume.
211
230
  Defaults to None.
212
- multislice (bool, optional): Whether the data are to be read
213
- as multislice or not. In multislice data the voxel size
214
- is taken from the slice gap rather thaan the slice thickness. Defaults to False.
215
231
  """
216
232
  dbd = open(series[0])
217
- dbd.to_nifti(series, file, dims, multislice)
233
+ dbd.to_nifti(series, file, dims)
218
234
  dbd.close()
219
235
 
220
- def from_nifti(file:str, series:list, ref:list=None, multislice=False):
236
+ def from_nifti(file:str, series:list, ref:list=None):
221
237
  """Create a DICOM series from a nifti file.
222
238
 
223
239
  Args:
224
240
  file (str): file path of the nifti file.
225
241
  series (list): DICOM series to create
226
242
  ref (list): DICOM series to use as template.
227
- multislice (bool, optional): Whether the data are to be written
228
- as multislice or not. In multislice data the voxel size
229
- is written in the slice gap rather thaan the slice thickness. Defaults to False.
230
243
  """
231
244
  dbd = open(series[0])
232
- dbd.from_nifti(file, series, ref, multislice)
245
+ dbd.from_nifti(file, series, ref)
233
246
  dbd.close()
234
247
 
235
248
  def pixel_data(series:list, dims:list=None, include:list=None) -> tuple:
@@ -247,6 +260,8 @@ def pixel_data(series:list, dims:list=None, include:list=None) -> tuple:
247
260
  is provide these are returned as a dictionary in a third
248
261
  return value.
249
262
  """
263
+ if isinstance(series, str):
264
+ series = [series]
250
265
  dbd = open(series[0])
251
266
  array = dbd.pixel_data(series, dims, include)
252
267
  dbd.close()
@@ -0,0 +1,126 @@
1
+ import os
2
+ from tqdm import tqdm
3
+
4
+ import numpy as np
5
+ import pandas as pd
6
+ import pydicom
7
+
8
+ import dbdicom.utils.dcm4che as dcm4che
9
+ import dbdicom.utils.files as filetools
10
+ import dbdicom.dataset as dbdataset
11
+
12
+
13
+ COLUMNS = [
14
+ # Identifiers (unique)
15
+ 'PatientID',
16
+ 'StudyInstanceUID',
17
+ 'SeriesInstanceUID',
18
+ 'SOPInstanceUID',
19
+ # Human-readable identifiers (not unique)
20
+ 'PatientName',
21
+ 'StudyDescription',
22
+ 'StudyDate',
23
+ 'StudyID',
24
+ 'SeriesDescription',
25
+ 'SeriesNumber',
26
+ 'InstanceNumber',
27
+ ]
28
+
29
+ def read(path):
30
+ files = filetools.all_files(path)
31
+ tags = COLUMNS + ['NumberOfFrames'] # + ['SOPClassUID']
32
+ array = []
33
+ dicom_files = []
34
+ for i, file in tqdm(enumerate(files), total=len(files), desc='Reading DICOM folder'):
35
+ try:
36
+ ds = pydicom.dcmread(file, force=True, specific_tags=tags+['Rows'])
37
+ except:
38
+ pass
39
+ else:
40
+ if isinstance(ds, pydicom.dataset.FileDataset):
41
+ if 'TransferSyntaxUID' in ds.file_meta:
42
+ if not 'Rows' in ds: # Image only
43
+ continue
44
+ row = dbdataset.get_values(ds, tags)
45
+ array.append(row)
46
+ index = os.path.relpath(file, path)
47
+ dicom_files.append(index)
48
+ df = pd.DataFrame(array, index = dicom_files, columns = tags)
49
+ df = _multiframe_to_singleframe(path, df)
50
+ dbtree = _tree(df)
51
+ return dbtree
52
+
53
+
54
+ def _multiframe_to_singleframe(path, df):
55
+ """Converts all multiframe files in the folder into single-frame files.
56
+
57
+ Reads all the multi-frame files in the folder,
58
+ converts them to singleframe files, and delete the original multiframe file.
59
+ """
60
+ singleframe = df.NumberOfFrames.isnull()
61
+ multiframe = singleframe == False
62
+ nr_multiframe = multiframe.sum()
63
+ if nr_multiframe != 0:
64
+ for relpath in tqdm(df[multiframe].index.values, desc="Converting multiframe file " + relpath):
65
+ filepath = os.path.join(path, relpath)
66
+ singleframe_files = dcm4che.split_multiframe(filepath)
67
+ if singleframe_files != []:
68
+ # add the single frame files to the dataframe
69
+ dfnew = read(singleframe_files, df.columns, path)
70
+ df = pd.concat([df, dfnew])
71
+ # delete the original multiframe
72
+ os.remove(filepath)
73
+ # drop the file also if the conversion has failed
74
+ df.drop(index=relpath, inplace=True)
75
+ df.drop('NumberOfFrames', axis=1, inplace=True)
76
+ return df
77
+
78
+
79
+ def _tree(df):
80
+ # A human-readable summary tree
81
+ # TODO: Add version number
82
+
83
+ df.sort_values(['PatientID','StudyInstanceUID','SeriesNumber'], inplace=True)
84
+ df = df.fillna('None')
85
+ summary = []
86
+
87
+ for uid_patient in df.PatientID.unique():
88
+ df_patient = df[df.PatientID == uid_patient]
89
+ patient_name = df_patient.PatientName.values[0]
90
+ patient = {
91
+ 'PatientName': patient_name,
92
+ 'PatientID': uid_patient,
93
+ 'studies': [],
94
+ }
95
+ summary.append(patient)
96
+ for uid_study in df_patient.StudyInstanceUID.unique():
97
+ df_study = df_patient[df_patient.StudyInstanceUID == uid_study]
98
+ study_desc = df_study.StudyDescription.values[0]
99
+ study_id = df_study.StudyID.values[0]
100
+ study_date = df_study.StudyDate.values[0]
101
+ study = {
102
+ 'StudyDescription': study_desc,
103
+ 'StudyDate': study_date,
104
+ 'StudyID': study_id,
105
+ 'StudyInstanceUID': uid_study,
106
+ 'series': [],
107
+ }
108
+ patient['studies'].append(study)
109
+ for uid_sery in df_study.SeriesInstanceUID.unique():
110
+ df_series = df_study[df_study.SeriesInstanceUID == uid_sery]
111
+ series_desc = df_series.SeriesDescription.values[0]
112
+ series_nr = int(df_series.SeriesNumber.values[0])
113
+ series = {
114
+ 'SeriesNumber': series_nr,
115
+ 'SeriesDescription': series_desc,
116
+ 'SeriesInstanceUID': uid_sery,
117
+ 'instances': {},
118
+ }
119
+ study['series'].append(series)
120
+ for uid_instance in df_series.SOPInstanceUID.unique():
121
+ df_instance = df_series[df_series.SOPInstanceUID == uid_instance]
122
+ instance_nr = int(df_instance.InstanceNumber.values[0])
123
+ relpath = df_instance.index[0]
124
+ series['instances'][instance_nr]=relpath
125
+
126
+ return summary
@@ -7,13 +7,12 @@ import struct
7
7
  from tqdm import tqdm
8
8
 
9
9
  import numpy as np
10
- import pandas as pd
11
10
  import pydicom
12
11
  from pydicom.util.codify import code_file
13
12
  import pydicom.config
14
- from pydicom.dataset import Dataset
15
13
  import vreg
16
14
 
15
+
17
16
  import dbdicom.utils.image as image
18
17
  import dbdicom.utils.variables as variables
19
18
  from dbdicom.sop_classes import (
@@ -74,6 +73,8 @@ def new_dataset(sop_class):
74
73
  return xray_angiographic_image.default()
75
74
  if sop_class == 'UltrasoundMultiFrameImage':
76
75
  return ultrasound_multiframe_image.default()
76
+ if sop_class == 'ParametricMap':
77
+ return parametric_map.default()
77
78
  else:
78
79
  raise ValueError(
79
80
  f"DICOM class {sop_class} is not currently supported"
@@ -228,7 +229,8 @@ def write(ds, file, status=None):
228
229
  dir = os.path.dirname(file)
229
230
  if not os.path.exists(dir):
230
231
  os.makedirs(dir)
231
- ds.save_as(file, write_like_original=False)
232
+ #ds.save_as(file, write_like_original=False) # deprecated
233
+ ds.save_as(file, enforce_file_format=True)
232
234
 
233
235
 
234
236
  def codify(source_file, save_file, **kwargs):
@@ -238,7 +240,7 @@ def codify(source_file, save_file, **kwargs):
238
240
  file.close()
239
241
 
240
242
 
241
- def read_data(files, tags, path=None, images_only=False):
243
+ def read_data(files, tags, path=None, images_only=False): # obsolete??
242
244
 
243
245
  if np.isscalar(files):
244
246
  files = [files]
@@ -266,34 +268,6 @@ def read_data(files, tags, path=None, images_only=False):
266
268
 
267
269
 
268
270
 
269
- def read_dataframe(files, tags, path=None, images_only=False):
270
- if np.isscalar(files):
271
- files = [files]
272
- if np.isscalar(tags):
273
- tags = [tags]
274
- array = []
275
- dicom_files = []
276
- for i, file in tqdm(enumerate(files), desc='Reading DICOM folder'):
277
- try:
278
- ds = pydicom.dcmread(file, force=True, specific_tags=tags+['Rows'])
279
- except:
280
- pass
281
- else:
282
- if isinstance(ds, pydicom.dataset.FileDataset):
283
- if 'TransferSyntaxUID' in ds.file_meta:
284
- if images_only:
285
- if not 'Rows' in ds:
286
- continue
287
- row = get_values(ds, tags)
288
- array.append(row)
289
- if path is None:
290
- index = file
291
- else:
292
- index = os.path.relpath(file, path)
293
- dicom_files.append(index)
294
- df = pd.DataFrame(array, index = dicom_files, columns = tags)
295
- return df
296
-
297
271
 
298
272
  def _add_new(ds, tag, value, VR='OW'):
299
273
  if not isinstance(tag, pydicom.tag.BaseTag):
@@ -540,32 +514,24 @@ def set_lut(ds, RGB):
540
514
 
541
515
 
542
516
 
543
- def affine(ds, multislice=False):
544
- if multislice:
545
- return image.affine_matrix(
546
- get_values(ds, 'ImageOrientationPatient'),
547
- get_values(ds, 'ImagePositionPatient'),
548
- get_values(ds, 'PixelSpacing'),
549
- get_values(ds, 'SpacingBetweenSlices'),
550
- )
551
- else:
552
- return image.affine_matrix(
553
- get_values(ds, 'ImageOrientationPatient'),
554
- get_values(ds, 'ImagePositionPatient'),
555
- get_values(ds, 'PixelSpacing'),
556
- get_values(ds, 'SliceThickness'),
557
- )
517
+ def affine(ds):
518
+ # Spacing Between Slices is not required so can be absent
519
+ slice_spacing = ds.get("SpacingBetweenSlices")
520
+ if slice_spacing is None:
521
+ slice_spacing = ds.get("SliceThickness")
522
+ return image.affine_matrix(
523
+ get_values(ds, 'ImageOrientationPatient'),
524
+ get_values(ds, 'ImagePositionPatient'),
525
+ get_values(ds, 'PixelSpacing'),
526
+ slice_spacing,
527
+ )
558
528
 
559
-
560
- def set_affine(ds, affine, multislice=False):
529
+ def set_affine(ds, affine):
561
530
  if affine is None:
562
531
  raise ValueError('The affine cannot be set to an empty value')
563
532
  v = image.dismantle_affine_matrix(affine)
564
533
  set_values(ds, 'PixelSpacing', v['PixelSpacing'])
565
- if multislice:
566
- set_values(ds, 'SpacingBetweenSlices', v['SliceThickness'])
567
- else:
568
- set_values(ds, 'SliceThickness', v['SliceThickness'])
534
+ set_values(ds, 'SpacingBetweenSlices', v['SpacingBetweenSlices'])
569
535
  set_values(ds, 'ImageOrientationPatient', v['ImageOrientationPatient'])
570
536
  set_values(ds, 'ImagePositionPatient', v['ImagePositionPatient'])
571
537
  set_values(ds, 'SliceLocation', np.dot(v['ImagePositionPatient'], v['slice_cosine']))
@@ -586,7 +552,7 @@ def pixel_data(ds):
586
552
  try:
587
553
  array = ds.pixel_array
588
554
  except:
589
- return None
555
+ raise ValueError("Dataset has no pixel data.")
590
556
  array = array.astype(np.float32)
591
557
  slope = float(getattr(ds, 'RescaleSlope', 1))
592
558
  intercept = float(getattr(ds, 'RescaleIntercept', 0))
@@ -595,7 +561,7 @@ def pixel_data(ds):
595
561
  return np.transpose(array)
596
562
 
597
563
 
598
- def set_pixel_data(ds, array, value_range=None):
564
+ def set_pixel_data(ds, array):
599
565
  if array is None:
600
566
  raise ValueError('The pixel array cannot be set to an empty value.')
601
567
 
@@ -611,7 +577,7 @@ def set_pixel_data(ds, array, value_range=None):
611
577
  # if array.ndim >= 3: # remove spurious dimensions of 1
612
578
  # array = np.squeeze(array)
613
579
 
614
- array = image.clip(array.astype(np.float32), value_range=value_range)
580
+ array = image.clip(array.astype(np.float32))
615
581
  array, slope, intercept = image.scale_to_range(array, ds.BitsAllocated)
616
582
  array = np.transpose(array)
617
583
 
@@ -629,17 +595,26 @@ def set_pixel_data(ds, array, value_range=None):
629
595
  ds.PixelData = array.tobytes()
630
596
 
631
597
 
632
- def volume(ds, multislice=False):
633
- return vreg.volume(pixel_data(ds), affine(ds, multislice))
598
+ def volume(ds):
599
+ return vreg.volume(pixel_data(ds), affine(ds))
634
600
 
635
- def set_volume(ds, volume:vreg.Volume3D, multislice=False):
601
+ def set_volume(ds, volume:vreg.Volume3D):
636
602
  if volume is None:
637
603
  raise ValueError('The volume cannot be set to an empty value.')
604
+ try:
605
+ mod = SOPCLASSMODULE[ds.SOPClassUID]
606
+ except KeyError:
607
+ raise ValueError(
608
+ f"DICOM class {ds.SOPClassUID} is not currently supported."
609
+ )
610
+ if hasattr(mod, 'set_volume'):
611
+ return getattr(mod, 'set_volume')(ds, volume)
612
+
638
613
  image = np.squeeze(volume.values)
639
614
  if image.ndim != 2:
640
615
  raise ValueError("Can only write 2D images to a dataset.")
641
616
  set_pixel_data(ds, image)
642
- set_affine(ds, volume.affine, multislice)
617
+ set_affine(ds, volume.affine)
643
618
  if volume.coords is not None:
644
619
  # All other dimensions should have size 1
645
620
  coords = volume.coords.reshape((volume.coords.shape[0], -1))