dbdicom 0.2.0__py3-none-any.whl → 0.3.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. dbdicom/__init__.py +3 -25
  2. dbdicom/api.py +496 -0
  3. dbdicom/const.py +144 -0
  4. dbdicom/database.py +133 -0
  5. dbdicom/dataset.py +471 -0
  6. dbdicom/dbd.py +1290 -0
  7. dbdicom/external/__pycache__/__init__.cpython-311.pyc +0 -0
  8. dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
  9. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
  10. dbdicom/external/dcm4che/bin/emf2sf +57 -57
  11. dbdicom/register.py +402 -0
  12. dbdicom/{ds/types → sop_classes}/ct_image.py +2 -16
  13. dbdicom/{ds/types → sop_classes}/enhanced_mr_image.py +206 -160
  14. dbdicom/sop_classes/mr_image.py +338 -0
  15. dbdicom/sop_classes/parametric_map.py +381 -0
  16. dbdicom/sop_classes/secondary_capture.py +140 -0
  17. dbdicom/sop_classes/segmentation.py +311 -0
  18. dbdicom/{ds/types → sop_classes}/ultrasound_multiframe_image.py +1 -15
  19. dbdicom/{ds/types → sop_classes}/xray_angiographic_image.py +2 -17
  20. dbdicom/utils/arrays.py +142 -0
  21. dbdicom/utils/files.py +0 -20
  22. dbdicom/utils/image.py +43 -466
  23. dbdicom/utils/pydicom_dataset.py +386 -0
  24. dbdicom-0.3.16.dist-info/METADATA +26 -0
  25. dbdicom-0.3.16.dist-info/RECORD +54 -0
  26. {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info}/WHEEL +1 -1
  27. dbdicom/create.py +0 -450
  28. dbdicom/ds/__init__.py +0 -10
  29. dbdicom/ds/create.py +0 -63
  30. dbdicom/ds/dataset.py +0 -841
  31. dbdicom/ds/dictionaries.py +0 -620
  32. dbdicom/ds/types/mr_image.py +0 -267
  33. dbdicom/ds/types/parametric_map.py +0 -226
  34. dbdicom/external/__pycache__/__init__.cpython-310.pyc +0 -0
  35. dbdicom/external/__pycache__/__init__.cpython-37.pyc +0 -0
  36. dbdicom/external/dcm4che/__pycache__/__init__.cpython-310.pyc +0 -0
  37. dbdicom/external/dcm4che/__pycache__/__init__.cpython-37.pyc +0 -0
  38. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-310.pyc +0 -0
  39. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-37.pyc +0 -0
  40. dbdicom/external/dcm4che/lib/linux-x86/libclib_jiio.so +0 -0
  41. dbdicom/external/dcm4che/lib/linux-x86-64/libclib_jiio.so +0 -0
  42. dbdicom/external/dcm4che/lib/linux-x86-64/libopencv_java.so +0 -0
  43. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio.so +0 -0
  44. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis.so +0 -0
  45. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis2.so +0 -0
  46. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio.so +0 -0
  47. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis.so +0 -0
  48. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis2.so +0 -0
  49. dbdicom/external/dcm4che/lib/solaris-x86/libclib_jiio.so +0 -0
  50. dbdicom/external/dcm4che/lib/solaris-x86-64/libclib_jiio.so +0 -0
  51. dbdicom/manager.py +0 -2077
  52. dbdicom/message.py +0 -119
  53. dbdicom/record.py +0 -1526
  54. dbdicom/types/database.py +0 -107
  55. dbdicom/types/instance.py +0 -184
  56. dbdicom/types/patient.py +0 -40
  57. dbdicom/types/series.py +0 -816
  58. dbdicom/types/study.py +0 -58
  59. dbdicom/utils/variables.py +0 -155
  60. dbdicom/utils/vreg.py +0 -2626
  61. dbdicom/wrappers/__init__.py +0 -7
  62. dbdicom/wrappers/dipy.py +0 -462
  63. dbdicom/wrappers/elastix.py +0 -855
  64. dbdicom/wrappers/numpy.py +0 -119
  65. dbdicom/wrappers/scipy.py +0 -1413
  66. dbdicom/wrappers/skimage.py +0 -1030
  67. dbdicom/wrappers/sklearn.py +0 -151
  68. dbdicom/wrappers/vreg.py +0 -273
  69. dbdicom-0.2.0.dist-info/METADATA +0 -276
  70. dbdicom-0.2.0.dist-info/RECORD +0 -81
  71. {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info/licenses}/LICENSE +0 -0
  72. {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info}/top_level.txt +0 -0
dbdicom/database.py ADDED
@@ -0,0 +1,133 @@
1
+ import os
2
+ from pathlib import Path
3
+
4
+ from tqdm import tqdm
5
+ import numpy as np
6
+ import pandas as pd
7
+ import pydicom
8
+
9
+ import dbdicom.utils.dcm4che as dcm4che
10
+ import dbdicom.utils.files as filetools
11
+ from dbdicom.utils.pydicom_dataset import get_values
12
+
13
+
14
+ COLUMNS = [
15
+ # Identifiers (unique)
16
+ 'PatientID',
17
+ 'StudyInstanceUID',
18
+ 'SeriesInstanceUID',
19
+ 'SOPInstanceUID',
20
+ # Human-readable identifiers (not unique)
21
+ 'PatientName',
22
+ 'StudyDescription',
23
+ 'StudyDate',
24
+ 'StudyID',
25
+ 'SeriesDescription',
26
+ 'SeriesNumber',
27
+ 'InstanceNumber',
28
+ ]
29
+
30
+ def read(path):
31
+ files = filetools.all_files(path)
32
+ tags = COLUMNS + ['NumberOfFrames'] # + ['SOPClassUID']
33
+ array = []
34
+ for i, file in tqdm(enumerate(files), total=len(files), desc='Reading DICOM folder'):
35
+ try:
36
+ ds = pydicom.dcmread(file, force=True, specific_tags=tags+['Rows'])
37
+ except:
38
+ pass
39
+ else:
40
+ if isinstance(ds, pydicom.dataset.FileDataset):
41
+ if 'TransferSyntaxUID' in ds.file_meta:
42
+ if not 'Rows' in ds: # Image only
43
+ continue
44
+ row = get_values(ds, tags)
45
+ index = os.path.relpath(file, path)
46
+ p = Path(index)
47
+ parts = list(p.parts)
48
+ row = [parts] + row
49
+ array.append(row)
50
+ df = pd.DataFrame(array, columns = ['rel_path'] + tags)
51
+ df = _multiframe_to_singleframe(path, df) # needs updating and testing
52
+ dbtree = _tree(df)
53
+ return dbtree
54
+
55
+
56
+ def _multiframe_to_singleframe(path, df):
57
+ """Converts all multiframe files in the folder into single-frame files.
58
+
59
+ Reads all the multi-frame files in the folder,
60
+ converts them to singleframe files, and delete the original multiframe file.
61
+ """
62
+ singleframe = df.NumberOfFrames.isnull()
63
+ multiframe = singleframe == False
64
+ nr_multiframe = multiframe.sum()
65
+ if nr_multiframe != 0:
66
+ raise ValueError(
67
+ "dbdicom currently does not support multiframe data."
68
+ "Please remove them from the database and try again."
69
+ )
70
+ for relpath in tqdm(df[multiframe].index.values, desc="Converting multiframe file " + relpath):
71
+ filepath = [path] + [relpath]
72
+ filepath = Path(*filepath)
73
+ singleframe_files = dcm4che.split_multiframe(str(filepath))
74
+ if singleframe_files != []:
75
+ # add the single frame files to the dataframe
76
+ dfnew = read(singleframe_files, df.columns, path) # This needs fixing
77
+ df = pd.concat([df, dfnew])
78
+ # delete the original multiframe
79
+ os.remove(filepath)
80
+ # drop the file also if the conversion has failed
81
+ df.drop(index=relpath, inplace=True)
82
+ df.drop('NumberOfFrames', axis=1, inplace=True)
83
+ return df
84
+
85
+
86
+ def _tree(df):
87
+ # A human-readable summary tree
88
+ # TODO: Add version number
89
+
90
+ df.sort_values(['PatientID','StudyInstanceUID','SeriesNumber'], inplace=True)
91
+ df = df.fillna('None')
92
+ summary = []
93
+
94
+ for uid_patient in df.PatientID.unique():
95
+ df_patient = df[df.PatientID == uid_patient]
96
+ patient_name = df_patient.PatientName.values[0]
97
+ patient = {
98
+ 'PatientName': patient_name,
99
+ 'PatientID': uid_patient,
100
+ 'studies': [],
101
+ }
102
+ summary.append(patient)
103
+ for uid_study in df_patient.StudyInstanceUID.unique():
104
+ df_study = df_patient[df_patient.StudyInstanceUID == uid_study]
105
+ study_desc = df_study.StudyDescription.values[0]
106
+ study_id = df_study.StudyID.values[0]
107
+ study_date = df_study.StudyDate.values[0]
108
+ study = {
109
+ 'StudyDescription': study_desc,
110
+ 'StudyDate': study_date,
111
+ 'StudyID': study_id,
112
+ 'StudyInstanceUID': uid_study,
113
+ 'series': [],
114
+ }
115
+ patient['studies'].append(study)
116
+ for uid_sery in df_study.SeriesInstanceUID.unique():
117
+ df_series = df_study[df_study.SeriesInstanceUID == uid_sery]
118
+ series_desc = df_series.SeriesDescription.values[0]
119
+ series_nr = int(df_series.SeriesNumber.values[0])
120
+ series = {
121
+ 'SeriesNumber': series_nr,
122
+ 'SeriesDescription': series_desc,
123
+ 'SeriesInstanceUID': uid_sery,
124
+ 'instances': {},
125
+ }
126
+ study['series'].append(series)
127
+ for uid_instance in df_series.SOPInstanceUID.unique():
128
+ df_instance = df_series[df_series.SOPInstanceUID == uid_instance]
129
+ instance_nr = int(df_instance.InstanceNumber.values[0])
130
+ relpath = df_instance.rel_path.values[0]
131
+ series['instances'][instance_nr]=relpath
132
+
133
+ return summary
dbdicom/dataset.py ADDED
@@ -0,0 +1,471 @@
1
+ # Test data
2
+ # https://www.aliza-dicom-viewer.com/download/datasets
3
+
4
+ import os
5
+ import struct
6
+ from tqdm import tqdm
7
+
8
+ import numpy as np
9
+ import pydicom
10
+ from pydicom.util.codify import code_file
11
+ from pydicom.tag import Tag
12
+ import pydicom.config
13
+ import vreg
14
+
15
+ from dbdicom.utils.pydicom_dataset import get_values, set_values
16
+ import dbdicom.utils.image as image
17
+ from dbdicom.sop_classes import (
18
+ xray_angiographic_image,
19
+ ct_image,
20
+ mr_image,
21
+ enhanced_mr_image,
22
+ ultrasound_multiframe_image,
23
+ parametric_map,
24
+ segmentation,
25
+ )
26
+
27
+
28
+ # This ensures that dates and times are read as TM, DT and DA classes
29
+ pydicom.config.datetime_conversion = True
30
+
31
+
32
+ SOPCLASS = {
33
+ '1.2.840.10008.5.1.4.1.1.4': 'MRImage',
34
+ '1.2.840.10008.5.1.4.1.1.4.1': 'EnhancedMRImage',
35
+ '1.2.840.10008.5.1.4.1.1.2': 'CTImage',
36
+ '1.2.840.10008.5.1.4.1.1.12.2': 'XrayAngiographicImage',
37
+ '1.2.840.10008.5.1.4.1.1.3.1': 'UltrasoundMultiFrameImage',
38
+ '1.2.840.10008.5.1.4.1.1.30': 'ParametricMap',
39
+ '1.2.840.10008.5.1.4.1.1.66.4': 'Segmentation',
40
+ }
41
+ SOPCLASSMODULE = {
42
+ '1.2.840.10008.5.1.4.1.1.4': mr_image,
43
+ '1.2.840.10008.5.1.4.1.1.4.1': enhanced_mr_image,
44
+ '1.2.840.10008.5.1.4.1.1.2': ct_image,
45
+ '1.2.840.10008.5.1.4.1.1.12.2': xray_angiographic_image,
46
+ '1.2.840.10008.5.1.4.1.1.3.1': ultrasound_multiframe_image,
47
+ '1.2.840.10008.5.1.4.1.1.30': parametric_map,
48
+ '1.2.840.10008.5.1.4.1.1.66.4': segmentation,
49
+ }
50
+
51
+
52
+ # def read_dataset(file):
53
+
54
+ # try:
55
+ # ds = pydicom.dcmread(file)
56
+ # # ds = pydicom.dcmread(file, force=True) # more robust but hides corrupted data
57
+ # except Exception:
58
+ # raise FileNotFoundError('File not found')
59
+
60
+ # return ds
61
+
62
+
63
+ def new_dataset(sop_class):
64
+
65
+ if sop_class == 'MRImage':
66
+ return mr_image.default()
67
+ if sop_class == 'EnhancedMRImage':
68
+ return enhanced_mr_image.default()
69
+ if sop_class == 'CTImage':
70
+ return ct_image.default()
71
+ if sop_class == 'XrayAngiographicImage':
72
+ return xray_angiographic_image.default()
73
+ if sop_class == 'UltrasoundMultiFrameImage':
74
+ return ultrasound_multiframe_image.default()
75
+ if sop_class == 'ParametricMap':
76
+ return parametric_map.default()
77
+ else:
78
+ raise ValueError(
79
+ f"DICOM class {sop_class} is not currently supported"
80
+ )
81
+
82
+
83
+ def write(ds, file, status=None):
84
+ # check if directory exists and create it if not
85
+ dir = os.path.dirname(file)
86
+ if not os.path.exists(dir):
87
+ os.makedirs(dir)
88
+ # ds.save_as(file, write_like_original=False) # deprecated
89
+ pydicom.dcmwrite(file, ds, enforce_file_format=True)
90
+
91
+
92
+ def codify(source_file, save_file, **kwargs):
93
+ str = code_file(source_file, **kwargs)
94
+ file = open(save_file, "w")
95
+ file.write(str)
96
+ file.close()
97
+
98
+
99
+ def read_data(files, tags, path=None, images_only=False): # obsolete??
100
+
101
+ if np.isscalar(files):
102
+ files = [files]
103
+ if np.isscalar(tags):
104
+ tags = [tags]
105
+ dict = {}
106
+ for i, file in tqdm(enumerate(files), 'reading files..'):
107
+ try:
108
+ ds = pydicom.dcmread(file, force=True, specific_tags=tags+['Rows'])
109
+ except:
110
+ pass
111
+ else:
112
+ if isinstance(ds, pydicom.dataset.FileDataset):
113
+ if 'TransferSyntaxUID' in ds.file_meta:
114
+ if images_only:
115
+ if not 'Rows' in ds:
116
+ continue
117
+ row = get_values(ds, tags)
118
+ if path is None:
119
+ index = file
120
+ else:
121
+ index = os.path.relpath(file, path)
122
+ dict[index] = row
123
+ return dict
124
+
125
+
126
+
127
+ # def new_uid(n=None):
128
+
129
+ # if n is None:
130
+ # return pydicom.uid.generate_uid()
131
+ # else:
132
+ # return [pydicom.uid.generate_uid() for _ in range(n)]
133
+
134
+
135
+
136
+ def window(ds):
137
+ """Centre and width of the pixel data after applying rescale slope and intercept"""
138
+
139
+ if 'WindowCenter' in ds:
140
+ centre = ds.WindowCenter
141
+ if 'WindowWidth' in ds:
142
+ width = ds.WindowWidth
143
+ if centre is None or width is None:
144
+ array = pixel_data(ds)
145
+ #p = np.percentile(array, [25, 50, 75])
146
+ min = np.min(array)
147
+ max = np.max(array)
148
+ if centre is None:
149
+ centre = (max+min)/2
150
+ #centre = p[1]
151
+ if width is None:
152
+ width = 0.9*(max-min)
153
+ #width = p[2] - p[0]
154
+ return centre, width
155
+
156
+ def set_window(ds, center, width):
157
+ ds.WindowCenter = center
158
+ ds.WindowWidth = width
159
+
160
+ # List of all supported (matplotlib) colormaps
161
+
162
+ COLORMAPS = ['cividis', 'magma', 'plasma', 'viridis',
163
+ 'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds',
164
+ 'YlOrBr', 'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu',
165
+ 'GnBu', 'PuBu', 'YlGnBu', 'PuBuGn', 'BuGn', 'YlGn',
166
+ 'binary', 'gist_yarg', 'gist_gray', 'bone', 'pink',
167
+ 'spring', 'summer', 'autumn', 'winter', 'cool', 'Wistia',
168
+ 'hot', 'afmhot', 'gist_heat', 'copper',
169
+ 'PiYG', 'PRGn', 'BrBG', 'PuOr', 'RdGy', 'RdBu',
170
+ 'RdYlBu', 'RdYlGn', 'Spectral', 'coolwarm', 'bwr', 'seismic',
171
+ 'twilight', 'twilight_shifted', 'hsv',
172
+ 'flag', 'prism', 'ocean', 'gist_earth', 'terrain', 'gist_stern',
173
+ 'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg', 'turbo',
174
+ 'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']
175
+
176
+ # Include support for DICOM natiove colormaps (see pydicom guide on working with pixel data)
177
+
178
+
179
+ def lut(ds):
180
+ """Return RGB as float with values in [0,1]"""
181
+
182
+ if 'PhotometricInterpretation' not in ds:
183
+ return None
184
+ if ds.PhotometricInterpretation != 'PALETTE COLOR':
185
+ return None
186
+
187
+ if ds.BitsAllocated == 8:
188
+ dtype = np.ubyte
189
+ elif ds.BitsAllocated == 16:
190
+ dtype = np.uint16
191
+
192
+ R = ds.RedPaletteColorLookupTableData
193
+ G = ds.GreenPaletteColorLookupTableData
194
+ B = ds.BluePaletteColorLookupTableData
195
+
196
+ R = np.frombuffer(R, dtype=dtype)
197
+ G = np.frombuffer(G, dtype=dtype)
198
+ B = np.frombuffer(B, dtype=dtype)
199
+
200
+ R = R.astype(np.float32)
201
+ G = G.astype(np.float32)
202
+ B = B.astype(np.float32)
203
+
204
+ R *= 1.0/(np.power(2, ds.RedPaletteColorLookupTableDescriptor[2]) - 1)
205
+ G *= 1.0/(np.power(2, ds.GreenPaletteColorLookupTableDescriptor[2]) - 1)
206
+ B *= 1.0/(np.power(2, ds.BluePaletteColorLookupTableDescriptor[2]) - 1)
207
+
208
+ return np.transpose([R, G, B])
209
+
210
+
211
+ def set_lut(ds, RGB):
212
+ """Set RGB as float with values in range [0,1]"""
213
+
214
+ ds.PhotometricInterpretation = 'PALETTE COLOR'
215
+
216
+ RGB *= (np.power(2, ds.BitsAllocated) - 1)
217
+
218
+ if ds.BitsAllocated == 8:
219
+ RGB = RGB.astype(np.ubyte)
220
+ elif ds.BitsAllocated == 16:
221
+ RGB = RGB.astype(np.uint16)
222
+
223
+ # Define the properties of the LUT
224
+ ds.add_new('0x00281101', 'US', [255, 0, ds.BitsAllocated])
225
+ ds.add_new('0x00281102', 'US', [255, 0, ds.BitsAllocated])
226
+ ds.add_new('0x00281103', 'US', [255, 0, ds.BitsAllocated])
227
+
228
+ # Scale the colorsList to the available range
229
+ ds.RedPaletteColorLookupTableData = bytes(RGB[:,0])
230
+ ds.GreenPaletteColorLookupTableData = bytes(RGB[:,1])
231
+ ds.BluePaletteColorLookupTableData = bytes(RGB[:,2])
232
+
233
+
234
+
235
+ def affine(ds, multislice=False):
236
+
237
+ if multislice:
238
+ # For 2D scans the slice_spacing is the slice thickness
239
+ slice_spacing = ds.get("SliceThickness")
240
+ else:
241
+ # For 3D scans the slice spacing is the SpacingBetweenSlices
242
+ # Spacing Between Slices is not required so can be absent
243
+ # This is less critical because when reading a 3D volume the
244
+ # definitive slice_spacing is inferred from the slice positions.
245
+ slice_spacing = ds.get("SpacingBetweenSlices")
246
+ if slice_spacing is None:
247
+ slice_spacing = ds.get("SliceThickness")
248
+
249
+ return image.affine_matrix(
250
+ get_values(ds, 'ImageOrientationPatient'),
251
+ get_values(ds, 'ImagePositionPatient'),
252
+ get_values(ds, 'PixelSpacing'),
253
+ slice_spacing,
254
+ # slice_location = get_values(ds, 'SliceLocation')
255
+ )
256
+
257
+ def slice_location(ds):
258
+ slice_location = get_values(ds, 'SliceLocation')
259
+ if slice_location is not None:
260
+ return slice_location
261
+ image_orientation = get_values(ds, 'ImageOrientationPatient')
262
+ image_position = get_values(ds, 'ImagePositionPatient')
263
+ row_cosine = np.array(image_orientation[:3])
264
+ column_cosine = np.array(image_orientation[3:])
265
+ slice_cosine = np.cross(row_cosine, column_cosine)
266
+ return np.dot(image_position, slice_cosine)
267
+
268
+
269
+ def set_affine(ds, affine):
270
+ if affine is None:
271
+ raise ValueError('The affine cannot be set to an empty value')
272
+ v = image.dismantle_affine_matrix(affine)
273
+ set_values(ds, 'PixelSpacing', v['PixelSpacing'])
274
+ set_values(ds, 'SpacingBetweenSlices', v['SpacingBetweenSlices'])
275
+ set_values(ds, 'ImageOrientationPatient', v['ImageOrientationPatient'])
276
+ set_values(ds, 'ImagePositionPatient', v['ImagePositionPatient'])
277
+ set_values(ds, 'SliceLocation', np.dot(v['ImagePositionPatient'], v['slice_cosine']))
278
+
279
+
280
+ def pixel_data(ds):
281
+
282
+ try:
283
+ mod = SOPCLASSMODULE[ds.SOPClassUID]
284
+ except KeyError:
285
+ raise ValueError(
286
+ f"DICOM class {ds.SOPClassUID} is not currently supported."
287
+ )
288
+ if hasattr(mod, 'pixel_data'):
289
+ return getattr(mod, 'pixel_data')(ds)
290
+
291
+ try:
292
+ array = ds.pixel_array
293
+ except:
294
+ raise ValueError("Dataset has no pixel data.")
295
+ array = array.astype(np.float32)
296
+ slope = float(getattr(ds, 'RescaleSlope', 1))
297
+ intercept = float(getattr(ds, 'RescaleIntercept', 0))
298
+ array *= slope
299
+ array += intercept
300
+ return np.transpose(array)
301
+
302
+
303
+ def set_pixel_data(ds, array):
304
+ if array is None:
305
+ raise ValueError('The pixel array cannot be set to an empty value.')
306
+
307
+ try:
308
+ mod = SOPCLASSMODULE[ds.SOPClassUID]
309
+ except KeyError:
310
+ raise ValueError(
311
+ f"DICOM class {ds.SOPClassUID} is not currently supported."
312
+ )
313
+ if hasattr(mod, 'set_pixel_data'):
314
+ return getattr(mod, 'set_pixel_data')(ds, array)
315
+
316
+ ds.BitsAllocated = 16
317
+ ds.BitsStored = 16
318
+ ds.HighBit = 15
319
+
320
+ if array.dtype==np.int16:
321
+ array = image.clip(array) # remove nan and infs
322
+ ds.PixelRepresentation = 1
323
+ ds.RescaleSlope = 1
324
+ ds.RescaleIntercept = 0
325
+ elif array.dtype==np.uint16:
326
+ array = image.clip(array) # remove nan and infs
327
+ ds.PixelRepresentation = 0
328
+ ds.RescaleSlope = 1
329
+ ds.RescaleIntercept = 0
330
+ else:
331
+ array = image.clip(array) # remove nan and infs
332
+ array, slope, intercept = image.scale_to_range(array, ds.BitsStored)
333
+ ds.PixelRepresentation = 0
334
+ ds.RescaleSlope = 1 / slope
335
+ ds.RescaleIntercept = - intercept / slope
336
+
337
+ array = np.transpose(array)
338
+ ds.Rows = array.shape[0]
339
+ ds.Columns = array.shape[1]
340
+ ds.PixelData = array.tobytes()
341
+
342
+ # # if array.ndim >= 3: # remove spurious dimensions of 1
343
+ # # array = np.squeeze(array)
344
+
345
+ # array = image.clip(array.astype(np.float32))
346
+ # array, slope, intercept = image.scale_to_range(array, ds.BitsAllocated)
347
+ # array = np.transpose(array)
348
+
349
+ # ds.PixelRepresentation = 0
350
+ # #ds.SmallestImagePixelValue = int(0)
351
+ # #ds.LargestImagePixelValue = int(2**ds.BitsAllocated - 1)
352
+ # #ds.set_values('SmallestImagePixelValue', int(0))
353
+ # #ds.set_values('LargestImagePixelValue', int(2**ds.BitsAllocated - 1))
354
+ # ds.RescaleSlope = 1 / slope
355
+ # ds.RescaleIntercept = - intercept / slope
356
+ # # ds.WindowCenter = (maximum + minimum) / 2
357
+ # # ds.WindowWidth = maximum - minimum
358
+ # ds.Rows = array.shape[0]
359
+ # ds.Columns = array.shape[1]
360
+ # ds.PixelData = array.tobytes()
361
+
362
+
363
+ def volume(ds, multislice=False):
364
+ return vreg.volume(pixel_data(ds), affine(ds, multislice=multislice))
365
+
366
+
367
+
368
+ def is_valid_dicom_tag(value):
369
+ try:
370
+ tag = Tag(value)
371
+ return pydicom.datadict.dictionary_keyword(tag) != ''
372
+ except Exception:
373
+ return False
374
+
375
+ def set_volume(ds, volume:vreg.Volume3D):
376
+ if volume is None:
377
+ raise ValueError('The volume cannot be set to an empty value.')
378
+ try:
379
+ mod = SOPCLASSMODULE[ds.SOPClassUID]
380
+ except KeyError:
381
+ raise ValueError(
382
+ f"DICOM class {ds.SOPClassUID} is not currently supported."
383
+ )
384
+ if hasattr(mod, 'set_volume'):
385
+ return getattr(mod, 'set_volume')(ds, volume)
386
+
387
+ image = np.squeeze(volume.values)
388
+ if image.ndim != 2:
389
+ raise ValueError("Can only write 2D images to a dataset.")
390
+ set_pixel_data(ds, image)
391
+ set_affine(ds, volume.affine)
392
+ if volume.coords is not None:
393
+ # All other dimensions should have size 1
394
+ coords = [c.reshape(-1) for c in volume.coords]
395
+ for i, d in enumerate(volume.dims):
396
+ if not is_valid_dicom_tag(d):
397
+ raise ValueError(
398
+ "Cannot write volume to DICOM. "
399
+ f"Volume dimension {d} is not a recognized DICOM data-element. "
400
+ f"Use Volume3D.set_dims() with proper DICOM "
401
+ "tags to change the dimensions."
402
+ )
403
+ else:
404
+ set_values(ds, d, coords[i][0])
405
+
406
+
407
+
408
+ def image_type(ds):
409
+ """Determine if an image is Magnitude, Phase, Real or Imaginary image or None"""
410
+
411
+ if (0x0043, 0x102f) in ds:
412
+ private_ge = ds[0x0043, 0x102f]
413
+ try:
414
+ value = struct.unpack('h', private_ge.value)[0]
415
+ except:
416
+ value = private_ge.value
417
+ if value == 0:
418
+ return 'MAGNITUDE'
419
+ if value == 1:
420
+ return 'PHASE'
421
+ if value == 2:
422
+ return 'REAL'
423
+ if value == 3:
424
+ return 'IMAGINARY'
425
+
426
+ if 'ImageType' in ds:
427
+ type = set(ds.ImageType)
428
+ if set(['M', 'MAGNITUDE']).intersection(type):
429
+ return 'MAGNITUDE'
430
+ if set(['P', 'PHASE']).intersection(type):
431
+ return 'PHASE'
432
+ if set(['R', 'REAL']).intersection(type):
433
+ return 'REAL'
434
+ if set(['I', 'IMAGINARY']).intersection(type):
435
+ return 'IMAGINARY'
436
+
437
+ if 'ComplexImageComponent' in ds:
438
+ return ds.ComplexImageComponent
439
+
440
+ return 'UNKNOWN'
441
+
442
+
443
+ def set_image_type(ds, value):
444
+ ds.ImageType = value
445
+
446
+
447
+ def signal_type(ds):
448
+ """Determine if an image is Water, Fat, In-Phase, Out-phase image or None"""
449
+
450
+ if hasattr(ds, 'ImageType'):
451
+ type = set(ds.ImageType)
452
+ if set(['W', 'WATER']).intersection(type):
453
+ return 'WATER'
454
+ elif set(['F', 'FAT']).intersection(type):
455
+ return 'FAT'
456
+ elif set(['IP', 'IN_PHASE']).intersection(type):
457
+ return 'IN_PHASE'
458
+ elif set(['OP', 'OUT_PHASE']).intersection(type):
459
+ return 'OP_PHASE'
460
+ return 'UNKNOWN'
461
+
462
+
463
+ def set_signal_type(ds, value):
464
+ ds.ImageType = value
465
+
466
+
467
+
468
+ if __name__=='__main__':
469
+
470
+ pass
471
+ #codify('C:\\Users\\md1spsx\\Documents\\f32bit.dcm', 'C:\\Users\\md1spsx\\Documents\\f32bit.py')