dbdicom 0.2.0__py3-none-any.whl → 0.3.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. dbdicom/__init__.py +3 -25
  2. dbdicom/api.py +496 -0
  3. dbdicom/const.py +144 -0
  4. dbdicom/database.py +133 -0
  5. dbdicom/dataset.py +471 -0
  6. dbdicom/dbd.py +1290 -0
  7. dbdicom/external/__pycache__/__init__.cpython-311.pyc +0 -0
  8. dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
  9. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
  10. dbdicom/external/dcm4che/bin/emf2sf +57 -57
  11. dbdicom/register.py +402 -0
  12. dbdicom/{ds/types → sop_classes}/ct_image.py +2 -16
  13. dbdicom/{ds/types → sop_classes}/enhanced_mr_image.py +206 -160
  14. dbdicom/sop_classes/mr_image.py +338 -0
  15. dbdicom/sop_classes/parametric_map.py +381 -0
  16. dbdicom/sop_classes/secondary_capture.py +140 -0
  17. dbdicom/sop_classes/segmentation.py +311 -0
  18. dbdicom/{ds/types → sop_classes}/ultrasound_multiframe_image.py +1 -15
  19. dbdicom/{ds/types → sop_classes}/xray_angiographic_image.py +2 -17
  20. dbdicom/utils/arrays.py +142 -0
  21. dbdicom/utils/files.py +0 -20
  22. dbdicom/utils/image.py +43 -466
  23. dbdicom/utils/pydicom_dataset.py +386 -0
  24. dbdicom-0.3.16.dist-info/METADATA +26 -0
  25. dbdicom-0.3.16.dist-info/RECORD +54 -0
  26. {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info}/WHEEL +1 -1
  27. dbdicom/create.py +0 -450
  28. dbdicom/ds/__init__.py +0 -10
  29. dbdicom/ds/create.py +0 -63
  30. dbdicom/ds/dataset.py +0 -841
  31. dbdicom/ds/dictionaries.py +0 -620
  32. dbdicom/ds/types/mr_image.py +0 -267
  33. dbdicom/ds/types/parametric_map.py +0 -226
  34. dbdicom/external/__pycache__/__init__.cpython-310.pyc +0 -0
  35. dbdicom/external/__pycache__/__init__.cpython-37.pyc +0 -0
  36. dbdicom/external/dcm4che/__pycache__/__init__.cpython-310.pyc +0 -0
  37. dbdicom/external/dcm4che/__pycache__/__init__.cpython-37.pyc +0 -0
  38. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-310.pyc +0 -0
  39. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-37.pyc +0 -0
  40. dbdicom/external/dcm4che/lib/linux-x86/libclib_jiio.so +0 -0
  41. dbdicom/external/dcm4che/lib/linux-x86-64/libclib_jiio.so +0 -0
  42. dbdicom/external/dcm4che/lib/linux-x86-64/libopencv_java.so +0 -0
  43. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio.so +0 -0
  44. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis.so +0 -0
  45. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis2.so +0 -0
  46. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio.so +0 -0
  47. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis.so +0 -0
  48. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis2.so +0 -0
  49. dbdicom/external/dcm4che/lib/solaris-x86/libclib_jiio.so +0 -0
  50. dbdicom/external/dcm4che/lib/solaris-x86-64/libclib_jiio.so +0 -0
  51. dbdicom/manager.py +0 -2077
  52. dbdicom/message.py +0 -119
  53. dbdicom/record.py +0 -1526
  54. dbdicom/types/database.py +0 -107
  55. dbdicom/types/instance.py +0 -184
  56. dbdicom/types/patient.py +0 -40
  57. dbdicom/types/series.py +0 -816
  58. dbdicom/types/study.py +0 -58
  59. dbdicom/utils/variables.py +0 -155
  60. dbdicom/utils/vreg.py +0 -2626
  61. dbdicom/wrappers/__init__.py +0 -7
  62. dbdicom/wrappers/dipy.py +0 -462
  63. dbdicom/wrappers/elastix.py +0 -855
  64. dbdicom/wrappers/numpy.py +0 -119
  65. dbdicom/wrappers/scipy.py +0 -1413
  66. dbdicom/wrappers/skimage.py +0 -1030
  67. dbdicom/wrappers/sklearn.py +0 -151
  68. dbdicom/wrappers/vreg.py +0 -273
  69. dbdicom-0.2.0.dist-info/METADATA +0 -276
  70. dbdicom-0.2.0.dist-info/RECORD +0 -81
  71. {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info/licenses}/LICENSE +0 -0
  72. {dbdicom-0.2.0.dist-info → dbdicom-0.3.16.dist-info}/top_level.txt +0 -0
dbdicom/ds/dataset.py DELETED
@@ -1,841 +0,0 @@
1
- """A colections of tools to extend functionality of pydicom datasets."""
2
-
3
- import os
4
- from datetime import datetime
5
-
6
- import numpy as np
7
- import pandas as pd
8
- from matplotlib import cm
9
- import nibabel as nib
10
- import pydicom
11
- from pydicom.dataset import Dataset
12
- from pydicom.sequence import Sequence
13
- from pydicom.util.codify import code_file
14
- import pydicom.config
15
-
16
- import dbdicom.utils.image as image
17
- import dbdicom.utils.variables as variables
18
-
19
- # This ensures that dates and times are read as TM, DT and DA classes
20
- pydicom.config.datetime_conversion= True
21
-
22
-
23
- class DbDataset(Dataset):
24
-
25
- def __init__(self, dataset=None):
26
- super().__init__()
27
-
28
- if dataset is not None:
29
- self.__dict__ = dataset.__dict__
30
-
31
- def write(self, file, status=None):
32
- write(self, file, status=status)
33
-
34
- def get_values(self, tags):
35
- return get_values(self, tags)
36
-
37
- def set_values(self, tags, values):
38
- return set_values(self, tags, values)
39
-
40
- def get_lut(self):
41
- return get_lut(self)
42
-
43
- def set_lut(*args, **kwargs):
44
- set_lut(*args, **kwargs)
45
-
46
- def get_colormap(self):
47
- return get_colormap(self)
48
-
49
- def set_colormap(*args, **kwargs):
50
- set_colormap(*args, **kwargs)
51
-
52
- # Should be just pixel_array to fit in with logic
53
- # of custom attributes but conflicts with pydicom definition
54
- # go back to just array?
55
- def get_pixel_array(self):
56
- return get_pixel_array(self)
57
-
58
- def set_pixel_array(self, array, value_range=None):
59
- set_pixel_array(self, array, value_range=value_range)
60
-
61
- def map_mask_to(self, ds_target):
62
- return map_mask_to(self, ds_target)
63
-
64
- ##
65
- ## CUSTOM ATTRIBUTES
66
- ##
67
-
68
- def get_attribute_affine_matrix(self):
69
- return get_affine_matrix(self)
70
-
71
- def set_attribute_affine_matrix(*args, **kwargs):
72
- set_affine_matrix(*args, **kwargs)
73
-
74
- def get_attribute_window(self):
75
- return get_window(self)
76
-
77
- def set_attribute_window(self):
78
- set_window(self)
79
-
80
- def get_attribute_lut(self): # use _get_attribute to encode these
81
- return get_lut(self)
82
-
83
- def set_attribute_lut(*args, **kwargs): # use _set_attribute to encode these
84
- set_lut(*args, **kwargs)
85
-
86
- def get_attribute_colormap(self):
87
- return get_colormap(self)
88
-
89
- def set_attribute_colormap(*args, **kwargs):
90
- set_colormap(*args, **kwargs)
91
-
92
-
93
-
94
- def get_window(ds):
95
- """Centre and width of the pixel data after applying rescale slope and intercept"""
96
-
97
- if 'WindowCenter' in ds:
98
- centre = ds.WindowCenter
99
- if 'WindowWidth' in ds:
100
- width = ds.WindowWidth
101
- if centre is None or width is None:
102
- array = ds.get_pixel_array()
103
- #p = np.percentile(array, [25, 50, 75])
104
- min = np.min(array)
105
- max = np.max(array)
106
- if centre is None:
107
- centre = (max+min)/2
108
- #centre = p[1]
109
- if width is None:
110
- width = 0.9*(max-min)
111
- #width = p[2] - p[0]
112
- return centre, width
113
-
114
- def set_window(ds, center, width):
115
- ds.WindowCenter = center
116
- ds.WindowWidth = width
117
-
118
-
119
- def read(file, dialog=None, nifti=False):
120
- try:
121
- if nifti:
122
- nim = nib.load(file)
123
- ds = nim.header.extensions[0].get_content()
124
- array = nim.get_fdata()
125
- set_pixel_array(ds, array)
126
- else:
127
- ds = pydicom.dcmread(file)
128
- return DbDataset(ds)
129
- except:
130
- message = "Failed to read " + file
131
- if dialog is not None:
132
- dialog.information(message)
133
- raise FileNotFoundError(message)
134
-
135
-
136
- def write(ds, file, status=None):
137
- # check if directory exists and create it if not
138
- dir = os.path.dirname(file)
139
- if not os.path.exists(dir):
140
- os.makedirs(dir)
141
- ds.save_as(file, write_like_original=False)
142
- # try:
143
- # # check if directory exists and create it if not
144
- # dir = os.path.dirname(file)
145
- # if not os.path.exists(dir):
146
- # os.makedirs(dir)
147
- # ds.save_as(file, write_like_original=False)
148
- # except:
149
- # msg = 'Cannot write to file \n' + file
150
- # if status is not None:
151
- # status.message(msg)
152
- # else:
153
- # print(msg)
154
- # raise RuntimeError
155
-
156
-
157
- def codify(source_file, save_file, **kwargs):
158
-
159
- str = code_file(source_file, **kwargs)
160
- file = open(save_file, "w")
161
- file.write(str)
162
- file.close()
163
-
164
-
165
- def read_data(files, tags, status=None, path=None, message='Reading DICOM folder..', images_only=False):
166
- """Reads a list of tags in a list of files.
167
-
168
- Arguments
169
- ---------
170
- files : str or list
171
- A filepath or a list of filepaths
172
- tags : str or list
173
- A DICOM tag or a list of DICOM tags
174
- status : StatusBar
175
-
176
- Creates
177
- -------
178
- dataframe : pandas.DataFrame
179
- A Pandas dataframe with one row per file
180
- The index is the file path
181
- Each column corresponds to a Tag in the list of Tags
182
- The returned dataframe is sorted by the given tags.
183
- """
184
- if not isinstance(files, list):
185
- files = [files]
186
- if not isinstance(tags, list):
187
- tags = [tags]
188
- dict = {}
189
- for i, file in enumerate(files):
190
- if status is not None:
191
- status.progress(i+1, len(files))
192
- try:
193
- ds = pydicom.dcmread(file, force=True, specific_tags=tags+['Rows'])
194
- except:
195
- pass
196
- else:
197
- if isinstance(ds, pydicom.dataset.FileDataset):
198
- if 'TransferSyntaxUID' in ds.file_meta:
199
- if images_only:
200
- if not 'Rows' in ds:
201
- continue
202
- row = get_values(ds, tags)
203
- if path is None:
204
- index = file
205
- else:
206
- index = os.path.relpath(file, path)
207
- dict[index] = row
208
- return dict
209
-
210
-
211
-
212
- def read_dataframe(files, tags, status=None, path=None, message='Reading DICOM folder..', images_only=False):
213
- """Reads a list of tags in a list of files.
214
-
215
- Arguments
216
- ---------
217
- files : str or list
218
- A filepath or a list of filepaths
219
- tags : str or list
220
- A DICOM tag or a list of DICOM tags
221
- status : StatusBar
222
-
223
- Creates
224
- -------
225
- dataframe : pandas.DataFrame
226
- A Pandas dataframe with one row per file
227
- The index is the file path
228
- Each column corresponds to a Tag in the list of Tags
229
- The returned dataframe is sorted by the given tags.
230
- """
231
- if not isinstance(files, list):
232
- files = [files]
233
- if not isinstance(tags, list):
234
- tags = [tags]
235
- array = []
236
- dicom_files = []
237
- for i, file in enumerate(files):
238
- if status is not None:
239
- status.progress(i+1, len(files))
240
- try:
241
- ds = pydicom.dcmread(file, force=True, specific_tags=tags+['Rows'])
242
- except:
243
- pass
244
- else:
245
- if isinstance(ds, pydicom.dataset.FileDataset):
246
- if 'TransferSyntaxUID' in ds.file_meta:
247
- if images_only:
248
- if not 'Rows' in ds:
249
- continue
250
- row = get_values(ds, tags)
251
- array.append(row)
252
- if path is None:
253
- index = file
254
- else:
255
- index = os.path.relpath(file, path)
256
- dicom_files.append(index)
257
- df = pd.DataFrame(array, index = dicom_files, columns = tags)
258
- return df
259
-
260
-
261
-
262
- def set_values(ds, tags, values, VR=None):
263
- """
264
- Sets DICOM tags in the pydicom dataset in memory
265
-
266
- Private and standard tags can both be set.
267
- tags, values and VR must either be lists of equal lengths,
268
- or single values.
269
- VR is required for private tags.
270
- If private and standard tags are set in the same function call,
271
- VR can be set to any value for the standard tags: e.g.
272
- set_values(ds, ['Rows', (0x0019, 0x0100)], [128, 'Hello'], [None, 'LO'])
273
- """
274
-
275
- if not isinstance(tags, list):
276
- tags = [tags]
277
- values = [values]
278
- VR = [VR]
279
- elif VR is None:
280
- VR = [None] * len(tags)
281
- for i, tag in enumerate(tags):
282
-
283
- if values[i] is None:
284
- if isinstance(tag, str):
285
- if hasattr(ds, tag):
286
- # Setting standard DICOM attribute to None
287
- del ds[tag]
288
- else:
289
- # Setting custom attribute to None
290
- if hasattr(ds, 'set_attribute_' + tag):
291
- getattr(ds, 'set_attribute_' + tag)(values[i])
292
- else: # hexadecimal tuple
293
- if tag in ds:
294
- del ds[tag]
295
- else:
296
- if isinstance(tag, str):
297
- if hasattr(ds, tag):
298
- ds[tag].value = format_value(values[i], tag=tag)
299
- else:
300
- if hasattr(ds, 'set_attribute_' + tag):
301
- getattr(ds, 'set_attribute_' + tag)(values[i])
302
- continue
303
- _add_new(ds, tag, values[i], VR=VR[i])
304
- else: # hexadecimal tuple
305
- if tag in ds:
306
- ds[tag].value = format_value(values[i], tag=tag)
307
- else:
308
- _add_new(ds, tag, values[i], VR=VR[i])
309
- return ds
310
-
311
-
312
- def _add_new(ds, tag, value, VR='OW'):
313
- if not isinstance(tag, pydicom.tag.BaseTag):
314
- tag = pydicom.tag.Tag(tag)
315
- if not tag.is_private: # Add a new data element
316
- value_repr = pydicom.datadict.dictionary_VR(tag)
317
- if value_repr == 'US or SS':
318
- if value >= 0:
319
- value_repr = 'US'
320
- else:
321
- value_repr = 'SS'
322
- elif value_repr == 'OB or OW':
323
- value_repr = 'OW'
324
- ds.add_new(tag, value_repr, format_value(value, value_repr))
325
- else:
326
- if (tag.group, 0x0010) not in ds:
327
- ds.private_block(tag.group, 'Wezel ' + str(tag.group), create=True)
328
- ds.add_new(tag, VR, format_value(value, VR))
329
-
330
-
331
- def get_values(ds, tags):
332
- """Return a list of values for a dataset"""
333
-
334
- # https://pydicom.github.io/pydicom/stable/guides/element_value_types.html
335
- if not isinstance(tags, list):
336
- return get_values(ds, [tags])[0]
337
-
338
- row = []
339
- for tag in tags:
340
- value = None
341
-
342
- # If the tag is provided as string
343
- # check first if it is a custom attribute
344
- if isinstance(tag, str):
345
- if not hasattr(ds, tag):
346
- if hasattr(ds, 'get_attribute_' + tag):
347
- value = getattr(ds, 'get_attribute_' + tag)()
348
- else:
349
- pydcm_value = ds[tag].value
350
- value = to_set_type(pydcm_value, pydicom.datadict.dictionary_VR(tag)) # ELIMINATE THIS STEP - return pydicom datatypes
351
-
352
- # If the tag is a tuple of hexadecimal values
353
- else:
354
- if tag in ds:
355
- value = to_set_type(ds[tag].value, pydicom.datadict.dictionary_VR(tag))
356
-
357
- # If a tag is not present in the dataset, check if it can be derived
358
- if value is None:
359
- value = derive_data_element(ds, tag)
360
-
361
- row.append(value)
362
- return row
363
-
364
-
365
- def derive_data_element(ds, tag):
366
- """Tags that are not required but can be derived from other required tags"""
367
-
368
- if tag == 'SliceLocation' or tag == (0x0020, 0x1041):
369
- if 'ImageOrientationPatient' in ds and 'ImagePositionPatient' in ds:
370
- return image.slice_location(
371
- ds['ImageOrientationPatient'].value,
372
- ds['ImagePositionPatient'].value,
373
- )
374
- # To be extended ad hoc with other tags that can be derived
375
-
376
-
377
- def format_value(value, VR=None, tag=None):
378
-
379
- # If the change below is made (TM, DA, DT) then this needs to
380
- # convert those to string before setting
381
-
382
- # Slow - dictionary lookup for every value write
383
-
384
- if VR is None:
385
- VR = pydicom.datadict.dictionary_VR(tag)
386
-
387
- if VR == 'LO':
388
- if len(value) > 64:
389
- return value[-64:]
390
- #return value[:64]
391
- if VR == 'TM':
392
- return variables.seconds_to_str(value)
393
-
394
- return value
395
-
396
-
397
- def to_set_type(value, VR):
398
- """
399
- Convert pydicom datatypes to the python datatypes used to set the parameter.
400
- """
401
- # Not a good idea to modify pydicom set/get values. confusing and requires extra VR lookups
402
-
403
- if VR == 'TM':
404
- # pydicom sometimes returns string values for TM data types
405
- if isinstance(value, str):
406
- return variables.str_to_seconds(value)
407
-
408
- if value.__class__.__name__ == 'MultiValue':
409
- return [to_set_type(v, VR) for v in value]
410
- if value.__class__.__name__ == 'PersonName':
411
- return str(value)
412
- if value.__class__.__name__ == 'Sequence':
413
- return [ds for ds in value]
414
- if value.__class__.__name__ == 'TM':
415
- return variables.time_to_seconds(value) # return datetime.time
416
- if value.__class__.__name__ == 'UID':
417
- return str(value)
418
- if value.__class__.__name__ == 'IS':
419
- return int(value)
420
- if value.__class__.__name__ == 'DT':
421
- return variables.datetime_to_str(value) # return datetime.datetime
422
- if value.__class__.__name__ == 'DA': # return datetime.date
423
- return variables.date_to_str(value)
424
- if value.__class__.__name__ == 'DSfloat':
425
- return float(value)
426
- if value.__class__.__name__ == 'DSdecimal':
427
- return int(value)
428
-
429
- return value
430
-
431
-
432
- def new_uid(n=None):
433
-
434
- if n is None:
435
- return pydicom.uid.generate_uid()
436
- else:
437
- return [pydicom.uid.generate_uid() for _ in range(n)]
438
-
439
-
440
- def get_affine_matrix(ds):
441
- """Affine transformation matrix for a DICOM image"""
442
-
443
- # slice_spacing = get_values(ds, 'SpacingBetweenSlices')
444
- # if slice_spacing is None:
445
- # slice_spacing = get_values(ds, 'SliceThickness')
446
- slice_spacing = get_values(ds, 'SliceThickness')
447
-
448
- return image.affine_matrix(
449
- get_values(ds, 'ImageOrientationPatient'),
450
- get_values(ds, 'ImagePositionPatient'),
451
- get_values(ds, 'PixelSpacing'),
452
- slice_spacing)
453
-
454
-
455
- def set_affine_matrix(ds, affine):
456
- v = image.dismantle_affine_matrix(affine)
457
- set_values(ds, 'PixelSpacing', v['PixelSpacing'])
458
- #set_values(ds, 'SpacingBetweenSlices', v['SpacingBetweenSlices'])
459
- set_values(ds, 'SliceThickness', v['SpacingBetweenSlices'])
460
- set_values(ds, 'ImageOrientationPatient', v['ImageOrientationPatient'])
461
- set_values(ds, 'ImagePositionPatient', v['ImagePositionPatient'])
462
-
463
-
464
- def map_mask_to(ds_source, ds_target):
465
- """Map non-zero image pixels onto a target image.
466
-
467
- Overwrite pixel values in the target"""
468
-
469
- # Create a coordinate array of non-zero pixels
470
- coords = np.transpose(np.where(ds_source.get_pixel_array() != 0))
471
- coords = [[coord[0], coord[1], 0] for coord in coords]
472
- coords = np.array(coords)
473
-
474
- # Determine coordinate transformation matrix
475
- affine_source = ds_source.get_values('affine_matrix')
476
- affine_target = ds_target.get_values('affine_matrix')
477
- source_to_target = np.linalg.inv(affine_target).dot(affine_source)
478
-
479
- # Apply coordinate transformation and interpolate (nearest neighbour)
480
- coords = nib.affines.apply_affine(source_to_target, coords)
481
- coords = np.round(coords).astype(int)
482
- # x = y = []
483
- # for r in coords:
484
- # if r[2] == 0:
485
- # if (0 <= r[0]) & (r[0] < ds_target.Columns):
486
- # if (0 <= r[1]) & (r[1] < ds_target.Rows):
487
- # x.append(r[0])
488
- # y.append(r[1])
489
- # x = tuple(x)
490
- # y = tuple(y)
491
- x = tuple([c[0] for c in coords if (c[2] == 0) & (0 <= c[0]) & (c[0] < ds_target.Columns) & (0 <= c[1]) & (c[1] < ds_target.Rows)])
492
- y = tuple([c[1] for c in coords if (c[2] == 0) & (0 <= c[0]) & (c[0] < ds_target.Columns) & (0 <= c[1]) & (c[1] < ds_target.Rows)])
493
- # x = tuple([c[0] for c in coords if c[2] == 0])
494
- # y = tuple([c[1] for c in coords if c[2] == 0])
495
-
496
- # Set values in the target image
497
- # array = np.zeros((record.Rows, record.Columns))
498
- array = np.zeros((ds_target.Columns, ds_target.Rows))
499
- array[(x, y)] = 1.0
500
-
501
- return array
502
-
503
- # List of all supported (matplotlib) colormaps
504
-
505
- COLORMAPS = ['cividis', 'magma', 'plasma', 'viridis',
506
- 'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds',
507
- 'YlOrBr', 'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu',
508
- 'GnBu', 'PuBu', 'YlGnBu', 'PuBuGn', 'BuGn', 'YlGn',
509
- 'binary', 'gist_yarg', 'gist_gray', 'bone', 'pink',
510
- 'spring', 'summer', 'autumn', 'winter', 'cool', 'Wistia',
511
- 'hot', 'afmhot', 'gist_heat', 'copper',
512
- 'PiYG', 'PRGn', 'BrBG', 'PuOr', 'RdGy', 'RdBu',
513
- 'RdYlBu', 'RdYlGn', 'Spectral', 'coolwarm', 'bwr', 'seismic',
514
- 'twilight', 'twilight_shifted', 'hsv',
515
- 'flag', 'prism', 'ocean', 'gist_earth', 'terrain', 'gist_stern',
516
- 'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg', 'turbo',
517
- 'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']
518
-
519
- # Include support for DICOM natiove colormaps (see pydicom guide on working with pixel data)
520
-
521
- def get_colormap(ds):
522
- """Returns the colormap if there is any."""
523
-
524
- # Hijacking this free text field to store the colormap
525
- # This should use ContentDescription instead (0070, 0081)
526
- #
527
- if 'WindowCenterWidthExplanation' in ds:
528
- if ds.WindowCenterWidthExplanation in COLORMAPS:
529
- return ds.WindowCenterWidthExplanation
530
-
531
-
532
- def set_colormap(ds, colormap=None):
533
-
534
- if colormap is None:
535
- ds.PhotometricInterpretation = 'MONOCHROME2'
536
- if hasattr(ds, 'WindowCenterWidthExplanation'):
537
- del ds.WindowCenterWidthExplanation
538
- if hasattr(ds, 'RGBLUTTransferFunction'):
539
- del ds.RGBLUTTransferFunction
540
- if hasattr(ds, 'GreenPaletteColorLookupTableData'):
541
- del ds.GreenPaletteColorLookupTableData
542
- if hasattr(ds, 'RedPaletteColorLookupTableData'):
543
- del ds.RedPaletteColorLookupTableData
544
- if hasattr(ds, 'BluePaletteColorLookupTableData'):
545
- del ds.BluePaletteColorLookupTableData
546
- if hasattr(ds, 'RedPaletteColorLookupTableDescriptor'):
547
- del ds.RedPaletteColorLookupTableDescriptor
548
- if hasattr(ds, 'GreenPaletteColorLookupTableDescriptor'):
549
- del ds.GreenPaletteColorLookupTableDescriptor
550
- if hasattr(ds, 'BluePaletteColorLookupTableDescriptor'):
551
- del ds.BluePaletteColorLookupTableDescriptor
552
- else:
553
- ds.WindowCenterWidthExplanation = colormap
554
- # Get a LUT as float numpy array with values in the range [0,1]
555
- RGBA = cm.ScalarMappable(cmap=colormap).to_rgba(np.arange(256))
556
- set_lut(ds, RGBA[:,:3])
557
-
558
-
559
- def set_lut(ds, RGB):
560
- """Set RGB as float with values in range [0,1]"""
561
-
562
- ds.PhotometricInterpretation = 'PALETTE COLOR'
563
-
564
- RGB *= (np.power(2, ds.BitsAllocated) - 1)
565
-
566
- if ds.BitsAllocated == 8:
567
- RGB = RGB.astype(np.ubyte)
568
- elif ds.BitsAllocated == 16:
569
- RGB = RGB.astype(np.uint16)
570
-
571
- # Define the properties of the LUT
572
- ds.add_new('0x00281101', 'US', [255, 0, ds.BitsAllocated])
573
- ds.add_new('0x00281102', 'US', [255, 0, ds.BitsAllocated])
574
- ds.add_new('0x00281103', 'US', [255, 0, ds.BitsAllocated])
575
-
576
- # Scale the colorsList to the available range
577
- ds.RedPaletteColorLookupTableData = bytes(RGB[:,0])
578
- ds.GreenPaletteColorLookupTableData = bytes(RGB[:,1])
579
- ds.BluePaletteColorLookupTableData = bytes(RGB[:,2])
580
-
581
-
582
- def get_lut(ds):
583
- """Return RGB as float with values in [0,1]"""
584
-
585
- if 'PhotometricInterpretation' not in ds:
586
- return None
587
- if ds.PhotometricInterpretation != 'PALETTE COLOR':
588
- return None
589
-
590
- if ds.BitsAllocated == 8:
591
- dtype = np.ubyte
592
- elif ds.BitsAllocated == 16:
593
- dtype = np.uint16
594
-
595
- R = ds.RedPaletteColorLookupTableData
596
- G = ds.GreenPaletteColorLookupTableData
597
- B = ds.BluePaletteColorLookupTableData
598
-
599
- R = np.frombuffer(R, dtype=dtype)
600
- G = np.frombuffer(G, dtype=dtype)
601
- B = np.frombuffer(B, dtype=dtype)
602
-
603
- R = R.astype(np.float32)
604
- G = G.astype(np.float32)
605
- B = B.astype(np.float32)
606
-
607
- R *= 1.0/(np.power(2, ds.RedPaletteColorLookupTableDescriptor[2]) - 1)
608
- G *= 1.0/(np.power(2, ds.GreenPaletteColorLookupTableDescriptor[2]) - 1)
609
- B *= 1.0/(np.power(2, ds.BluePaletteColorLookupTableDescriptor[2]) - 1)
610
-
611
- return np.transpose([R, G, B])
612
-
613
-
614
- def get_pixel_array(ds):
615
- """Read the pixel array from an image"""
616
-
617
- try:
618
- array = ds.pixel_array
619
- except:
620
- return None
621
- array = array.astype(np.float32)
622
- slope = float(getattr(ds, 'RescaleSlope', 1))
623
- intercept = float(getattr(ds, 'RescaleIntercept', 0))
624
- array *= slope
625
- array += intercept
626
-
627
- return np.transpose(array)
628
-
629
-
630
- def set_pixel_array(ds, array, value_range=None):
631
-
632
- # if array.ndim >= 3: # remove spurious dimensions of 1
633
- # array = np.squeeze(array)
634
-
635
- array = image.clip(array.astype(np.float32), value_range=value_range)
636
- array, slope, intercept = image.scale_to_range(array, ds.BitsAllocated)
637
- array = np.transpose(array)
638
-
639
- #maximum = np.amax(array)
640
- #minimum = np.amin(array)
641
- shape = np.shape(array)
642
-
643
- ds.PixelRepresentation = 0
644
- #ds.SmallestImagePixelValue = int(0)
645
- #ds.LargestImagePixelValue = int(2**ds.BitsAllocated - 1)
646
- ds.set_values('SmallestImagePixelValue', int(0))
647
- ds.set_values('LargestImagePixelValue', int(2**ds.BitsAllocated - 1))
648
- ds.RescaleSlope = 1 / slope
649
- ds.RescaleIntercept = - intercept / slope
650
- # ds.WindowCenter = (maximum + minimum) / 2
651
- # ds.WindowWidth = maximum - minimum
652
- ds.Rows = shape[0]
653
- ds.Columns = shape[1]
654
- ds.PixelData = array.tobytes()
655
-
656
-
657
- def module_patient():
658
-
659
- return [
660
- 'ReferencedPatientSequence',
661
- 'PatientName',
662
- 'PatientID',
663
- 'IssuerOfPatientID',
664
- 'TypeOfPatientID',
665
- 'IssuerOfPatientIDQualifiersSequence',
666
- 'SourcePatientGroupIdentificationSequence',
667
- 'GroupOfPatientsIdentificationSequence',
668
- 'PatientBirthDate',
669
- 'PatientBirthTime',
670
- 'PatientBirthDateInAlternativeCalendar',
671
- 'PatientDeathDateInAlternativeCalendar',
672
- 'PatientAlternativeCalendar',
673
- 'PatientSex',
674
- 'QualityControlSubject',
675
- 'StrainDescription',
676
- 'StrainNomenclature',
677
- 'StrainStockSequence',
678
- 'StrainAdditionalInformation',
679
- 'StrainCodeSequence',
680
- 'GeneticModificationsSequence',
681
- 'OtherPatientNames',
682
- 'OtherPatientIDsSequence',
683
- 'ReferencedPatientPhotoSequence',
684
- 'EthnicGroup',
685
- 'PatientSpeciesDescription',
686
- 'PatientSpeciesCodeSequence',
687
- 'PatientBreedDescription',
688
- 'PatientBreedCodeSequence',
689
- 'BreedRegistrationSequence',
690
- 'ResponsiblePerson',
691
- 'ResponsiblePersonRole',
692
- 'ResponsibleOrganization',
693
- 'PatientComments',
694
- 'PatientIdentityRemoved',
695
- 'DeidentificationMethod',
696
- 'DeidentificationMethodCodeSequence',
697
- 'ClinicalTrialSponsorName',
698
- 'ClinicalTrialProtocolID',
699
- 'ClinicalTrialProtocolName',
700
- 'ClinicalTrialSiteID',
701
- 'ClinicalTrialSiteName',
702
- 'ClinicalTrialSubjectID',
703
- 'ClinicalTrialSubjectReadingID',
704
- 'ClinicalTrialProtocolEthicsCommitteeName',
705
- 'ClinicalTrialProtocolEthicsCommitteeApprovalNumber',
706
- ]
707
-
708
-
709
- def module_study():
710
-
711
- return [
712
- 'StudyDate',
713
- 'StudyTime',
714
- 'AccessionNumber',
715
- 'IssuerOfAccessionNumberSequence',
716
- 'ReferringPhysicianName',
717
- 'ReferringPhysicianIdentificationSequence',
718
- 'ConsultingPhysicianName',
719
- 'ConsultingPhysicianIdentificationSequence',
720
- 'StudyDescription',
721
- 'ProcedureCodeSequence',
722
- 'PhysiciansOfRecord',
723
- 'PhysiciansOfRecordIdentificationSequence',
724
- 'NameOfPhysiciansReadingStudy',
725
- 'PhysiciansReadingStudyIdentificationSequence',
726
- 'ReferencedStudySequence',
727
- 'StudyInstanceUID',
728
- 'StudyID',
729
- 'RequestingService',
730
- 'RequestingServiceCodeSequence',
731
- 'ReasonForPerformedProcedureCodeSequence',
732
- 'AdmittingDiagnosesDescription',
733
- 'AdmittingDiagnosesCodeSequence',
734
- 'PatientAge',
735
- 'PatientSize',
736
- 'PatientSizeCodeSequence',
737
- 'PatientBodyMassIndex',
738
- 'MeasuredAPDimension',
739
- 'MeasuredLateralDimension',
740
- 'PatientWeight',
741
- 'MedicalAlerts',
742
- 'Allergies',
743
- 'Occupation',
744
- 'SmokingStatus',
745
- 'AdditionalPatientHistory',
746
- 'PregnancyStatus',
747
- 'LastMenstrualDate',
748
- 'PatientSexNeutered',
749
- 'ReasonForVisit',
750
- 'ReasonForVisitCodeSequence',
751
- 'AdmissionID',
752
- 'IssuerOfAdmissionIDSequence',
753
- 'ServiceEpisodeID',
754
- 'ServiceEpisodeDescription',
755
- 'IssuerOfServiceEpisodeIDSequence',
756
- 'PatientState',
757
- 'ClinicalTrialTimePointID',
758
- 'ClinicalTrialTimePointDescription',
759
- 'LongitudinalTemporalOffsetFromEvent',
760
- 'LongitudinalTemporalEventType',
761
- 'ConsentForClinicalTrialUseSequence',
762
- ]
763
-
764
-
765
- def module_series():
766
-
767
- return [
768
- 'SeriesDate',
769
- 'SeriesTime',
770
- 'Modality',
771
- 'SeriesDescription',
772
- 'SeriesDescriptionCodeSequence',
773
- 'PerformingPhysicianName',
774
- 'PerformingPhysicianIdentificationSequence',
775
- 'OperatorsName',
776
- 'OperatorIdentificationSequence',
777
- 'ReferencedPerformedProcedureStepSequence',
778
- 'RelatedSeriesSequence',
779
- 'AnatomicalOrientationType',
780
- 'BodyPartExamined',
781
- 'ProtocolName',
782
- 'PatientPosition',
783
- 'ReferencedDefinedProtocolSequence',
784
- 'ReferencedPerformedProtocolSequence',
785
- 'SeriesInstanceUID',
786
- 'SeriesNumber',
787
- 'Laterality',
788
- 'SmallestPixelValueInSeries',
789
- 'LargestPixelValueInSeries',
790
- 'PerformedProcedureStepStartDate',
791
- 'PerformedProcedureStepStartTime',
792
- 'PerformedProcedureStepEndDate',
793
- 'PerformedProcedureStepEndTime',
794
- 'PerformedProcedureStepID',
795
- 'PerformedProcedureStepDescription',
796
- 'PerformedProtocolCodeSequence',
797
- 'RequestAttributesSequence',
798
- 'CommentsOnThePerformedProcedureStep',
799
- 'ClinicalTrialCoordinatingCenterName',
800
- 'ClinicalTrialSeriesID',
801
- 'ClinicalTrialSeriesDescription',
802
- ]
803
-
804
-
805
- # def _initialize(ds, UID=None, ref=None): # ds is pydicom dataset
806
-
807
- # # Date and Time of Creation
808
- # dt = datetime.now()
809
- # timeStr = dt.strftime('%H%M%S') # long format with micro seconds
810
-
811
- # ds.ContentDate = dt.strftime('%Y%m%d')
812
- # ds.ContentTime = timeStr
813
- # ds.AcquisitionDate = dt.strftime('%Y%m%d')
814
- # ds.AcquisitionTime = timeStr
815
- # ds.SeriesDate = dt.strftime('%Y%m%d')
816
- # ds.SeriesTime = timeStr
817
- # ds.InstanceCreationDate = dt.strftime('%Y%m%d')
818
- # ds.InstanceCreationTime = timeStr
819
-
820
- # if UID is not None:
821
-
822
- # # overwrite UIDs
823
- # ds.PatientID = UID[0]
824
- # ds.StudyInstanceUID = UID[1]
825
- # ds.SeriesInstanceUID = UID[2]
826
- # ds.SOPInstanceUID = UID[3]
827
-
828
- # if ref is not None:
829
-
830
- # # Series, Instance and Class for Reference
831
- # refd_instance = Dataset()
832
- # refd_instance.ReferencedSOPClassUID = ref.SOPClassUID
833
- # refd_instance.ReferencedSOPInstanceUID = ref.SOPInstanceUID
834
-
835
- # refd_series = Dataset()
836
- # refd_series.ReferencedInstanceSequence = Sequence([refd_instance])
837
- # refd_series.SeriesInstanceUID = ds.SeriesInstanceUID
838
-
839
- # ds.ReferencedSeriesSequence = Sequence([refd_series])
840
-
841
- # return ds