dbdicom 0.2.5__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbdicom might be problematic. Click here for more details.

Files changed (52) hide show
  1. dbdicom/__init__.py +1 -28
  2. dbdicom/api.py +267 -0
  3. dbdicom/const.py +144 -0
  4. dbdicom/dataset.py +752 -0
  5. dbdicom/dbd.py +719 -0
  6. dbdicom/external/__pycache__/__init__.cpython-311.pyc +0 -0
  7. dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
  8. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
  9. dbdicom/register.py +527 -0
  10. dbdicom/{ds/types → sop_classes}/ct_image.py +2 -16
  11. dbdicom/{ds/types → sop_classes}/enhanced_mr_image.py +153 -26
  12. dbdicom/{ds/types → sop_classes}/mr_image.py +185 -140
  13. dbdicom/sop_classes/parametric_map.py +307 -0
  14. dbdicom/sop_classes/secondary_capture.py +140 -0
  15. dbdicom/sop_classes/segmentation.py +311 -0
  16. dbdicom/{ds/types → sop_classes}/ultrasound_multiframe_image.py +1 -15
  17. dbdicom/{ds/types → sop_classes}/xray_angiographic_image.py +2 -17
  18. dbdicom/utils/arrays.py +36 -0
  19. dbdicom/utils/files.py +0 -20
  20. dbdicom/utils/image.py +10 -629
  21. dbdicom-0.3.0.dist-info/METADATA +28 -0
  22. dbdicom-0.3.0.dist-info/RECORD +53 -0
  23. {dbdicom-0.2.5.dist-info → dbdicom-0.3.0.dist-info}/WHEEL +1 -1
  24. dbdicom/create.py +0 -457
  25. dbdicom/dro.py +0 -174
  26. dbdicom/ds/__init__.py +0 -10
  27. dbdicom/ds/create.py +0 -63
  28. dbdicom/ds/dataset.py +0 -869
  29. dbdicom/ds/dictionaries.py +0 -620
  30. dbdicom/ds/types/parametric_map.py +0 -226
  31. dbdicom/extensions/__init__.py +0 -9
  32. dbdicom/extensions/dipy.py +0 -448
  33. dbdicom/extensions/elastix.py +0 -503
  34. dbdicom/extensions/matplotlib.py +0 -107
  35. dbdicom/extensions/numpy.py +0 -271
  36. dbdicom/extensions/scipy.py +0 -1512
  37. dbdicom/extensions/skimage.py +0 -1030
  38. dbdicom/extensions/sklearn.py +0 -243
  39. dbdicom/extensions/vreg.py +0 -1390
  40. dbdicom/manager.py +0 -2132
  41. dbdicom/message.py +0 -119
  42. dbdicom/pipelines.py +0 -66
  43. dbdicom/record.py +0 -1893
  44. dbdicom/types/database.py +0 -107
  45. dbdicom/types/instance.py +0 -231
  46. dbdicom/types/patient.py +0 -40
  47. dbdicom/types/series.py +0 -2874
  48. dbdicom/types/study.py +0 -58
  49. dbdicom-0.2.5.dist-info/METADATA +0 -71
  50. dbdicom-0.2.5.dist-info/RECORD +0 -66
  51. {dbdicom-0.2.5.dist-info → dbdicom-0.3.0.dist-info/licenses}/LICENSE +0 -0
  52. {dbdicom-0.2.5.dist-info → dbdicom-0.3.0.dist-info}/top_level.txt +0 -0
dbdicom/ds/dataset.py DELETED
@@ -1,869 +0,0 @@
1
- """A colections of tools to extend functionality of pydicom datasets."""
2
-
3
- import os
4
- from datetime import datetime
5
-
6
- import numpy as np
7
- import pandas as pd
8
- from matplotlib import cm
9
- import nibabel as nib
10
- import pydicom
11
- from pydicom.dataset import Dataset
12
- from pydicom.sequence import Sequence
13
- from pydicom.util.codify import code_file
14
- import pydicom.config
15
-
16
- import dbdicom.utils.image as image
17
- import dbdicom.utils.variables as variables
18
-
19
- # This ensures that dates and times are read as TM, DT and DA classes
20
- pydicom.config.datetime_conversion= True
21
-
22
-
23
- class DbDataset(Dataset):
24
-
25
- def __init__(self, dataset=None):
26
- super().__init__()
27
-
28
- if dataset is not None:
29
- self.__dict__ = dataset.__dict__
30
-
31
- def write(self, file, status=None):
32
- write(self, file, status=status)
33
-
34
- def get_values(self, tags):
35
- return get_values(self, tags)
36
-
37
- def set_values(self, tags, values):
38
- return set_values(self, tags, values)
39
-
40
- def get_lut(self):
41
- return get_lut(self)
42
-
43
- def set_lut(*args, **kwargs):
44
- set_lut(*args, **kwargs)
45
-
46
- def get_colormap(self):
47
- return get_colormap(self)
48
-
49
- def set_colormap(*args, **kwargs):
50
- set_colormap(*args, **kwargs)
51
-
52
- # Should be just pixel_array to fit in with logic
53
- # of custom attributes but conflicts with pydicom definition
54
- # go back to just array?
55
- def get_pixel_array(self):
56
- return get_pixel_array(self)
57
-
58
- def set_pixel_array(self, array, value_range=None):
59
- set_pixel_array(self, array, value_range=value_range)
60
-
61
- def map_mask_to(self, ds_target):
62
- return map_mask_to(self, ds_target)
63
-
64
- ##
65
- ## CUSTOM ATTRIBUTES
66
- ##
67
-
68
- def get_attribute_affine_matrix(self):
69
- return get_affine_matrix(self)
70
-
71
- def set_attribute_affine_matrix(*args, **kwargs):
72
- set_affine_matrix(*args, **kwargs)
73
-
74
- def get_attribute_window(self):
75
- return get_window(self)
76
-
77
- def set_attribute_window(self):
78
- set_window(self)
79
-
80
- def get_attribute_lut(self): # use _get_attribute to encode these
81
- return get_lut(self)
82
-
83
- def set_attribute_lut(*args, **kwargs): # use _set_attribute to encode these
84
- set_lut(*args, **kwargs)
85
-
86
- def get_attribute_colormap(self):
87
- return get_colormap(self)
88
-
89
- def set_attribute_colormap(*args, **kwargs):
90
- set_colormap(*args, **kwargs)
91
-
92
-
93
-
94
- def get_window(ds):
95
- """Centre and width of the pixel data after applying rescale slope and intercept"""
96
-
97
- if 'WindowCenter' in ds:
98
- centre = ds.WindowCenter
99
- if 'WindowWidth' in ds:
100
- width = ds.WindowWidth
101
- if centre is None or width is None:
102
- array = ds.get_pixel_array()
103
- #p = np.percentile(array, [25, 50, 75])
104
- min = np.min(array)
105
- max = np.max(array)
106
- if centre is None:
107
- centre = (max+min)/2
108
- #centre = p[1]
109
- if width is None:
110
- width = 0.9*(max-min)
111
- #width = p[2] - p[0]
112
- return centre, width
113
-
114
- def set_window(ds, center, width):
115
- ds.WindowCenter = center
116
- ds.WindowWidth = width
117
-
118
-
119
- def read(file, dialog=None, nifti=False):
120
- try:
121
- if nifti:
122
- nim = nib.load(file)
123
- ds = nim.header.extensions[0].get_content()
124
- array = nim.get_fdata()
125
- set_pixel_array(ds, array)
126
- else:
127
- ds = pydicom.dcmread(file)
128
- return DbDataset(ds)
129
- except:
130
- message = "Failed to read " + file
131
- if dialog is not None:
132
- dialog.information(message)
133
- raise FileNotFoundError(message)
134
-
135
-
136
- def write(ds, file, status=None):
137
- # check if directory exists and create it if not
138
- dir = os.path.dirname(file)
139
- if not os.path.exists(dir):
140
- os.makedirs(dir)
141
- ds.save_as(file, write_like_original=False)
142
- # try:
143
- # # check if directory exists and create it if not
144
- # dir = os.path.dirname(file)
145
- # if not os.path.exists(dir):
146
- # os.makedirs(dir)
147
- # ds.save_as(file, write_like_original=False)
148
- # except:
149
- # msg = 'Cannot write to file \n' + file
150
- # if status is not None:
151
- # status.message(msg)
152
- # else:
153
- # print(msg)
154
- # raise RuntimeError
155
-
156
-
157
- def codify(source_file, save_file, **kwargs):
158
-
159
- str = code_file(source_file, **kwargs)
160
- file = open(save_file, "w")
161
- file.write(str)
162
- file.close()
163
-
164
-
165
- def read_data(files, tags, status=None, path=None, message='Reading DICOM folder..', images_only=False):
166
- """Reads a list of tags in a list of files.
167
-
168
- Arguments
169
- ---------
170
- files : str or list
171
- A filepath or a list of filepaths
172
- tags : str or list
173
- A DICOM tag or a list of DICOM tags
174
- status : StatusBar
175
-
176
- Creates
177
- -------
178
- dataframe : pandas.DataFrame
179
- A Pandas dataframe with one row per file
180
- The index is the file path
181
- Each column corresponds to a Tag in the list of Tags
182
- The returned dataframe is sorted by the given tags.
183
- """
184
- if not isinstance(files, list):
185
- files = [files]
186
- if not isinstance(tags, list):
187
- tags = [tags]
188
- dict = {}
189
- for i, file in enumerate(files):
190
- if status is not None:
191
- status.progress(i+1, len(files))
192
- try:
193
- ds = pydicom.dcmread(file, force=True, specific_tags=tags+['Rows'])
194
- except:
195
- pass
196
- else:
197
- if isinstance(ds, pydicom.dataset.FileDataset):
198
- if 'TransferSyntaxUID' in ds.file_meta:
199
- if images_only:
200
- if not 'Rows' in ds:
201
- continue
202
- row = get_values(ds, tags)
203
- if path is None:
204
- index = file
205
- else:
206
- index = os.path.relpath(file, path)
207
- dict[index] = row
208
- return dict
209
-
210
-
211
-
212
- def read_dataframe(files, tags, status=None, path=None, message='Reading DICOM folder..', images_only=False):
213
- """Reads a list of tags in a list of files.
214
-
215
- Arguments
216
- ---------
217
- files : str or list
218
- A filepath or a list of filepaths
219
- tags : str or list
220
- A DICOM tag or a list of DICOM tags
221
- status : StatusBar
222
-
223
- Creates
224
- -------
225
- dataframe : pandas.DataFrame
226
- A Pandas dataframe with one row per file
227
- The index is the file path
228
- Each column corresponds to a Tag in the list of Tags
229
- The returned dataframe is sorted by the given tags.
230
- """
231
- if not isinstance(files, list):
232
- files = [files]
233
- if not isinstance(tags, list):
234
- tags = [tags]
235
- array = []
236
- dicom_files = []
237
- for i, file in enumerate(files):
238
- if status is not None:
239
- status.progress(i+1, len(files))
240
- try:
241
- ds = pydicom.dcmread(file, force=True, specific_tags=tags+['Rows'])
242
- except:
243
- pass
244
- else:
245
- if isinstance(ds, pydicom.dataset.FileDataset):
246
- if 'TransferSyntaxUID' in ds.file_meta:
247
- if images_only:
248
- if not 'Rows' in ds:
249
- continue
250
- row = get_values(ds, tags)
251
- array.append(row)
252
- if path is None:
253
- index = file
254
- else:
255
- index = os.path.relpath(file, path)
256
- dicom_files.append(index)
257
- df = pd.DataFrame(array, index = dicom_files, columns = tags)
258
- return df
259
-
260
-
261
-
262
- def set_values(ds, tags, values, VR=None):
263
- """
264
- Sets DICOM tags in the pydicom dataset in memory
265
-
266
- Private and standard tags can both be set.
267
- tags, values and VR must either be lists of equal lengths,
268
- or single values.
269
- VR is required for private tags.
270
- If private and standard tags are set in the same function call,
271
- VR can be set to any value for the standard tags: e.g.
272
- set_values(ds, ['Rows', (0x0019, 0x0100)], [128, 'Hello'], [None, 'LO'])
273
- """
274
-
275
- if not isinstance(tags, list):
276
- tags = [tags]
277
- values = [values]
278
- VR = [VR]
279
- elif VR is None:
280
- VR = [None] * len(tags)
281
- for i, tag in enumerate(tags):
282
-
283
- if values[i] is None:
284
- if isinstance(tag, str):
285
- if hasattr(ds, tag):
286
- # Setting standard DICOM attribute to None
287
- del ds[tag]
288
- else:
289
- # Setting custom attribute to None
290
- if hasattr(ds, 'set_attribute_' + tag):
291
- getattr(ds, 'set_attribute_' + tag)(values[i])
292
- else: # hexadecimal tuple
293
- if tag in ds:
294
- del ds[tag]
295
- else:
296
- if isinstance(tag, str):
297
- if hasattr(ds, tag):
298
- ds[tag].value = format_value(values[i], tag=tag)
299
- else:
300
- if hasattr(ds, 'set_attribute_' + tag):
301
- getattr(ds, 'set_attribute_' + tag)(values[i])
302
- continue
303
- _add_new(ds, tag, values[i], VR=VR[i])
304
- else: # hexadecimal tuple
305
- if tag in ds:
306
- ds[tag].value = format_value(values[i], tag=tag)
307
- else:
308
- _add_new(ds, tag, values[i], VR=VR[i])
309
-
310
- #_set_derived_data_element(ds, tag, values[i])
311
-
312
- return ds
313
-
314
-
315
- # def _set_derived_data_element(ds, tag, value):
316
- # """Set any tags that are need to change as well"""
317
-
318
- # if tag == 'SliceLocation' or tag == (0x0020, 0x1041):
319
- # if value is not None:
320
- # loc = ds['ImageOrientationPatient'].value
321
- # ds['ImagePositionPatient'].value = image.image_position_from_slice_location(value, loc)
322
-
323
-
324
- def _add_new(ds, tag, value, VR='OW'):
325
- if not isinstance(tag, pydicom.tag.BaseTag):
326
- tag = pydicom.tag.Tag(tag)
327
- if not tag.is_private: # Add a new data element
328
- value_repr = pydicom.datadict.dictionary_VR(tag)
329
- if value_repr == 'US or SS':
330
- if value >= 0:
331
- value_repr = 'US'
332
- else:
333
- value_repr = 'SS'
334
- elif value_repr == 'OB or OW':
335
- value_repr = 'OW'
336
- ds.add_new(tag, value_repr, format_value(value, value_repr))
337
- else:
338
- if (tag.group, 0x0010) not in ds:
339
- ds.private_block(tag.group, 'dbdicom ' + str(tag.group), create=True)
340
- ds.add_new(tag, VR, format_value(value, VR))
341
-
342
-
343
-
344
-
345
-
346
-
347
-
348
- def get_values(ds, tags):
349
- """Return a list of values for a dataset"""
350
-
351
- # https://pydicom.github.io/pydicom/stable/guides/element_value_types.html
352
- if not isinstance(tags, list):
353
- return get_values(ds, [tags])[0]
354
-
355
- row = []
356
- for tag in tags:
357
- value = None
358
-
359
- # If the tag is provided as string
360
- # check first if it is a custom attribute
361
- if isinstance(tag, str):
362
- if not hasattr(ds, tag):
363
- if hasattr(ds, 'get_attribute_' + tag):
364
- value = getattr(ds, 'get_attribute_' + tag)()
365
- else:
366
- pydcm_value = ds[tag].value
367
- try:
368
- VR = pydicom.datadict.dictionary_VR(tag)
369
- except:
370
- VR = None
371
- value = to_set_type(pydcm_value, VR) # ELIMINATE THIS STEP - return pydicom datatypes
372
-
373
- # If the tag is a tuple of hexadecimal values
374
- else:
375
- if tag in ds:
376
- try:
377
- VR = pydicom.datadict.dictionary_VR(tag)
378
- except:
379
- VR = None
380
- value = to_set_type(ds[tag].value, VR)
381
-
382
- # If a tag is not present in the dataset, check if it can be derived
383
- if value is None:
384
- value = derive_data_element(ds, tag)
385
-
386
- row.append(value)
387
- return row
388
-
389
-
390
- def derive_data_element(ds, tag):
391
- """Tags that are not required but can be derived from other required tags"""
392
-
393
- if tag == 'SliceLocation' or tag == (0x0020, 0x1041):
394
- if 'ImageOrientationPatient' in ds and 'ImagePositionPatient' in ds:
395
- return image.slice_location(
396
- ds['ImageOrientationPatient'].value,
397
- ds['ImagePositionPatient'].value,
398
- )
399
- # To be extended ad hoc with other tags that can be derived
400
-
401
-
402
-
403
- def format_value(value, VR=None, tag=None):
404
-
405
- # If the change below is made (TM, DA, DT) then this needs to
406
- # convert those to string before setting
407
-
408
- # Slow - dictionary lookup for every value write
409
-
410
- if VR is None:
411
- VR = pydicom.datadict.dictionary_VR(tag)
412
-
413
- if VR == 'LO':
414
- if len(value) > 64:
415
- return value[-64:]
416
- #return value[:64]
417
- if VR == 'TM':
418
- return variables.seconds_to_str(value)
419
-
420
- return value
421
-
422
-
423
- def to_set_type(value, VR):
424
- """
425
- Convert pydicom datatypes to the python datatypes used to set the parameter.
426
- """
427
- # Not a good idea to modify pydicom set/get values. confusing and requires extra VR lookups
428
-
429
- if VR == 'TM':
430
- # pydicom sometimes returns string values for TM data types
431
- if isinstance(value, str):
432
- return variables.str_to_seconds(value)
433
-
434
- if value.__class__.__name__ == 'MultiValue':
435
- return [to_set_type(v, VR) for v in value]
436
- if value.__class__.__name__ == 'PersonName':
437
- return str(value)
438
- if value.__class__.__name__ == 'Sequence':
439
- return [ds for ds in value]
440
- if value.__class__.__name__ == 'TM':
441
- return variables.time_to_seconds(value) # return datetime.time
442
- if value.__class__.__name__ == 'UID':
443
- return str(value)
444
- if value.__class__.__name__ == 'IS':
445
- return int(value)
446
- if value.__class__.__name__ == 'DT':
447
- return variables.datetime_to_str(value) # return datetime.datetime
448
- if value.__class__.__name__ == 'DA': # return datetime.date
449
- return variables.date_to_str(value)
450
- if value.__class__.__name__ == 'DSfloat':
451
- return float(value)
452
- if value.__class__.__name__ == 'DSdecimal':
453
- return int(value)
454
-
455
- return value
456
-
457
-
458
- def new_uid(n=None):
459
-
460
- if n is None:
461
- return pydicom.uid.generate_uid()
462
- else:
463
- return [pydicom.uid.generate_uid() for _ in range(n)]
464
-
465
-
466
- # Obsolete - replaced by instance.affine()
467
- def get_affine_matrix(ds):
468
- """Affine transformation matrix for a DICOM image"""
469
-
470
- # slice_spacing = get_values(ds, 'SpacingBetweenSlices')
471
- # if slice_spacing is None:
472
- # slice_spacing = get_values(ds, 'SliceThickness')
473
- slice_spacing = get_values(ds, 'SliceThickness')
474
- return image.affine_matrix(
475
- get_values(ds, 'ImageOrientationPatient'),
476
- get_values(ds, 'ImagePositionPatient'),
477
- get_values(ds, 'PixelSpacing'),
478
- slice_spacing)
479
-
480
-
481
- # Obsolete - replaced by instance.set_affine()
482
- def set_affine_matrix(ds, affine):
483
- v = image.dismantle_affine_matrix(affine)
484
- set_values(ds, 'PixelSpacing', v['PixelSpacing'])
485
- #set_values(ds, 'SpacingBetweenSlices', v['SpacingBetweenSlices'])
486
- set_values(ds, 'SliceThickness', v['SpacingBetweenSlices'])
487
- set_values(ds, 'ImageOrientationPatient', v['ImageOrientationPatient'])
488
- set_values(ds, 'ImagePositionPatient', v['ImagePositionPatient'])
489
- set_values(ds, 'SliceLocation', np.dot(v['ImagePositionPatient'], v['slice_cosine']))
490
-
491
-
492
- def map_mask_to(ds_source, ds_target):
493
- """Map non-zero image pixels onto a target image.
494
-
495
- Overwrite pixel values in the target"""
496
-
497
- # Create a coordinate array of non-zero pixels
498
- coords = np.transpose(np.where(ds_source.get_pixel_array() != 0))
499
- coords = [[coord[0], coord[1], 0] for coord in coords]
500
- coords = np.array(coords)
501
-
502
- # Determine coordinate transformation matrix
503
- affine_source = ds_source.get_values('affine_matrix')
504
- affine_target = ds_target.get_values('affine_matrix')
505
- source_to_target = np.linalg.inv(affine_target).dot(affine_source)
506
-
507
- # Apply coordinate transformation and interpolate (nearest neighbour)
508
- coords = nib.affines.apply_affine(source_to_target, coords)
509
- coords = np.round(coords).astype(int)
510
- # x = y = []
511
- # for r in coords:
512
- # if r[2] == 0:
513
- # if (0 <= r[0]) & (r[0] < ds_target.Columns):
514
- # if (0 <= r[1]) & (r[1] < ds_target.Rows):
515
- # x.append(r[0])
516
- # y.append(r[1])
517
- # x = tuple(x)
518
- # y = tuple(y)
519
- x = tuple([c[0] for c in coords if (c[2] == 0) & (0 <= c[0]) & (c[0] < ds_target.Columns) & (0 <= c[1]) & (c[1] < ds_target.Rows)])
520
- y = tuple([c[1] for c in coords if (c[2] == 0) & (0 <= c[0]) & (c[0] < ds_target.Columns) & (0 <= c[1]) & (c[1] < ds_target.Rows)])
521
- # x = tuple([c[0] for c in coords if c[2] == 0])
522
- # y = tuple([c[1] for c in coords if c[2] == 0])
523
-
524
- # Set values in the target image
525
- # array = np.zeros((record.Rows, record.Columns))
526
- array = np.zeros((ds_target.Columns, ds_target.Rows))
527
- array[(x, y)] = 1.0
528
-
529
- return array
530
-
531
- # List of all supported (matplotlib) colormaps
532
-
533
- COLORMAPS = ['cividis', 'magma', 'plasma', 'viridis',
534
- 'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds',
535
- 'YlOrBr', 'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu',
536
- 'GnBu', 'PuBu', 'YlGnBu', 'PuBuGn', 'BuGn', 'YlGn',
537
- 'binary', 'gist_yarg', 'gist_gray', 'bone', 'pink',
538
- 'spring', 'summer', 'autumn', 'winter', 'cool', 'Wistia',
539
- 'hot', 'afmhot', 'gist_heat', 'copper',
540
- 'PiYG', 'PRGn', 'BrBG', 'PuOr', 'RdGy', 'RdBu',
541
- 'RdYlBu', 'RdYlGn', 'Spectral', 'coolwarm', 'bwr', 'seismic',
542
- 'twilight', 'twilight_shifted', 'hsv',
543
- 'flag', 'prism', 'ocean', 'gist_earth', 'terrain', 'gist_stern',
544
- 'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg', 'turbo',
545
- 'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']
546
-
547
- # Include support for DICOM natiove colormaps (see pydicom guide on working with pixel data)
548
-
549
- def get_colormap(ds):
550
- """Returns the colormap if there is any."""
551
-
552
- # Hijacking this free text field to store the colormap
553
- # This should use ContentDescription instead (0070, 0081)
554
- #
555
- if 'WindowCenterWidthExplanation' in ds:
556
- if ds.WindowCenterWidthExplanation in COLORMAPS:
557
- return ds.WindowCenterWidthExplanation
558
-
559
-
560
- def set_colormap(ds, colormap=None):
561
-
562
- if colormap is None:
563
- ds.PhotometricInterpretation = 'MONOCHROME2'
564
- if hasattr(ds, 'WindowCenterWidthExplanation'):
565
- del ds.WindowCenterWidthExplanation
566
- if hasattr(ds, 'RGBLUTTransferFunction'):
567
- del ds.RGBLUTTransferFunction
568
- if hasattr(ds, 'GreenPaletteColorLookupTableData'):
569
- del ds.GreenPaletteColorLookupTableData
570
- if hasattr(ds, 'RedPaletteColorLookupTableData'):
571
- del ds.RedPaletteColorLookupTableData
572
- if hasattr(ds, 'BluePaletteColorLookupTableData'):
573
- del ds.BluePaletteColorLookupTableData
574
- if hasattr(ds, 'RedPaletteColorLookupTableDescriptor'):
575
- del ds.RedPaletteColorLookupTableDescriptor
576
- if hasattr(ds, 'GreenPaletteColorLookupTableDescriptor'):
577
- del ds.GreenPaletteColorLookupTableDescriptor
578
- if hasattr(ds, 'BluePaletteColorLookupTableDescriptor'):
579
- del ds.BluePaletteColorLookupTableDescriptor
580
- else:
581
- ds.WindowCenterWidthExplanation = colormap
582
- # Get a LUT as float numpy array with values in the range [0,1]
583
- RGBA = cm.ScalarMappable(cmap=colormap).to_rgba(np.arange(256))
584
- set_lut(ds, RGBA[:,:3])
585
-
586
-
587
- def set_lut(ds, RGB):
588
- """Set RGB as float with values in range [0,1]"""
589
-
590
- ds.PhotometricInterpretation = 'PALETTE COLOR'
591
-
592
- RGB *= (np.power(2, ds.BitsAllocated) - 1)
593
-
594
- if ds.BitsAllocated == 8:
595
- RGB = RGB.astype(np.ubyte)
596
- elif ds.BitsAllocated == 16:
597
- RGB = RGB.astype(np.uint16)
598
-
599
- # Define the properties of the LUT
600
- ds.add_new('0x00281101', 'US', [255, 0, ds.BitsAllocated])
601
- ds.add_new('0x00281102', 'US', [255, 0, ds.BitsAllocated])
602
- ds.add_new('0x00281103', 'US', [255, 0, ds.BitsAllocated])
603
-
604
- # Scale the colorsList to the available range
605
- ds.RedPaletteColorLookupTableData = bytes(RGB[:,0])
606
- ds.GreenPaletteColorLookupTableData = bytes(RGB[:,1])
607
- ds.BluePaletteColorLookupTableData = bytes(RGB[:,2])
608
-
609
-
610
- def get_lut(ds):
611
- """Return RGB as float with values in [0,1]"""
612
-
613
- if 'PhotometricInterpretation' not in ds:
614
- return None
615
- if ds.PhotometricInterpretation != 'PALETTE COLOR':
616
- return None
617
-
618
- if ds.BitsAllocated == 8:
619
- dtype = np.ubyte
620
- elif ds.BitsAllocated == 16:
621
- dtype = np.uint16
622
-
623
- R = ds.RedPaletteColorLookupTableData
624
- G = ds.GreenPaletteColorLookupTableData
625
- B = ds.BluePaletteColorLookupTableData
626
-
627
- R = np.frombuffer(R, dtype=dtype)
628
- G = np.frombuffer(G, dtype=dtype)
629
- B = np.frombuffer(B, dtype=dtype)
630
-
631
- R = R.astype(np.float32)
632
- G = G.astype(np.float32)
633
- B = B.astype(np.float32)
634
-
635
- R *= 1.0/(np.power(2, ds.RedPaletteColorLookupTableDescriptor[2]) - 1)
636
- G *= 1.0/(np.power(2, ds.GreenPaletteColorLookupTableDescriptor[2]) - 1)
637
- B *= 1.0/(np.power(2, ds.BluePaletteColorLookupTableDescriptor[2]) - 1)
638
-
639
- return np.transpose([R, G, B])
640
-
641
-
642
- def get_pixel_array(ds):
643
- """Read the pixel array from an image"""
644
-
645
- try:
646
- array = ds.pixel_array
647
- except:
648
- return None
649
- array = array.astype(np.float32)
650
- slope = float(getattr(ds, 'RescaleSlope', 1))
651
- intercept = float(getattr(ds, 'RescaleIntercept', 0))
652
- array *= slope
653
- array += intercept
654
-
655
- return np.transpose(array)
656
-
657
-
658
- def set_pixel_array(ds, array, value_range=None):
659
-
660
- # if array.ndim >= 3: # remove spurious dimensions of 1
661
- # array = np.squeeze(array)
662
-
663
- array = image.clip(array.astype(np.float32), value_range=value_range)
664
- array, slope, intercept = image.scale_to_range(array, ds.BitsAllocated)
665
- array = np.transpose(array)
666
-
667
- #maximum = np.amax(array)
668
- #minimum = np.amin(array)
669
- shape = np.shape(array)
670
-
671
- ds.PixelRepresentation = 0
672
- #ds.SmallestImagePixelValue = int(0)
673
- #ds.LargestImagePixelValue = int(2**ds.BitsAllocated - 1)
674
- ds.set_values('SmallestImagePixelValue', int(0))
675
- ds.set_values('LargestImagePixelValue', int(2**ds.BitsAllocated - 1))
676
- ds.RescaleSlope = 1 / slope
677
- ds.RescaleIntercept = - intercept / slope
678
- # ds.WindowCenter = (maximum + minimum) / 2
679
- # ds.WindowWidth = maximum - minimum
680
- ds.Rows = shape[0]
681
- ds.Columns = shape[1]
682
- ds.PixelData = array.tobytes()
683
-
684
-
685
- def module_patient():
686
-
687
- return [
688
- 'ReferencedPatientSequence',
689
- 'PatientName',
690
- 'PatientID',
691
- 'IssuerOfPatientID',
692
- 'TypeOfPatientID',
693
- 'IssuerOfPatientIDQualifiersSequence',
694
- 'SourcePatientGroupIdentificationSequence',
695
- 'GroupOfPatientsIdentificationSequence',
696
- 'PatientBirthDate',
697
- 'PatientBirthTime',
698
- 'PatientBirthDateInAlternativeCalendar',
699
- 'PatientDeathDateInAlternativeCalendar',
700
- 'PatientAlternativeCalendar',
701
- 'PatientSex',
702
- 'QualityControlSubject',
703
- 'StrainDescription',
704
- 'StrainNomenclature',
705
- 'StrainStockSequence',
706
- 'StrainAdditionalInformation',
707
- 'StrainCodeSequence',
708
- 'GeneticModificationsSequence',
709
- 'OtherPatientNames',
710
- 'OtherPatientIDsSequence',
711
- 'ReferencedPatientPhotoSequence',
712
- 'EthnicGroup',
713
- 'PatientSpeciesDescription',
714
- 'PatientSpeciesCodeSequence',
715
- 'PatientBreedDescription',
716
- 'PatientBreedCodeSequence',
717
- 'BreedRegistrationSequence',
718
- 'ResponsiblePerson',
719
- 'ResponsiblePersonRole',
720
- 'ResponsibleOrganization',
721
- 'PatientComments',
722
- 'PatientIdentityRemoved',
723
- 'DeidentificationMethod',
724
- 'DeidentificationMethodCodeSequence',
725
- 'ClinicalTrialSponsorName',
726
- 'ClinicalTrialProtocolID',
727
- 'ClinicalTrialProtocolName',
728
- 'ClinicalTrialSiteID',
729
- 'ClinicalTrialSiteName',
730
- 'ClinicalTrialSubjectID',
731
- 'ClinicalTrialSubjectReadingID',
732
- 'ClinicalTrialProtocolEthicsCommitteeName',
733
- 'ClinicalTrialProtocolEthicsCommitteeApprovalNumber',
734
- ]
735
-
736
-
737
- def module_study():
738
-
739
- return [
740
- 'StudyDate',
741
- 'StudyTime',
742
- 'AccessionNumber',
743
- 'IssuerOfAccessionNumberSequence',
744
- 'ReferringPhysicianName',
745
- 'ReferringPhysicianIdentificationSequence',
746
- 'ConsultingPhysicianName',
747
- 'ConsultingPhysicianIdentificationSequence',
748
- 'StudyDescription',
749
- 'ProcedureCodeSequence',
750
- 'PhysiciansOfRecord',
751
- 'PhysiciansOfRecordIdentificationSequence',
752
- 'NameOfPhysiciansReadingStudy',
753
- 'PhysiciansReadingStudyIdentificationSequence',
754
- 'ReferencedStudySequence',
755
- 'StudyInstanceUID',
756
- 'StudyID',
757
- 'RequestingService',
758
- 'RequestingServiceCodeSequence',
759
- 'ReasonForPerformedProcedureCodeSequence',
760
- 'AdmittingDiagnosesDescription',
761
- 'AdmittingDiagnosesCodeSequence',
762
- 'PatientAge',
763
- 'PatientSize',
764
- 'PatientSizeCodeSequence',
765
- 'PatientBodyMassIndex',
766
- 'MeasuredAPDimension',
767
- 'MeasuredLateralDimension',
768
- 'PatientWeight',
769
- 'MedicalAlerts',
770
- 'Allergies',
771
- 'Occupation',
772
- 'SmokingStatus',
773
- 'AdditionalPatientHistory',
774
- 'PregnancyStatus',
775
- 'LastMenstrualDate',
776
- 'PatientSexNeutered',
777
- 'ReasonForVisit',
778
- 'ReasonForVisitCodeSequence',
779
- 'AdmissionID',
780
- 'IssuerOfAdmissionIDSequence',
781
- 'ServiceEpisodeID',
782
- 'ServiceEpisodeDescription',
783
- 'IssuerOfServiceEpisodeIDSequence',
784
- 'PatientState',
785
- 'ClinicalTrialTimePointID',
786
- 'ClinicalTrialTimePointDescription',
787
- 'LongitudinalTemporalOffsetFromEvent',
788
- 'LongitudinalTemporalEventType',
789
- 'ConsentForClinicalTrialUseSequence',
790
- ]
791
-
792
-
793
- def module_series():
794
-
795
- return [
796
- 'SeriesDate',
797
- 'SeriesTime',
798
- 'Modality',
799
- 'SeriesDescription',
800
- 'SeriesDescriptionCodeSequence',
801
- 'PerformingPhysicianName',
802
- 'PerformingPhysicianIdentificationSequence',
803
- 'OperatorsName',
804
- 'OperatorIdentificationSequence',
805
- 'ReferencedPerformedProcedureStepSequence',
806
- 'RelatedSeriesSequence',
807
- 'AnatomicalOrientationType',
808
- 'BodyPartExamined',
809
- 'ProtocolName',
810
- 'PatientPosition',
811
- 'ReferencedDefinedProtocolSequence',
812
- 'ReferencedPerformedProtocolSequence',
813
- 'SeriesInstanceUID',
814
- 'SeriesNumber',
815
- 'Laterality',
816
- 'SmallestPixelValueInSeries',
817
- 'LargestPixelValueInSeries',
818
- 'PerformedProcedureStepStartDate',
819
- 'PerformedProcedureStepStartTime',
820
- 'PerformedProcedureStepEndDate',
821
- 'PerformedProcedureStepEndTime',
822
- 'PerformedProcedureStepID',
823
- 'PerformedProcedureStepDescription',
824
- 'PerformedProtocolCodeSequence',
825
- 'RequestAttributesSequence',
826
- 'CommentsOnThePerformedProcedureStep',
827
- 'ClinicalTrialCoordinatingCenterName',
828
- 'ClinicalTrialSeriesID',
829
- 'ClinicalTrialSeriesDescription',
830
- ]
831
-
832
-
833
- # def _initialize(ds, UID=None, ref=None): # ds is pydicom dataset
834
-
835
- # # Date and Time of Creation
836
- # dt = datetime.now()
837
- # timeStr = dt.strftime('%H%M%S') # long format with micro seconds
838
-
839
- # ds.ContentDate = dt.strftime('%Y%m%d')
840
- # ds.ContentTime = timeStr
841
- # ds.AcquisitionDate = dt.strftime('%Y%m%d')
842
- # ds.AcquisitionTime = timeStr
843
- # ds.SeriesDate = dt.strftime('%Y%m%d')
844
- # ds.SeriesTime = timeStr
845
- # ds.InstanceCreationDate = dt.strftime('%Y%m%d')
846
- # ds.InstanceCreationTime = timeStr
847
-
848
- # if UID is not None:
849
-
850
- # # overwrite UIDs
851
- # ds.PatientID = UID[0]
852
- # ds.StudyInstanceUID = UID[1]
853
- # ds.SeriesInstanceUID = UID[2]
854
- # ds.SOPInstanceUID = UID[3]
855
-
856
- # if ref is not None:
857
-
858
- # # Series, Instance and Class for Reference
859
- # refd_instance = Dataset()
860
- # refd_instance.ReferencedSOPClassUID = ref.SOPClassUID
861
- # refd_instance.ReferencedSOPInstanceUID = ref.SOPInstanceUID
862
-
863
- # refd_series = Dataset()
864
- # refd_series.ReferencedInstanceSequence = Sequence([refd_instance])
865
- # refd_series.SeriesInstanceUID = ds.SeriesInstanceUID
866
-
867
- # ds.ReferencedSeriesSequence = Sequence([refd_series])
868
-
869
- # return ds