dbdicom 0.2.6__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbdicom might be problematic. Click here for more details.

Files changed (52) hide show
  1. dbdicom/__init__.py +1 -28
  2. dbdicom/api.py +287 -0
  3. dbdicom/const.py +144 -0
  4. dbdicom/dataset.py +721 -0
  5. dbdicom/dbd.py +736 -0
  6. dbdicom/external/__pycache__/__init__.cpython-311.pyc +0 -0
  7. dbdicom/external/dcm4che/__pycache__/__init__.cpython-311.pyc +0 -0
  8. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-311.pyc +0 -0
  9. dbdicom/register.py +527 -0
  10. dbdicom/{ds/types → sop_classes}/ct_image.py +2 -16
  11. dbdicom/{ds/types → sop_classes}/enhanced_mr_image.py +153 -26
  12. dbdicom/{ds/types → sop_classes}/mr_image.py +185 -140
  13. dbdicom/sop_classes/parametric_map.py +310 -0
  14. dbdicom/sop_classes/secondary_capture.py +140 -0
  15. dbdicom/sop_classes/segmentation.py +311 -0
  16. dbdicom/{ds/types → sop_classes}/ultrasound_multiframe_image.py +1 -15
  17. dbdicom/{ds/types → sop_classes}/xray_angiographic_image.py +2 -17
  18. dbdicom/utils/arrays.py +36 -0
  19. dbdicom/utils/files.py +0 -20
  20. dbdicom/utils/image.py +10 -629
  21. dbdicom-0.3.1.dist-info/METADATA +28 -0
  22. dbdicom-0.3.1.dist-info/RECORD +53 -0
  23. dbdicom/create.py +0 -457
  24. dbdicom/dro.py +0 -174
  25. dbdicom/ds/__init__.py +0 -10
  26. dbdicom/ds/create.py +0 -63
  27. dbdicom/ds/dataset.py +0 -869
  28. dbdicom/ds/dictionaries.py +0 -620
  29. dbdicom/ds/types/parametric_map.py +0 -226
  30. dbdicom/extensions/__init__.py +0 -9
  31. dbdicom/extensions/dipy.py +0 -448
  32. dbdicom/extensions/elastix.py +0 -503
  33. dbdicom/extensions/matplotlib.py +0 -107
  34. dbdicom/extensions/numpy.py +0 -271
  35. dbdicom/extensions/scipy.py +0 -1512
  36. dbdicom/extensions/skimage.py +0 -1030
  37. dbdicom/extensions/sklearn.py +0 -243
  38. dbdicom/extensions/vreg.py +0 -1390
  39. dbdicom/manager.py +0 -2132
  40. dbdicom/message.py +0 -119
  41. dbdicom/pipelines.py +0 -66
  42. dbdicom/record.py +0 -1893
  43. dbdicom/types/database.py +0 -107
  44. dbdicom/types/instance.py +0 -231
  45. dbdicom/types/patient.py +0 -40
  46. dbdicom/types/series.py +0 -2874
  47. dbdicom/types/study.py +0 -58
  48. dbdicom-0.2.6.dist-info/METADATA +0 -72
  49. dbdicom-0.2.6.dist-info/RECORD +0 -66
  50. {dbdicom-0.2.6.dist-info → dbdicom-0.3.1.dist-info}/WHEEL +0 -0
  51. {dbdicom-0.2.6.dist-info → dbdicom-0.3.1.dist-info}/licenses/LICENSE +0 -0
  52. {dbdicom-0.2.6.dist-info → dbdicom-0.3.1.dist-info}/top_level.txt +0 -0
dbdicom/types/series.py DELETED
@@ -1,2874 +0,0 @@
1
- # Importing annotations to handle or sign in import type hints
2
- from __future__ import annotations
3
-
4
- import os
5
- import math
6
- from numbers import Number
7
-
8
- import numpy as np
9
- import nibabel as nib
10
- import vreg
11
-
12
-
13
- from dbdicom.record import Record, read_dataframe_from_instance_array
14
- from dbdicom.ds import MRImage
15
- import dbdicom.utils.image as image_utils
16
- from dbdicom.manager import Manager
17
- # import dbdicom.extensions.scipy as scipy_utils
18
- from dbdicom.utils.files import export_path
19
-
20
-
21
- class Series(Record):
22
-
23
- name = 'SeriesInstanceUID'
24
-
25
- def _set_key(self):
26
- self._key = self.keys()[0]
27
-
28
- def remove(self):
29
- self.manager.delete_series([self.uid])
30
-
31
- def parent(self):
32
- #uid = self.manager.register.at[self.key(), 'StudyInstanceUID']
33
- uid = self.manager._at(self.key(), 'StudyInstanceUID')
34
- return self.record('Study', uid, key=self.key())
35
-
36
- def children(self, **kwargs):
37
- return self.instances(**kwargs)
38
-
39
- def new_child(self, dataset=None, **kwargs):
40
- attr = {**kwargs, **self.attributes}
41
- return self.new_instance(dataset=dataset, **attr)
42
-
43
- def new_sibling(self, suffix=None, **kwargs):
44
- if suffix is not None:
45
- desc = self.manager._at(self.key(), 'SeriesDescription')
46
- kwargs['SeriesDescription'] = desc + ' [' + suffix + ']'
47
- return self.parent().new_child(**kwargs)
48
-
49
- def new_instance(self, dataset=None, **kwargs):
50
- attr = {**kwargs, **self.attributes}
51
- uid, key = self.manager.new_instance(parent=self.uid, dataset=dataset, key=self.key(), **attr)
52
- return self.record('Instance', uid, key, **attr)
53
-
54
- # replace by clone(). Adopt implies move rather than copy
55
- def adopt(self, instances):
56
- if len(instances)==0:
57
- return []
58
- uids = [i.uid for i in instances]
59
- uids = self.manager.copy_to_series(uids, self.uid, **self.attributes)
60
- if isinstance(uids, list):
61
- return [self.record('Instance', uid) for uid in uids]
62
- else:
63
- return self.record('Instance', uids)
64
-
65
- def _copy_from(self, record, **kwargs):
66
- attr = {**kwargs, **self.attributes}
67
- uids = self.manager.copy_to_series(record.uid, self.uid, **attr)
68
- if isinstance(uids, list):
69
- return [self.record('Instance', uid, **attr) for uid in uids]
70
- else:
71
- return self.record('Instance', uids, **attr)
72
-
73
- def export_as_dicom(self, path):
74
- folder = self.label()
75
- path = export_path(path, folder)
76
- # Create a copy so that exported datasets have different UIDs.
77
- copy = self.copy()
78
- mgr = Manager(path, status=self.status)
79
- mgr.open(path)
80
- for i in copy.instances():
81
- ds = i.get_dataset()
82
- mgr.import_dataset(ds)
83
- copy.remove()
84
-
85
- def export_as_png(self, path, **kwargs):
86
- #Export all images as png files
87
- folder = self.label()
88
- path = export_path(path, folder)
89
- images = self.images()
90
- for i, img in enumerate(images):
91
- img.progress(i+1, len(images), 'Exporting png..')
92
- img.export_as_png(path, **kwargs)
93
-
94
- def export_as_csv(self, path):
95
- #Export all images as csv files
96
- folder = self.label()
97
- path = export_path(path, folder)
98
- images = self.images()
99
- for i, img in enumerate(images):
100
- img.progress(i+1, len(images), 'Exporting csv..')
101
- img.export_as_csv(path)
102
-
103
- def export_as_npy(self, path, dims=None):
104
- if dims is None:
105
- folder = self.label()
106
- path = export_path(path, folder)
107
- images = self.images()
108
- for i, img in enumerate(images):
109
- img.progress(i+1, len(images), 'Exporting npy..')
110
- img.export_as_npy(path)
111
- else:
112
- array = self.pixel_values(dims)
113
- filepath = self.label()
114
- filepath = os.path.join(path, filepath + '.npy')
115
- with open(filepath, 'wb') as f:
116
- np.save(f, array)
117
-
118
- def export_as_nifti(self, path, dims=None):
119
- if dims is None:
120
- folder = self.label()
121
- path = export_path(path, folder)
122
- affine = self.affine_matrix()
123
- if not isinstance(affine, list):
124
- affine = [affine]
125
- for a in affine:
126
- matrix = a[0]
127
- images = a[1]
128
- for i, img in enumerate(images):
129
- img.progress(i+1, len(images), 'Exporting nifti..')
130
- img.export_as_nifti(path, matrix)
131
- else:
132
- ds = self.instance().get_dataset()
133
- sgroups = self.slice_groups(dims=dims)
134
- for i, sg in enumerate(sgroups):
135
- self.progress(i+1, len(sgroups), 'Exporting nifti..')
136
- dicom_header = nib.nifti1.Nifti1DicomExtension(2, ds)
137
- nifti1_image = nib.Nifti1Image(sg['ndarray'], image_utils.affine_to_RAH(sg['affine']))
138
- nifti1_image.header.extensions.append(dicom_header)
139
- filepath = self.label()
140
- filepath = os.path.join(path, filepath + '[' + str(i) + '].nii')
141
- nib.save(nifti1_image, filepath)
142
-
143
- def import_dicom(self, files):
144
- uids = self.manager.import_datasets(files)
145
- self.manager.move_to(uids, self.uid)
146
-
147
-
148
-
149
- def coords(self, dims=('InstanceNumber', ), mesh=False, slice={}, coords={}, exclude=False, **filters)->dict:
150
- """return a dictionary of coordinates.
151
-
152
- Args:
153
- dims (tuple, optional): Dimensions along which the shape is to be determined. If dims is not provided, they default to InstanceNumber.
154
-
155
- Raises:
156
- ValueError: If the dimensions do not produce suitable coordinates.
157
-
158
- Returns:
159
- dict: dictionary of coordinates, one entry for each dimension. The values for each coordinate are returned as an darray with one dimension.
160
-
161
- See also:
162
- `set_coords`
163
-
164
- Example:
165
-
166
- Create an empty series with 3 slice dimensions:
167
-
168
- >>> coords = {
169
- ... 'SliceLocation': np.array([0,1,2,0,1,2]),
170
- ... 'FlipAngle': np.array([2,2,2,10,10,10]),
171
- ... 'RepetitionTime': np.array([1,5,15,1,5,15]),
172
- ... }
173
- >>> series = db.empty_series(coords)
174
-
175
- Retrieve the coordinates:
176
-
177
- >>> coords = series.coords(tuple(coords))
178
- >>> coords['FlipAngle']
179
- [2,10,2,10,2,10]
180
- >>> coords['RepetitionTime']
181
- [1,1,5,5,15,15]
182
-
183
- Check the result in default dimensions:
184
-
185
- >>> coords = series.coords()
186
- >>> coords['InstanceNumber']
187
- [1,2,3,4,5,6]
188
-
189
- In this case the slice location and flip angle along are sufficient to identify the frames, so these are valid coordinates:
190
-
191
- >>> coords = series.coords(('SliceLocation', 'FlipAngle'))
192
- >>> coords['SliceLocation']
193
- [0,0,1,1,2,2]
194
-
195
- # However slice location and acquisition time are not sufficient as coordinates because each combination appears twice. So this throws an error:
196
-
197
- >>> series.coords(('SliceLocation','RepetitionTime'))
198
- ValueError: These are not proper coordinates. Coordinate values must be unique.
199
- """
200
-
201
- if np.isscalar(dims):
202
- dims = (dims,)
203
-
204
- # Default empty coordinates
205
- vcoords = {}
206
- for i, tag in enumerate(dims):
207
- vcoords[tag] = np.array([])
208
-
209
- # Get all frames and return if empty
210
- frames = self.instances()
211
- if frames == []:
212
- return vcoords
213
-
214
- # Read values and sort
215
- fltr = {**slice, **filters}
216
- values = [f[list(dims)+list(fltr)+list(tuple(coords))] for f in frames]
217
- values.sort()
218
-
219
- # Check dimensions
220
- cvalues = [v[:len(dims)] for v in values]
221
- cvalues = np.array(cvalues).T
222
- _check_if_ivals(cvalues)
223
-
224
- # Filter values
225
- values = _filter_values(values, fltr, coords, exclude=exclude)
226
-
227
- # If requested, mesh values
228
- if mesh:
229
- values = _meshvals(values)
230
- mshape = values.shape[1:]
231
-
232
- # Build coordinates
233
- if values.size > 0:
234
- for i, tag in enumerate(dims):
235
- vcoords[tag] = values[i,...]
236
- if mesh: # Is this necessary? Is already in the right shape
237
- vcoords[tag] = vcoords[tag].reshape(mshape)
238
-
239
- return vcoords
240
-
241
-
242
- def values(self, *tags, dims=('InstanceNumber', ), return_coords=False, mesh=True, slice={}, coords={}, exclude=False, **filters)->np.ndarray:
243
- """Return the values of one or more attributes for each frame in the series.
244
-
245
- Args:
246
- tag (str or tuple): either a keyword string or a (group, element) tag of a DICOM data element.
247
- dims (tuple, optional): Dimensions of the resulting array. If *dims* is not provided, values are ordered by InstanceNumber. Defaults to None.
248
- inds (dict, optional): Dictionary with indices to retrieve a slice of the entire array. Defaults to None.
249
- select (dict, optional): A dictionary of values for DICOM attributes to filter the result. By default the data are not filtered.
250
- filters (dict, optional): keyword arguments to filter the data by value of DICOM attributes.
251
-
252
- Returns:
253
- An `numpy.ndarray` of values with dimensions as specified by *dims*. If the value is not defined in *one or more* of the slices, an empty array is returned.
254
-
255
- See also:
256
- `unique`
257
- `coords`
258
- `gridcoords`
259
-
260
- Note:
261
- In order to list the values in the case one or more are absent in the headers, use `Series.unique()` instead.
262
-
263
- Example:
264
-
265
- Create a zero-filled series with 3 slice dimensions:
266
-
267
- >>> coords = {
268
- ... 'SliceLocation': 10*np.arange(4),
269
- ... 'FlipAngle': np.array([2, 15, 30]),
270
- ... 'RepetitionTime': np.array([2.5, 5.0]), }
271
- >>> zeros = db.zeros((128,128,4,3,2), coords)
272
-
273
- # If values() is called without dimensions, a flat array is returned with one value per frame, ordered by instance number:
274
-
275
- >>> zeros.values('InstanceNumber')
276
- [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,191,20,21,22,23,24]
277
- >>> zros.values('FlipAngle')
278
- [2,2,15,15,30,30,2,2,15,15,30,30,2,2,15,15,30,30,2,2,15,15,30,30]
279
-
280
- if dimensions are provided, an array of the appropriate shape is returned:
281
-
282
- >>> dims = tuple(coords)
283
- >>> tacq = series.values('AcquisitionTime', dims)
284
- >>> tacq.shape
285
- (4,3,2)
286
- >>> tacq[0,0,0]
287
- 28609.057496
288
-
289
- In this case all values are the same:
290
-
291
- >>> np.unique(tacq)
292
- [28609.057496]
293
-
294
- If a value is not defined in the header, None is returned:
295
- >>> series.values('Gobbledigook')[:2]
296
- [None None]
297
-
298
- Specify keywords to select a subset of values:
299
-
300
- >>> tacq = zeros.values('AcquisitionTime', dims, FlipAngle=15)
301
- >>> tacq.shape
302
- (4, 1, 2)
303
-
304
- If none exist, and emptry array is returned:
305
-
306
- >>> tacq = zeros.values('AcquisitionTime', dims, FlipAngle=0)
307
- >>> tacq.size
308
- 0
309
-
310
- Multiple possible values can be selected with arrays:
311
-
312
- >>> tacq = zeros.values('AcquisitionTime', dims, FlipAngle=np.array([15,30]))
313
- >>> tacq.shape
314
- (4, 2, 2)
315
-
316
- Any number of keywords can be added as filters:
317
-
318
- >>> tacq = zeros.values('AcquisitionTime', dims, FlipAngle=np.array([15,30]), SliceLocation=np.array([10,20]))
319
- >>> tacq.shape
320
- (2, 2, 2)
321
-
322
- Filters can alos be set using the *select* argument:
323
-
324
- >>> tacq = zeros.values('AcquisitionTime', dims, select={'FlipAngle': 15})
325
- >>> tacq.shape
326
- (4, 1, 2)
327
-
328
- This also allows (group, element) tags:
329
-
330
- >>> tacq = zeros.values('AcquisitionTime', dims, select={(0x0018, 0x1314): 15})
331
- >>> tacq.shape
332
- (4, 1, 2)
333
-
334
- Selections can also be made using indices rather than values:
335
-
336
- >>> tacq = zeros.values('FlipAngle', dims, inds={'FlipAngle': 1})
337
- >>> tacq.shape
338
- (4, 1, 2)
339
-
340
- >>> tacq = zeros.values('AcquisitionTime', dims, inds={'FlipAngle':np.arange(2)})
341
- >>> tacq.shape
342
- (4, 2, 2)
343
- """
344
-
345
- if np.isscalar(dims):
346
- dims = (dims,)
347
-
348
- # Default return values
349
- values = np.array([]).reshape((0,0))
350
- vcoords = {}
351
- for i, tag in enumerate(dims):
352
- vcoords[tag] = np.array([])
353
-
354
- # Get all frames and return if empty
355
- frames = self.instances()
356
- if frames == []:
357
- if return_coords:
358
- return values, vcoords
359
- return values
360
-
361
- # Read values and sort
362
- filters = {**slice, **filters}
363
- values = []
364
- for i, f in enumerate(frames):
365
- self.progress(i+1,len(frames), 'Reading values..')
366
- v = f[list(dims)+list(tags)+list(tuple(filters))+list(tuple(coords))]
367
- values.append(v)
368
- fsort = sorted(range(len(values)), key=lambda k: values[k][:len(dims)])
369
- values = [values[i] for i in fsort]
370
-
371
- # Check if dimensions are proper
372
- # Need object array here because the values can be different type including lists.
373
- cvalues = [v[:len(dims)] for v in values]
374
- cvalues = np.array(cvalues, dtype=object).T
375
- _check_if_ivals(cvalues)
376
-
377
- # Filter values
378
- values = _filter_values(values, filters, coords, exclude=exclude)
379
- if values.size == 0:
380
- if return_coords:
381
- if len(tags) == 1:
382
- return values, vcoords
383
- else:
384
- values = [np.array([]) for _ in range(len(tags))]
385
- return tuple(values) + (vcoords,)
386
- return values
387
- cvalues = values[:len(dims),:]
388
- values = values[len(dims):,:]
389
-
390
- # If requested, mesh values
391
- if mesh:
392
- cmesh = _meshvals(cvalues)
393
- values = _meshdata(values, cvalues, cmesh)
394
- cvalues = cmesh
395
-
396
- # Create return values
397
- if len(tags) == 1:
398
- values = values[0,...]
399
- else:
400
- values = [values[i,...] for i in range(values.shape[0])]
401
- values = tuple(values)
402
-
403
- if return_coords:
404
- for i, tag in enumerate(dims):
405
- vcoords[tag] = cvalues[i,...]
406
- if len(tags) == 1:
407
- return values, vcoords
408
- else:
409
- return values + (vcoords,)
410
- else:
411
- return values
412
-
413
-
414
-
415
-
416
-
417
- def frames(
418
- self, dims=('InstanceNumber', ), return_coords=False,
419
- return_vals=(), mesh=True, slice={}, coords={}, exclude=False,
420
- **filters):
421
- """Return the frames of given coordinates in the correct order"""
422
-
423
- if np.isscalar(dims):
424
- dims = (dims,)
425
-
426
- # Default return values
427
- values = np.array([]).reshape((0,0))
428
- vcoords = {}
429
- for i, tag in enumerate(dims):
430
- vcoords[tag] = np.array([])
431
- if mesh:
432
- fshape = tuple([0]*len(dims))
433
- else:
434
- fshape = (0,)
435
-
436
- # Get all frames and return if empty
437
- frames_sel = self.instances()
438
- if frames_sel == []:
439
-
440
- # Empty return values
441
- frames = np.array([]).reshape(fshape)
442
- rval = (frames,)
443
- if return_coords:
444
- rval += (vcoords, )
445
- if return_vals != ():
446
- rval += (values, )
447
- if len(rval)==1:
448
- return rval[0]
449
- else:
450
- return rval
451
-
452
- # Read values and sort
453
- filters = {**slice, **filters}
454
- values = [f[list(dims)+list(return_vals)+list(tuple(filters))+list(tuple(coords))] for f in frames_sel]
455
- fsort = sorted(range(len(values)), key=lambda k: values[k][:len(dims)])
456
- values = [values[i] for i in fsort]
457
-
458
- # Check dimensions
459
- cvalues = [v[:len(dims)] for v in values]
460
- cvalues = np.array(cvalues).T
461
- _check_if_ivals(cvalues)
462
-
463
- # Create array of frames.
464
- frames = np.empty(len(frames_sel), dtype=object)
465
- for i in range(len(fsort)):
466
- frames[i] = frames_sel[fsort[i]]
467
-
468
- # Filter values
469
- finds = _filter_values_ind(values, filters, coords, exclude=exclude)
470
- if finds.size==0:
471
- # Empty return values
472
- frames = np.array([]).reshape(fshape)
473
- rval = (frames,)
474
- if return_coords:
475
- rval += (vcoords, )
476
- if return_vals != ():
477
- rval += (np.array([]), )
478
- if len(rval)==1:
479
- return rval[0]
480
- else:
481
- return rval
482
- frames = frames[finds]
483
- values = _filter_values(values, filters, coords, exclude=exclude)
484
- cvalues = values[:len(dims),:]
485
- values = values[len(dims):,:]
486
-
487
- # If requested, mesh values
488
- if mesh:
489
- cmesh = _meshvals(cvalues)
490
- values = _meshdata(values, cvalues, cmesh)
491
- frames = _meshdata(frames.reshape((1,frames.size)), cvalues, cmesh)
492
- frames = frames[0,...]
493
- cvalues = cmesh
494
-
495
- # Create return values
496
- rval = (frames,)
497
- if return_coords:
498
- for i, tag in enumerate(dims):
499
- vcoords[tag] = cvalues[i,...]
500
- rval += (vcoords, )
501
- if return_vals != ():
502
- rval += (values, )
503
- if len(rval)==1:
504
- return rval[0]
505
- else:
506
- return rval
507
-
508
-
509
- def expand(self, coords={}, gridcoords={}): # gridcoords -> slice
510
-
511
- if coords != {}:
512
- pass
513
- elif gridcoords != {}:
514
- coords = _grid_to_coords(gridcoords)
515
- else:
516
- msg = 'Cannot expand without new coordinates'
517
- raise ValueError(msg)
518
-
519
- # If the series is not empty, first check that the new coordinates are valid.
520
- if not self.empty():
521
- current_coords = self.coords(tuple(coords))
522
- try:
523
- _concatenate_coords((current_coords, coords))
524
- except:
525
- msg = 'Cannot expand - the new coordinates overlap with existing coordinates.'
526
- raise ValueError(msg)
527
-
528
- # Expand the series to the new coordinates
529
- size = _coords_size(coords)
530
- for i in range(size):
531
- ds = self.init_dataset()
532
- for c in coords:
533
- ds.set_values(c, coords[c].ravel()[i])
534
- self.new_instance(ds)
535
-
536
-
537
- def set_coords(self, new_coords:dict, dims=(), slice={}, coords={}, **filters):
538
- """Set a dictionary of coordinates.
539
-
540
- Args:
541
- coords (dict): Dictionary of coordinates.
542
- dims (tuple, optional): Dimensions of at which the new coordinates are to be best. If *dims* is not set, the dimensions are assumed to be the same as those of *coords* or *grid*. Defaults to None.
543
-
544
- Raises:
545
- ValueError: if the coordinates provided are not properly formatted or have the wrong shape.
546
-
547
- See also:
548
- `coords`
549
- `set_gridcoords`
550
-
551
- Example:
552
-
553
- Create an empty series:
554
-
555
- >>> coords = {
556
- ... 'SliceLocation': np.array([0,1,2,0,1,2]),
557
- ... 'FlipAngle': np.array([2,2,2,10,10,10]),
558
- ... 'RepetitionTime': np.array([1,5,15,1,5,15]),
559
- ... }
560
- >>> series = db.empty_series(coords)
561
-
562
- Change the flip angle of 15 to 12:
563
-
564
- >>> coords = series.coords(tuple(coords))
565
- >>> fa = coords['FlipAngle']
566
- >>> fa[np.where(fa==2)] = 5
567
- >>> series.set_coords(coords)
568
-
569
- Check the new coordinates:
570
-
571
- >>> new_coords = series.coords(dims)
572
- >>> new_coords['FlipAngle']
573
- [5,10,5,10,5,10]
574
-
575
- Create a new set of coordinates along slice location and acquisition time:
576
-
577
- >>> new_coords = {
578
- ... 'SliceLocation': np.array([0,0,1,1,2,2]),
579
- ... 'AcquisitionTime': np.array([0,60,0,60,0,60]),
580
- ... }
581
- >>> series.set_coords(new_coords, ('SliceLocation', 'FlipAngle'))
582
-
583
- # Inspect the new coordinates - each slice now has two acquisition times corresponding to the flip angles:
584
-
585
- >>> coords['SliceLocation']
586
- [0,0,1,1,2,2]
587
- >>> coords['AcquisitionTime']
588
- [0,60,0,60,0,60]
589
- >>> coords['FlipAngle']
590
- [5,10,5,10,5,10]
591
-
592
- # Check that an error is raised if coordinate values have different sizes:
593
- >>> new_coords = {
594
- ... 'SliceLocation': np.zeros(24),
595
- ... 'AcquisitionTime': np.ones(25),
596
- ... }
597
- >>> series.set_coords(new_coords, dims)
598
- ValueError: Coordinate values must all have the same size
599
-
600
- # An error is also raised if they have all the same size but the values are not unique:
601
-
602
- >>> new_coords = {
603
- ... 'SliceLocation': np.zeros(24),
604
- ... 'AcquisitionTime': np.ones(24),
605
- ... }
606
- >>> series.set_coords(new_coords, dims)
607
- ValueError: Coordinate values must all have the same size
608
-
609
- # .. or when the number does not match up with the size of the series:
610
-
611
- >>> new_coords = {
612
- ... 'SliceLocation': np.arange(25),
613
- ... 'AcquisitionTime': np.arange(25),
614
- ... }
615
- >>> series.set_coords(new_coords, dims)
616
- ValueError: Shape of coordinates does not match up with the size of the series.
617
-
618
- """
619
- if dims == ():
620
- dims = tuple(new_coords)
621
- elif np.isscalar(dims):
622
- dims = (dims,)
623
- new_coords = _check_if_coords(new_coords)
624
- frames = self.frames(dims, slice=slice, coords=coords, **filters)
625
- if frames.size == 0:
626
- # If the series is empty, assignment of coords is unambiguous
627
- self.expand(new_coords)
628
- else:
629
- size = _coords_size(new_coords)
630
- if size != frames.size:
631
- msg = 'Cannot set ' + str(size) + ' coordinates in ' + str(frames.size) + ' frames.'
632
- msg += '\nThe number of new coordinates must equal the number of frames.'
633
- raise ValueError(msg)
634
- # If setting a subset, check if the new set of coordinates is valid
635
- if len({**slice, **coords, **filters}) > 0:
636
- complement = self.coords(dims, slice=slice, coords=coords, exclude=True, **filters)
637
- if _coords_size(complement) > 0:
638
- try:
639
- _concatenate_coords((new_coords, complement))
640
- except:
641
- msg = 'Cannot set coordinates - this would produce invalid coordinates for the series'
642
- raise ValueError(msg)
643
- frames = frames.flatten()
644
- values = _coords_vals(new_coords)
645
- for f, frame in enumerate(frames):
646
- frame[list(new_coords)] = list(values[:,f])
647
-
648
-
649
- def set_values(self, values, tags, dims=('InstanceNumber', ), slice={}, coords={}, **filters):
650
- # Note tags, values is a more logical order considering we have self.values(tags)
651
- """Set the values of an attribute.
652
-
653
- Args:
654
- tag: either a keyword string or a (group, element) tag of a DICOM data element.
655
- value: a single value or a numpy array of values for the attribute.
656
- dims (tuple, optional): Dimensions of *value*. If *value* is a single value, *dims* is ignored. Otherwise, if *dim* is not provided, values are ordered by instance number. Defaults to None.
657
-
658
- Raises:
659
- ValueError: if the size of *value* does not match the size of the series.
660
-
661
- See also:
662
- `value`
663
-
664
- Example:
665
-
666
- Create a zero-filled series with 3 slice dimensions.
667
-
668
- >>> loc = np.arange(4)
669
- >>> fa = [2, 15, 30]
670
- >>> tr = [2.5, 5.0]
671
- >>> coords = {
672
- ... 'SliceLocation': np.arange(4),
673
- ... 'FlipAngle': [2, 15, 30],
674
- ... 'RepetitionTime': [2.5, 5.0] }
675
- >>> series = db.zeros((128,128,8,3,2), coords)
676
-
677
- Change the acquisition time of the series to midnight (0 sec):
678
-
679
- >>> series.value('AcquisitionTime')
680
- 28609.057496
681
- >>> series.set_value('AcquisitionTime', 0)
682
- >>> series.value('AcquisitionTime')
683
- 0
684
-
685
- Set the acquisition time to a different value for each flip angle:
686
-
687
- >>> tacq = np.repeat(60*np.arange(3), 8)
688
- >>> series.set_value('AcquisitionTime', tacq, dims=('FlipAngle','InstanceNumber'))
689
-
690
- Set the acquisition time to a different value for each flip angle and acquisition time:
691
-
692
- >>> tacq = np.repeat(60*np.arange(6), 4)
693
- >>> series.set_value('AcquisitionTime', tacq, dims=('FlipAngle','RepetitionTime','SliceLocation'))
694
-
695
- Note: the size of the value and of the series need to match up. If not, an error is raised:
696
-
697
- >>> series.set_value('AcquisitionTime', np.arange(25), dims=tuple(coords))
698
- ValueError: The size of the value array is different from the size of the series.
699
- The value array has shape (25,), but the series has shape (4, 3).
700
-
701
- """
702
-
703
- if np.isscalar(dims):
704
- dims = (dims,)
705
-
706
- if not isinstance(values, tuple):
707
- self.set_values((values,), (tags,), dims=dims, slice=slice, coords=coords, **filters)
708
- return
709
-
710
- # Get frames to set:
711
- frames = self.frames(dims, mesh=False, slice=slice, coords=coords, **filters)
712
- if frames.size == 0:
713
- msg = 'Cannot set values to an empty series. Use Series.expand() to create empty frames first.'
714
- raise ValueError(msg)
715
-
716
- # Check that values all have the proper format:
717
- values = list(values)
718
- for i, v in enumerate(values):
719
- #if not isinstance(v, np.ndarray):
720
- # values[i] = np.full(frames.shape, v)
721
- if isinstance(v, np.ndarray):
722
- if values[i].size != frames.size:
723
- msg = 'Cannot set values: number of values does not match number of frames.'
724
- raise ValueError(msg)
725
- values[i] = values[i].ravel()
726
-
727
- # Set values
728
- for f, frame in enumerate(frames):
729
- self.progress(f+1, frames.size, 'Writing values..')
730
- frame[list(tags)] = [v if np.isscalar(v) else v[f] for v in values]
731
- #frame[list(tags)] = [v[f] for v in values]
732
-
733
-
734
- def set_gridcoords(self, gridcoords:dict, dims=(), slice={}, coords={}, **filters):
735
- """ Set a dictionary of grid coordinates.
736
-
737
- Args:
738
- coords (dict): dictionary of grid coordinates
739
- dims (tuple, optional): Dimensions of at which the new coordinates are to be best. If *dims* is not set, the dimensions are assumed to be the same as those of *coords* or *grid*. Defaults to None.
740
-
741
- See also:
742
- `gridcoords`
743
- `set_coords`
744
-
745
- Examples:
746
-
747
- Create an empty series with 3 slice dimensions:
748
-
749
- >>> gridcoords = {
750
- ... 'SliceLocation': np.arange(4),
751
- ... 'FlipAngle': np.array([2, 15, 30]),
752
- ... 'RepetitionTime': np.array([2.5, 5.0]),
753
- ... }
754
- >>> series = db.empty_series()
755
- >>> series.set_gridcoords(gridcoords)
756
-
757
- Get the coordinates as a mesh
758
-
759
- >>> dims = tuple(gridcoords)
760
- >>> coords = series.meshcoords(dims)
761
- >>> coords['SliceLocation'].shape
762
- (4, 3, 2)
763
- >>> coords['FlipAngle'][1,1,1]
764
- 15
765
- """
766
- setcoords = _grid_to_coords(gridcoords)
767
- self.set_coords(setcoords, dims=dims, slice=slice, coords=coords, **filters)
768
-
769
-
770
- def gridcoords(self, dims=('InstanceNumber', ), slice={}, coords={}, exclude=False, **filters)->dict:
771
- """return a dictionary of grid coordinates.
772
-
773
- Args:
774
- dims (tuple): Attributes to be used as coordinates.
775
-
776
- Returns:
777
- dict: dictionary of coordinates, one entry for each dimension.
778
-
779
- See also:
780
- `coords`
781
- `set_gridcoords`
782
-
783
- Examples:
784
-
785
- Create an empty series with 3 slice dimensions:
786
-
787
- >>> gridcoords = {
788
- ... 'SliceLocation': np.arange(4),
789
- ... 'FlipAngle': np.array([2, 15, 30]),
790
- ... 'RepetitionTime': np.array([2.5, 5.0]),
791
- ... }
792
- >>> series = db.empty_series(gridcoords=gridcoords)
793
-
794
- Recover the grid coordinates:
795
-
796
- >>> gridcoords_rec = series.gridcoords(tuple(gridcoords))
797
- >>> coords_rec['SliceLocation']
798
- [0. 1. 2. 3.]
799
- >>> coords_rec['FlipAngle']
800
- [ 2. 15. 30.]
801
- >>> coords_rec['RepetitionTime']
802
- [2.5 5. ]
803
-
804
- Note an error is raised if the coordinates are not grid coordinates:
805
-
806
- >>> coords = {
807
- ... 'SliceLocation': np.array([0,1,2,0,1,2]),
808
- ... 'FlipAngle': np.array([10,10,10,2,2,2]),
809
- ... 'RepetitionTime': np.array([1,5,15,1,5,15]),
810
- ... }
811
- >>> series = db.empty_series(coords)
812
-
813
- The coordinates form a proper mesh, so this works fine:
814
-
815
- >>> coords = series.meshcoords(tuple(coords))
816
-
817
- But this raises an error:
818
-
819
- >>> series.gridcoords(tuple(coords))
820
- ValueError: These are not grid coordinates.
821
- """
822
- meshcoords = self.coords(dims=dims, mesh=True, slice=slice, coords=coords, exclude=exclude, **filters)
823
- return _meshcoords_to_grid(meshcoords)
824
-
825
-
826
- def shape(self, dims=('InstanceNumber', ), mesh=True, slice={}, coords={}, exclude=False, **filters)->tuple:
827
- """Return the shape of the series along given dimensions.
828
-
829
- Args:
830
- dims (tuple, optional): Dimensions along which the shape is to be determined. If dims is not provided, the shape of the flattened series is returned. Defaults to None.
831
-
832
- Returns:
833
- tuple: one value for each element of dims.
834
-
835
- Raises:
836
- ValueError: if the shape in the specified dimensions is ambiguous (because the number of slices is not unique at each location)
837
- ValueError: if the shape in the specified dimensions is not well defined (because there is no slice at one or more locations).
838
-
839
- See also:
840
- `coords`
841
- `gridcoords`
842
- `spacing`
843
-
844
- Example:
845
-
846
- Create a zero-filled series with 3 dimensions.
847
-
848
- >>> coords = {
849
- >>> 'SliceLocation': np.arange(4),
850
- >>> 'FlipAngle': [2, 15, 30],
851
- >>> 'RepetitionTime': [2.5, 5.0] }
852
- >>> series = db.zeros((128,128,4,3,2), coords)
853
-
854
- Check the shape of a flattened series:
855
- >>> series.shape()
856
- (24,)
857
-
858
- Check the shape along all 3 dimensions:
859
-
860
- >>> dims = tuple(coords)
861
- >>> series.shape(dims)
862
- (4, 3, 2)
863
-
864
- Swap the first two dimensions:
865
-
866
- >>> series.shape((dims[1], dims[0], dims[2]))
867
- (3, 4, 2)
868
-
869
- Determine the shape along another DICOM attribute:
870
-
871
- >>> series.shape(('FlipAngle', 'InstanceNumber'))
872
- (3, 8)
873
-
874
- The shape of an empty series is zero along any dimension:
875
-
876
- >>> series.new_sibling().shape(dims)
877
- (0, 0, 0)
878
-
879
- If one or more of the dimensions is not defined in the header, this raises an error:
880
-
881
- >>> series.shape(('FlipAngle', 'Gobbledigook'))
882
- ValueError: series shape is not well defined in dimensions (FlipAngle, Gobbledigook, )
883
- --> Some of the dimensions are not defined in the header.
884
- --> Hint: use Series.value() to find the undefined values.
885
-
886
- An error is also raised if the values are defined, but are not unique. In this case, all acquisition times are the same so this raises an error:
887
-
888
- >>> series.shape(('FlipAngle', 'AcquisitionTime'))
889
- ValueError: series shape is ambiguous in dimensions (FlipAngle, AcquisitionTime, )
890
- --> Multiple slices exist at some or all locations.
891
- --> Hint: use Series.unique() to list the values at all locations.
892
-
893
- """
894
- frames = self.frames(dims=dims, mesh=mesh, slice=slice, coords=coords, exclude=exclude, **filters)
895
- return frames.shape
896
-
897
-
898
- def unique(self, *tags, sortby=(), slice={}, coords={}, exclude=False, return_locs=False, **filters) -> np.ndarray:
899
- """Return the unique values of an attribute, sorted by any number of variables.
900
-
901
- Args:
902
- tag: either a keyword string or a (group, element) tag of a DICOM data element.
903
- sortby (tuple, optional): Dimensions of the resulting array. If *sortby* is not provided, then an array of unique values is returned.
904
-
905
- Returns:
906
- np.ndarray: a sorted array of unique values of the attribute, with dimensions as specified by *dims*. If *dims* is provided, the result has the dimensions of *dims* and each element of the array is an array unique values.
907
-
908
- See also:
909
- `value`
910
- `unique_affines`
911
- `coords`
912
- `gridcoords`
913
-
914
- Example:
915
- Create a zero-filled series with 3 slice dimensions:
916
-
917
- >>> loc = np.arange(4)
918
- >>> fa = [2, 15, 30]
919
- >>> tr = [2.5, 5.0]
920
- >>> coords = {
921
- ... 'SliceLocation': np.arange(4),
922
- ... 'FlipAngle': [2, 15, 30],
923
- ... 'RepetitionTime': [2.5, 5.0] }
924
- >>> series = db.zeros((128,128,8,3,2), coords)
925
-
926
- Recover the unique values of any coordinate, such as the flip angle:
927
-
928
- >>> series.value('FlipAngle')
929
- [ 2. 15. 30.]
930
-
931
- List the flip angles for each slice location separately:
932
-
933
- >>> fa = series.unique('FlipAngle', sortby=('SliceLocation', ))
934
- >>> fa[0]
935
- [ 2. 15. 30.]
936
- >>> fa[3]
937
- [ 2. 15. 30.]
938
-
939
- List the flip angles for each slice location and repetition time:
940
-
941
- >>> fa = series.unique('FlipAngle', sortby=('SliceLocation', 'RepetitionTime'))
942
- >>> fa.shape
943
- (4, 2)
944
- >>> fa[1,1]
945
- [ 2. 15. 30.]
946
-
947
- Getting the values for a non-existing attribute produces an empty array:
948
-
949
- >>> gbbl = series.unique('Gobbledigook')
950
- >>> gbbl.size
951
- 0
952
- >>> gbbl.shape
953
- (0,)
954
-
955
- Getting a non-existing attribute for each slice location produces an array of the expected shape, where each element is an empty array:
956
-
957
- >>> gbbl = series.unique('Gobbledigook', sortby=('SliceLocation',))
958
- >>> gbbl.shape
959
- (4,)
960
- >>> gbbl.size
961
- 4
962
- >>> gbbl[-1].size
963
- 0
964
- """
965
- # If no sorting is required, return an array of unique values
966
-
967
- vals = self.values(*(tags+sortby), slice=slice, coords=coords, exclude=exclude, **filters)
968
-
969
- if sortby == ():
970
- if len(tags) == 1:
971
- uv = vals[vals != np.array(None)]
972
- return np.unique(uv)
973
- uvals = []
974
- for v in vals:
975
- uv = v[v != np.array(None)]
976
- uvals.append(np.unique(uv))
977
- return tuple(uvals)
978
-
979
- # Create a flat location array
980
- loc = []
981
- for k in range(len(sortby)):
982
- v = vals[len(tags)+k]
983
- v = v[v != np.array(None)]
984
- loc.append(np.unique(v))
985
- loc = np.meshgrid(*tuple(loc), indexing='ij')
986
- shape = loc[0].shape
987
- loc = [l.ravel() for l in loc]
988
-
989
- # Build an array of unique values at each location and each tag
990
- uvals = np.empty((len(tags), loc[0].size), dtype=np.ndarray)
991
- for i in range(loc[0].size):
992
- k = 0
993
- ind = vals[len(tags)+k] == loc[k][i]
994
- for k in range(1, len(sortby)):
995
- ind = ind & (vals[len(tags)+k] == loc[k][i])
996
- for t in range(len(tags)):
997
- vti = vals[t][ind]
998
- vti = vti[vti != np.array(None)]
999
- uvals[t,i] = np.unique(vti)
1000
-
1001
- # Refactor to return values
1002
- if len(tags) == 1:
1003
- uvals = uvals[0,:].reshape(shape)
1004
- else:
1005
- uvals = [uvals[t,:].reshape(shape) for t in range(len(tags))]
1006
- uvals = tuple(uvals)
1007
- if return_locs:
1008
- loc = [l.reshape(shape) for l in loc]
1009
- loc = tuple(loc)
1010
- return uvals, loc
1011
- else:
1012
- return uvals
1013
-
1014
-
1015
- def pixel_values(self, dims=('InstanceNumber', ), return_coords=False, slice={}, coords={}, **filters) -> np.ndarray:
1016
- """Return a numpy.ndarray with pixel data.
1017
-
1018
- Args:
1019
- dims (tuple, optional): Dimensions of the result, as a tuple of valid DICOM tags of any length. If *dims* is not provided, pixel values are ordered by instance number. Defaults to None.
1020
- inds (dict, optional): Dictionary with indices to retrieve a slice of the entire array. Defaults to None.
1021
- select (dict, optional): A dictionary of values for DICOM attributes to filter the result. By default the data are not filtered.
1022
- filters (dict, optional): keyword arguments to filter the data by value of DICOM attributes.
1023
-
1024
- Returns:
1025
- np.ndarray: pixel data. The number of dimensions will be 2 plus the number of elements in *dim*. The first two indices will enumerate (column, row) indices in the slice, the other dimensions are as specified by the *dims* argument.
1026
-
1027
- The function returns an empty array when no data are found at the specified locations.
1028
-
1029
- Raises:
1030
- ValueError: Indices must be in the dimensions provided. If *ind* is set but keys are not part of *dims*.
1031
- ValueError: if the images are different shapes.
1032
-
1033
- See also:
1034
- `set_pixel_values`
1035
-
1036
- Example:
1037
- Create a zero-filled array with 3 slice dimensions:
1038
-
1039
- >>> coords = {
1040
- ... 'SliceLocation': 10*np.arange(4),
1041
- ... 'FlipAngle': np.array([2, 15, 30]),
1042
- ... 'RepetitionTime': np.array([2.5, 5.0]),
1043
- ... }
1044
- >>> zeros = db.zeros((128,64,4,3,2), coords)
1045
-
1046
- Retrieve the pixel array of the series:
1047
-
1048
- >>> dims = tuple(coords)
1049
- >>> array = zeros.pixel_values(dims)
1050
- >>> array.shape
1051
- (128, 64, 4, 3, 2)
1052
-
1053
- To retrieve an array containing only the data with flip angle 15:
1054
-
1055
- >>> array = zeros.pixel_values(dims, FlipAngle=15)
1056
- >>> array.shape
1057
- (128, 64, 4, 1, 2)
1058
-
1059
- If no data fit the requirement, and empty array is returned:
1060
-
1061
- >>> array = zeros.pixel_values(dims, FlipAngle=15)
1062
- >>> array.size
1063
- 0
1064
-
1065
- Multiple possible values can be specified as an array:
1066
-
1067
- >>> array = zeros.pixel_values(dims, FlipAngle=np.array([15,30]))
1068
- >>> array.shape
1069
- (128, 64, 4, 2, 2)
1070
-
1071
- And multiple filters can be specified by adding keyword arguments. The following returns an array of pixel values with flip angle of 15 or 30, and slice location of 10 or 20:
1072
-
1073
- >>> array = zeros.pixel_values(dims, FlipAngle=np.array([15,30]), SliceLocation=np.array([10,20]))
1074
- >>> array.shape
1075
- (128, 64, 2, 2, 2)
1076
-
1077
- The filters can be any DICOM attribute:
1078
-
1079
- >>> array = zeros.pixel_values(dims, AcquisitionTime=0)
1080
- >>> array.size
1081
- 0
1082
-
1083
- The filters can also be specified as a dictionary of values:
1084
-
1085
- >>> array = zeros.pixel_values(dims, select={'FlipAngle': 15})
1086
- >>> array.shape
1087
- (128, 64, 4, 1, 2)
1088
-
1089
- Since keywords need to be strings in python, this is the only way to specify filters with (group, element) tags:
1090
-
1091
- >>> array = zeros.pixel_values(dims, select={(0x0018, 0x1314): 15})
1092
- >>> array.shape
1093
- (128, 64, 4, 1, 2)
1094
-
1095
- Using the *inds* argument, the pixel array can be indexed to avoid reading a large array if only a subarray is required:
1096
-
1097
- >>> array = zeros.pixel_values(dims, inds={'FlipAngle': 1})
1098
- >>> array.shape
1099
- (128, 64, 4, 1, 2)
1100
-
1101
- Note unlike filters defind by *value*, the indices must be provided in the dimensions of the array. If not, a `ValueError` is raised:
1102
-
1103
- >>> zeros.pixel_values(dims, inds={'AcquisitionTime':0})
1104
- ValueError: Indices must be in the dimensions provided.
1105
- """
1106
- if np.isscalar(dims):
1107
- dims = (dims,)
1108
- frames = self.frames(dims, return_coords=return_coords, slice=slice, coords=coords, **filters)
1109
- if return_coords:
1110
- frames, fcoords = frames
1111
- if frames.size == 0:
1112
- shape = (0,0) + frames.shape
1113
- values = np.array([]).reshape(shape)
1114
- if return_coords:
1115
- return values, fcoords
1116
- else:
1117
- return values
1118
-
1119
- # Read values
1120
- fshape = frames.shape
1121
- frames = frames.ravel()
1122
- values = []
1123
- for f, frame in enumerate(frames):
1124
- self.progress(f+1, len(frames), 'Reading pixel values..')
1125
- values.append(frame.get_pixel_array())
1126
-
1127
- # Check that all matrix sizes are the same
1128
- vshape = np.array([v.shape for v in values])
1129
- vshape = np.unique(vshape.T, axis=1)
1130
- if vshape.shape[1] > 1:
1131
- msg = 'Cannot extract an array of pixel values - not all frames have the same matrix size.'
1132
- raise ValueError(msg)
1133
-
1134
- # Create the array
1135
- values = np.stack(values, axis=-1)
1136
- values = values.reshape(values.shape[:2] + fshape)
1137
- if return_coords:
1138
- return values, fcoords
1139
- else:
1140
- return values
1141
-
1142
-
1143
- def set_pixel_values(self, values:np.ndarray, dims:tuple=None, slice={}, coords={}, **filters):
1144
- """Set a numpy.ndarray with pixel data.
1145
-
1146
- Args:
1147
- dims (tuple, optional): Dimensions of the pixel values, as a tuple of valid DICOM tags of any length. If *dims* is not provided, pixel values are ordered by instance number. Defaults to None.
1148
- inds (dict, optional): Dictionary with indices to set a slice of the entire array. Defaults to None.
1149
- select (dict, optional): A dictionary of values for DICOM attributes to set specific frames.
1150
- filters (dict, optional): keyword arguments to set specific frames.
1151
-
1152
- Raises:
1153
- ValueError: if the values are the incorrect shape for the dimensions.
1154
-
1155
- See also:
1156
- `pixel_values`
1157
-
1158
- Example:
1159
- Create a zero-filled array with 3 slice dimensions:
1160
-
1161
- >>> coords = {
1162
- ... 'SliceLocation': 10*np.arange(4),
1163
- ... 'FlipAngle': np.array([2, 15, 30]),
1164
- ... 'RepetitionTime': np.array([2.5, 5.0]),
1165
- ... }
1166
- >>> zeros = db.zeros((128,64,4,3,2), coords)
1167
- """
1168
- if dims is None:
1169
- if slice != {}:
1170
- dims = tuple(slice)
1171
- elif coords != {}:
1172
- dims = tuple(coords)
1173
- else:
1174
- dims = ('InstanceNumber', )
1175
- elif np.isscalar(dims):
1176
- dims = (dims,)
1177
- # Get frames to set:
1178
- frames = self.frames(dims, slice=slice, coords=coords, **filters)
1179
- if frames.size == 0:
1180
- if slice != {}:
1181
- self.expand(gridcoords=slice)
1182
- frames = self.frames(dims)
1183
- else:
1184
- msg = 'Cannot set values to an empty series. Use Series.expand() to create empty frames first, or set the loc keyword to define coordinates for the new frames.'
1185
- raise ValueError(msg)
1186
-
1187
- if np.prod(values.shape[2:]) != frames.size:
1188
- msg = 'The size of the pixel value array is different from the size of the series.'
1189
- msg += '\nThe pixel array has shape ' + str(values.shape[2:]) + ', '
1190
- msg += 'but the series has shape ' + str(frames.shape) + '.'
1191
- raise ValueError(msg)
1192
- frames = frames.ravel()
1193
- values = values.reshape(values.shape[:2] + (-1,))
1194
- for f, frame in enumerate(frames):
1195
- self.progress(f+1, frames.size, 'Writing pixel values..')
1196
- frame.set_pixel_array(values[:,:,f])
1197
-
1198
- def volume(self):
1199
- return self.volumes(stack=True)
1200
-
1201
- def volumes(self, dims='SliceLocation', mesh=True, stack=False):
1202
- """Return vreg volumes for each frame, or stacked"""
1203
-
1204
- frames = self.frames(dims, mesh=mesh)
1205
- vols = [f.volume() for f in frames.reshape(-1)]
1206
- vols = np.asarray(vols).reshape(frames.shape)
1207
- if not stack:
1208
- return vols
1209
- shape = vols.shape
1210
- vols = vols.reshape((shape[0],-1))
1211
- vols_stack = []
1212
- for k in range(vols.shape[1]):
1213
- vstack = vreg.concatenate(vols[:,k], prec=3)
1214
- vols_stack.append(vstack)
1215
- if len(shape) == 1:
1216
- return vols_stack[0]
1217
- else:
1218
- return np.asarray(vols_stack).reshape(shape[1:])
1219
-
1220
-
1221
- def set_volumes(self, volumes, dims='SliceLocation', mesh=True):
1222
-
1223
- # Convert affines to arrays if needed
1224
- if isinstance(volumes, list):
1225
- volumes = np.array(volumes)
1226
-
1227
- # Get frames
1228
- frames = self.frames(dims, mesh=mesh)
1229
-
1230
- # One affine for each frame
1231
- if volumes.shape == frames.shape:
1232
- volumes = volumes.reshape(-1)
1233
- for i, f in enumerate(frames.reshape(-1)):
1234
- self.progress(i, frames.size, 'Setting affines.. ')
1235
- f.set_volume(volumes[i])
1236
-
1237
- # Different number of affines and frames
1238
- else:
1239
- # A volumetric series
1240
- if frames.ndim==1:
1241
- volumes = volumes.reshape(-1)
1242
- if volumes.size > 1:
1243
- raise ValueError(
1244
- "Cannot set volumes. A volume can only "
1245
- "have one element.")
1246
- volumes = volumes[0].split(frames.size)
1247
- for z, f in enumerate(frames):
1248
- self.progress(z+1, frames.size, 'Setting volumes.. ')
1249
- f.set_volume(volumes[z])
1250
-
1251
- # Multislice affine replicated across all times
1252
- elif volumes.size == frames.shape[0]:
1253
- frames = frames.reshape((frames.shape[0],-1))
1254
- volumes = volumes.reshape(-1)
1255
- nz, nt = frames.shape
1256
- cnt=0
1257
- for z in range(nz):
1258
- for t in range(nt):
1259
- cnt+=1
1260
- self.progress(cnt, nt*nz, 'Setting volumes.. ')
1261
- frames[z,t].set_volume(volumes[z])
1262
-
1263
- # One volume replicated across all times
1264
- elif volumes.size==1:
1265
- frames = frames.reshape((frames.shape[0],-1))
1266
- nz, nt = frames.shape
1267
- volumes = volumes[0].split(nz)
1268
- cnt=0
1269
- for z in range(nz):
1270
- for t in range(nt):
1271
- cnt+=1
1272
- self.progress(cnt, nt*nz, 'Setting volumes.. ')
1273
- frames[z,t].set_volume(volumes[z])
1274
-
1275
- # Volume for each time point
1276
- elif volumes.shape == frames.shape[1:]:
1277
- frames = frames.reshape((frames.shape[0],-1))
1278
- volumes = volumes.reshape(-1)
1279
- nz, nt = frames.shape
1280
- cnt=0
1281
- for t in range(nt):
1282
- volumes_t = volumes[t].split(nz)
1283
- for z, f in enumerate(frames[:,t]):
1284
- cnt+=1
1285
- self.progress(cnt, nt*nz, 'Setting volumes.. ')
1286
- f.set_volume(volumes_t[z])
1287
-
1288
- # Incompatible shapes
1289
- else:
1290
- raise ValueError(
1291
- "Cannot set volumes. The volume array has an incompatible "
1292
- "shape or size.")
1293
- return self
1294
-
1295
-
1296
- def affines(self, dims='SliceLocation', mesh=True, stack=False):
1297
- """Return affines for each frame"""
1298
-
1299
- frames = self.frames(dims, mesh=mesh)
1300
- affines = [f.affine() for f in frames.reshape(-1)]
1301
- affines = np.asarray(affines).reshape(frames.shape)
1302
- if not stack:
1303
- return affines
1304
- shape = affines.shape
1305
- affines = affines.reshape((shape[0],-1))
1306
- nt = affines.shape[1]
1307
- affines_stack = np.empty(nt, dtype=np.ndarray)
1308
- for t in range(nt):
1309
- affines_stack[t] = image_utils.stack_affines(affines[:,t])
1310
- if len(shape)==1:
1311
- return affines_stack[0]
1312
- else:
1313
- return affines_stack.reshape(shape[1:])
1314
-
1315
- def set_affines(self, affines, dims='SliceLocation', mesh=True):
1316
-
1317
- # Convert affines to arrays if needed
1318
- if isinstance(affines, np.ndarray):
1319
- aff = np.empty(1, dtype=np.ndarray)
1320
- aff[0] = affines
1321
- affines = aff
1322
- elif isinstance(affines, list):
1323
- aff = np.empty(len(affines), dtype=np.ndarray)
1324
- for i, a in enumerate(affines):
1325
- aff[i] = a
1326
- affines = aff
1327
-
1328
- # Get frames
1329
- frames = self.frames(dims, mesh=mesh)
1330
-
1331
- # One affine for each frame
1332
- if affines.shape == frames.shape:
1333
- affines = affines.reshape(-1)
1334
- for i, f in enumerate(frames.reshape(-1)):
1335
- self.progress(i, frames.size, 'Setting affines.. ')
1336
- f.set_affine(affines[i])
1337
-
1338
- # Different number of affines and frames
1339
- else:
1340
- # A volumetric series
1341
- if frames.ndim==1:
1342
- affines = affines.reshape(-1)
1343
- if affines.size > 1:
1344
- raise ValueError(
1345
- "Cannot set affines. A volumetric affine can only "
1346
- "have one element.")
1347
- affines = image_utils.unstack_affine(affines[0], frames.shape[0])
1348
- for z, f in enumerate(frames):
1349
- self.progress(z+1, frames.size, 'Setting affines.. ')
1350
- f.set_affine(affines[z])
1351
-
1352
- # Multislice affine replicated across all times
1353
- elif affines.size == frames.shape[0]:
1354
- frames = frames.reshape((frames.shape[0],-1))
1355
- affines = affines.reshape(-1)
1356
- nz, nt = frames.shape
1357
- cnt=0
1358
- for z in range(nz):
1359
- for t in range(nt):
1360
- cnt+=1
1361
- self.progress(cnt, nt*nz, 'Setting affines.. ')
1362
- frames[z,t].set_affine(affines[z])
1363
-
1364
- # One volume affine replicated across all times
1365
- elif affines.size==1:
1366
- frames = frames.reshape((frames.shape[0],-1))
1367
- nz, nt = frames.shape
1368
- affines = image_utils.unstack_affine(affines[0], nz)
1369
- cnt=0
1370
- for z in range(nz):
1371
- for t in range(nt):
1372
- cnt+=1
1373
- self.progress(cnt, nt*nz, 'Setting affines.. ')
1374
- frames[z,t].set_affine(affines[z])
1375
-
1376
- # Volume affine for each time point
1377
- elif affines.shape == frames.shape[1:]:
1378
- frames = frames.reshape((frames.shape[0],-1))
1379
- affines = affines.reshape(-1)
1380
- nz, nt = frames.shape
1381
- cnt=0
1382
- for t in range(nt):
1383
- affines_t = image_utils.unstack_affine(affines[t], nz)
1384
- for z, f in enumerate(frames[:,t]):
1385
- cnt+=1
1386
- self.progress(cnt, nt*nz, 'Setting affines.. ')
1387
- f.set_affine(affines_t[z])
1388
-
1389
- # Incompatible shapes
1390
- else:
1391
- raise ValueError(
1392
- "Cannot set affines. The affine array has an incompatible "
1393
- "shape or size.")
1394
- return self
1395
-
1396
-
1397
- # TODO: make obsolete (ignores dimensions or multi-volume series)
1398
- def affine(self, slice={}, coords={}, **filters) -> np.ndarray:
1399
- """Return the affine of the Series.
1400
-
1401
- Raises:
1402
- ValueError: if the DICOM file is corrupted
1403
- ValueError: if the affine is not unique.
1404
-
1405
- Returns:
1406
- np.ndarray: affine matrix as a 4x4 numpy array.
1407
-
1408
- See also:
1409
- `set_affine`
1410
- `unique_affines`
1411
-
1412
- Example:
1413
- Check that the default affine is the identity:
1414
-
1415
- >>> zeros = db.zeros((128,128,10))
1416
- >>> zeros.affine()
1417
- [[1., 0., 0., 0.],
1418
- [0., 1., 0., 0.],
1419
- [0., 0., 1., 0.],
1420
- [0., 0., 0., 1.]]
1421
- """
1422
-
1423
- # Read values
1424
- tags = ('ImageOrientationPatient', 'ImagePositionPatient', 'PixelSpacing', 'SliceThickness', )
1425
- orientation, pos, spacing, thick = self.values(*tags, slice=slice, coords=coords, **filters)
1426
-
1427
- # Single slice
1428
- if len(pos) == 1:
1429
- return image_utils.affine_matrix(orientation[0], pos[0], spacing[0], thick[0])
1430
-
1431
- # Multiple orientations - raise error
1432
- orientation = np.unique(orientation)
1433
- if len(orientation) > 1:
1434
- msg = 'The series has multiple affines. '
1435
- msg += '\nUse Series.unique_affines() to return an array of unique affines.'
1436
- raise ValueError(msg)
1437
- orientation = orientation[0]
1438
-
1439
- # Multiple pixel spacings - raise error
1440
- spacing = np.unique(spacing)
1441
- if len(spacing) > 1:
1442
- msg = 'The series has multiple pixel spacings. '
1443
- msg += '\nAffine array of the series is not well defined.'
1444
- raise ValueError(msg)
1445
- spacing = spacing[0]
1446
-
1447
- # All the same slice locations
1448
- upos = np.unique(pos)
1449
- if len(upos) == 1:
1450
- return image_utils.affine_matrix(orientation, pos[0], spacing, thick[0])
1451
-
1452
- # Different slice locations but not all different - raise error
1453
- if len(upos) != len(pos):
1454
- msg = 'Some frames have the same ImagePositionPatient. '
1455
- msg += '\nAffine matrix of the series is not well defined.'
1456
- raise ValueError(msg)
1457
-
1458
- return image_utils.affine_matrix_multislice(orientation, pos, spacing)
1459
-
1460
- # TODO: amke obsolete - does not handle dimensions or multislice vs volume
1461
- def set_affine(self, affine:np.ndarray, dims=('InstanceNumber',), slice={}, coords={}, multislice=False, **filters):
1462
- """Set the affine matrix of a series.
1463
-
1464
- The affine is defined as a 4x4 numpy array with bottom row [0,0,0,1]. The final column represents the position of the top right hand corner of the first slice. The first three columns represent rotation and scaling with respect to the axes of the reference frame.
1465
-
1466
- Args:
1467
- affine (numpy.ndarray): 4x4 numpy array
1468
-
1469
- Raises:
1470
- ValueError: if the series is empty. The information of the affine matrix is stored in the header and can not be stored in an empty series.
1471
-
1472
- See also:
1473
- `affine`
1474
- `unique_affines`
1475
-
1476
- Example:
1477
- Create a series with unit affine array:
1478
-
1479
- >>> zeros = db.zeros((128,128,10))
1480
- >>> zeros.affine()
1481
- [[1., 0., 0., 0.],
1482
- [0., 1., 0., 0.],
1483
- [0., 0., 1., 0.],
1484
- [0., 0., 0., 1.]]
1485
-
1486
- Rotate the volume over 90 degrees in the xy-plane:
1487
-
1488
- >>> affine = np.array([
1489
- ... [1., 0., 0., 0.],
1490
- ... [0., 1., 0., 0.],
1491
- ... [0., 0., 1., 0.],
1492
- ... [0., 0., 0., 1.],
1493
- ... ])
1494
- >>> zeros.set_affine(affine)
1495
-
1496
- Apart from the rotation, also change the resolution to (3mm, 3mm, 1.5mm):
1497
-
1498
- >>> affine = np.array([
1499
- ... [0., -3., 0., 0.],
1500
- ... [3., 0., 0., 0.],
1501
- ... [0., 0., 1.5, 0.],
1502
- ... [0., 0., 0., 1.],
1503
- ... ])
1504
- >>> zeros.set_affine(affine)
1505
-
1506
- Now rotate, change resolution, and shift the top right hand corner of the lowest slice to position (-30mm, 20mm, 120mm):
1507
-
1508
- >>> affine = np.array([
1509
- ... [0., -3., 0., -30.],
1510
- ... [3., 0., 0., 20.],
1511
- ... [0., 0., 1.5, 120.],
1512
- ... [0., 0., 0., 1.],
1513
- ... ])
1514
- >>> zeros.set_affine(affine)
1515
-
1516
- Note: changing the affine will affect multiple DICOM tags, such as slice location and image positions:
1517
-
1518
- >>> zeros.SliceLocation
1519
- [120.0, 121.5, 123.0, 124.5, 126.0, 127.5, 129.0, 130.5, 132.0, 133.5]
1520
-
1521
- In this case, since the slices are stacked in parallel to the z-axis, the slice location starts at the lower z-coordinate of 120mm and then increments slice-by-slice with the slice thickness of 1.5mm.
1522
-
1523
- """
1524
-
1525
- frames = self.frames(dims=dims, slice=slice, coords=coords, **filters)
1526
- if frames.size == 0:
1527
- msg = 'Cannot set affine matrix in an empty series. Use Series.expand() to create empty frames first.'
1528
- raise ValueError(msg)
1529
-
1530
- # For each slice location, the slice position needs to be updated too
1531
- # Need the coordinates of the vector parallel to the z-axis of the volume.
1532
- a = image_utils.dismantle_affine_matrix(affine)
1533
- ez = a['SpacingBetweenSlices']*np.array(a['slice_cosine'])
1534
-
1535
- # if multislice:
1536
- # slice_thickness = self.unique('SliceThickness')[0]
1537
-
1538
- # Set the affine slice-by-slice
1539
- affine_z = affine.copy()
1540
- for z, frame in enumerate(frames):
1541
- self.progress(z+1, frames.size, 'Writing affine..')
1542
- affine_z[:3, 3] = affine[:3, 3] + z*ez
1543
- if multislice:
1544
- thickness = frame.SliceThickness
1545
- frame.affine_matrix = affine_z
1546
- if multislice:
1547
- frame.SliceThickness = thickness
1548
-
1549
- # if multislice:
1550
- # self.set_values(slice_thickness,'SliceThickness')
1551
-
1552
-
1553
- # consider renaming copy() - but breaks backward compatibility - this is not a slice really
1554
- def extract(self, slice={}, coords={}, **filters) -> Series:
1555
- """Get a slice of the series by dimension values
1556
-
1557
- Args:
1558
- coordinates (dict, optional): dictionary of tag:value pairs where the value is either a single value or an array of values.
1559
- coords (dict): Provide coordinates for the slice, either as dimension=value pairs, or as a dictionary where the keys list the dimensions, and the values are provided as scalars, 1D or meshgrid arrays of coordinates.
1560
-
1561
- See also:
1562
- `islice`
1563
- `split_by`
1564
-
1565
- Example:
1566
- Create a zero-filled array, describing 8 MRI images each measured at 3 flip angles and 2 repetition times:
1567
-
1568
- >>> coords = {
1569
- ... 'SliceLocation': np.arange(8),
1570
- ... 'FlipAngle': [2, 15, 30],
1571
- ... 'RepetitionTime': [2.5, 5.0],
1572
- ... }
1573
- >>> series = db.zeros((128,128,8,3,2), coords)
1574
-
1575
- Slice the series at flip angle 15:
1576
-
1577
- >>> fa15 = series.slice(FlipAngle=15)
1578
-
1579
- Retrieve the array and check the dimensions:
1580
-
1581
- >>> array = fa15.pixel_values(dims=tuple(coords))
1582
- >>> print(array.shape)
1583
- (128, 128, 8, 1, 2)
1584
-
1585
- Multiple possible values can be specified as a list or np.ndarray:
1586
-
1587
- >>> fa15 = series.slice(SliceLocation=[0,5], FlipAngle=15)
1588
- >>> array = fa15.pixel_values(dims=tuple(coords))
1589
- >>> print(array.shape)
1590
- (128, 128, 2, 1, 2)
1591
-
1592
- Values can also be provided as a dictionary, which is useful for instance for private tags that do not have a keyword string. So the following are equivalent:
1593
-
1594
- >>> fa15 = series.slice(SliceLocation=[0,5], FlipAngle=15)
1595
- >>> fa15 = series.slice({SliceLocation:[0,5], FlipAngle:15})
1596
- >>> fa15 = series.slice({(0x0020, 0x1041):[0,5], (0x0018, 0x1314):15})
1597
- """
1598
-
1599
- frames = self.frames(slice=slice, coords=coords, **filters)
1600
- result = self.new_sibling()
1601
- # result.adopt(frames) # faster but no progress bar
1602
- for f, frame in enumerate(frames):
1603
- self.progress(f+1, len(frames), 'Creating slice..')
1604
- frame.copy_to(result)
1605
- return result
1606
-
1607
-
1608
- def split_by(self, tag: str | tuple) -> list:
1609
- """Split the series into multiple subseries based on keyword value.
1610
-
1611
- Args:
1612
- keyword (str | tuple): A valid DICOM keyword or hexadecimal (group, element) tag.
1613
-
1614
- Raises:
1615
- ValueError: if an invalid or missing keyword is provided.
1616
- ValueError: if all images have the same value for the keyword, so no subseries can be derived. An exception is raised rather than a copy of the series to avoid unnecessary copies being made. If that is the intention, use series.copy() instead.
1617
-
1618
- Returns:
1619
- list: A list of ``Series`` instances, where each element has the same value of the given keyword.
1620
-
1621
- See Also:
1622
- `slice`
1623
- `islice`
1624
-
1625
- Example:
1626
-
1627
- Create a single-slice series with multiple flip angles and repetition times:
1628
-
1629
- >>> coords = {
1630
- ... 'FlipAngle': [2, 15, 30],
1631
- ... 'RepetitionTime': [2.5, 7.5],
1632
- ... }
1633
- >>> zeros = db.zeros((128, 128, 3, 2), coords)
1634
- >>> zeros.print()
1635
- ---------- SERIES --------------
1636
- Series 001 [New Series]
1637
- Nr of instances: 6
1638
- MRImage 000001
1639
- MRImage 000002
1640
- MRImage 000003
1641
- MRImage 000004
1642
- MRImage 000005
1643
- MRImage 000006
1644
- --------------------------------
1645
-
1646
- Splitting this series by FlipAngle now creates 3 new series in the same study, with 2 images each. By default the fixed value of the splitting attribute is written in the series description:
1647
-
1648
- >>> FA = zeros.split_by('FlipAngle')
1649
- >>> zeros.study().print()
1650
- ---------- STUDY ---------------
1651
- Study New Study [None]
1652
- Series 001 [New Series]
1653
- Nr of instances: 6
1654
- Series 002 [New Series[FlipAngle = 2.0]]
1655
- Nr of instances: 2
1656
- Series 003 [New Series[FlipAngle = 15.0]]
1657
- Nr of instances: 2
1658
- Series 004 [New Series[FlipAngle = 30.0]]
1659
- Nr of instances: 2
1660
- --------------------------------
1661
-
1662
- Check the flip angle of the split series:
1663
- >>> for series in FA:
1664
- ... print(series.FlipAngle)
1665
- 2.0
1666
- 15.0
1667
- 30.0
1668
- """
1669
-
1670
- vals = self.unique(tag)
1671
- if len(vals)==1:
1672
- msg = 'Cannot split by ' + str(tag) + '\n'
1673
- msg += 'All frames have the same value.'
1674
- raise ValueError(msg)
1675
-
1676
- desc = self.instance().SeriesDescription + '[' + str(tag) + ' = '
1677
- split_series = []
1678
- for v in vals:
1679
- new = self.extract(slice={tag: v})
1680
- new.SeriesDescription = desc + str(v) + ']'
1681
- split_series.append(new)
1682
- return split_series
1683
-
1684
-
1685
- def spacing(self, **kwargs)->tuple:
1686
- """3D pixel spacing in mm
1687
-
1688
- Returns:
1689
- tuple: (x-spacing, y-spacing, z-spacing)
1690
-
1691
- See also:
1692
- `shape`
1693
-
1694
- Examples:
1695
- Check the spacing of a digital reference object:
1696
-
1697
- >>> series = db.dro.T1_mapping_vFATR()
1698
- >>> series.spacing()
1699
- (15, 15, 20)
1700
- """
1701
- affine = self.affine(**kwargs)
1702
- column_spacing = np.linalg.norm(affine[:3, 0])
1703
- row_spacing = np.linalg.norm(affine[:3, 1])
1704
- slice_spacing = np.linalg.norm(affine[:3, 2])
1705
- return column_spacing, row_spacing, slice_spacing
1706
-
1707
-
1708
-
1709
-
1710
- def unique_affines(self)->np.ndarray:
1711
- """Return the array of unique affine matrices.
1712
-
1713
- Raises:
1714
- ValueError: if the DICOM file is corrupted.
1715
-
1716
- Returns:
1717
- np.ndarray: array of 4x4 ndarrays with the unique affine matrices of the series.
1718
-
1719
- See also:
1720
- `set_affine`
1721
- `affine`
1722
-
1723
- Example:
1724
- Check that the default affine is the identity:
1725
-
1726
- >>> zeros = db.zeros((128,128,10))
1727
- >>> zeros.affine()
1728
- [array([
1729
- [1., 0., 0., 0.],
1730
- [0., 1., 0., 0.],
1731
- [0., 0., 1., 0.],
1732
- [0., 0., 0., 1.]], dtype=float32)]
1733
- """
1734
- image_orientation = self.ImageOrientationPatient
1735
- if image_orientation is None:
1736
- msg = 'ImageOrientationPatient not defined in the DICOM header \n'
1737
- msg += 'This is a required DICOM field \n'
1738
- msg += 'The data may be corrupted - please check'
1739
- raise ValueError(msg)
1740
- # Multiple slice groups in series - return list of affine matrices
1741
- if self.is_multislice():
1742
- affine_matrices = []
1743
- for dir in image_orientation:
1744
- slice_group = self.instances(ImageOrientationPatient=dir)
1745
- affine = _slice_group_affine_matrix(slice_group, dir)
1746
- affine_matrices.append(affine)
1747
- return np.unique(affine_matrices)
1748
- # Single slice group in series - return a list with a single affine matrix
1749
- else:
1750
- slice_group = self.instances()
1751
- affine = _slice_group_affine_matrix(slice_group, image_orientation)
1752
- return np.array([affine])
1753
-
1754
- def is_multislice(self)->bool:
1755
- """Check if the series is multislice
1756
-
1757
- Returns:
1758
- bool: True if the series is multislice.
1759
- """
1760
- return is_multislice(self)
1761
-
1762
-
1763
- def islice(self, indices={}, **inds) -> Series:
1764
- """Get a slice of the series by dimension indics
1765
-
1766
- Args:
1767
- indices (dict, optional): Dictionary with tag:value pairs, where the values are either a single index or an array of indices.
1768
- inds (dict, optional): Provide indices for the slice, either as keyword=index pairs or as a dictionary. The indices must be provided either as a scalar, a list or a numpy array.
1769
-
1770
- Raises:
1771
- IndexError: when the indices in inds are out of range of the existing coordinates.
1772
-
1773
- See also:
1774
- `slice`
1775
- `split_by`
1776
-
1777
- Example:
1778
- Create a zero-filled array, describing 8 MRI images each measured at 3 flip angles and 2 repetition times:
1779
-
1780
- >>> coords = {
1781
- ... 'SliceLocation': np.arange(8),
1782
- ... 'FlipAngle': [2, 15, 30],
1783
- ... 'RepetitionTime': [2.5, 5.0],
1784
- ... }
1785
- >>> series = db.zeros((128,128,8,3,2), coords)
1786
-
1787
- Slice the series at flip angle 15 (i.e. index 1):
1788
-
1789
- >>> fa15 = series.islice(FlipAngle=1)
1790
-
1791
- Retrieve the array and check the dimensions:
1792
-
1793
- >>> array = fa15.pixel_values(dims=tuple(coords))
1794
- >>> print(array.shape)
1795
- (128, 128, 8, 1, 2)
1796
-
1797
- Multiple possible indices can be specified as a list or np.ndarray:
1798
-
1799
- >>> fa15 = series.slice(SliceLocation=[0,5], FlipAngle=1)
1800
- >>> array = fa15.pixel_values(dims=tuple(coords))
1801
- >>> print(array.shape)
1802
- (128, 128, 2, 1, 2)
1803
-
1804
- Values can also be provided as a dictionary, which is useful for instance for private tags that do not have a keyword string. So the following are equivalent:
1805
-
1806
- >>> fa15 = series.slice(SliceLocation=[0,5], FlipAngle=1)
1807
- >>> fa15 = series.slice({SliceLocation:[0,5], FlipAngle:1})
1808
- >>> fa15 = series.slice({(0x0020, 0x1041):[0,5], (0x0018, 0x1314):1})
1809
-
1810
- """
1811
- inds = {**indices, **inds}
1812
-
1813
- # Check whether the arguments are valid, and initialize dims.
1814
- if inds == {}:
1815
- return self.new_sibling()
1816
- dims = list(inds.keys())
1817
- source = instance_array(self, sortby=dims)
1818
-
1819
- # Retrieve the instances of the slice.
1820
- for d, dim in enumerate(inds):
1821
- ind = inds[dim]
1822
- try:
1823
- source = source.take(ind, axis=d)
1824
- # Insert dimensions of 1 back in
1825
- if isinstance(ind, Number):
1826
- source = np.expand_dims(source, axis=d)
1827
- except IndexError as e:
1828
- msg = str(e) + '\n'
1829
- msg += 'The indices for ' + str(dim) + ' in the inds argument are out of bounds'
1830
- raise IndexError(msg)
1831
-
1832
- result = self.new_sibling()
1833
- source = source.ravel()
1834
- for i in range(source.size):
1835
- source[i].copy_to(result)
1836
- return result
1837
-
1838
-
1839
- #
1840
- # Following APIs are obsolete and will be removed in future versions
1841
- #
1842
-
1843
-
1844
- def _old_set_pixel_values(self, array:np.ndarray, coords:dict=None, inds:dict=None):
1845
- """Assign new pixel data with a new numpy.ndarray.
1846
-
1847
- Args:
1848
- array (np.ndarray): array with new pixel data.
1849
- coords (dict, optional): Provide coordinates for the array, using a dictionary where the keys list the dimensions, and the values are provided as 1D or meshgrid arrays of coordinates. If data already exist at the specified coordinates, these will be overwritten. If not, the new data will be added to the series.
1850
- inds (dict, optional): Provide a slice of existing data that will be overwritten with the new array. The format is the same as the dictionary of coordinates, except that the slice is identified by indices rather than values.
1851
-
1852
- Raises:
1853
- ValueError: if neither coords or inds or provided, if both are provided, or if the dimensions in coords or inds does not match up with the dimensions of the array.
1854
- IndexError: when attempting to set a slice in an empty array, or when the indices in inds are out of range of the existing coordinates.
1855
-
1856
- See also:
1857
- `pixel_values`
1858
-
1859
- Example:
1860
- Create a zero-filled array, describing 8 MRI images each measured at 3 flip angles and 2 repetition times:
1861
-
1862
- >>> coords = {
1863
- ... 'SliceLocation': np.arange(8),
1864
- ... 'FlipAngle': [2, 15, 30],
1865
- ... 'RepetitionTime': [2.5, 5.0],
1866
- ... }
1867
- >>> series = db.zeros((128,128,8,3,2), coords)
1868
-
1869
- Retrieve the array and check that it is populated with zeros:
1870
-
1871
- >>> array = series.pixel_values(dims=tuple(coords))
1872
- >>> print(np.mean(array))
1873
- 0.0
1874
-
1875
- Now overwrite the values with a new array of ones in a new shape:
1876
-
1877
- >>> new_shape = (128,128,8)
1878
- >>> new_coords = {
1879
- ... 'SliceLocation': np.arange(8),
1880
- ... }
1881
- >>> ones = np.ones(new_shape)
1882
- >>> series.set_pixel_values(ones, coords=new_coords)
1883
-
1884
- Retrieve the new array and check shape:
1885
-
1886
- >>> array = series.pixel_values(dims=tuple(new_coords))
1887
- >>> print(array.shape)
1888
- (128,128,8)
1889
-
1890
- Check that the value is overwritten:
1891
-
1892
- >>> print(np.mean(array))
1893
- 1.0
1894
- """
1895
-
1896
- # Check whether the arguments are valid, and initialize dims.
1897
- cnt = 0
1898
- if coords is not None:
1899
- cnt+=1
1900
- dims = tuple(coords)
1901
- if len(dims) != array.ndim-2:
1902
- msg = 'One coordinate must be specified for each dimensions in the array.'
1903
- raise ValueError(msg)
1904
- for d, dim in enumerate(coords):
1905
- if len(coords[dim]) != array.shape[d+2]:
1906
- msg = str(dim) + ' in the coords must have the same number of elements as the corresponding dimension in the array'
1907
- raise ValueError(msg)
1908
- if inds is not None:
1909
- cnt+=1
1910
- dims = tuple(inds)
1911
- if len(dims) != array.ndim-2:
1912
- msg = 'One coordinate must be specified for each dimensions in the array.'
1913
- raise ValueError(msg)
1914
- if cnt == 0:
1915
- msg = 'At least one of the optional arguments coords or inds must be provided'
1916
- raise ValueError(msg)
1917
- if cnt == 2:
1918
- msg = 'Only one of the optional arguments coords or inds must be provided'
1919
- raise ValueError(msg)
1920
-
1921
- source = instance_array(self, sortby=list(dims))
1922
-
1923
- if coords is not None:
1924
- # Retrieve the instances corresponding to the coordinates.
1925
- if source.size != 0:
1926
- for d, dim in enumerate(coords):
1927
- ind = []
1928
- for i in range(source.shape[d]):
1929
- si = source.take(i,axis=d).ravel()
1930
- if si[0][dim] in coords[dim]:
1931
- ind.append(i)
1932
- source = source.take(ind, axis=d)
1933
- # Insert dimensions of 1 back in
1934
- if len(ind)==1:
1935
- source = np.expand_dims(source, axis=d)
1936
- elif inds is not None:
1937
- # Retrieve the instances of the slice, as well as their coordinates.
1938
- coords = {}
1939
- for d, dim in enumerate(inds):
1940
- ind = inds[dim]
1941
- if isinstance(ind, np.ndarray):
1942
- ind = list(ind)
1943
- try:
1944
- source = source.take(ind, axis=d)
1945
- except IndexError as e:
1946
- msg = str(e) + '\n'
1947
- msg += 'The indices for ' + str(dim) + ' in the inds argument are out of bounds'
1948
- raise IndexError(msg)
1949
- coords[dim] = []
1950
- for i in range(source.shape[d]):
1951
- si = source.take(i,axis=d).ravel()
1952
- coords[dim].append(si[0][dim])
1953
-
1954
- nr_of_slices = int(np.prod(array.shape[2:]))
1955
- if source.size == 0:
1956
- # If there are not yet any instances at the correct coordinates, they will be created from scratch
1957
- source = [self.new_instance(MRImage()) for _ in range(nr_of_slices)]
1958
- set_pixel_values(self, array, source=source, coords=coords)
1959
- elif array.shape[2:] == source.shape:
1960
- # If the new array has the same shape, use the exact headers.
1961
- set_pixel_values(self, array, source=source.ravel().tolist(), coords=coords)
1962
- else:
1963
- # If the new array has a different shape, use the first header for all and delete all the others
1964
- # This happens when some of the new coordinates are present, but not all.
1965
- # TODO: This is overkill - only fill in the gaps with copies.
1966
- source = source.ravel().tolist()
1967
- for series in source[1:]:
1968
- series.remove()
1969
- source = [source[0]] + [source[0].copy_to(self) for _ in range(nr_of_slices-1)]
1970
- set_pixel_values(self, array, source=source, coords=coords)
1971
-
1972
- def subseries(self, **kwargs)->Series:
1973
- """Extract a subseries based on values of header elements.
1974
-
1975
- Args:
1976
- kwargs: Any number of valid DICOM (tag, value) keyword arguments.
1977
-
1978
- Returns:
1979
- Series: a new series as a sibling under the same parent.
1980
-
1981
- See Also:
1982
- :func:`~split_by`
1983
-
1984
- Example:
1985
-
1986
- Create a multi-slice series with multiple flip angles and repetition times:
1987
-
1988
- >>> coords = {
1989
- ... 'SliceLocation': np.arange(16),
1990
- ... 'FlipAngle': [2, 15, 30],
1991
- ... 'RepetitionTime': [2.5, 5.0, 7.5],
1992
- ... }
1993
- >>> zeros = db.zeros((128, 128, 16, 3, 2), coords)
1994
-
1995
- Create a new series containing only the data with flip angle 2 and repetition time 7.5:
1996
-
1997
- >>> volume = zeros.subseries(FlipAngle=2.0, RepetitionTime=7.5)
1998
-
1999
- Check that the volume series now has two dimensions of size 1:
2000
-
2001
- >>> array = volume.pixel_values(dims=tuple(coords))
2002
- >>> print(array.shape)
2003
- (128, 128, 16, 1, 1)
2004
-
2005
- and only one flip angle and repetition time:
2006
-
2007
- >>> print(volume.FlipAngle, volume.RepetitionTime)
2008
- 2.0 7.5
2009
-
2010
- and that the parent study now has two series:
2011
-
2012
- >>> volume.study().print()
2013
- ---------- STUDY ---------------
2014
- Study New Study [None]
2015
- Series 001 [New Series]
2016
- Nr of instances: 96
2017
- Series 002 [New Series]
2018
- Nr of instances: 16
2019
- --------------------------------
2020
- """
2021
- return subseries(self, move=False, **kwargs)
2022
-
2023
- def slice_groups(self, dims=('InstanceNumber',)) -> list:
2024
- """Return a list of slice groups in the series.
2025
-
2026
- In dbdicom, a *slice group* is defined as a series of slices that have the same orientation. It is common for a single series to have images with multiple orientations, such as in localizer series in MRI. For such a series, returning all data in a single array may not be meaningful.
2027
-
2028
- Formally, a *slice group* is a dictionary with two entries: 'ndarray' is the numpy.ndarray with the data along the dimensions provided by the dims argument, and 'affine' is the 4x4 affine matrix of the slice group. The function returns a list of such dictionaries, one for each slice group in the series.
2029
-
2030
- Args:
2031
- dims (tuple, optional): Dimensions for the returned arrays. Defaults to ('InstanceNumber',).
2032
-
2033
- Returns:
2034
- list: A list of slice groups (dictionaries), one for each slice group in the series.
2035
-
2036
- Examples:
2037
-
2038
- >>> series = db.ones((128,128,5,10))
2039
- >>> sgroups = series.slice_groups(dims=('SliceLocation', 'AcquisitionTime'))
2040
-
2041
- Since there is only one slice group in the series, ``sgroups`` is a list with one element:
2042
-
2043
- >>> print(len(sgroups))
2044
- 1
2045
-
2046
- The array of the slice group is the entire volume of the series:
2047
-
2048
- >>> print(sgroups[0]['ndarray'].shape)
2049
- (128, 128, 5, 10)
2050
-
2051
- And the affine of the series has not changed from the default (identity):
2052
-
2053
- >>> print(sgroups[0]['affine'])
2054
- [[1. 0. 0. 0.]
2055
- [0. 1. 0. 0.]
2056
- [0. 0. 1. 0.]
2057
- [0. 0. 0. 1.]]
2058
-
2059
- """
2060
-
2061
- slice_groups = []
2062
- image_orientation = self.ImageOrientationPatient
2063
-
2064
- # Multiple slice groups in series - return list of cuboids
2065
- if isinstance(image_orientation[0], list):
2066
- for dir in image_orientation:
2067
- slice_group = instance_array(self, ImageOrientationPatient=dir)
2068
- affine = _slice_group_affine_matrix(list(slice_group), dir)
2069
- array, _ = _get_pixel_array_from_instance_array(slice_group, sortby=list(dims), pixels_first=True)
2070
- slice_groups.append({'ndarray': array[...,0], 'affine': affine})
2071
-
2072
- # Single slice group in series - return a list with a single affine matrix
2073
- else:
2074
- slice_group = instance_array(self)
2075
- affine = _slice_group_affine_matrix(list(slice_group), image_orientation)
2076
- array, _ = _get_pixel_array_from_instance_array(slice_group, sortby=list(dims), pixels_first=True)
2077
- slice_groups.append({'ndarray': array[...,0], 'affine': affine})
2078
-
2079
- return slice_groups
2080
-
2081
- def affine_matrix(self):
2082
- return affine_matrix(self)
2083
-
2084
- def array(*args, **kwargs):
2085
- return get_pixel_array(*args, **kwargs)
2086
-
2087
- def set_array(*args, **kwargs):
2088
- set_pixel_array(*args, **kwargs)
2089
-
2090
- def get_pixel_array(*args, **kwargs):
2091
- return get_pixel_array(*args, **kwargs)
2092
-
2093
- def set_pixel_array(*args, **kwargs):
2094
- set_pixel_array(*args, **kwargs)
2095
-
2096
- def ndarray(self, *args, **kwargs):
2097
- return self.pixel_values(*args, **kwargs)
2098
-
2099
- def set_ndarray(self, *args, **kwargs):
2100
- self.set_pixel_values(*args, **kwargs)
2101
-
2102
-
2103
-
2104
- def _filter_values(vframes, slice, coords, exclude=False):
2105
- # vframes: list with one item per frame, each item being a list of values.
2106
- # filters: dictionary of tag: value pairs.
2107
- if slice=={} and coords=={}:
2108
- fvalues = vframes
2109
- else:
2110
- fvalues = []
2111
- nf = len(slice)
2112
- nl = _coords_size(coords)
2113
- nc = len(coords)
2114
- for vframe in vframes:
2115
- in_slice = True
2116
- for i, s in enumerate(slice):
2117
- if isinstance(slice[s], np.ndarray):
2118
- in_slice = vframe[i-nf-nc] in slice[s]
2119
- else:
2120
- in_slice = vframe[i-nf-nc] == slice[s]
2121
- if exclude:
2122
- in_slice = not in_slice
2123
- if not in_slice:
2124
- break
2125
- if nl==0:
2126
- in_coords = True
2127
- else:
2128
- in_coords = False
2129
- for l in range(nl):
2130
- at_l = True
2131
- for i, loc in enumerate(coords):
2132
- at_l = at_l and (vframe[i-nc] == coords[loc][l])
2133
- in_coords = in_coords or at_l
2134
- if at_l:
2135
- break
2136
- if exclude:
2137
- in_coords = not in_coords
2138
- if in_slice and in_coords:
2139
- fvalues.append(vframe[:-nf-nc])
2140
-
2141
- if len(fvalues) == 0:
2142
- return np.array([]).reshape((0,0))
2143
-
2144
- # Create array of return values. Values can be of different types including lists so this must be an object array.
2145
- nd, nf = len(fvalues[0]), len(fvalues)
2146
- rvalues = np.empty((nd,nf), dtype=object)
2147
- for d in range(nd):
2148
- for f in range(nf):
2149
- rvalues[d,f] = fvalues[f][d]
2150
-
2151
- return rvalues
2152
-
2153
-
2154
-
2155
- def _filter_values_ind(vframes, slice, coords, exclude=False):
2156
- if slice=={} and coords=={}:
2157
- return np.arange(len(vframes), dtype=int)
2158
- finds = []
2159
- nf = len(slice)
2160
- nl = _coords_size(coords)
2161
- nc = len(coords)
2162
- for iv, vframe in enumerate(vframes):
2163
- in_slice = True
2164
- for i, s in enumerate(slice):
2165
- if isinstance(slice[s], np.ndarray):
2166
- in_slice = vframe[i-nf-nc] in slice[s]
2167
- else:
2168
- in_slice = vframe[i-nf-nc] == slice[s]
2169
- if exclude:
2170
- in_slice = not in_slice
2171
- if not in_slice:
2172
- break
2173
- if nl==0:
2174
- in_coords = True
2175
- else:
2176
- in_coords = False
2177
- for l in range(nl):
2178
- at_l = True
2179
- for i, loc in enumerate(coords):
2180
- at_l = at_l and (vframe[i-nc] == coords[loc][l])
2181
- in_coords = in_coords or at_l
2182
- if at_l:
2183
- break
2184
- if exclude:
2185
- in_coords = not in_coords
2186
- if in_slice and in_coords:
2187
- finds.append(iv)
2188
- return np.array(finds, dtype=int)
2189
-
2190
-
2191
- def _coords_shape(coords):
2192
- if coords == {}:
2193
- return (0,)
2194
-
2195
- # Check that all values are arrays.
2196
- for c in coords:
2197
- if not isinstance(coords[c], np.ndarray):
2198
- msg = 'Coordinate values must be provided as numpy arrays.'
2199
- msg += '\nBut the value of ' + str(c) + ' is a ' + str(type(c))
2200
- raise ValueError(msg)
2201
-
2202
- shapes = [coords[tag].shape for tag in coords]
2203
- shape = shapes[0]
2204
- for s in shapes[1:]:
2205
- if s != shape:
2206
- msg = 'Dimensions are ambiguous - not all coordinates have the same shape.'
2207
- raise ValueError(msg)
2208
- return shapes[0]
2209
-
2210
-
2211
- def _coords_size(coords):
2212
-
2213
- if coords == {}:
2214
- return 0
2215
-
2216
- for c in coords:
2217
- if not isinstance(coords[c], np.ndarray):
2218
- msg = 'Coordinate values must be provided as numpy arrays.'
2219
- msg += '\nBut the value of ' + str(c) + ' is a ' + str(type(c))
2220
- raise ValueError(msg)
2221
-
2222
- # Coordinate values must a have the same size.
2223
- sizes = np.unique([coords[tag].size for tag in coords])
2224
- if len(sizes) > 1:
2225
- msg = 'These are not proper dimensions. Each coordinate must have the same number of values.'
2226
- raise ValueError(msg)
2227
- return sizes[0]
2228
-
2229
- def _coords_vals(coords):
2230
- values = [coords[tag].ravel() for tag in coords]
2231
- values = np.stack(values)
2232
- return values
2233
-
2234
- def _check_if_ivals(values):
2235
- if None in values:
2236
- msg = 'These are not proper dimensions. Coordinate values must be defined everywhere.'
2237
- raise ValueError(msg)
2238
-
2239
- # Check if the values are unique
2240
- for f in range(values.shape[1]-1):
2241
- for g in range(f+1, values.shape[1]):
2242
- equal = True
2243
- for d in range(values.shape[0]):
2244
- if values[d,f] != values[d,g]:
2245
- equal = False
2246
- break
2247
- if equal:
2248
- msg = 'These are not proper dimensions. Coordinate values must be unique.'
2249
- raise ValueError(msg)
2250
- # if values.shape[1] != np.unique(values, axis=1).shape[1]:
2251
- # msg = 'These are not proper dimensions. Coordinate values must be unique.'
2252
- # raise ValueError(msg)
2253
-
2254
- def _check_if_coords(coords):
2255
-
2256
- # Check that all values are arrays.
2257
- for c in coords:
2258
- if not isinstance(coords[c], np.ndarray):
2259
- msg = 'Coordinate values must be provided as numpy arrays.'
2260
- msg += '\nBut the value of ' + str(c) + ' is a ' + str(type(coords[c]))
2261
- raise ValueError(msg)
2262
-
2263
- # Check if coordinates are unique
2264
- values = _coords_vals(coords)
2265
- _check_if_ivals(values)
2266
- return coords
2267
-
2268
- def _mesh_to_coords(coords):
2269
- for c in coords:
2270
- coords[c] = coords[c].ravel()
2271
- return _check_if_coords(coords)
2272
-
2273
-
2274
- def _grid_to_meshcoords(gridcoords):
2275
-
2276
- grid = []
2277
- for c in gridcoords:
2278
- if not isinstance(gridcoords[c], np.ndarray):
2279
- msg = 'Grid coordinates have to be numpy arrays.'
2280
- raise TypeError(msg)
2281
- if len(gridcoords[c].shape) != 1:
2282
- msg = 'Grid coordinates have to be one-dimensionial.'
2283
- raise ValueError(msg)
2284
- if len(np.unique(gridcoords[c])) != len(gridcoords[c]):
2285
- msg = 'Grid coordinates have to be unique.'
2286
- raise ValueError(msg)
2287
- grid.append(gridcoords[c])
2288
-
2289
- mesh = np.meshgrid(*tuple(grid), indexing='ij')
2290
- meshcoords = {}
2291
- for i, c in enumerate(gridcoords):
2292
- meshcoords[c] = mesh[i]
2293
- _check_if_coords(meshcoords)
2294
- return meshcoords
2295
-
2296
-
2297
- def _meshcoords_to_grid(coords):
2298
- dims = tuple(coords)
2299
- gridcoords = {}
2300
- for d, dim in enumerate(dims):
2301
- gridcoords[dim] = []
2302
- dvals = coords[dim]
2303
- for i in range(dvals.shape[d]):
2304
- dvals_i = dvals.take(i, axis=d)
2305
- dvals_i = np.unique(dvals_i)
2306
- if len(dvals_i) > 1:
2307
- msg = 'These are not proper grid coordinates.'
2308
- raise ValueError(msg)
2309
- gridcoords[dim].append(dvals_i[0])
2310
- gridcoords[dim] = np.array(gridcoords[dim])
2311
- return gridcoords
2312
-
2313
-
2314
- def _grid_to_coords(grid):
2315
- if grid == {}:
2316
- return {}
2317
- coords = _grid_to_meshcoords(grid)
2318
- for c in coords:
2319
- coords[c] = coords[c].flatten()
2320
- return coords
2321
-
2322
- def _as_meshcoords(coords):
2323
-
2324
- # First check that they are proper coordinates
2325
- values = _coords_vals(coords)
2326
- _check_if_ivals(values)
2327
- values = _meshvals(values)
2328
- meshcoords = {}
2329
- for i, c in enumerate(coords):
2330
- meshcoords[c] = values[i,...]
2331
- return meshcoords
2332
-
2333
- def _meshvals(values):
2334
- # Input array shape: (d, f) with d = nr of dims and f = nr of frames
2335
- # Output array shape: (d, f1,..., fd)
2336
- if values.size == 0:
2337
- return np.array([])
2338
- # List the unique values of the first coordinate
2339
- vals, cnts = np.unique(values[0,:], return_counts=True)
2340
- # Check that there is an equal number of each value
2341
- if len(np.unique(cnts)) > 1:
2342
- msg = 'These are not mesh coordinates.'
2343
- raise ValueError(msg)
2344
- # If there is only one dimension, we are done
2345
- if values.shape[0] == 1:
2346
- return values
2347
- mesh = []
2348
- for v in vals:
2349
- vind = np.where(values[0,:]==v)[0]
2350
- vmesh = _meshvals(values[1:,vind])
2351
- mesh.append(vmesh)
2352
- mesh = np.stack(mesh, axis=1)
2353
- a = [np.full(mesh.shape[2:], v) for v in vals]
2354
- a = np.stack(a)
2355
- a = np.expand_dims(a,0)
2356
- mesh = np.concatenate((a, mesh))
2357
- return mesh
2358
-
2359
- def _meshdata(vals, crds, cmesh):
2360
- mshape = (vals.shape[0],) + cmesh.shape[1:]
2361
- if mshape[0]==0:
2362
- return vals.reshape(mshape)
2363
- vmesh = np.zeros(mshape, dtype=object)
2364
- cmesh = cmesh.reshape((cmesh.shape[0],-1))
2365
- vmesh = vmesh.reshape((vmesh.shape[0],-1))
2366
- for i in range(vals.shape[1]):
2367
- # find location of coordinate i in cmesh
2368
- for j in range(cmesh.shape[1]):
2369
- if np.array_equal(cmesh[:,j], crds[:,i]):
2370
- break
2371
- # Write value i at the same location in vmesh
2372
- vmesh[:,j] = vals[:,i]
2373
- return vmesh.reshape(mshape)
2374
-
2375
- def _concatenate_coords(coords:tuple, mesh=False):
2376
- concat = {}
2377
- for c in coords[0]:
2378
- concat[c] = coords[0][c].flatten().copy()
2379
- for coord in coords[1:]:
2380
- for c in coord:
2381
- if c not in concat:
2382
- msg = 'Cannot concatenate - all coordinates must have the same variables.'
2383
- raise ValueError(msg)
2384
- concat[c] = np.concatenate((concat[c], coord[c].flatten()))
2385
- _check_if_coords(concat)
2386
- if mesh:
2387
- return _as_meshcoords(concat)
2388
- else:
2389
- return concat
2390
-
2391
-
2392
- ### OBSOLETE BELOW HERE
2393
-
2394
-
2395
- def set_pixel_values(series, array, source=None, coords=None, **kwargs):
2396
-
2397
- # If coordinates are given as 1D arrays, turn them into grids and flatten for iteration.
2398
- if coords is not None:
2399
- mesh_coords = {}
2400
- v = list(coords.values())
2401
- if v != []:
2402
- v0 = v[0]
2403
- if np.array(v0).ndim==1: # regular grid
2404
- pos = tuple([coords[c] for c in coords])
2405
- pos = np.meshgrid(*pos, indexing='ij')
2406
- for i, c in enumerate(coords):
2407
- mesh_coords[c] = pos[i].ravel()
2408
-
2409
- # Flatten array for iterating
2410
- nr_of_slices = int(np.prod(array.shape[2:]))
2411
- array = array.reshape((array.shape[0], array.shape[1], nr_of_slices)) # shape (x,y,i)
2412
- attr = {**series.attributes, **kwargs}
2413
- if 'SliceLocation' in coords:
2414
- affine = series.affine()
2415
- for i, image in enumerate(source):
2416
- series.progress(i+1, len(source), 'Saving array..')
2417
- image.read()
2418
-
2419
- # Update any other header data provided
2420
- for a, v in attr.items():
2421
- setattr(image, a, v)
2422
- # if isinstance(v, list):
2423
- # setattr(image, a, v[i])
2424
- # else:
2425
- # setattr(image, a, v)
2426
-
2427
- # # If needed, use Defaults for geometry markers
2428
- # if affine is not None:
2429
- # affine[2, 3] = i # not sufficiently general
2430
- # image.affine_matrix = affine
2431
-
2432
- # Set coordinates.
2433
- if mesh_coords is not None:
2434
- for c in mesh_coords:
2435
- image[c] = mesh_coords[c][i]
2436
- if c == 'SliceLocation':
2437
- image['ImagePositionPatient'] = image_utils.image_position_from_slice_location(mesh_coords[c][i], affine)
2438
-
2439
- image.set_pixel_array(array[:,:,i])
2440
- image.clear()
2441
-
2442
-
2443
- # def slice_groups(series): # not yet in use
2444
- # slice_groups = []
2445
- # for orientation in series.ImageOrientationPatient:
2446
- # sg = series.instances(ImageOrientationPatient=orientation)
2447
- # slice_groups.append(sg)
2448
- # return slice_groups
2449
-
2450
-
2451
- def subseries(record, move=False, **kwargs):
2452
- series = record.new_sibling()
2453
- instances = record.instances(**kwargs)
2454
- for i, instance in enumerate(instances):
2455
- record.progress(i+1, len(instances), 'Extracting subseries..')
2456
- if move:
2457
- instance.move_to(series)
2458
- else:
2459
- instance.copy_to(series)
2460
- # This should be faster:
2461
- # instances = record.instances(**kwargs)
2462
- # series.adopt(instances)
2463
- return series
2464
-
2465
-
2466
- def read_npy(record):
2467
- # Not in use - loading of temporary numpy files
2468
- file = record.manager.npy()
2469
- if not os.path.exists(file):
2470
- return
2471
- with open(file, 'rb') as f:
2472
- array = np.load(f)
2473
- return array
2474
-
2475
-
2476
-
2477
- def array(record, sortby=None, pixels_first=False, first_volume=False):
2478
- if isinstance(record, list): # array of instances
2479
- arr = np.empty(len(record), dtype=object)
2480
- for i, rec in enumerate(record):
2481
- arr[i] = rec
2482
- return _get_pixel_array_from_instance_array(arr, sortby=sortby, pixels_first=pixels_first, first_volume=first_volume)
2483
- elif isinstance(record, np.ndarray): # array of instances
2484
- return _get_pixel_array_from_instance_array(record, sortby=sortby, pixels_first=pixels_first, first_volume=first_volume)
2485
- else:
2486
- return get_pixel_array(record, sortby=sortby, pixels_first=pixels_first, first_volume=first_volume)
2487
-
2488
-
2489
- def get_pixel_array(record, sortby=None, first_volume=False, pixels_first=False):
2490
- source = instance_array(record, sortby)
2491
- array, headers = _get_pixel_array_from_sorted_instance_array(source, pixels_first=pixels_first)
2492
- if first_volume:
2493
- return array[...,0], headers[...,0]
2494
- else:
2495
- return array, headers
2496
-
2497
-
2498
- def _get_pixel_array_from_instance_array(instance_array, sortby=None, pixels_first=False, first_volume=False):
2499
- source = sort_instance_array(instance_array, sortby)
2500
- array, headers = _get_pixel_array_from_sorted_instance_array(source, pixels_first=pixels_first)
2501
- if first_volume:
2502
- return array[...,0], headers[...,0]
2503
- else:
2504
- return array, headers
2505
-
2506
-
2507
- def _get_pixel_array_from_sorted_instance_array(source, pixels_first=False):
2508
-
2509
- array = []
2510
- instances = source.ravel()
2511
- im = None
2512
- for i, im in enumerate(instances):
2513
- if im is None:
2514
- array.append(np.zeros((1,1)))
2515
- else:
2516
- im.progress(i+1, len(instances), 'Reading pixel data..')
2517
- array.append(im.get_pixel_array())
2518
- if im is not None:
2519
- im.status.hide()
2520
- array = _stack(array)
2521
- if array is None:
2522
- msg = 'Pixel array is empty. \n'
2523
- msg += 'Either because one or more of the keywords used for sorting does not exist; \n'
2524
- msg += 'or the series does not have any image data..'
2525
- raise ValueError(msg)
2526
- array = array.reshape(source.shape + array.shape[1:])
2527
- if pixels_first:
2528
- array = np.moveaxis(array, -1, 0)
2529
- array = np.moveaxis(array, -1, 0)
2530
- return array, source
2531
-
2532
-
2533
- def set_pixel_array(series, array, source=None, pixels_first=False, **kwargs):
2534
-
2535
- # Move pixels to the end (default)
2536
- if pixels_first:
2537
- array = np.moveaxis(array, 0, -1)
2538
- array = np.moveaxis(array, 0, -1)
2539
-
2540
- # if no header data are provided, use template headers.
2541
- nr_of_slices = int(np.prod(array.shape[:-2]))
2542
- if source is None:
2543
- source = [series.new_instance(MRImage()) for _ in range(nr_of_slices)]
2544
- if source.size == 0:
2545
- source = [series.new_instance(MRImage()) for _ in range(nr_of_slices)]
2546
-
2547
- # If the header data are not the same size, use only the first one.
2548
- else:
2549
- if isinstance(source, list):
2550
- pass
2551
- elif isinstance(source, np.ndarray):
2552
- source = source.ravel().tolist()
2553
- else: # assume scalar
2554
- source = [source] * nr_of_slices
2555
- if nr_of_slices != len(source):
2556
- source = [source[0]] * nr_of_slices
2557
-
2558
- # Copy all sources to the series, if they are not part of it
2559
- copy_source = []
2560
- instances = series.instances()
2561
- for i, s in enumerate(source):
2562
- if s in instances:
2563
- copy_source.append(s)
2564
- else:
2565
- series.progress(i+1, len(source), 'Copying series..')
2566
- copy_source.append(s.copy_to(series))
2567
-
2568
- # Faster but does not work if all sources are the same
2569
- # series.status.message('Saving array (1/2): Copying series..')
2570
- # instances = series.instances()
2571
- # to_copy = [i for i in range(len(source)) if source[i] not in instances]
2572
- # copied = series.adopt([source[i] for i in to_copy])
2573
- # for i, c in enumerate(copied):
2574
- # source[to_copy[i]] = c
2575
-
2576
- # Flatten array for iterating
2577
- array = array.reshape((nr_of_slices, array.shape[-2], array.shape[-1])) # shape (i,x,y)
2578
- for i, image in enumerate(copy_source):
2579
- series.progress(i+1, len(copy_source), 'Saving array..')
2580
- image.read()
2581
- for attr, vals in kwargs.items():
2582
- if isinstance(vals, list):
2583
- setattr(image, attr, vals[i])
2584
- else:
2585
- setattr(image, attr, vals)
2586
- image.set_pixel_array(array[i,...])
2587
- image.clear()
2588
-
2589
- # TODO: make this obsolete - only used ion affine_matrix
2590
- def is_multislice(series):
2591
- orientation = series.ImageOrientationPatient
2592
- # Series is multislice if there are multiple unique orientations
2593
- if isinstance(orientation[0], list):
2594
- return True
2595
- #
2596
- # NOTE: 08/01/25: Added below conditions to correctly deal with situations
2597
- # where individual slices have been shifted but not rotated.
2598
- # From here: a series is multislice as soon as slices are not part of a
2599
- # uniformly spaced 3D volume.
2600
- #
2601
- pos = series.ImagePositionPatient
2602
- # If there is only one slice location, the series is not multislice
2603
- if not isinstance(pos[0], list):
2604
- return False
2605
- #
2606
- # If there are multiple positions, check that they are all on the slice
2607
- # vector. If at least one if them is not, the series is multislice.
2608
- #
2609
- # Get slice vector
2610
- row_vec = np.array(orientation[:3])
2611
- column_vec = np.array(orientation[3:])
2612
- slice_vec = np.cross(row_vec, column_vec)
2613
- for p in pos[1:]:
2614
- # Position relative to first slice position
2615
- prel = np.array(p)-np.array(pos[0])
2616
- # Parallel means cross product has length zero
2617
- norm = np.linalg.norm(np.cross(slice_vec, prel))
2618
- # Round to micrometers to avoid numerical error
2619
- if np.round(norm, 3) != 0:
2620
- return True
2621
- #
2622
- # If they are all on the slice vector, check that they have the same
2623
- # spacing. If more than one spacing is found, the series is multislice.
2624
- #
2625
- # Get slice locations
2626
- loc = [np.dot(p, slice_vec) for p in pos]
2627
- # Sort slice locations
2628
- loc = np.sort(loc)
2629
- # Get unique slice spacing (to micrometer precision)
2630
- spacing = np.unique(np.around(loc[1:]-loc[:-1], 3))
2631
- # If there is more than 1 slice spacing, the series is multislice
2632
- return spacing.size != 1
2633
-
2634
- # TODO: make this obsolete -replace by affines
2635
- def affine_matrix(series):
2636
- """Returns the affine matrix of a series.
2637
-
2638
- If the series consists of multiple slice groups with different
2639
- image orientations, then a list of affine matrices is returned,
2640
- one for each slice orientation.
2641
- """
2642
- image_orientation = series.ImageOrientationPatient
2643
- if image_orientation is None:
2644
- msg = 'ImageOrientationPatient not defined in the DICOM header \n'
2645
- msg = 'This is a required DICOM field \n'
2646
- msg += 'The data may be corrupted - please check'
2647
- raise ValueError(msg)
2648
-
2649
- # Multiple slice groups in series - return list of affine matrices
2650
- if is_multislice(series):
2651
- #
2652
- # NOTE: 08/01/2025: Changed definition of slice groups from "frames with
2653
- # the same orientation" to "frames with the same orientation and position"
2654
- #
2655
- # Get unique image positions
2656
- image_position = series.ImagePositionPatient
2657
- # Make sure orientations and positions are losts
2658
- if not isinstance(image_orientation[0], list):
2659
- image_orientation = [image_orientation]
2660
- if not isinstance(image_position[0], list):
2661
- image_position = [image_position]
2662
- # Return one affine per slice group
2663
- affine_matrices = []
2664
- for dir in image_orientation:
2665
- for pos in image_position:
2666
- slice_group = series.instances(ImageOrientationPatient=dir, ImagePositionPatient=pos)
2667
- if len(slice_group) > 0:
2668
- affine = _slice_group_affine_matrix(slice_group, dir)
2669
- affine_matrices.append((affine, slice_group))
2670
- return affine_matrices
2671
-
2672
- # Single slice group in series - return a single affine matrix
2673
- else:
2674
- slice_group = series.instances()
2675
- affine = _slice_group_affine_matrix(slice_group, image_orientation)
2676
- return affine, slice_group
2677
-
2678
-
2679
- def _slice_group_affine_matrix(slice_group, image_orientation):
2680
- """Return the affine matrix of a slice group"""
2681
-
2682
- # single slice
2683
- if len(slice_group) == 1:
2684
- return slice_group[0].affine_matrix
2685
- # multi slice
2686
- else:
2687
- pos = [s.ImagePositionPatient for s in slice_group]
2688
- # Find unique elements
2689
- pos = [x for i, x in enumerate(pos) if i==pos.index(x)]
2690
-
2691
- # One slice location
2692
- if len(pos) == 1:
2693
- return slice_group[0].affine_matrix
2694
-
2695
- # Slices with different locations
2696
- else:
2697
- return image_utils.affine_matrix_multislice(
2698
- image_orientation, pos,
2699
- slice_group[0].PixelSpacing) # assume all the same pixel spacing
2700
-
2701
-
2702
- def sort_instance_array(instance_array, sortby=None):
2703
- if sortby is None:
2704
- return instance_array
2705
- else:
2706
- if not isinstance(sortby, list):
2707
- sortby = [sortby]
2708
- df = read_dataframe_from_instance_array(instance_array, sortby + ['SOPInstanceUID'])
2709
- df.sort_values(sortby, inplace=True)
2710
- return df_to_sorted_instance_array(instance_array[0], df, sortby)
2711
-
2712
-
2713
- def _instances(series, dims:tuple=None, inds:dict=None, select={}, **filters):
2714
-
2715
- # Use default dimensions if needed.
2716
- if dims is None:
2717
- dims = ('InstanceNumber',)
2718
-
2719
- # If indices are provided, check that they are compatible with dims.
2720
- if inds is not None:
2721
- for dim in inds:
2722
- if dim not in dims:
2723
- msg = 'Indices must be in the dimensions provided.'
2724
- raise ValueError(msg)
2725
-
2726
- # Get the frames and sort by dim
2727
- frames = instance_array(series, list(dims), report_none=True, select=select, **filters)
2728
- if frames.size == 0:
2729
- return frames.reshape(tuple([0]*len(dims)))
2730
- if frames.shape[-1] > 1:
2731
- d = ''.join(['('] + [str(v)+', ' for v in dims] + [')'])
2732
- msg = 'series shape is ambiguous in dimensions ' + d
2733
- msg += '\n--> Multiple frames exist at some or all locations.'
2734
- msg += '\n--> Hint: use Series.unique() to list the values at all locations.'
2735
- raise ValueError(msg)
2736
- if None in frames:
2737
- d = ''.join(['('] + [str(v)+', ' for v in dims] + [')'])
2738
- msg = 'series shape is not well defined in dimensions ' + d
2739
- msg += '\n--> There are no frames at some locations.'
2740
- msg += '\n--> Hint: use Series.value() to find the values at all locations.'
2741
- raise ValueError(msg)
2742
- frames = frames[...,0]
2743
-
2744
- # Extract indices and coordinates if provided
2745
- if inds is not None:
2746
- for dim in inds:
2747
- ind = inds[dim]
2748
- d = dims.index(dim)
2749
- frames = frames.take(ind, axis=d)
2750
- if not isinstance(ind, np.ndarray):
2751
- frames = np.expand_dims(frames, axis=d)
2752
- if frames.size == 0:
2753
- return frames.reshape(tuple([0]*len(dims)))
2754
- else:
2755
- return frames
2756
-
2757
-
2758
- def instance_array(record, sortby=None, report_none=False, select={}, **filters):
2759
- """Sort instances by a list of attributes.
2760
-
2761
- Args:
2762
- sortby:
2763
- List of DICOM keywords by which the series is sorted
2764
- Returns:
2765
- An ndarray holding the instances sorted by sortby.
2766
- """
2767
- if sortby is None:
2768
- instances = record.instances(**filters) # Note filter values here cant be arrays
2769
- array = np.empty(len(instances), dtype=object)
2770
- for i, instance in enumerate(instances):
2771
- array[i] = instance
2772
- return array
2773
- else:
2774
- if not isinstance(sortby, list):
2775
- sortby = [sortby]
2776
- df = record.read_dataframe(sortby + ['SOPInstanceUID'], select=select, **filters)
2777
- df = df[df.SOPInstanceUID.values != None]
2778
- if df.empty:
2779
- return np.array([])
2780
- if report_none:
2781
- if None in df.values:
2782
- d = ''.join(['('] + [str(v)+', ' for v in sortby] + [')'])
2783
- msg = 'series shape is not well defined in dimensions ' + d
2784
- msg += '\n--> Some of the dimensions are not defined in the header.'
2785
- msg += '\n--> Hint: use Series.value() to find the undefined values.'
2786
- raise ValueError(msg)
2787
- df.sort_values(sortby, inplace=True)
2788
- return df_to_sorted_instance_array(record, df, sortby)
2789
-
2790
-
2791
- def df_to_sorted_instance_array(record, df, sortby):
2792
-
2793
- data = []
2794
- vals = df[sortby[0]].unique()
2795
- for i, c in enumerate(vals):
2796
- record.progress(i, len(vals), message='Sorting pixel data..')
2797
- # if a type is not supported by np.isnan()
2798
- # assume it is not a nan
2799
- if c is None: # this happens when undefined keyword is used
2800
- dfc = df[df[sortby[0]].isnull()]
2801
- else:
2802
- try:
2803
- nan = np.isnan(c)
2804
- except:
2805
- nan = False
2806
- if nan:
2807
- dfc = df[df[sortby[0]].isnull()]
2808
- else:
2809
- dfc = df[df[sortby[0]] == c]
2810
- if len(sortby) == 1:
2811
- datac = df_to_instance_array(record, dfc)
2812
- else:
2813
- datac = df_to_sorted_instance_array(record, dfc, sortby[1:])
2814
- data.append(datac)
2815
- return _stack(data, align_left=True)
2816
-
2817
-
2818
- def df_to_instance_array(record, df):
2819
- """Return datasets as numpy array of object type"""
2820
-
2821
- data = np.empty(df.shape[0], dtype=object)
2822
- for i, item in enumerate(df.SOPInstanceUID.items()):
2823
- data[i] = record.instance(key=item[0])
2824
- return data
2825
-
2826
-
2827
- def _stack(arrays, align_left=False):
2828
- """Stack a list of arrays of different shapes but same number of dimensions.
2829
-
2830
- This generalises numpy.stack to arrays of different sizes.
2831
- The stack has the size of the largest array.
2832
- If an array is smaller it is zero-padded and centred on the middle.
2833
- None items are removed first before stacking
2834
- """
2835
-
2836
- # Get the dimensions of the stack
2837
- # For each dimension, look for the largest values across all arrays
2838
- #arrays = [a for a in arrays if a is not None]
2839
- arrays = [a for a in arrays if a.size != 0]
2840
- if arrays == []:
2841
- return np.array([])
2842
- ndim = len(arrays[0].shape)
2843
- dim = [0] * ndim
2844
- for array in arrays:
2845
- for i, d in enumerate(dim):
2846
- dim[i] = max((d, array.shape[i])) # changing the variable we are iterating over!!
2847
- # for i in range(ndim):
2848
- # dim[i] = max((dim[i], array.shape[i]))
2849
-
2850
- # Create the stack
2851
- # Add one dimension corresponding to the size of the stack
2852
- n = len(arrays)
2853
- #stack = np.full([n] + dim, 0, dtype=arrays[0].dtype)
2854
- stack = np.full([n] + dim, None, dtype=arrays[0].dtype)
2855
-
2856
- for k, array in enumerate(arrays):
2857
- index = [k]
2858
- for i, d in enumerate(dim):
2859
- if align_left:
2860
- i0 = 0
2861
- else: # align center and zero-pad missing values
2862
- i0 = math.floor((d-array.shape[i])/2)
2863
- i1 = i0 + array.shape[i]
2864
- index.append(slice(i0,i1))
2865
- stack[tuple(index)] = array
2866
-
2867
- return stack
2868
-
2869
-
2870
-
2871
-
2872
-
2873
-
2874
-