dbdicom 0.3.4__py3-none-any.whl → 0.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbdicom might be problematic. Click here for more details.

dbdicom/__init__.py CHANGED
@@ -1 +1,4 @@
1
- from dbdicom.api import *
1
+ from dbdicom.api import *
2
+
3
+ # Utilities
4
+ from dbdicom.utils.image import affine_matrix
dbdicom/api.py CHANGED
@@ -262,13 +262,65 @@ def from_nifti(file:str, series:list, ref:list=None):
262
262
  dbd.from_nifti(file, series, ref)
263
263
  dbd.close()
264
264
 
265
- def pixel_data(series:list, dims:list=None, include:list=None) -> tuple:
265
+
266
+ def values(series:list, attr=None, dims:list=None, coords=False) -> Union[dict, tuple]:
267
+ """Read the values of some or all attributes from a DICOM series
268
+
269
+ Args:
270
+ series (list or str): DICOM series to read. This can also
271
+ be a path to a folder containing DICOM files, or a
272
+ patient or study to read all series in that patient or
273
+ study. In those cases a list is returned.
274
+ attr (list, optional): list of DICOM attributes to read.
275
+ dims (list, optional): Dimensions to sort the attributes.
276
+ If dims is not provided, values are sorted by
277
+ InstanceNumber.
278
+ coords (bool): If set to True, the coordinates of the
279
+ attributes are returned alongside the values
280
+
281
+ Returns:
282
+ dict or tuple: values as a dictionary in the last
283
+ return value, where each value is a numpy array with
284
+ the required dimensions. If coords is set to True,
285
+ these are returned too.
286
+ """
287
+ if isinstance(series, str):
288
+ series = [series]
289
+ dbd = open(series[0])
290
+ array = dbd.values(series, attr, dims, coords)
291
+ dbd.close()
292
+ return array
293
+
294
+
295
+ def files(entity:list) -> list:
296
+ """Read the files in a DICOM entity
297
+
298
+ Args:
299
+ entity (list or str): DICOM entity to read. This can
300
+ be a path to a folder containing DICOM files, or a
301
+ patient or study to read all series in that patient or
302
+ study.
303
+
304
+ Returns:
305
+ list: list of valid dicom files.
306
+ """
307
+ if isinstance(entity, str):
308
+ entity = [entity]
309
+ dbd = open(entity[0])
310
+ files = dbd.files(entity)
311
+ dbd.close()
312
+ return files
313
+
314
+
315
+ def pixel_data(series:list, dims:list=None, coords=False, attr:list=None) -> tuple:
266
316
  """Read the pixel data from a DICOM series
267
317
 
268
318
  Args:
269
319
  series (list): DICOM series to read
270
320
  dims (list, optional): Dimensions of the array.
271
- include (list, optional): list of DICOM attributes that are
321
+ coords (bool): If set to True, the coordinates of the
322
+ slices are returned alongside the pixel data.
323
+ attr (list, optional): list of DICOM attributes that are
272
324
  read on the fly to avoid reading the data twice.
273
325
 
274
326
  Returns:
@@ -280,7 +332,7 @@ def pixel_data(series:list, dims:list=None, include:list=None) -> tuple:
280
332
  if isinstance(series, str):
281
333
  series = [series]
282
334
  dbd = open(series[0])
283
- array = dbd.pixel_data(series, dims, include)
335
+ array = dbd.pixel_data(series, dims, coords, attr)
284
336
  dbd.close()
285
337
  return array
286
338
 
dbdicom/dataset.py CHANGED
@@ -574,25 +574,51 @@ def set_pixel_data(ds, array):
574
574
  if hasattr(mod, 'set_pixel_data'):
575
575
  return getattr(mod, 'set_pixel_data')(ds, array)
576
576
 
577
- # if array.ndim >= 3: # remove spurious dimensions of 1
578
- # array = np.squeeze(array)
577
+ ds.BitsAllocated = 16
578
+ ds.BitsStored = 16
579
+ ds.HighBit = 15
580
+
581
+ if array.dtype==np.int16:
582
+ array = image.clip(array) # remove nan and infs
583
+ ds.PixelRepresentation = 1
584
+ ds.RescaleSlope = 1
585
+ ds.RescaleIntercept = 0
586
+ elif array.dtype==np.uint16:
587
+ array = image.clip(array) # remove nan and infs
588
+ ds.PixelRepresentation = 0
589
+ ds.RescaleSlope = 1
590
+ ds.RescaleIntercept = 0
591
+ else:
592
+ array = image.clip(array) # remove nan and infs
593
+ array, slope, intercept = image.scale_to_range(array, ds.BitsStored)
594
+ ds.PixelRepresentation = 0
595
+ ds.RescaleSlope = 1 / slope
596
+ ds.RescaleIntercept = - intercept / slope
579
597
 
580
- array = image.clip(array.astype(np.float32))
581
- array, slope, intercept = image.scale_to_range(array, ds.BitsAllocated)
582
598
  array = np.transpose(array)
583
-
584
- ds.PixelRepresentation = 0
585
- #ds.SmallestImagePixelValue = int(0)
586
- #ds.LargestImagePixelValue = int(2**ds.BitsAllocated - 1)
587
- #ds.set_values('SmallestImagePixelValue', int(0))
588
- #ds.set_values('LargestImagePixelValue', int(2**ds.BitsAllocated - 1))
589
- ds.RescaleSlope = 1 / slope
590
- ds.RescaleIntercept = - intercept / slope
591
- # ds.WindowCenter = (maximum + minimum) / 2
592
- # ds.WindowWidth = maximum - minimum
593
599
  ds.Rows = array.shape[0]
594
600
  ds.Columns = array.shape[1]
595
601
  ds.PixelData = array.tobytes()
602
+
603
+ # # if array.ndim >= 3: # remove spurious dimensions of 1
604
+ # # array = np.squeeze(array)
605
+
606
+ # array = image.clip(array.astype(np.float32))
607
+ # array, slope, intercept = image.scale_to_range(array, ds.BitsAllocated)
608
+ # array = np.transpose(array)
609
+
610
+ # ds.PixelRepresentation = 0
611
+ # #ds.SmallestImagePixelValue = int(0)
612
+ # #ds.LargestImagePixelValue = int(2**ds.BitsAllocated - 1)
613
+ # #ds.set_values('SmallestImagePixelValue', int(0))
614
+ # #ds.set_values('LargestImagePixelValue', int(2**ds.BitsAllocated - 1))
615
+ # ds.RescaleSlope = 1 / slope
616
+ # ds.RescaleIntercept = - intercept / slope
617
+ # # ds.WindowCenter = (maximum + minimum) / 2
618
+ # # ds.WindowWidth = maximum - minimum
619
+ # ds.Rows = array.shape[0]
620
+ # ds.Columns = array.shape[1]
621
+ # ds.PixelData = array.tobytes()
596
622
 
597
623
 
598
624
  def volume(ds):
dbdicom/dbd.py CHANGED
@@ -38,8 +38,13 @@ class DataBaseDicom():
38
38
  # is not properly closed this will prevent that changes
39
39
  # have been made which are not reflected in the json
40
40
  # file on disk
41
- os.remove(file)
42
- except:
41
+ # os.remove(file)
42
+ except Exception as e:
43
+ # raise ValueError(
44
+ # f'Cannot open {file}. Please close any programs that are '
45
+ # f'using it and try again. Alternatively you can delete the file '
46
+ # f'manually and try again.'
47
+ # )
43
48
  # If the file can't be read, delete it and load again
44
49
  os.remove(file)
45
50
  self.read()
@@ -239,9 +244,7 @@ class DataBaseDicom():
239
244
  if not np.array_equal(coords[1:,k+1,...], c0):
240
245
  raise ValueError(
241
246
  "Cannot build a single volume. Not all slices "
242
- "have the same coordinates. \nIf you set "
243
- "firstslice=True, the coordinates of the lowest "
244
- "slice will be assigned to the whole volume."
247
+ "have the same coordinates."
245
248
  )
246
249
 
247
250
  # Infer spacing between slices from slice locations
@@ -335,7 +338,7 @@ class DataBaseDicom():
335
338
  self.write_volume(vol, series, ref)
336
339
  return self
337
340
 
338
- def pixel_data(self, series:list, dims:list=None, coords=False, include=None) -> np.ndarray:
341
+ def pixel_data(self, series:list, dims:list=None, coords=False, attr=None) -> np.ndarray:
339
342
  """Read the pixel data from a DICOM series
340
343
 
341
344
  Args:
@@ -344,9 +347,9 @@ class DataBaseDicom():
344
347
  patient or study to read all series in that patient or
345
348
  study. In those cases a list is returned.
346
349
  dims (list, optional): Dimensions of the array.
347
- coords (bool): If set to Trye, the coordinates of the
350
+ coords (bool): If set to True, the coordinates of the
348
351
  arrays are returned alongside the pixel data
349
- include (list, optional): list of DICOM attributes that are
352
+ attr (list, optional): list of DICOM attributes that are
350
353
  read on the fly to avoid reading the data twice.
351
354
 
352
355
  Returns:
@@ -354,48 +357,42 @@ class DataBaseDicom():
354
357
  at least 3 dimensions (x,y,z). If
355
358
  coords is set these are returned too as an array with
356
359
  coordinates of the slices according to dims. If include
357
- is provide the values are returned as a dictionary in the last
360
+ is provided the values are returned as a dictionary in the last
358
361
  return value.
359
362
  """
360
363
  if isinstance(series, str): # path to folder
361
- return [self.pixel_data(s, dims, coords, include) for s in self.series(series)]
364
+ return [self.pixel_data(s, dims, coords, attr) for s in self.series(series)]
362
365
  if len(series) < 4: # folder, patient or study
363
- return [self.pixel_data(s, dims, coords, include) for s in self.series(series)]
364
- if coords:
365
- if dims is None:
366
- raise ValueError(
367
- "Coordinates can only be returned if dimensions are specified."
368
- )
366
+ return [self.pixel_data(s, dims, coords, attr) for s in self.series(series)]
369
367
 
370
368
  if dims is None:
371
- dims = []
369
+ dims = ['InstanceNumber']
372
370
  elif np.isscalar(dims):
373
371
  dims = [dims]
374
372
  else:
375
373
  dims = list(dims)
376
- dims = ['SliceLocation'] + dims
377
374
 
378
375
  # Ensure return_vals is a list
379
- if include is None:
376
+ if attr is None:
380
377
  params = []
381
- elif np.isscalar(include):
382
- params = [include]
378
+ elif np.isscalar(attr):
379
+ params = [attr]
383
380
  else:
384
- params = list(include)
381
+ params = list(attr)
385
382
 
386
383
  files = register.files(self.register, series)
387
384
 
388
385
  # Read dicom files
389
386
  coords_array = []
390
387
  arrays = np.empty(len(files), dtype=dict)
391
- if include is not None:
388
+ if attr is not None:
392
389
  values = np.empty(len(files), dtype=dict)
393
390
  for i, f in tqdm(enumerate(files), desc='Reading pixel data..'):
394
391
  ds = dbdataset.read_dataset(f)
395
392
  coords_array.append(dbdataset.get_values(ds, dims))
396
393
  # save as dict so numpy does not stack as arrays
397
394
  arrays[i] = {'pixel_data': dbdataset.pixel_data(ds)}
398
- if include is not None:
395
+ if attr is not None:
399
396
  values[i] = {'values': dbdataset.get_values(ds, params)}
400
397
 
401
398
  # Format as mesh
@@ -406,20 +403,169 @@ class DataBaseDicom():
406
403
  arrays = np.stack([a['pixel_data'] for a in arrays.reshape(-1)], axis=-1)
407
404
  arrays = arrays.reshape(arrays.shape[:2] + coords_array.shape[1:])
408
405
 
409
- if include is None:
406
+ if attr is None:
410
407
  if coords:
411
- return arrays, coords_array[1:,...]
408
+ return arrays, coords_array
412
409
  else:
413
410
  return arrays
411
+
412
+ # Return values as a dictionary
413
+ values = values[inds].reshape(-1)
414
+ values_dict = {}
415
+ for p in range(len(params)):
416
+ # Get the type from the first value
417
+ vp0 = values[0]['values'][p]
418
+ # Build an array of the right type
419
+ vp = np.zeros(values.size, dtype=type(vp0))
420
+ # Populate the array with values for parameter p
421
+ for i, v in enumerate(values):
422
+ vp[i] = v['values'][p]
423
+ # Reshape values for parameter p
424
+ vp = vp.reshape(coords_array.shape[1:])
425
+ # Eneter in the dictionary
426
+ values_dict[params[p]] = vp
427
+
428
+ # If only one, return as value
429
+ if len(params) == 1:
430
+ values_return = values_dict[attr[0]]
431
+ else:
432
+ values_return = values_dict
414
433
 
415
- values = values[inds].reshape(coords_array.shape[1:])
416
- values = np.stack([a['values'] for a in values.reshape(-1)], axis=-1)
417
- values = values.reshape((len(params), ) + coords_array.shape[1:])
434
+ # problem if the values are a list. Needs an array with a prespeficied dtype
435
+ # values = values[inds].reshape(coords_array.shape[1:])
436
+ # values = np.stack([a['values'] for a in values.reshape(-1)], axis=-1)
437
+ # values = values.reshape((len(params), ) + coords_array.shape[1:])
418
438
 
419
439
  if coords:
420
- return arrays, coords_array[1:,...], values
440
+ return arrays, coords_array, values_return
441
+ else:
442
+ return arrays, values_return
443
+
444
+
445
+ def values(self, series:list, attr=None, dims:list=None, coords=False) -> Union[dict, tuple]:
446
+ """Read the values of some or all attributes from a DICOM series
447
+
448
+ Args:
449
+ series (list or str): DICOM series to read. This can also
450
+ be a path to a folder containing DICOM files, or a
451
+ patient or study to read all series in that patient or
452
+ study. In those cases a list is returned.
453
+ attr (list, optional): list of DICOM attributes to read.
454
+ dims (list, optional): Dimensions to sort the attributes.
455
+ If dims is not provided, values are sorted by
456
+ InstanceNumber.
457
+ coords (bool): If set to True, the coordinates of the
458
+ attributes are returned alongside the values
459
+
460
+ Returns:
461
+ dict or tuple: values as a dictionary in the last
462
+ return value, where each value is a numpy array with
463
+ the required dimensions. If coords is set to True,
464
+ these are returned too.
465
+ """
466
+ if isinstance(series, str): # path to folder
467
+ return [self.values(s, attr, dims, coords) for s in self.series(series)]
468
+ if len(series) < 4: # folder, patient or study
469
+ return [self.values(s, attr, dims, coords) for s in self.series(series)]
470
+
471
+ if dims is None:
472
+ dims = ['InstanceNumber']
473
+ elif np.isscalar(dims):
474
+ dims = [dims]
421
475
  else:
422
- return arrays, values
476
+ dims = list(dims)
477
+
478
+ files = register.files(self.register, series)
479
+
480
+ # Ensure return_vals is a list
481
+ if attr is None:
482
+ # If attributes are not provided, read all
483
+ # attributes from the first file
484
+ ds = dbdataset.read_dataset(files[0])
485
+ exclude = ['PixelData', 'FloatPixelData', 'DoubleFloatPixelData']
486
+ params = []
487
+ param_labels = []
488
+ for elem in ds:
489
+ if elem.keyword not in exclude:
490
+ params.append(elem.tag)
491
+ # For known tags use the keyword as label
492
+ label = elem.tag if len(elem.keyword)==0 else elem.keyword
493
+ param_labels.append(label)
494
+ elif np.isscalar(attr):
495
+ params = [attr]
496
+ param_labels = params[:]
497
+ else:
498
+ params = list(attr)
499
+ param_labels = params[:]
500
+
501
+ # Read dicom files
502
+ coords_array = []
503
+ values = np.empty(len(files), dtype=dict)
504
+ for i, f in tqdm(enumerate(files), desc='Reading values..'):
505
+ ds = dbdataset.read_dataset(f)
506
+ coords_array.append(dbdataset.get_values(ds, dims))
507
+ # save as dict so numpy does not stack as arrays
508
+ values[i] = {'values': dbdataset.get_values(ds, params)}
509
+
510
+ # Format as mesh
511
+ coords_array = np.stack([v for v in coords_array], axis=-1)
512
+ coords_array, inds = dbdicom.utils.arrays.meshvals(coords_array)
513
+
514
+ # Sort values accordingly
515
+ values = values[inds].reshape(-1)
516
+
517
+ # Return values as a dictionary
518
+ values_dict = {}
519
+ for p in range(len(params)):
520
+ # Get the type from the first value
521
+ vp0 = values[0]['values'][p]
522
+ # Build an array of the right type
523
+ vp = np.zeros(values.size, dtype=type(vp0))
524
+ # Populate the arrate with values for parameter p
525
+ for i, v in enumerate(values):
526
+ vp[i] = v['values'][p]
527
+ # Reshape values for parameter p
528
+ vp = vp.reshape(coords_array.shape[1:])
529
+ # Eneter in the dictionary
530
+ values_dict[param_labels[p]] = vp
531
+
532
+ # If only one, return as value
533
+ if len(params) == 1:
534
+ values_return = values_dict[params[0]]
535
+ else:
536
+ values_return = values_dict
537
+
538
+ if coords:
539
+ return values_return, coords_array
540
+ else:
541
+ return values_return
542
+
543
+
544
+ def files(self, entity:list) -> list:
545
+ """Read the files in a DICOM entity
546
+
547
+ Args:
548
+ entity (list or str): DICOM entity to read. This can
549
+ be a path to a folder containing DICOM files, or a
550
+ patient or study to read all series in that patient or
551
+ study.
552
+
553
+ Returns:
554
+ list: list of valid dicom files.
555
+ """
556
+ if isinstance(entity, str): # path to folder
557
+ files = []
558
+ for s in self.series(entity):
559
+ files += self.files(s)
560
+ return files
561
+ if len(entity) < 4: # folder, patient or study
562
+ files = []
563
+ for s in self.series(entity):
564
+ files += self.files(s)
565
+ return files
566
+
567
+ return register.files(self.register, entity)
568
+
423
569
 
424
570
 
425
571
  def unique(self, pars:list, entity:list) -> dict:
@@ -723,9 +869,9 @@ class DataBaseDicom():
723
869
  dbdataset.set_values(ds, list(attr.keys()), list(attr.values()))
724
870
  # Save results in a new file
725
871
  rel_dir = os.path.join(
726
- f"patient_{attr['PatientID']}",
727
- f"study_[{attr['StudyID']}]_{attr['StudyDescription']}",
728
- f"series_[{attr['SeriesNumber']}]_{attr['SeriesDescription']}",
872
+ f"Patient__{attr['PatientID']}",
873
+ f"Study__{attr['StudyID']}__{attr['StudyDescription']}",
874
+ f"Series__{attr['SeriesNumber']}__{attr['SeriesDescription']}",
729
875
  )
730
876
  os.makedirs(os.path.join(self.path, rel_dir), exist_ok=True)
731
877
  rel_path = os.path.join(rel_dir, dbdataset.new_uid() + '.dcm')
@@ -761,8 +907,8 @@ def infer_slice_spacing(vols):
761
907
  slice_loc = np.sort(slice_loc)
762
908
  distances = slice_loc[1:] - slice_loc[:-1]
763
909
 
764
- # Round to micrometer and check if unique
765
- distances = np.around(distances, 3)
910
+ # Round to 10 micrometer and check if unique
911
+ distances = np.around(distances, 2)
766
912
  slice_spacing_d = np.unique(distances)
767
913
 
768
914
  # Check if unique - otherwise this is not a volume
@@ -1,6 +1,7 @@
1
1
  import os
2
2
 
3
3
  import numpy as np
4
+ import vreg
4
5
  import pydicom
5
6
  from pydicom.dataset import FileMetaDataset, Dataset, FileDataset
6
7
  from pydicom.sequence import Sequence
@@ -53,18 +54,18 @@ def set_pixel_data(ds, array):
53
54
  ds.BitsStored = 16
54
55
  ds.HighBit = 15
55
56
 
56
- # clipping may slow down a lot
57
- #array = image.clip(array.astype(np.float32))
58
- array = image.clip(array) # remove nan and infs
59
57
  if array.dtype==np.int16:
58
+ array = image.clip(array) # remove nan and infs
60
59
  ds.PixelRepresentation = 1
61
60
  ds.RescaleSlope = 1
62
61
  ds.RescaleIntercept = 0
63
62
  elif array.dtype==np.uint16:
63
+ array = image.clip(array) # remove nan and infs
64
64
  ds.PixelRepresentation = 0
65
65
  ds.RescaleSlope = 1
66
66
  ds.RescaleIntercept = 0
67
67
  else:
68
+ array = image.clip(array) # remove nan and infs
68
69
  array, slope, intercept = image.scale_to_range(array, ds.BitsStored)
69
70
  ds.PixelRepresentation = 0
70
71
  ds.RescaleSlope = 1 / slope
@@ -76,6 +77,7 @@ def set_pixel_data(ds, array):
76
77
  ds.PixelData = array.tobytes()
77
78
 
78
79
 
80
+
79
81
  def default(): # from the RIDER dataset
80
82
 
81
83
  # File meta info data elements
dbdicom/utils/arrays.py CHANGED
@@ -29,8 +29,12 @@ def _mesh_shape(sorted_array):
29
29
 
30
30
  if np.prod(shape) != sorted_array.size:
31
31
  raise ValueError(
32
- 'These are not mesh coordinates.'
33
- 'Make sure to specify dimensions for a multidimensional series.'
32
+ 'Improper dimensions for the series. This usually means '
33
+ 'that there are multiple images at the same location, \n or that '
34
+ 'there are no images at one or more locations. \n\n'
35
+ 'Make sure to specify proper dimensions when reading a pixel array or volume. \n'
36
+ 'If the default dimensions of pixel_array (InstanceNumber) generate this error, '
37
+ 'the DICOM data may be corrupted.'
34
38
  )
35
39
 
36
40
  return shape
dbdicom/utils/image.py CHANGED
@@ -7,7 +7,7 @@ def affine_matrix( # single slice function
7
7
  image_orientation, # ImageOrientationPatient
8
8
  image_position, # ImagePositionPatient
9
9
  pixel_spacing, # PixelSpacing
10
- slice_thickness): # SliceThickness
10
+ slice_spacing): # SpacingBetweenSlices
11
11
 
12
12
  row_spacing = pixel_spacing[0]
13
13
  column_spacing = pixel_spacing[1]
@@ -16,15 +16,10 @@ def affine_matrix( # single slice function
16
16
  column_cosine = np.array(image_orientation[3:])
17
17
  slice_cosine = np.cross(row_cosine, column_cosine)
18
18
 
19
- # This should not be addressed here
20
- # # The coronal orientation has a left-handed reference frame
21
- # if np.array_equal(np.around(image_orientation, 3), [1,0,0,0,0,-1]):
22
- # slice_cosine = -slice_cosine
23
-
24
19
  affine = np.identity(4, dtype=np.float32)
25
20
  affine[:3, 0] = row_cosine * column_spacing
26
21
  affine[:3, 1] = column_cosine * row_spacing
27
- affine[:3, 2] = slice_cosine * slice_thickness
22
+ affine[:3, 2] = slice_cosine * slice_spacing
28
23
  affine[:3, 3] = image_position
29
24
 
30
25
  return affine
@@ -91,8 +86,8 @@ def scale_to_range(array, bits_allocated, signed=False):
91
86
  else:
92
87
  slope = range / (maximum - minimum)
93
88
  intercept = -slope * minimum + minval
94
- array *= slope
95
- array += intercept
89
+ array = array * slope
90
+ array = array + intercept
96
91
 
97
92
  if bits_allocated == 8:
98
93
  if signed:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dbdicom
3
- Version: 0.3.4
3
+ Version: 0.3.6
4
4
  Summary: A pythonic interface for reading and writing DICOM databases
5
5
  Author-email: Steven Sourbron <s.sourbron@sheffield.ac.uk>, Ebony Gunwhy <e.gunwhy@sheffield.ac.uk>
6
6
  Project-URL: Homepage, https://openmiblab.github.io/dbdicom/
@@ -1,9 +1,9 @@
1
- dbdicom/__init__.py,sha256=DyogeTraV6o-FgWdBCbtVEaMmdkMQHkYkraDIE0t8OA,25
2
- dbdicom/api.py,sha256=gBBOgzg54zWA_Rf4392OyNBQUEHt3FYYTLq_QA75bUc,9596
1
+ dbdicom/__init__.py,sha256=dW5aezonmMc_41Dp1PuYmXQlr307RkyJxsJuetkpWso,87
2
+ dbdicom/api.py,sha256=GiXke8CmCjfzGnRnHqKtuq68rA7yFB4BY74VsWSb0E4,11469
3
3
  dbdicom/const.py,sha256=BqBiRRjeiSqDr1W6YvaayD8WKCjG4Cny2NT0GeLM6bI,4269
4
4
  dbdicom/database.py,sha256=_LUbH7gc9l7j_63AC71DjwxgTUwbEjHSy5kuvRw75Hw,4764
5
- dbdicom/dataset.py,sha256=hLAyFlN7zQ-dOzI9V67aHfTq3VtpvCI7_83tnBqXObE,21880
6
- dbdicom/dbd.py,sha256=6ewuLQwiYGI5EtzWQskxk4CKoprc2gU-m-iSIMPNTv8,31090
5
+ dbdicom/dataset.py,sha256=kbswoVE3-Wtu0uwcSW319KzpdWx5fZphfKxizxJePzc,22773
6
+ dbdicom/dbd.py,sha256=_Fj9xFpx6Rsr5UH5YR6PJaJPSOl4cMfNmk9V2b1dEyo,36894
7
7
  dbdicom/register.py,sha256=_NyNbOEAN_hkwjxupNpr9F5DWUwARCsci8knK41-EsA,13931
8
8
  dbdicom/external/__init__.py,sha256=XNQqfspyf6vFGedXlRKZsUB8k8E-0W19Uamwn8Aioxo,316
9
9
  dbdicom/external/__pycache__/__init__.cpython-311.pyc,sha256=pXAQ35ixd92fm6YcuHgzR1t6RcASQ-cHhU1wOA5b8sw,542
@@ -36,19 +36,19 @@ dbdicom/external/dcm4che/lib/windows-x86/opencv_java.dll,sha256=QanyzLy0Cd79-aOV
36
36
  dbdicom/external/dcm4che/lib/windows-x86-64/opencv_java.dll,sha256=TmjW2SbG4MR3GQ95T8xCVVDLgsdKukgaHBPUvWkfXp8,11039232
37
37
  dbdicom/sop_classes/ct_image.py,sha256=16PNv_0e1_7cfxE12JWlx5YQeaTAQVzwtXTjxs3aonk,2812
38
38
  dbdicom/sop_classes/enhanced_mr_image.py,sha256=13j4EGXniBpJxpzzL3Xa4y3g5OKhMd5Ct7cjPGOYQY4,35496
39
- dbdicom/sop_classes/mr_image.py,sha256=kNcrWXZ3VC3hhfqjMRjrlZOVqZH3Q5KfWXYLfLD-bEY,10913
39
+ dbdicom/sop_classes/mr_image.py,sha256=1biIw7R26Fc38FAeSlWxd29VO17e8cEQdDIdLbeXTzw,10959
40
40
  dbdicom/sop_classes/parametric_map.py,sha256=2OKBuC2bo03OEpKqimQS-nVGFp1cKRPYwVgmDGVf1JU,12288
41
41
  dbdicom/sop_classes/secondary_capture.py,sha256=wgNRX8qyhV7HR7Jq2tQWPPuGpiRzYl6qPOgK6qFbPUc,4541
42
42
  dbdicom/sop_classes/segmentation.py,sha256=I8-PciIoIz27_-dZ4esBZSw0TBBbO8KbNYTiTmVe62g,11465
43
43
  dbdicom/sop_classes/ultrasound_multiframe_image.py,sha256=j3KN5R90j6WwPMy01hAN2_XSum5TvksF2MYoNGfX_yE,2797
44
44
  dbdicom/sop_classes/xray_angiographic_image.py,sha256=nWysCGaEWKVNItnOgyJfcGMpS3oEK1T0_uNR2D7p0Ls,3270
45
- dbdicom/utils/arrays.py,sha256=wiqCczLXlNl0qIePVOwCYvbAJhPveNorplkhtGleS48,1121
45
+ dbdicom/utils/arrays.py,sha256=cZo6hKk-pg_e2WCs9vxW9dxX04gmH3EwSZKFX1n8pq4,1451
46
46
  dbdicom/utils/dcm4che.py,sha256=Vxq8NYWWK3BuqJkzhBQ89oMqzJlnxqTxgsgTo_Frznc,2317
47
47
  dbdicom/utils/files.py,sha256=qhWNJqeWnRjDNbERpC6Mz962_TW9mFdvd2lnBbK3xt4,2259
48
- dbdicom/utils/image.py,sha256=D46CD_ezpp2uq8VMqug5Z09fAyoJ9U6VwuxIFNJK8zg,4048
48
+ dbdicom/utils/image.py,sha256=qsU_wOdleZCqU5g7LSp8OXKmoG109NauYx7wad_ZQ7Q,3839
49
49
  dbdicom/utils/variables.py,sha256=vUh5cDnmCft5hoXDYXUvfkg5Cy5WlgMAogU38Y_BKRo,5753
50
- dbdicom-0.3.4.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
51
- dbdicom-0.3.4.dist-info/METADATA,sha256=DPuzyZj81SfdTQGR_qogbbI1BxeG6StqSLM_pgnck44,1030
52
- dbdicom-0.3.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
- dbdicom-0.3.4.dist-info/top_level.txt,sha256=nJWxXg4YjD6QblfmhrzTMXcr8FSKNc0Yk-CAIDUsYkQ,8
54
- dbdicom-0.3.4.dist-info/RECORD,,
50
+ dbdicom-0.3.6.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
51
+ dbdicom-0.3.6.dist-info/METADATA,sha256=cl4tN9uUuh7Cr_5V0zlBCwyTfWdJH69C4kAettD0CFg,1030
52
+ dbdicom-0.3.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
+ dbdicom-0.3.6.dist-info/top_level.txt,sha256=nJWxXg4YjD6QblfmhrzTMXcr8FSKNc0Yk-CAIDUsYkQ,8
54
+ dbdicom-0.3.6.dist-info/RECORD,,