dbdicom 0.3.3__py3-none-any.whl → 0.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbdicom might be problematic. Click here for more details.

dbdicom/api.py CHANGED
@@ -88,14 +88,14 @@ def patients(path, name:str=None, contains:str=None, isin:list=None)->list:
88
88
  return p
89
89
 
90
90
 
91
- def studies(entity:str | list, name:str=None, contains:str=None, isin:list=None)->list:
91
+ def studies(entity:str | list, desc:str=None, contains:str=None, isin:list=None)->list:
92
92
  """Return a list of studies in the DICOM folder.
93
93
 
94
94
  Args:
95
95
  entity (str or list): path to a DICOM folder (to search in
96
96
  the whole folder), or a two-element list identifying a
97
97
  patient (to search studies of a given patient).
98
- name (str, optional): value of StudyDescription, to search for
98
+ desc (str, optional): value of StudyDescription, to search for
99
99
  studies with a given description. Defaults to None.
100
100
  contains (str, optional): substring of StudyDescription, to
101
101
  search for studies based on part of their description.
@@ -108,12 +108,12 @@ def studies(entity:str | list, name:str=None, contains:str=None, isin:list=None)
108
108
  """
109
109
  if isinstance(entity, str): # path = folder
110
110
  dbd = open(entity)
111
- s = dbd.studies(entity, name, contains, isin)
111
+ s = dbd.studies(entity, desc, contains, isin)
112
112
  dbd.close()
113
113
  return s
114
114
  elif len(entity)==2: # path = patient
115
115
  dbd = open(entity[0])
116
- s = dbd.studies(entity, name, contains, isin)
116
+ s = dbd.studies(entity, desc, contains, isin)
117
117
  dbd.close()
118
118
  return s
119
119
  else:
@@ -122,7 +122,7 @@ def studies(entity:str | list, name:str=None, contains:str=None, isin:list=None)
122
122
  "with a folder and a patient name."
123
123
  )
124
124
 
125
- def series(entity:str | list, name:str=None, contains:str=None, isin:list=None)->list:
125
+ def series(entity:str | list, desc:str=None, contains:str=None, isin:list=None)->list:
126
126
  """Return a list of series in the DICOM folder.
127
127
 
128
128
  Args:
@@ -130,7 +130,7 @@ def series(entity:str | list, name:str=None, contains:str=None, isin:list=None)-
130
130
  the whole folder), or a list identifying a
131
131
  patient or a study (to search series of a given patient
132
132
  or study).
133
- name (str, optional): value of SeriesDescription, to search for
133
+ desc (str, optional): value of SeriesDescription, to search for
134
134
  series with a given description. Defaults to None.
135
135
  contains (str, optional): substring of SeriesDescription, to
136
136
  search for series based on part of their description.
@@ -143,12 +143,12 @@ def series(entity:str | list, name:str=None, contains:str=None, isin:list=None)-
143
143
  """
144
144
  if isinstance(entity, str): # path = folder
145
145
  dbd = open(entity)
146
- s = dbd.series(entity, name, contains, isin)
146
+ s = dbd.series(entity, desc, contains, isin)
147
147
  dbd.close()
148
148
  return s
149
149
  elif len(entity) in [2,3]:
150
150
  dbd = open(entity[0])
151
- s = dbd.series(entity, name, contains, isin)
151
+ s = dbd.series(entity, desc, contains, isin)
152
152
  dbd.close()
153
153
  return s
154
154
  else:
@@ -190,21 +190,38 @@ def move(from_entity:list, to_entity:list):
190
190
  dbd.delete(from_entity)
191
191
  dbd.close()
192
192
 
193
+ def split_series(series:list, attr:Union[str, tuple])->dict:
194
+ """
195
+ Split a series into multiple series
196
+
197
+ Args:
198
+ series (list): series to split.
199
+ attr (str or tuple): dicom attribute to split the series by.
200
+ Returns:
201
+ dict: dictionary with keys the unique values found (ascending)
202
+ and as values the series corresponding to that value.
203
+ """
204
+ dbd = open(series[0])
205
+ split_series = dbd.split_series(series, attr)
206
+ dbd.close()
207
+ return split_series
193
208
 
194
- def volume(series:list, dims:list=None) -> vreg.Volume3D:
195
- """Read a vreg.Volume3D from a DICOM series
209
+
210
+ def volume(entity:Union[list, str], dims:list=None) -> Union[vreg.Volume3D, list]:
211
+ """Read volume or volumes.
196
212
 
197
213
  Args:
198
- series (list): DICOM series to read
214
+ entity (list, str): DICOM entity to read
199
215
  dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
200
216
 
201
217
  Returns:
202
- vreg.Volume3D: vole read from the series.
218
+ vreg.Volume3D | list: If the entity is a series this returns
219
+ a volume, else a list of volumes.
203
220
  """
204
- if isinstance(series, str):
205
- series = [series]
206
- dbd = open(series[0])
207
- vol = dbd.volume(series, dims)
221
+ if isinstance(entity, str):
222
+ entity = [entity]
223
+ dbd = open(entity[0])
224
+ vol = dbd.volume(entity, dims)
208
225
  dbd.close()
209
226
  return vol
210
227
 
@@ -281,11 +298,12 @@ def unique(pars:list, entity:list) -> dict:
281
298
  """Return a list of unique values for a DICOM entity
282
299
 
283
300
  Args:
284
- pars (list): attributes to return.
301
+ pars (list, str/tuple): attribute or attributes to return.
285
302
  entity (list): DICOM entity to search (Patient, Study or Series)
286
303
 
287
304
  Returns:
288
- dict: dictionary with unique values for each attribute.
305
+ dict: if a pars is a list, this returns a dictionary with
306
+ unique values for each attribute. If pars is a scalar this returnes a list of values
289
307
  """
290
308
  dbd = open(entity[0])
291
309
  u = dbd.unique(pars, entity)
dbdicom/dbd.py CHANGED
@@ -5,12 +5,10 @@ from typing import Union
5
5
 
6
6
  from tqdm import tqdm
7
7
  import numpy as np
8
- import pandas as pd
9
8
  import vreg
10
9
  from pydicom.dataset import Dataset
11
10
 
12
11
  import dbdicom.utils.arrays
13
-
14
12
  import dbdicom.dataset as dbdataset
15
13
  import dbdicom.database as dbdatabase
16
14
  import dbdicom.register as register
@@ -36,8 +34,13 @@ class DataBaseDicom():
36
34
  try:
37
35
  with open(file, 'r') as f:
38
36
  self.register = json.load(f)
37
+ # remove the json file after reading it. If the database
38
+ # is not properly closed this will prevent that changes
39
+ # have been made which are not reflected in the json
40
+ # file on disk
41
+ os.remove(file)
39
42
  except:
40
- # If the file is corrupted, delete it and load again
43
+ # If the file can't be read, delete it and load again
41
44
  os.remove(file)
42
45
  self.read()
43
46
  else:
@@ -49,7 +52,7 @@ class DataBaseDicom():
49
52
  """
50
53
  self.register = dbdatabase.read(self.path)
51
54
  # For now ensure all series have just a single CIOD
52
- # Leaving this out for now until the issue occurs again
55
+ # Leaving this out for now until the issue occurs again.
53
56
  # self._split_series()
54
57
  return self
55
58
 
@@ -63,12 +66,12 @@ class DataBaseDicom():
63
66
  """
64
67
  removed = register.index(self.register, entity)
65
68
  # delete datasets marked for removal
66
- for index in removed.tolist():
69
+ for index in removed:
67
70
  file = os.path.join(self.path, index)
68
71
  if os.path.exists(file):
69
72
  os.remove(file)
70
73
  # and drop then from the register
71
- self.register = register.drop(removed)
74
+ self.register = register.drop(self.register, removed)
72
75
  return self
73
76
 
74
77
 
@@ -77,7 +80,6 @@ class DataBaseDicom():
77
80
 
78
81
  This also saves changes in the header file to disk.
79
82
  """
80
- # Save df as pkl
81
83
  file = self._register_file()
82
84
  with open(file, 'w') as f:
83
85
  json.dump(self.register, f, indent=4)
@@ -119,14 +121,14 @@ class DataBaseDicom():
119
121
  """
120
122
  return register.patients(self.register, self.path, name, contains, isin)
121
123
 
122
- def studies(self, entity=None, name=None, contains=None, isin=None):
124
+ def studies(self, entity=None, desc=None, contains=None, isin=None):
123
125
  """Return a list of studies in the DICOM folder.
124
126
 
125
127
  Args:
126
128
  entity (str or list): path to a DICOM folder (to search in
127
129
  the whole folder), or a two-element list identifying a
128
130
  patient (to search studies of a given patient).
129
- name (str, optional): value of StudyDescription, to search for
131
+ desc (str, optional): value of StudyDescription, to search for
130
132
  studies with a given description. Defaults to None.
131
133
  contains (str, optional): substring of StudyDescription, to
132
134
  search for studies based on part of their description.
@@ -142,17 +144,17 @@ class DataBaseDicom():
142
144
  if isinstance(entity, str):
143
145
  studies = []
144
146
  for patient in self.patients():
145
- studies += self.studies(patient, name, contains, isin)
147
+ studies += self.studies(patient, desc, contains, isin)
146
148
  return studies
147
149
  elif len(entity)==1:
148
150
  studies = []
149
151
  for patient in self.patients():
150
- studies += self.studies(patient, name, contains, isin)
152
+ studies += self.studies(patient, desc, contains, isin)
151
153
  return studies
152
154
  else:
153
- return register.studies(self.register, entity, name, contains, isin)
155
+ return register.studies(self.register, entity, desc, contains, isin)
154
156
 
155
- def series(self, entity=None, name=None, contains=None, isin=None):
157
+ def series(self, entity=None, desc=None, contains=None, isin=None):
156
158
  """Return a list of series in the DICOM folder.
157
159
 
158
160
  Args:
@@ -160,7 +162,7 @@ class DataBaseDicom():
160
162
  the whole folder), or a list identifying a
161
163
  patient or a study (to search series of a given patient
162
164
  or study).
163
- name (str, optional): value of SeriesDescription, to search for
165
+ desc (str, optional): value of SeriesDescription, to search for
164
166
  series with a given description. Defaults to None.
165
167
  contains (str, optional): substring of SeriesDescription, to
166
168
  search for series based on part of their description.
@@ -176,36 +178,37 @@ class DataBaseDicom():
176
178
  if isinstance(entity, str):
177
179
  series = []
178
180
  for study in self.studies(entity):
179
- series += self.series(study, name, contains, isin)
181
+ series += self.series(study, desc, contains, isin)
180
182
  return series
181
183
  elif len(entity)==1:
182
184
  series = []
183
185
  for study in self.studies(entity):
184
- series += self.series(study, name, contains, isin)
186
+ series += self.series(study, desc, contains, isin)
185
187
  return series
186
188
  elif len(entity)==2:
187
189
  series = []
188
190
  for study in self.studies(entity):
189
- series += self.series(study, name, contains, isin)
191
+ series += self.series(study, desc, contains, isin)
190
192
  return series
191
193
  else: # path = None (all series) or path = patient (all series in patient)
192
- return register.series(self.register, entity, name, contains, isin)
194
+ return register.series(self.register, entity, desc, contains, isin)
193
195
 
194
196
 
195
- def volume(self, series:list, dims:list=None) -> vreg.Volume3D:
196
- """Read a vreg.Volume3D from a DICOM series
197
+ def volume(self, entity:Union[list, str], dims:list=None) -> Union[vreg.Volume3D, list]:
198
+ """Read volume or volumes.
197
199
 
198
200
  Args:
199
- series (list): DICOM series to read
201
+ entity (list, str): DICOM entity to read
200
202
  dims (list, optional): Non-spatial dimensions of the volume. Defaults to None.
201
203
 
202
204
  Returns:
203
- vreg.Volume3D: vole read from the series.
205
+ vreg.Volume3D | list: If the entity is a series this returns
206
+ a volume, else a list of volumes.
204
207
  """
205
- if isinstance(series, str): # path to folder
206
- return [self.volume(s, dims) for s in self.series(series)]
207
- if len(series) < 4: # folder, patient or study
208
- return [self.volume(s, dims) for s in self.series(series)]
208
+ if isinstance(entity, str): # path to folder
209
+ return [self.volume(s, dims) for s in self.series(entity)]
210
+ if len(entity) < 4: # folder, patient or study
211
+ return [self.volume(s, dims) for s in self.series(entity)]
209
212
  if dims is None:
210
213
  dims = []
211
214
  elif isinstance(dims, str):
@@ -214,13 +217,13 @@ class DataBaseDicom():
214
217
  dims = list(dims)
215
218
  dims = ['SliceLocation'] + dims
216
219
 
217
- files = register.files(self.register, series)
220
+ files = register.files(self.register, entity)
218
221
 
219
222
  # Read dicom files
220
223
  values = []
221
224
  volumes = []
222
225
  for f in tqdm(files, desc='Reading volume..'):
223
- ds = dbdataset.read_dataset(f)
226
+ ds = dbdataset.read_dataset(f)
224
227
  values.append(dbdataset.get_values(ds, dims))
225
228
  volumes.append(dbdataset.volume(ds))
226
229
 
@@ -283,10 +286,11 @@ class DataBaseDicom():
283
286
  else:
284
287
  ref_mgr = DataBaseDicom(ref[0])
285
288
  files = register.files(ref_mgr.register, ref)
289
+ ref_mgr.close()
286
290
  ds = dbdataset.read_dataset(files[0])
287
291
 
288
292
  # Get the attributes of the destination series
289
- attr = self._attributes(series)
293
+ attr = self._series_attributes(series)
290
294
  n = self._max_instance_number(attr['SeriesInstanceUID'])
291
295
 
292
296
  if vol.ndim==3:
@@ -422,12 +426,20 @@ class DataBaseDicom():
422
426
  """Return a list of unique values for a DICOM entity
423
427
 
424
428
  Args:
425
- pars (list): attributes to return.
429
+ pars (list, str/tuple): attribute or attributes to return.
426
430
  entity (list): DICOM entity to search (Patient, Study or Series)
427
431
 
428
432
  Returns:
429
- dict: dictionary with unique values for each attribute.
433
+ dict: if a pars is a list, this returns a dictionary with
434
+ unique values for each attribute. If pars is a scalar
435
+ this returnes a list of values.
430
436
  """
437
+ if not isinstance(pars, list):
438
+ single=True
439
+ pars = [pars]
440
+ else:
441
+ single=False
442
+
431
443
  v = self._values(pars, entity)
432
444
 
433
445
  # Return a list with unique values for each attribute
@@ -439,17 +451,16 @@ class DataBaseDicom():
439
451
  va = list(va)
440
452
  # Get unique values and sort
441
453
  va = [x for i, x in enumerate(va) if i==va.index(x)]
442
- if len(va) == 0:
443
- va = None
444
- elif len(va) == 1:
445
- va = va[0]
446
- else:
447
- try:
448
- va.sort()
449
- except:
450
- pass
454
+ try:
455
+ va.sort()
456
+ except:
457
+ pass
451
458
  values.append(va)
452
- return {p: values[i] for i, p in enumerate(pars)}
459
+
460
+ if single:
461
+ return values[0]
462
+ else:
463
+ return {p: values[i] for i, p in enumerate(pars)}
453
464
 
454
465
  def copy(self, from_entity, to_entity):
455
466
  """Copy a DICOM entity (patient, study or series)
@@ -492,6 +503,40 @@ class DataBaseDicom():
492
503
  self.copy(from_entity, to_entity)
493
504
  self.delete(from_entity)
494
505
  return self
506
+
507
+ def split_series(self, series:list, attr:Union[str, tuple]) -> dict:
508
+ """
509
+ Split a series into multiple series
510
+
511
+ Args:
512
+ series (list): series to split.
513
+ attr (str or tuple): dicom attribute to split the series by.
514
+ Returns:
515
+ dict: dictionary with keys the unique values found (ascending)
516
+ and as values the series corresponding to that value.
517
+ """
518
+
519
+ # Find all values of the attr and list files per value
520
+ all_files = register.files(self.register, series)
521
+ files = {}
522
+ for f in tqdm(all_files, desc=f'Reading {attr}'):
523
+ ds = dbdataset.read_dataset(f)
524
+ v = dbdataset.get_values(ds, attr)
525
+ if v in files:
526
+ files[v].append(f)
527
+ else:
528
+ files[v] = [f]
529
+
530
+ # Copy the files for each value (sorted) to new series
531
+ values = sorted(list(files.keys()))
532
+ split_series = {}
533
+ for v in tqdm(values, desc='Writing new series'):
534
+ series_desc = series[-1] if isinstance(series, str) else series[-1][0]
535
+ series_v = series[:3] + [f'{series_desc}_{attr}_{v}']
536
+ self._files_to_series(files[v], series_v)
537
+ split_series[v] = series_v
538
+ return split_series
539
+
495
540
 
496
541
  def _values(self, attributes:list, entity:list):
497
542
  # Create a np array v with values for each instance and attribute
@@ -509,27 +554,36 @@ class DataBaseDicom():
509
554
  def _copy_patient(self, from_patient, to_patient):
510
555
  from_patient_studies = register.studies(self.register, from_patient)
511
556
  for from_study in tqdm(from_patient_studies, desc=f'Copying patient {from_patient[1:]}'):
557
+ # Count the studies with the same description in the target patient
558
+ study_desc = from_study[-1][0]
512
559
  if to_patient[0]==from_patient[0]:
513
- to_study = register.append(self.register, to_patient, from_study[-1])
560
+ cnt = len(self.studies(to_patient, desc=study_desc))
514
561
  else:
515
- mgr = DataBaseDicom(to_study[0])
516
- to_study = register.append(mgr.register, to_patient, from_study[-1])
562
+ mgr = DataBaseDicom(to_patient[0])
563
+ cnt = len(mgr.studies(to_patient, desc=study_desc))
564
+ mgr.close()
565
+ # Ensure the copied studies end up in a separate study with the same description
566
+ to_study = to_patient + [(study_desc, cnt)]
517
567
  self._copy_study(from_study, to_study)
518
568
 
519
569
  def _copy_study(self, from_study, to_study):
520
570
  from_study_series = register.series(self.register, from_study)
521
571
  for from_series in tqdm(from_study_series, desc=f'Copying study {from_study[1:]}'):
572
+ # Count the series with the same description in the target study
573
+ series_desc = from_series[-1][0]
522
574
  if to_study[0]==from_study[0]:
523
- to_series = register.append(self.register, to_study, from_series[-1])
575
+ cnt = len(self.series(to_study, desc=series_desc))
524
576
  else:
525
577
  mgr = DataBaseDicom(to_study[0])
526
- to_series = register.append(mgr.register, to_study, from_series[-1])
578
+ cnt = len(mgr.series(to_study, desc=series_desc))
579
+ mgr.close()
580
+ # Ensure the copied series end up in a separate series with the same description
581
+ to_series = to_study + [(series_desc, cnt)]
527
582
  self._copy_series(from_series, to_series)
528
583
 
529
584
  def _copy_series(self, from_series, to_series):
530
585
  # Get the files to be exported
531
586
  from_series_files = register.files(self.register, from_series)
532
-
533
587
  if to_series[0] == from_series[0]:
534
588
  # Copy in the same database
535
589
  self._files_to_series(from_series_files, to_series)
@@ -543,7 +597,7 @@ class DataBaseDicom():
543
597
  def _files_to_series(self, files, to_series):
544
598
 
545
599
  # Get the attributes of the destination series
546
- attr = self._attributes(to_series)
600
+ attr = self._series_attributes(to_series)
547
601
  n = self._max_instance_number(attr['SeriesInstanceUID'])
548
602
 
549
603
  # Copy the files to the new series
@@ -555,8 +609,17 @@ class DataBaseDicom():
555
609
  def _max_study_id(self, patient_id):
556
610
  for pt in self.register:
557
611
  if pt['PatientID'] == patient_id:
558
- n = [int(st['StudyID']) for st in pt['studies']]
559
- return int(np.amax(n))
612
+ # Find the largest integer StudyID
613
+ n = []
614
+ for st in pt['studies']:
615
+ try:
616
+ n.append(int(st['StudyID']))
617
+ except:
618
+ pass
619
+ if n == []:
620
+ return 0
621
+ else:
622
+ return int(np.amax(n))
560
623
  return 0
561
624
 
562
625
  def _max_series_number(self, study_uid):
@@ -576,13 +639,13 @@ class DataBaseDicom():
576
639
  return int(np.amax([int(i) for i in n]))
577
640
  return 0
578
641
 
579
- def _attributes(self, entity):
580
- if len(entity)==4:
581
- return self._series_attributes(entity)
582
- if len(entity)==3:
583
- return self._study_attributes(entity)
584
- if len(entity)==2:
585
- return self._patient_attributes(entity)
642
+ # def _attributes(self, entity):
643
+ # if len(entity)==4:
644
+ # return self._series_attributes(entity)
645
+ # if len(entity)==3:
646
+ # return self._study_attributes(entity)
647
+ # if len(entity)==2:
648
+ # return self._patient_attributes(entity)
586
649
 
587
650
 
588
651
  def _patient_attributes(self, patient):
@@ -594,11 +657,13 @@ class DataBaseDicom():
594
657
  vals = dbdataset.get_values(ds, attr)
595
658
  except:
596
659
  # If the patient does not exist, generate values
660
+ if patient in self.patients():
661
+ raise ValueError(
662
+ f"Cannot create patient with id {patient[1]}."
663
+ f"The ID is already taken. Please provide a unique ID."
664
+ )
597
665
  attr = ['PatientID', 'PatientName']
598
- #patient_id = dbdataset.new_uid()
599
- patient_id = patient[-1] if isinstance(patient[-1], str) else f"{patient[-1][0]}_{patient[-1][1]}"
600
- patient_name = patient[-1] if isinstance(patient[-1], str) else patient[-1][0]
601
- vals = [patient_id, patient_name]
666
+ vals = [patient[1], 'Anonymous']
602
667
  return {attr[i]:vals[i] for i in range(len(attr)) if vals[i] is not None}
603
668
 
604
669
 
@@ -610,19 +675,19 @@ class DataBaseDicom():
610
675
  attr = const.STUDY_MODULE
611
676
  ds = dbdataset.read_dataset(files[0])
612
677
  vals = dbdataset.get_values(ds, attr)
678
+ except register.AmbiguousError as e:
679
+ raise register.AmbiguousError(e)
613
680
  except:
614
681
  # If the study does not exist or is empty, generate values
615
- try:
616
- patient_id = register.uid(self.register, study[:-1])
617
- except:
682
+ if study[:-1] not in self.patients():
618
683
  study_id = 1
619
684
  else:
620
- study_id = 1 + self._max_study_id(patient_id)
621
- attr = ['StudyInstanceUID', 'StudyDescription', 'StudyDate', 'StudyID']
685
+ study_id = 1 + self._max_study_id(study[-1])
686
+ attr = ['StudyInstanceUID', 'StudyDescription', 'StudyID']
622
687
  study_uid = dbdataset.new_uid()
623
688
  study_desc = study[-1] if isinstance(study[-1], str) else study[-1][0]
624
- study_date = datetime.today().strftime('%Y%m%d')
625
- vals = [study_uid, study_desc, study_date, str(study_id)]
689
+ #study_date = datetime.today().strftime('%Y%m%d')
690
+ vals = [study_uid, study_desc, str(study_id)]
626
691
  return patient_attr | {attr[i]:vals[i] for i in range(len(attr)) if vals[i] is not None}
627
692
 
628
693
 
@@ -634,10 +699,12 @@ class DataBaseDicom():
634
699
  attr = const.SERIES_MODULE
635
700
  ds = dbdataset.read_dataset(files[0])
636
701
  vals = dbdataset.get_values(ds, attr)
702
+ except register.AmbiguousError as e:
703
+ raise register.AmbiguousError(e)
637
704
  except:
638
705
  # If the series does not exist or is empty, generate values
639
706
  try:
640
- study_uid = register.uid(self.register, series[:-1])
707
+ study_uid = register.study_uid(self.register, series[:-1])
641
708
  except:
642
709
  series_number = 1
643
710
  else:
@@ -668,36 +735,6 @@ class DataBaseDicom():
668
735
 
669
736
 
670
737
 
671
- # def _split_series(self):
672
- # """
673
- # Split series with multiple SOP Classes.
674
-
675
- # If a series contain instances from different SOP Classes,
676
- # these are separated out into multiple series with identical SOP Classes.
677
- # """
678
- # # For each series, check if there are multiple
679
- # # SOP Classes in the series and split them if yes.
680
- # for series in self.series():
681
- # series_index = register.index(self.register, series)
682
- # df_series = self.register.loc[series_index]
683
- # sop_classes = df_series.SOPClassUID.unique()
684
- # if len(sop_classes) > 1:
685
- # # For each sop_class, create a new series and move all
686
- # # instances of that sop_class to the new series
687
- # desc = series[-1] if isinstance(series, str) else series[0]
688
- # for i, sop_class in tqdm(enumerate(sop_classes[1:]), desc='Splitting series with multiple SOP Classes.'):
689
- # df_sop_class = df_series[df_series.SOPClassUID == sop_class]
690
- # relpaths = df_sop_class.index.tolist()
691
- # sop_class_files = [os.path.join(self.path, p) for p in relpaths]
692
- # sop_class_series = series[:-1] + [desc + f' [{i+1}]']
693
- # self._files_to_series(sop_class_files, sop_class_series)
694
- # # Delete original files permanently
695
- # self.register.drop(relpaths)
696
- # for f in sop_class_files:
697
- # os.remove(f)
698
- # self.register.drop('SOPClassUID', axis=1, inplace=True)
699
-
700
-
701
738
  def infer_slice_spacing(vols):
702
739
  # In case spacing between slices is not (correctly) encoded in
703
740
  # DICOM it can be inferred from the slice locations.