dbdicom 0.2.0__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbdicom might be problematic. Click here for more details.

Files changed (52) hide show
  1. dbdicom/__init__.py +5 -3
  2. dbdicom/create.py +77 -70
  3. dbdicom/dro.py +174 -0
  4. dbdicom/ds/dataset.py +30 -3
  5. dbdicom/ds/types/mr_image.py +18 -7
  6. dbdicom/extensions/__init__.py +10 -0
  7. dbdicom/{wrappers → extensions}/dipy.py +191 -205
  8. dbdicom/extensions/elastix.py +503 -0
  9. dbdicom/extensions/matplotlib.py +107 -0
  10. dbdicom/extensions/numpy.py +271 -0
  11. dbdicom/{wrappers → extensions}/scipy.py +131 -32
  12. dbdicom/{wrappers → extensions}/skimage.py +1 -1
  13. dbdicom/extensions/sklearn.py +243 -0
  14. dbdicom/extensions/vreg.py +1390 -0
  15. dbdicom/external/dcm4che/bin/emf2sf +57 -57
  16. dbdicom/manager.py +91 -36
  17. dbdicom/pipelines.py +66 -0
  18. dbdicom/record.py +447 -80
  19. dbdicom/types/instance.py +46 -20
  20. dbdicom/types/series.py +2182 -399
  21. dbdicom/utils/image.py +152 -21
  22. dbdicom/utils/variables.py +8 -2
  23. dbdicom/utils/vreg.py +327 -135
  24. dbdicom-0.2.3.dist-info/METADATA +88 -0
  25. dbdicom-0.2.3.dist-info/RECORD +67 -0
  26. {dbdicom-0.2.0.dist-info → dbdicom-0.2.3.dist-info}/WHEEL +1 -1
  27. dbdicom/external/__pycache__/__init__.cpython-310.pyc +0 -0
  28. dbdicom/external/__pycache__/__init__.cpython-37.pyc +0 -0
  29. dbdicom/external/dcm4che/__pycache__/__init__.cpython-310.pyc +0 -0
  30. dbdicom/external/dcm4che/__pycache__/__init__.cpython-37.pyc +0 -0
  31. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-310.pyc +0 -0
  32. dbdicom/external/dcm4che/bin/__pycache__/__init__.cpython-37.pyc +0 -0
  33. dbdicom/external/dcm4che/lib/linux-x86/libclib_jiio.so +0 -0
  34. dbdicom/external/dcm4che/lib/linux-x86-64/libclib_jiio.so +0 -0
  35. dbdicom/external/dcm4che/lib/linux-x86-64/libopencv_java.so +0 -0
  36. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio.so +0 -0
  37. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis.so +0 -0
  38. dbdicom/external/dcm4che/lib/solaris-sparc/libclib_jiio_vis2.so +0 -0
  39. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio.so +0 -0
  40. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis.so +0 -0
  41. dbdicom/external/dcm4che/lib/solaris-sparcv9/libclib_jiio_vis2.so +0 -0
  42. dbdicom/external/dcm4che/lib/solaris-x86/libclib_jiio.so +0 -0
  43. dbdicom/external/dcm4che/lib/solaris-x86-64/libclib_jiio.so +0 -0
  44. dbdicom/wrappers/__init__.py +0 -7
  45. dbdicom/wrappers/elastix.py +0 -855
  46. dbdicom/wrappers/numpy.py +0 -119
  47. dbdicom/wrappers/sklearn.py +0 -151
  48. dbdicom/wrappers/vreg.py +0 -273
  49. dbdicom-0.2.0.dist-info/METADATA +0 -276
  50. dbdicom-0.2.0.dist-info/RECORD +0 -81
  51. {dbdicom-0.2.0.dist-info → dbdicom-0.2.3.dist-info}/LICENSE +0 -0
  52. {dbdicom-0.2.0.dist-info → dbdicom-0.2.3.dist-info}/top_level.txt +0 -0
@@ -1,57 +1,57 @@
1
- #!/bin/sh
2
- # -------------------------------------------------------------------------
3
- # emf2sf Launcher
4
- # -------------------------------------------------------------------------
5
-
6
- MAIN_CLASS=org.dcm4che3.tool.emf2sf.Emf2sf
7
- MAIN_JAR=dcm4che-tool-emf2sf-5.23.1.jar
8
-
9
- DIRNAME="`dirname "$0"`"
10
-
11
- # OS specific support (must be 'true' or 'false').
12
- cygwin=false;
13
- case "`uname`" in
14
- CYGWIN*)
15
- cygwin=true
16
- ;;
17
- esac
18
-
19
- # For Cygwin, ensure paths are in UNIX format before anything is touched
20
- if $cygwin ; then
21
- [ -n "$DCM4CHE_HOME" ] &&
22
- DCM4CHE_HOME=`cygpath --unix "$DCM4CHE_HOME"`
23
- [ -n "$JAVA_HOME" ] &&
24
- JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
25
- fi
26
-
27
- # Setup DCM4CHE_HOME
28
- if [ "x$DCM4CHE_HOME" = "x" ]; then
29
- DCM4CHE_HOME=`cd "$DIRNAME"/..; pwd`
30
- fi
31
-
32
- # Setup the JVM
33
- if [ "x$JAVA_HOME" != "x" ]; then
34
- JAVA=$JAVA_HOME/bin/java
35
- else
36
- JAVA="java"
37
- fi
38
-
39
- # Setup the classpath
40
- CP="$DCM4CHE_HOME/etc/emf2sf/"
41
- CP="$CP:$DCM4CHE_HOME/lib/$MAIN_JAR"
42
- CP="$CP:$DCM4CHE_HOME/lib/dcm4che-core-5.23.1.jar"
43
- CP="$CP:$DCM4CHE_HOME/lib/dcm4che-emf-5.23.1.jar"
44
- CP="$CP:$DCM4CHE_HOME/lib/dcm4che-tool-common-5.23.1.jar"
45
- CP="$CP:$DCM4CHE_HOME/lib/slf4j-api-1.7.30.jar"
46
- CP="$CP:$DCM4CHE_HOME/lib/slf4j-log4j12-1.7.30.jar"
47
- CP="$CP:$DCM4CHE_HOME/lib/log4j-1.2.17.jar"
48
- CP="$CP:$DCM4CHE_HOME/lib/commons-cli-1.4.jar"
49
-
50
- # For Cygwin, switch paths to Windows format before running java
51
- if $cygwin; then
52
- JAVA=`cygpath --path --windows "$JAVA"`
53
- CP=`cygpath --path --windows "$CP"`
54
- fi
55
-
56
- # Execute the JVM
57
- exec "$JAVA" $JAVA_OPTS -cp "$CP" $MAIN_CLASS "$@"
1
+ #!/bin/sh
2
+ # -------------------------------------------------------------------------
3
+ # emf2sf Launcher
4
+ # -------------------------------------------------------------------------
5
+
6
+ MAIN_CLASS=org.dcm4che3.tool.emf2sf.Emf2sf
7
+ MAIN_JAR=dcm4che-tool-emf2sf-5.23.1.jar
8
+
9
+ DIRNAME="`dirname "$0"`"
10
+
11
+ # OS specific support (must be 'true' or 'false').
12
+ cygwin=false;
13
+ case "`uname`" in
14
+ CYGWIN*)
15
+ cygwin=true
16
+ ;;
17
+ esac
18
+
19
+ # For Cygwin, ensure paths are in UNIX format before anything is touched
20
+ if $cygwin ; then
21
+ [ -n "$DCM4CHE_HOME" ] &&
22
+ DCM4CHE_HOME=`cygpath --unix "$DCM4CHE_HOME"`
23
+ [ -n "$JAVA_HOME" ] &&
24
+ JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
25
+ fi
26
+
27
+ # Setup DCM4CHE_HOME
28
+ if [ "x$DCM4CHE_HOME" = "x" ]; then
29
+ DCM4CHE_HOME=`cd "$DIRNAME"/..; pwd`
30
+ fi
31
+
32
+ # Setup the JVM
33
+ if [ "x$JAVA_HOME" != "x" ]; then
34
+ JAVA=$JAVA_HOME/bin/java
35
+ else
36
+ JAVA="java"
37
+ fi
38
+
39
+ # Setup the classpath
40
+ CP="$DCM4CHE_HOME/etc/emf2sf/"
41
+ CP="$CP:$DCM4CHE_HOME/lib/$MAIN_JAR"
42
+ CP="$CP:$DCM4CHE_HOME/lib/dcm4che-core-5.23.1.jar"
43
+ CP="$CP:$DCM4CHE_HOME/lib/dcm4che-emf-5.23.1.jar"
44
+ CP="$CP:$DCM4CHE_HOME/lib/dcm4che-tool-common-5.23.1.jar"
45
+ CP="$CP:$DCM4CHE_HOME/lib/slf4j-api-1.7.30.jar"
46
+ CP="$CP:$DCM4CHE_HOME/lib/slf4j-log4j12-1.7.30.jar"
47
+ CP="$CP:$DCM4CHE_HOME/lib/log4j-1.2.17.jar"
48
+ CP="$CP:$DCM4CHE_HOME/lib/commons-cli-1.4.jar"
49
+
50
+ # For Cygwin, switch paths to Windows format before running java
51
+ if $cygwin; then
52
+ JAVA=`cygpath --path --windows "$JAVA"`
53
+ CP=`cygpath --path --windows "$CP"`
54
+ fi
55
+
56
+ # Execute the JVM
57
+ exec "$JAVA" $JAVA_OPTS -cp "$CP" $MAIN_CLASS "$@"
dbdicom/manager.py CHANGED
@@ -423,17 +423,33 @@ class Manager():
423
423
  #return [id for id in uids if function(self.get_values(attr, uid=id), vals)]
424
424
 
425
425
 
426
- def filter_instances(self, df, **kwargs):
426
+ def filter_instances(self, df, select={}, **filters):
427
427
  df.dropna(inplace=True)
428
- if not kwargs:
428
+ filters = {**select, **filters}
429
+ if filters == {}:
429
430
  return df
430
- vals = list(kwargs.values())
431
- attr = list(kwargs.keys())
432
- keys = [key for key in df.index if self.get_values(attr, [key]) == vals]
431
+ vals = list(filters.values())
432
+ attr = list(filters.keys())
433
+ # keys = [key for key in df.index if self.get_values(attr, [key]) == vals]
434
+ keys = []
435
+ for key in df.index:
436
+ v = self.get_values(attr, [key])
437
+ append = True
438
+ for i, vi in enumerate(v):
439
+ if isinstance(vals[i], np.ndarray):
440
+ if vi not in vals[i]:
441
+ append = False
442
+ break
443
+ else:
444
+ if vi != vals[i]:
445
+ append = False
446
+ break
447
+ if append:
448
+ keys.append(key)
433
449
  return df[keys]
434
450
 
435
451
 
436
- def instances(self, uid=None, keys=None, sort=True, sortby=None, images=False, **kwargs):
452
+ def instances(self, uid=None, keys=None, sort=True, sortby=None, images=False, select={}, **filters):
437
453
  if keys is None:
438
454
  keys = self.keys(uid)
439
455
  if sort:
@@ -444,7 +460,7 @@ class Manager():
444
460
  df = df.SOPInstanceUID
445
461
  else:
446
462
  df = self.register.loc[keys,'SOPInstanceUID']
447
- df = self.filter_instances(df, **kwargs)
463
+ df = self.filter_instances(df, select=select, **filters)
448
464
  if images == True:
449
465
  keys = [key for key in df.index if self.get_values('Rows', [key]) is not None]
450
466
  df = df[keys]
@@ -978,6 +994,16 @@ class Manager():
978
994
  empty = df[df.values == None].index
979
995
  if len(empty) == 1:
980
996
  self.delete_row(empty[0])
997
+ # Return new parent key
998
+ if missing == 'SOPInstanceUID':
999
+ return self.keys(series=parent_uid)[0]
1000
+ if missing == 'SeriesInstanceUID':
1001
+ return self.keys(study=parent_uid)[0]
1002
+ if missing == 'StudyInstanceUID':
1003
+ return self.keys(patient=parent_uid)[0]
1004
+ if missing == 'PatientID':
1005
+ return self.register.index[0]
1006
+ return parent_key
981
1007
 
982
1008
 
983
1009
  def update_row_data(self, key, data):
@@ -1069,6 +1095,7 @@ class Manager():
1069
1095
  key = self.new_row(data) # Study with existing series
1070
1096
  return data[2], key
1071
1097
 
1098
+
1072
1099
 
1073
1100
  def new_instance(self, parent=None, dataset=None, key=None, **kwargs):
1074
1101
 
@@ -1132,12 +1159,12 @@ class Manager():
1132
1159
 
1133
1160
  data = self.register.loc[key, self.columns]
1134
1161
  data[4] = ds.SOPClassUID
1162
+ data[11:] = ds.get_values(self.columns[11:])
1135
1163
  key = self.update_row_data(key, data)
1136
- ds.set_values(self.columns, data)
1164
+ ds.set_values(self.columns[:11], data[:11])
1137
1165
  self.dataset[key] = ds
1138
1166
  return key
1139
1167
 
1140
-
1141
1168
  def set_dataset(self, uid, dataset, keys=None):
1142
1169
 
1143
1170
  if keys is None:
@@ -1145,10 +1172,9 @@ class Manager():
1145
1172
  else:
1146
1173
  parent_keys = keys
1147
1174
 
1148
- # LOOKUP!!!
1149
- # ELIMINATE
1150
- if self.type(uid, parent_keys[0]) == 'Instance':
1151
- self.set_instance_dataset(uid, dataset, parent_keys[0])
1175
+ parent_key = parent_keys[0]
1176
+ if self.type(uid, parent_key) == 'Instance':
1177
+ self.set_instance_dataset(uid, dataset, parent_key)
1152
1178
  return
1153
1179
 
1154
1180
  if not isinstance(dataset, list):
@@ -1177,10 +1203,11 @@ class Manager():
1177
1203
  data[10] = 1
1178
1204
  else:
1179
1205
  data[10] = 1 + max(nrs)
1206
+ data[11:] = ds.get_values(self.columns[11:]) # added 27/07/23
1180
1207
 
1181
1208
  # Add to database in memory as a new row
1182
1209
  key = self.new_row(data)
1183
- ds.set_values(self.columns, data)
1210
+ ds.set_values(self.columns[:11], data[:11]) # modified 27/07/23
1184
1211
  self.dataset[key] = ds
1185
1212
 
1186
1213
  else: # If the instance is already in the object
@@ -1188,11 +1215,13 @@ class Manager():
1188
1215
  key = parent_keys[ind]
1189
1216
  data = self.value(key, self.columns)
1190
1217
  data[4] = ds.SOPClassUID
1218
+ data[11:] = ds.get_values(self.columns[11:]) # added 27/07/23
1191
1219
  key = self.update_row_data(key, data)
1220
+ ds.set_values(self.columns[:11], data[:11]) # added 27/07/23
1192
1221
  self.dataset[key] = ds
1193
1222
 
1194
1223
  # If the series is empty and new instances have been added then delete the row
1195
- self.drop_placeholder_row(parent_keys[0], missing='SOPInstanceUID')
1224
+ parent_key = self.drop_placeholder_row(parent_key, missing='SOPInstanceUID')
1196
1225
 
1197
1226
 
1198
1227
 
@@ -1245,7 +1274,8 @@ class Manager():
1245
1274
  def copy_instance_to_series(self, instance_key, target_keys, tmp, **kwargs):
1246
1275
  """Copy instances to another series"""
1247
1276
 
1248
- attributes, values = self.series_header(target_keys[0])
1277
+ new_parent_key = target_keys[0]
1278
+ attributes, values = self.series_header(new_parent_key)
1249
1279
  self.append_kwargs(kwargs, attributes, values)
1250
1280
 
1251
1281
  n = self.register.loc[target_keys,'InstanceNumber'].values
@@ -1258,7 +1288,7 @@ class Manager():
1258
1288
 
1259
1289
  if ds is None:
1260
1290
  row = self.value(instance_key, self.columns).tolist()
1261
- row = self.copy_series_data(target_keys[0], row)
1291
+ row = self.copy_series_data(new_parent_key, row)
1262
1292
  row[3] = new_instance
1263
1293
  row[10] = 1 + max_number
1264
1294
  for val in kwargs:
@@ -1275,7 +1305,7 @@ class Manager():
1275
1305
  ds.write(self.filepath(new_key), self.status)
1276
1306
  row = ds.get_values(self.columns)
1277
1307
 
1278
- self.drop_placeholder_row(target_keys[0], missing='SOPInstanceUID')
1308
+ new_parent_key = self.drop_placeholder_row(new_parent_key, missing='SOPInstanceUID')
1279
1309
  self.new_row(row, new_key)
1280
1310
 
1281
1311
  return new_instance
@@ -1291,8 +1321,8 @@ class Manager():
1291
1321
  """Copy instances to another series"""
1292
1322
 
1293
1323
  target_keys = self.keys(series=target)
1294
-
1295
- attributes, values = self.series_header(target_keys[0])
1324
+ new_parent_key = target_keys[0]
1325
+ attributes, values = self.series_header(new_parent_key)
1296
1326
  self.append_kwargs(kwargs, attributes, values)
1297
1327
 
1298
1328
  max_number = self.new_instance_number(target)
@@ -1309,7 +1339,7 @@ class Manager():
1309
1339
  ds = self.get_dataset(instance_uid, [key])
1310
1340
  if ds is None:
1311
1341
  row = self.value(key, self.columns).tolist()
1312
- row = self.copy_series_data(target_keys[0], row)
1342
+ row = self.copy_series_data(new_parent_key, row)
1313
1343
  row[3] = new_instances[i]
1314
1344
  row[10] = i + max_number
1315
1345
  for val in kwargs:
@@ -1330,7 +1360,7 @@ class Manager():
1330
1360
  self.new_row(row, new_key)
1331
1361
 
1332
1362
  # If the series is empty and new instances have been added, then delete the row
1333
- self.drop_placeholder_row(target_keys[0], missing='SOPInstanceUID')
1363
+ new_parent_key = self.drop_placeholder_row(new_parent_key, missing='SOPInstanceUID')
1334
1364
 
1335
1365
  if len(keys) > 1:
1336
1366
  self.status.hide()
@@ -1367,7 +1397,10 @@ class Manager():
1367
1397
  series_keys = self.keys(series=series)
1368
1398
  for k, key in enumerate(series_keys):
1369
1399
 
1370
- msg = 'Copying series ' + self.value(key, 'SeriesDescription')
1400
+ desc = self.value(key, 'SeriesDescription')
1401
+ if desc is None:
1402
+ desc = 'Unknown'
1403
+ msg = 'Copying series ' + desc
1371
1404
  msg += ' (' + str(s+1) + '/' + str(len(all_series)) + ')'
1372
1405
  self.status.progress(k+1, len(series_keys), msg)
1373
1406
 
@@ -1410,7 +1443,7 @@ class Manager():
1410
1443
 
1411
1444
  # If the study is empty and new series have been added
1412
1445
  # then delete the row
1413
- self.drop_placeholder_row(target_key, missing='SeriesInstanceUID')
1446
+ target_key = self.drop_placeholder_row(target_key, missing='SeriesInstanceUID')
1414
1447
  self.status.hide()
1415
1448
 
1416
1449
  if len(new_series) == 1:
@@ -1476,7 +1509,7 @@ class Manager():
1476
1509
  self.new_row(row, new_key)
1477
1510
 
1478
1511
  # If the patient is empty and new studies have been added, then delete the row
1479
- self.drop_placeholder_row(target_key, missing='StudyInstanceUID')
1512
+ target_key = self.drop_placeholder_row(target_key, missing='StudyInstanceUID')
1480
1513
 
1481
1514
  if len(new_studies) == 1:
1482
1515
  return new_studies[0]
@@ -1577,8 +1610,8 @@ class Manager():
1577
1610
  if target_keys == []:
1578
1611
  msg = 'Moving data to a series that does not exist in the database'
1579
1612
  raise ValueError(msg)
1580
-
1581
- attributes, values = self.series_header(target_keys[0])
1613
+ new_parent_key = target_keys[0]
1614
+ attributes, values = self.series_header(new_parent_key)
1582
1615
  self.append_kwargs(kwargs, attributes, values)
1583
1616
 
1584
1617
  n = self.value(target_keys, 'InstanceNumber')
@@ -1596,7 +1629,7 @@ class Manager():
1596
1629
 
1597
1630
  if ds is None:
1598
1631
  row = self.value(key, self.columns).tolist()
1599
- row = self.copy_series_data(target_keys[0], row)
1632
+ row = self.copy_series_data(new_parent_key, row)
1600
1633
  row[10] = i + 1 + max_number
1601
1634
  for val in kwargs:
1602
1635
  if val in self._descriptives:
@@ -1606,7 +1639,7 @@ class Manager():
1606
1639
  self.set_dataset_values(ds, key, attributes+['InstanceNumber'], values+[i+1+max_number])
1607
1640
 
1608
1641
  # If the series is empty and new instances have been added, then delete the row
1609
- self.drop_placeholder_row(target_keys[0], 'SOPInstanceUID')
1642
+ new_parent_key = self.drop_placeholder_row(new_parent_key, 'SOPInstanceUID')
1610
1643
 
1611
1644
  if len(keys) == 1:
1612
1645
  return self.value(keys, 'SOPInstanceUID')
@@ -1639,8 +1672,8 @@ class Manager():
1639
1672
  """Copy series to another study"""
1640
1673
 
1641
1674
  target_keys = self.keys(study=target)
1642
-
1643
- attributes, values = self.study_header(target_keys[0])
1675
+ new_parent_key = target_keys[0]
1676
+ attributes, values = self.study_header(new_parent_key)
1644
1677
  self.append_kwargs(kwargs, attributes, values)
1645
1678
 
1646
1679
  n = self.value(target_keys, 'SeriesNumber')
@@ -1664,7 +1697,7 @@ class Manager():
1664
1697
  # If the instance is empty, just replace study data in the register.
1665
1698
  if ds is None:
1666
1699
  row = self.value(key, self.columns).tolist()
1667
- row = self.copy_study_data(target_keys[0], row)
1700
+ row = self.copy_study_data(new_parent_key, row)
1668
1701
  row[9] = new_number
1669
1702
  for val in kwargs:
1670
1703
  if val in self._descriptives:
@@ -1675,7 +1708,7 @@ class Manager():
1675
1708
  else:
1676
1709
  self.set_dataset_values(ds, key, attributes+['SeriesNumber'], values+[new_number])
1677
1710
 
1678
- self.drop_placeholder_row(target_keys[0], 'SeriesInstanceUID')
1711
+ new_parent_key = self.drop_placeholder_row(new_parent_key, 'SeriesInstanceUID')
1679
1712
 
1680
1713
  if len(all_series) == 1:
1681
1714
  return all_series[0]
@@ -1705,7 +1738,8 @@ class Manager():
1705
1738
  """Copy series to another study"""
1706
1739
 
1707
1740
  target_keys = self.keys(patient=target)
1708
- attributes, values = self.patient_header(target_keys[0])
1741
+ new_parent_key = target_keys[0]
1742
+ attributes, values = self.patient_header(new_parent_key)
1709
1743
  self.append_kwargs(kwargs, attributes, values)
1710
1744
  all_studies = self.studies(uid)
1711
1745
 
@@ -1726,7 +1760,7 @@ class Manager():
1726
1760
  # If the instance is empty, just update the register.
1727
1761
  if ds is None:
1728
1762
  row = self.value(key, self.columns).tolist()
1729
- row = self.copy_patient_data(target_keys[0], row)
1763
+ row = self.copy_patient_data(new_parent_key, row)
1730
1764
  for val in kwargs:
1731
1765
  if val in self._descriptives:
1732
1766
  row[self._descriptives[val]] = kwargs[val]
@@ -1736,7 +1770,7 @@ class Manager():
1736
1770
  else:
1737
1771
  self.set_dataset_values(ds, key, attributes, values)
1738
1772
 
1739
- self.drop_placeholder_row(target_keys[0], 'StudyInstanceUID')
1773
+ new_parent_key = self.drop_placeholder_row(new_parent_key, 'StudyInstanceUID')
1740
1774
 
1741
1775
  if len(all_studies) == 1:
1742
1776
  return all_studies[0]
@@ -1961,6 +1995,27 @@ class Manager():
1961
1995
  return values
1962
1996
 
1963
1997
 
1998
+ def import_dataset(self, ds):
1999
+
2000
+ # Do not import SOPInstances that are already in the database
2001
+ uid = ds.SOPInstanceUID
2002
+ keys = self.keys(instance=uid)
2003
+ if keys != []:
2004
+ msg = 'Cannot import a dataset that is already in the database.'
2005
+ raise ValueError(msg)
2006
+
2007
+ # Add a row to the register
2008
+ row = ds.get_values(self.columns)
2009
+ new_key = self.new_key()
2010
+ self.new_row(row, new_key)
2011
+
2012
+ # If the database exists on disk, write file
2013
+ if self.path is not None:
2014
+ path = self.filepath(new_key)
2015
+ ds.write(path)
2016
+
2017
+
2018
+ # Misleading name because files are not datasets - e.g. does not work for datasets in memory.
1964
2019
  def import_datasets(self, files):
1965
2020
 
1966
2021
  # Read manager data
dbdicom/pipelines.py ADDED
@@ -0,0 +1,66 @@
1
+ """
2
+ Some utilities for writing automated pipelines.
3
+ """
4
+
5
+
6
+
7
+ def input_series(database, series_desc, study_desc=None, handle_duplicate=False):
8
+ """Select a list of series for processing, and a study for saving the results"""
9
+
10
+ # Make sure the input is a list for convenience
11
+ lst = True
12
+ if not isinstance(series_desc, list):
13
+ lst = False
14
+ series_desc = [series_desc]
15
+
16
+ # Find series and check if valid
17
+ input_series = []
18
+ for desc in series_desc:
19
+ database.message('Finding input series ' + desc)
20
+ series = database.series(SeriesDescription=desc)
21
+ if series == []:
22
+ return None, None
23
+ elif len(series) > 1:
24
+ msg = 'Multiple series found with the description: ' + desc + '\n'
25
+ #msg += 'Please rename the others so there is only one.'
26
+ msg += 'Last one was selected'
27
+ database.dialog.information(msg)
28
+ if handle_duplicate:
29
+ series = series[-1]
30
+ else:
31
+ return None,None
32
+ else:
33
+ series = series[0]
34
+ input_series.append(series)
35
+
36
+ if study_desc is None:
37
+ # If the input was a list, return a list - else return a scalar.
38
+ if lst:
39
+ return input_series
40
+ else:
41
+ return input_series[0]
42
+
43
+ # Find study and check if valid.
44
+ database.message('Finding export study ' + study_desc)
45
+ studies = database.studies(StudyDescription=study_desc)
46
+ if studies == []:
47
+ study = input_series[0].new_pibling(StudyDescription=study_desc)
48
+ elif len(studies) > 1:
49
+ msg = 'Multiple studies found with the same description: ' + study_desc + '\n'
50
+ #msg += 'Please rename the others so there is only one, or choose another study for the output.'
51
+ msg += 'Last one was selected'
52
+ database.dialog.information(msg)
53
+ #return None, None
54
+ if handle_duplicate:
55
+ study = studies[-1]
56
+ else:
57
+ return None,None
58
+
59
+ else:
60
+ study = studies[0]
61
+
62
+ # If the input was a list, return a list - else return a scalar.
63
+ if lst:
64
+ return input_series, study
65
+ else:
66
+ return input_series[0], study