biomedisa 2024.5.18__tar.gz → 2024.5.19__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/PKG-INFO +6 -7
  2. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/README.md +5 -6
  3. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/pyproject.toml +1 -1
  4. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/biomedisa_helper.py +100 -110
  5. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/create_slices.py +49 -56
  6. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/crop_helper.py +107 -112
  7. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/keras_helper.py +137 -163
  8. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/deeplearning.py +78 -79
  9. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa.egg-info/PKG-INFO +6 -7
  10. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/LICENSE +0 -0
  11. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/setup.cfg +0 -0
  12. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/__init__.py +0 -0
  13. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/__main__.py +0 -0
  14. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/DataGenerator.py +0 -0
  15. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/DataGeneratorCrop.py +0 -0
  16. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/PredictDataGenerator.py +0 -0
  17. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/PredictDataGeneratorCrop.py +0 -0
  18. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/__init__.py +0 -0
  19. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/active_contour.py +0 -0
  20. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/amira_to_np/__init__.py +0 -0
  21. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/amira_to_np/amira_data_stream.py +0 -0
  22. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/amira_to_np/amira_grammar.py +0 -0
  23. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/amira_to_np/amira_header.py +0 -0
  24. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/amira_to_np/amira_helper.py +0 -0
  25. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/assd.py +0 -0
  26. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/curvop_numba.py +0 -0
  27. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/django_env.py +0 -0
  28. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/nc_reader.py +0 -0
  29. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/pid.py +0 -0
  30. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/process_image.py +0 -0
  31. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/pycuda_test.py +0 -0
  32. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/__init__.py +0 -0
  33. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/gpu_kernels.py +0 -0
  34. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/pycuda_large.py +0 -0
  35. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/pycuda_large_allx.py +0 -0
  36. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/pycuda_small.py +0 -0
  37. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/pycuda_small_allx.py +0 -0
  38. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/pyopencl_large.py +0 -0
  39. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/pyopencl_small.py +0 -0
  40. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/rw_large.py +0 -0
  41. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/random_walk/rw_small.py +0 -0
  42. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/remove_outlier.py +0 -0
  43. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/biomedisa_features/split_volume.py +0 -0
  44. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/interpolation.py +0 -0
  45. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa/mesh.py +0 -0
  46. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa.egg-info/SOURCES.txt +0 -0
  47. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa.egg-info/dependency_links.txt +0 -0
  48. {biomedisa-2024.5.18 → biomedisa-2024.5.19}/src/biomedisa.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: biomedisa
3
- Version: 2024.5.18
3
+ Version: 2024.5.19
4
4
  Summary: Segmentation of 3D volumetric image data
5
5
  Author: Philipp Lösel
6
6
  Author-email: philipp.loesel@anu.edu.au
@@ -140,8 +140,7 @@ save_data('final.Head5.am', results['regular'], results['header'])
140
140
 
141
141
  #### Command-line based (prediction)
142
142
  ```
143
- # start prediction with a batch size of 6
144
- python -m biomedisa.deeplearning C:\Users\%USERNAME%\Downloads\testing_axial_crop_pat13.nii.gz C:\Users\%USERNAME%\Downloads\heart.h5 -p -bs 6
143
+ python -m biomedisa.deeplearning C:\Users\%USERNAME%\Downloads\testing_axial_crop_pat13.nii.gz C:\Users\%USERNAME%\Downloads\heart.h5 -p
145
144
  ```
146
145
 
147
146
  # Biomedisa Features
@@ -224,22 +223,22 @@ assd = ASSD(ground_truth, result)
224
223
  ```
225
224
 
226
225
  # Update Biomedisa
227
- If you installed Biomedisa via Pip.
226
+ If you installed Biomedisa via Pip
228
227
  ```
229
228
  pip install --upgrade biomedisa
230
229
  ```
231
- If you used `git clone`, change to the Biomedisa directory and make a pull request.
230
+ If you used `git clone`, change to the Biomedisa directory and make a pull request
232
231
  ```
233
232
  cd git/biomedisa
234
233
  git pull
235
234
  ```
236
235
 
237
- If you installed the browser based version of Biomedisa (including MySQL database), you also need to update the database.
236
+ If you installed the browser based version of Biomedisa (including MySQL database), you also need to update the database
238
237
  ```
239
238
  python manage.py migrate
240
239
  ```
241
240
 
242
- If you installed an [Apache Server](https://github.com/biomedisa/biomedisa/blob/master/README/APACHE_SERVER.md), you need to restart the server.
241
+ If you installed an [Apache Server](https://github.com/biomedisa/biomedisa/blob/master/README/APACHE_SERVER.md), you need to restart the server
243
242
  ```
244
243
  sudo service apache2 restart
245
244
  ```
@@ -124,8 +124,7 @@ save_data('final.Head5.am', results['regular'], results['header'])
124
124
 
125
125
  #### Command-line based (prediction)
126
126
  ```
127
- # start prediction with a batch size of 6
128
- python -m biomedisa.deeplearning C:\Users\%USERNAME%\Downloads\testing_axial_crop_pat13.nii.gz C:\Users\%USERNAME%\Downloads\heart.h5 -p -bs 6
127
+ python -m biomedisa.deeplearning C:\Users\%USERNAME%\Downloads\testing_axial_crop_pat13.nii.gz C:\Users\%USERNAME%\Downloads\heart.h5 -p
129
128
  ```
130
129
 
131
130
  # Biomedisa Features
@@ -208,22 +207,22 @@ assd = ASSD(ground_truth, result)
208
207
  ```
209
208
 
210
209
  # Update Biomedisa
211
- If you installed Biomedisa via Pip.
210
+ If you installed Biomedisa via Pip
212
211
  ```
213
212
  pip install --upgrade biomedisa
214
213
  ```
215
- If you used `git clone`, change to the Biomedisa directory and make a pull request.
214
+ If you used `git clone`, change to the Biomedisa directory and make a pull request
216
215
  ```
217
216
  cd git/biomedisa
218
217
  git pull
219
218
  ```
220
219
 
221
- If you installed the browser based version of Biomedisa (including MySQL database), you also need to update the database.
220
+ If you installed the browser based version of Biomedisa (including MySQL database), you also need to update the database
222
221
  ```
223
222
  python manage.py migrate
224
223
  ```
225
224
 
226
- If you installed an [Apache Server](https://github.com/biomedisa/biomedisa/blob/master/README/APACHE_SERVER.md), you need to restart the server.
225
+ If you installed an [Apache Server](https://github.com/biomedisa/biomedisa/blob/master/README/APACHE_SERVER.md), you need to restart the server
227
226
  ```
228
227
  sudo service apache2 restart
229
228
  ```
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "biomedisa"
7
- version = "2024.5.18"
7
+ version = "2024.5.19"
8
8
  authors = [
9
9
  { name="Philipp Lösel"}, {email="philipp.loesel@anu.edu.au" },
10
10
  ]
@@ -41,10 +41,10 @@ import cv2
41
41
  import time
42
42
  import zipfile
43
43
  import numba
44
- import shutil
45
44
  import subprocess
46
45
  import re
47
46
  import math
47
+ import tempfile
48
48
 
49
49
  def silent_remove(filename):
50
50
  try:
@@ -280,92 +280,85 @@ def load_data(path_to_data, process='None', return_extension=False):
280
280
  data, header = None, None
281
281
 
282
282
  elif extension == '.zip' or os.path.isdir(path_to_data):
283
- # extract files
284
- if extension=='.zip':
285
- path_to_dir = BASE_DIR + '/tmp/' + id_generator(40)
286
- try:
287
- zip_ref = zipfile.ZipFile(path_to_data, 'r')
288
- zip_ref.extractall(path=path_to_dir)
289
- zip_ref.close()
290
- except Exception as e:
291
- print(e)
292
- print('Using unzip package...')
293
- try:
294
- success = subprocess.Popen(['unzip',path_to_data,'-d',path_to_dir]).wait()
295
- if success != 0:
296
- if os.path.isdir(path_to_dir):
297
- shutil.rmtree(path_to_dir)
298
- data, header = None, None
299
- except Exception as e:
300
- print(e)
301
- if os.path.isdir(path_to_dir):
302
- shutil.rmtree(path_to_dir)
303
- data, header = None, None
304
- path_to_data = path_to_dir
305
-
306
- # load files
307
- if os.path.isdir(path_to_data):
308
- files = []
309
- for data_type in ['.[pP][nN][gG]','.[tT][iI][fF]','.[tT][iI][fF][fF]','.[dD][cC][mM]','.[dD][iI][cC][oO][mM]','.[bB][mM][pP]','.[jJ][pP][gG]','.[jJ][pP][eE][gG]','.nc','.nc.bz2']:
310
- files += [file for file in glob.glob(path_to_data+'/**/*'+data_type, recursive=True) if not os.path.basename(file).startswith('.')]
311
- nc_extension = False
312
- for file in files:
313
- if os.path.splitext(file)[1] == '.nc' or os.path.splitext(os.path.splitext(file)[0])[1] == '.nc':
314
- nc_extension = True
315
- if nc_extension:
283
+ with tempfile.TemporaryDirectory() as temp_dir:
284
+
285
+ # extract files
286
+ if extension=='.zip':
316
287
  try:
317
- data, header = nc_to_np(path_to_data)
288
+ zip_ref = zipfile.ZipFile(path_to_data, 'r')
289
+ zip_ref.extractall(path=temp_dir)
290
+ zip_ref.close()
318
291
  except Exception as e:
319
292
  print(e)
320
- data, header = None, None
321
- else:
322
- try:
323
- # remove unreadable files or directories
324
- for name in files:
325
- if os.path.isfile(name):
326
- try:
327
- img, _ = load(name)
328
- except:
293
+ print('Using unzip package...')
294
+ try:
295
+ success = subprocess.Popen(['unzip',path_to_data,'-d',temp_dir]).wait()
296
+ if success != 0:
297
+ data, header = None, None
298
+ except Exception as e:
299
+ print(e)
300
+ data, header = None, None
301
+ path_to_data = temp_dir
302
+
303
+ # load files
304
+ if os.path.isdir(path_to_data):
305
+ files = []
306
+ for data_type in ['.[pP][nN][gG]','.[tT][iI][fF]','.[tT][iI][fF][fF]','.[dD][cC][mM]','.[dD][iI][cC][oO][mM]','.[bB][mM][pP]','.[jJ][pP][gG]','.[jJ][pP][eE][gG]','.nc','.nc.bz2']:
307
+ files += [file for file in glob.glob(path_to_data+'/**/*'+data_type, recursive=True) if not os.path.basename(file).startswith('.')]
308
+ nc_extension = False
309
+ for file in files:
310
+ if os.path.splitext(file)[1] == '.nc' or os.path.splitext(os.path.splitext(file)[0])[1] == '.nc':
311
+ nc_extension = True
312
+ if nc_extension:
313
+ try:
314
+ data, header = nc_to_np(path_to_data)
315
+ except Exception as e:
316
+ print(e)
317
+ data, header = None, None
318
+ else:
319
+ try:
320
+ # remove unreadable files or directories
321
+ for name in files:
322
+ if os.path.isfile(name):
323
+ try:
324
+ img, _ = load(name)
325
+ except:
326
+ files.remove(name)
327
+ else:
329
328
  files.remove(name)
329
+ files.sort()
330
+
331
+ # get data size
332
+ img, _ = load(files[0])
333
+ if len(img.shape)==3:
334
+ ysh, xsh, csh = img.shape[0], img.shape[1], img.shape[2]
335
+ channel = 'last'
336
+ if ysh < csh:
337
+ csh, ysh, xsh = img.shape[0], img.shape[1], img.shape[2]
338
+ channel = 'first'
330
339
  else:
331
- files.remove(name)
332
- files.sort()
333
-
334
- # get data size
335
- img, _ = load(files[0])
336
- if len(img.shape)==3:
337
- ysh, xsh, csh = img.shape[0], img.shape[1], img.shape[2]
338
- channel = 'last'
339
- if ysh < csh:
340
- csh, ysh, xsh = img.shape[0], img.shape[1], img.shape[2]
341
- channel = 'first'
342
- else:
343
- ysh, xsh = img.shape[0], img.shape[1]
344
- csh, channel = 0, None
345
-
346
- # load data slice by slice
347
- data = np.empty((len(files), ysh, xsh), dtype=img.dtype)
348
- header, image_data_shape = [], []
349
- for k, file_name in enumerate(files):
350
- img, img_header = load(file_name)
351
- if csh==3:
352
- img = rgb2gray(img, channel)
353
- elif csh==1 and channel=='last':
354
- img = img[:,:,0]
355
- elif csh==1 and channel=='first':
356
- img = img[0,:,:]
357
- data[k] = img
358
- header.append(img_header)
359
- header = [header, files, data.dtype]
360
- data = np.swapaxes(data, 1, 2)
361
- data = np.copy(data, order='C')
362
- except Exception as e:
363
- print(e)
364
- data, header = None, None
365
-
366
- # remove extracted files
367
- if extension=='.zip' and os.path.isdir(path_to_data):
368
- shutil.rmtree(path_to_data)
340
+ ysh, xsh = img.shape[0], img.shape[1]
341
+ csh, channel = 0, None
342
+
343
+ # load data slice by slice
344
+ data = np.empty((len(files), ysh, xsh), dtype=img.dtype)
345
+ header, image_data_shape = [], []
346
+ for k, file_name in enumerate(files):
347
+ img, img_header = load(file_name)
348
+ if csh==3:
349
+ img = rgb2gray(img, channel)
350
+ elif csh==1 and channel=='last':
351
+ img = img[:,:,0]
352
+ elif csh==1 and channel=='first':
353
+ img = img[0,:,:]
354
+ data[k] = img
355
+ header.append(img_header)
356
+ header = [header, files, data.dtype]
357
+ data = np.swapaxes(data, 1, 2)
358
+ data = np.copy(data, order='C')
359
+ except Exception as e:
360
+ print(e)
361
+ data, header = None, None
369
362
 
370
363
  elif extension == '.mrc':
371
364
  try:
@@ -495,34 +488,31 @@ def save_data(path_to_final, final, header=None, final_image_type=None, compress
495
488
  simg.CopyInformation(header)
496
489
  sitk.WriteImage(simg, path_to_final, useCompression=compress)
497
490
  elif final_image_type in ['.zip', 'directory', '']:
498
- # make results directory
499
- if final_image_type == '.zip':
500
- results_dir = BASE_DIR + '/tmp/' + id_generator(40)
501
- os.makedirs(results_dir)
502
- os.chmod(results_dir, 0o770)
503
- else:
504
- results_dir = path_to_final
505
- if not os.path.isdir(results_dir):
506
- os.makedirs(results_dir)
507
- os.chmod(results_dir, 0o770)
508
- # save data as NC blocks
509
- if os.path.splitext(header[1][0])[1] == '.nc':
510
- np_to_nc(results_dir, final, header)
511
- file_names = header[1]
512
- # save data as PNG, TIF, DICOM slices
513
- else:
514
- header, file_names, final_dtype = header[0], header[1], header[2]
515
- final = final.astype(final_dtype)
516
- final = np.swapaxes(final, 2, 1)
517
- for k, file in enumerate(file_names):
518
- save(final[k], results_dir + '/' + os.path.basename(file), header[k])
519
- # zip data
520
- if final_image_type == '.zip':
521
- with zipfile.ZipFile(path_to_final, 'w') as zip:
522
- for file in file_names:
523
- zip.write(results_dir + '/' + os.path.basename(file), os.path.basename(file))
524
- if os.path.isdir(results_dir):
525
- shutil.rmtree(results_dir)
491
+ with tempfile.TemporaryDirectory() as temp_dir:
492
+ # make results directory
493
+ if final_image_type == '.zip':
494
+ results_dir = temp_dir
495
+ else:
496
+ results_dir = path_to_final
497
+ if not os.path.isdir(results_dir):
498
+ os.makedirs(results_dir)
499
+ os.chmod(results_dir, 0o770)
500
+ # save data as NC blocks
501
+ if os.path.splitext(header[1][0])[1] == '.nc':
502
+ np_to_nc(results_dir, final, header)
503
+ file_names = header[1]
504
+ # save data as PNG, TIF, DICOM slices
505
+ else:
506
+ header, file_names, final_dtype = header[0], header[1], header[2]
507
+ final = final.astype(final_dtype)
508
+ final = np.swapaxes(final, 2, 1)
509
+ for k, file in enumerate(file_names):
510
+ save(final[k], results_dir + '/' + os.path.basename(file), header[k])
511
+ # zip data
512
+ if final_image_type == '.zip':
513
+ with zipfile.ZipFile(path_to_final, 'w') as zip:
514
+ for file in file_names:
515
+ zip.write(results_dir + '/' + os.path.basename(file), os.path.basename(file))
526
516
  else:
527
517
  imageSize = int(final.nbytes * 10e-7)
528
518
  bigtiff = True if imageSize > 2000 else False
@@ -38,6 +38,7 @@ import numba
38
38
  import shutil
39
39
  import cv2
40
40
  import tarfile
41
+ import tempfile
41
42
 
42
43
  def unique(arr):
43
44
  arr = arr.astype(np.uint8)
@@ -110,37 +111,33 @@ def create_slices(path_to_data, path_to_label, on_site=False):
110
111
  # load data and reduce data size
111
112
  path_to_dir, extension = os.path.splitext(path_to_data)
112
113
  if extension == '.gz':
113
- path_to_dir, extension = os.path.splitext(path_to_dir)
114
+ extension = os.path.splitext(path_to_dir)[1]
114
115
  if extension == '.tar' or os.path.isdir(path_to_data):
115
- # extract files
116
- if extension == '.tar':
117
- path_to_dir = BASE_DIR + '/tmp/' + id_generator(40)
118
- tar = tarfile.open(path_to_data)
119
- tar.extractall(path=path_to_dir)
120
- tar.close()
121
- else:
122
- path_to_dir = path_to_data
123
- # load files
124
- img_names = []
125
- for data_type in ['.tif','.tiff','.am','.hdr','.mhd','.mha','.nrrd','.nii','.nii.gz','.zip','.mrc']:
126
- img_names.extend(glob(path_to_dir+'/**/*'+data_type, recursive=True))
127
- img_names = sorted(img_names)
128
- raw = None
129
- for name in img_names:
130
- arr, _ = load_data(name, 'create_slices')
131
- if arr is not None and raw is None:
132
- zsh, ysh, xsh = arr.shape
133
- scale = float(256) / float(max(zsh, ysh, xsh))
134
- z_scale = int(zsh * scale)
135
- y_scale = int(ysh * scale)
136
- x_scale = int(xsh * scale)
137
- raw = img_resize(arr, z_scale, y_scale, x_scale)
138
- elif arr is not None:
139
- arr = img_resize(arr, z_scale, y_scale, x_scale)
140
- raw = np.append(raw, arr, axis=0)
141
- # remove extracted files
142
- if extension == '.tar' and os.path.isdir(path_to_dir):
143
- shutil.rmtree(path_to_dir)
116
+ with tempfile.TemporaryDirectory() as temp_dir:
117
+ # extract files
118
+ if extension == '.tar':
119
+ tar = tarfile.open(path_to_data)
120
+ tar.extractall(path=temp_dir)
121
+ tar.close()
122
+ path_to_data = temp_dir
123
+ # load files
124
+ img_names = []
125
+ for data_type in ['.tif','.tiff','.am','.hdr','.mhd','.mha','.nrrd','.nii','.nii.gz','.zip','.mrc']:
126
+ img_names.extend(glob(path_to_data+'/**/*'+data_type, recursive=True))
127
+ img_names = sorted(img_names)
128
+ raw = None
129
+ for name in img_names:
130
+ arr, _ = load_data(name, 'create_slices')
131
+ if arr is not None and raw is None:
132
+ zsh, ysh, xsh = arr.shape
133
+ scale = float(256) / float(max(zsh, ysh, xsh))
134
+ z_scale = int(zsh * scale)
135
+ y_scale = int(ysh * scale)
136
+ x_scale = int(xsh * scale)
137
+ raw = img_resize(arr, z_scale, y_scale, x_scale)
138
+ elif arr is not None:
139
+ arr = img_resize(arr, z_scale, y_scale, x_scale)
140
+ raw = np.append(raw, arr, axis=0)
144
141
  else:
145
142
  raw, _ = load_data(path_to_data, 'create_slices')
146
143
  zsh, ysh, xsh = raw.shape
@@ -174,33 +171,29 @@ def create_slices(path_to_data, path_to_label, on_site=False):
174
171
  # load data
175
172
  path_to_dir, extension = os.path.splitext(path_to_label)
176
173
  if extension == '.gz':
177
- path_to_dir, extension = os.path.splitext(path_to_dir)
174
+ extension = os.path.splitext(path_to_dir)[1]
178
175
  if extension == '.tar' or os.path.isdir(path_to_label):
179
- # extract files
180
- if extension == '.tar':
181
- path_to_dir = BASE_DIR + '/tmp/' + id_generator(40)
182
- tar = tarfile.open(path_to_label)
183
- tar.extractall(path=path_to_dir)
184
- tar.close()
185
- else:
186
- path_to_dir = path_to_label
187
- # load files
188
- img_names = []
189
- for data_type in ['.tif','.tiff','.am','.hdr','.mhd','.mha','.nrrd','.nii','.nii.gz','.zip','.mrc']:
190
- img_names.extend(glob(path_to_dir+'/**/*'+data_type, recursive=True))
191
- img_names = sorted(img_names)
192
- # load and scale label data to corresponding img data
193
- mask = np.zeros((0, y_scale, x_scale), dtype=np.uint8)
194
- for name in img_names:
195
- arr, _ = load_data(name, 'create_slices')
196
- if arr is not None:
197
- arr = color_to_gray(arr)
198
- arr = arr.astype(np.uint8)
199
- arr = img_resize(arr, z_scale, y_scale, x_scale, labels=True)
200
- mask = np.append(mask, arr, axis=0)
201
- # remove extracted files
202
- if extension == '.tar' and os.path.isdir(path_to_dir):
203
- shutil.rmtree(path_to_dir)
176
+ with tempfile.TemporaryDirectory() as temp_dir:
177
+ # extract files
178
+ if extension == '.tar':
179
+ tar = tarfile.open(path_to_label)
180
+ tar.extractall(path=temp_dir)
181
+ tar.close()
182
+ path_to_label = temp_dir
183
+ # load files
184
+ img_names = []
185
+ for data_type in ['.tif','.tiff','.am','.hdr','.mhd','.mha','.nrrd','.nii','.nii.gz','.zip','.mrc']:
186
+ img_names.extend(glob(path_to_label+'/**/*'+data_type, recursive=True))
187
+ img_names = sorted(img_names)
188
+ # load and scale label data to corresponding img data
189
+ mask = np.zeros((0, y_scale, x_scale), dtype=np.uint8)
190
+ for name in img_names:
191
+ arr, _ = load_data(name, 'create_slices')
192
+ if arr is not None:
193
+ arr = color_to_gray(arr)
194
+ arr = arr.astype(np.uint8)
195
+ arr = img_resize(arr, z_scale, y_scale, x_scale, labels=True)
196
+ mask = np.append(mask, arr, axis=0)
204
197
  else:
205
198
  mask, _ = load_data(path_to_label, 'create_slices')
206
199
  mask = color_to_gray(mask)