biomedisa 2024.5.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. biomedisa/__init__.py +53 -0
  2. biomedisa/__main__.py +18 -0
  3. biomedisa/biomedisa_features/DataGenerator.py +299 -0
  4. biomedisa/biomedisa_features/DataGeneratorCrop.py +121 -0
  5. biomedisa/biomedisa_features/PredictDataGenerator.py +87 -0
  6. biomedisa/biomedisa_features/PredictDataGeneratorCrop.py +74 -0
  7. biomedisa/biomedisa_features/__init__.py +0 -0
  8. biomedisa/biomedisa_features/active_contour.py +434 -0
  9. biomedisa/biomedisa_features/amira_to_np/__init__.py +0 -0
  10. biomedisa/biomedisa_features/amira_to_np/amira_data_stream.py +980 -0
  11. biomedisa/biomedisa_features/amira_to_np/amira_grammar.py +369 -0
  12. biomedisa/biomedisa_features/amira_to_np/amira_header.py +290 -0
  13. biomedisa/biomedisa_features/amira_to_np/amira_helper.py +72 -0
  14. biomedisa/biomedisa_features/assd.py +167 -0
  15. biomedisa/biomedisa_features/biomedisa_helper.py +801 -0
  16. biomedisa/biomedisa_features/create_slices.py +286 -0
  17. biomedisa/biomedisa_features/crop_helper.py +586 -0
  18. biomedisa/biomedisa_features/curvop_numba.py +149 -0
  19. biomedisa/biomedisa_features/django_env.py +172 -0
  20. biomedisa/biomedisa_features/keras_helper.py +1219 -0
  21. biomedisa/biomedisa_features/nc_reader.py +179 -0
  22. biomedisa/biomedisa_features/pid.py +52 -0
  23. biomedisa/biomedisa_features/process_image.py +253 -0
  24. biomedisa/biomedisa_features/pycuda_test.py +84 -0
  25. biomedisa/biomedisa_features/random_walk/__init__.py +0 -0
  26. biomedisa/biomedisa_features/random_walk/gpu_kernels.py +183 -0
  27. biomedisa/biomedisa_features/random_walk/pycuda_large.py +826 -0
  28. biomedisa/biomedisa_features/random_walk/pycuda_large_allx.py +806 -0
  29. biomedisa/biomedisa_features/random_walk/pycuda_small.py +414 -0
  30. biomedisa/biomedisa_features/random_walk/pycuda_small_allx.py +493 -0
  31. biomedisa/biomedisa_features/random_walk/pyopencl_large.py +760 -0
  32. biomedisa/biomedisa_features/random_walk/pyopencl_small.py +441 -0
  33. biomedisa/biomedisa_features/random_walk/rw_large.py +390 -0
  34. biomedisa/biomedisa_features/random_walk/rw_small.py +310 -0
  35. biomedisa/biomedisa_features/remove_outlier.py +399 -0
  36. biomedisa/biomedisa_features/split_volume.py +274 -0
  37. biomedisa/deeplearning.py +519 -0
  38. biomedisa/interpolation.py +371 -0
  39. biomedisa/mesh.py +406 -0
  40. biomedisa-2024.5.14.dist-info/LICENSE +191 -0
  41. biomedisa-2024.5.14.dist-info/METADATA +306 -0
  42. biomedisa-2024.5.14.dist-info/RECORD +44 -0
  43. biomedisa-2024.5.14.dist-info/WHEEL +5 -0
  44. biomedisa-2024.5.14.dist-info/top_level.txt +1 -0
biomedisa/__init__.py ADDED
@@ -0,0 +1,53 @@
1
+ import os
2
+ import sys
3
+ import subprocess
4
+
5
+ # from source base directory
6
+ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
7
+
8
+ # pip base directory
9
+ if not os.path.exists(os.path.join(BASE_DIR,'biomedisa_features')):
10
+ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
11
+ # add BASE_DIR to PYTHONPATH for absolute imports
12
+ sys.path.append(BASE_DIR)
13
+
14
+ # metadata
15
+ import importlib_metadata
16
+ metadata = importlib_metadata.metadata("biomedisa")
17
+
18
+ __all__ = (
19
+ "__title__",
20
+ "__summary__",
21
+ "__url__",
22
+ "__version__",
23
+ "__author__",
24
+ "__email__",
25
+ "__license__",
26
+ "__copyright__",
27
+ )
28
+
29
+ __copyright__ = "Copyright (c) 2019-2024 Philipp Lösel"
30
+ __title__ = metadata["name"]
31
+ __summary__ = metadata["summary"]
32
+ __url__ = "https://biomedisa.info"
33
+ __version__ = metadata["version"]
34
+ __author__ = "Philipp Lösel"
35
+ __email__ = metadata["author-email"]
36
+ __license__ = "European Union Public Licence 1.2 (EUPL 1.2)"
37
+
38
+ # biomedisa version when installed from source
39
+ else:
40
+ try:
41
+ if os.path.exists(os.path.join(BASE_DIR,'.git')):
42
+ __version__ = subprocess.check_output(['git', 'describe', '--tags', '--always'], cwd=BASE_DIR).decode('utf-8').strip()
43
+ f = open(os.path.join(BASE_DIR,'log/biomedisa_version'), 'w')
44
+ f.write(__version__)
45
+ f.close()
46
+ else:
47
+ raise Exception()
48
+ except:
49
+ if os.path.isfile(os.path.join(BASE_DIR,'log/biomedisa_version')):
50
+ __version__ = open(os.path.join(BASE_DIR,'log/biomedisa_version'), 'r').readline().rstrip('\n')
51
+ else:
52
+ __version__ = None
53
+
biomedisa/__main__.py ADDED
@@ -0,0 +1,18 @@
1
+ # biomedisa/__main__.py
2
+
3
+ import sys
4
+
5
+ def main():
6
+ if len(sys.argv) < 2 or sys.argv[1] in ['-h','--help']:
7
+ print("Usage: python3 -m biomedisa.<module_name> <args>")
8
+ print("Modules available: interpolation, deeplearning, mesh")
9
+ print("[-h, --help] for more information of each module")
10
+ print("[-V, --version] for Biomedisa version installed")
11
+
12
+ if sys.argv[1] in ['-v','-V','--version']:
13
+ import biomedisa
14
+ print(biomedisa.__version__)
15
+
16
+ if __name__ == "__main__":
17
+ sys.exit(main())
18
+
@@ -0,0 +1,299 @@
1
+ ##########################################################################
2
+ ## ##
3
+ ## Copyright (c) 2024 Philipp Lösel. All rights reserved. ##
4
+ ## ##
5
+ ## This file is part of the open source project biomedisa. ##
6
+ ## ##
7
+ ## Licensed under the European Union Public Licence (EUPL) ##
8
+ ## v1.2, or - as soon as they will be approved by the ##
9
+ ## European Commission - subsequent versions of the EUPL; ##
10
+ ## ##
11
+ ## You may redistribute it and/or modify it under the terms ##
12
+ ## of the EUPL v1.2. You may not use this work except in ##
13
+ ## compliance with this Licence. ##
14
+ ## ##
15
+ ## You can obtain a copy of the Licence at: ##
16
+ ## ##
17
+ ## https://joinup.ec.europa.eu/page/eupl-text-11-12 ##
18
+ ## ##
19
+ ## Unless required by applicable law or agreed to in ##
20
+ ## writing, software distributed under the Licence is ##
21
+ ## distributed on an "AS IS" basis, WITHOUT WARRANTIES ##
22
+ ## OR CONDITIONS OF ANY KIND, either express or implied. ##
23
+ ## ##
24
+ ## See the Licence for the specific language governing ##
25
+ ## permissions and limitations under the Licence. ##
26
+ ## ##
27
+ ##########################################################################
28
+
29
+ import numpy as np
30
+ import tensorflow as tf
31
+ import numba
32
+ import random
33
+ import scipy
34
+
35
+ @numba.jit(nopython=True)#parallel=True
36
+ def rotate_img_patch(src,trg,k,l,m,cos_a,sin_a,z_patch,y_patch,x_patch,imageHeight,imageWidth):
37
+ for y in range(l,l+y_patch):
38
+ yA = y - imageHeight/2
39
+ for x in range(m,m+x_patch):
40
+ xA = x - imageWidth/2
41
+ xR = xA * cos_a - yA * sin_a
42
+ yR = xA * sin_a + yA * cos_a
43
+ src_x = xR + imageWidth/2
44
+ src_y = yR + imageHeight/2
45
+ # bilinear interpolation
46
+ src_x0 = float(int(src_x))
47
+ src_x1 = src_x0 + 1
48
+ src_y0 = float(int(src_y))
49
+ src_y1 = src_y0 + 1
50
+ sx = src_x - src_x0
51
+ sy = src_y - src_y0
52
+ idx_src_x0 = int(min(max(0,src_x0),imageWidth-1))
53
+ idx_src_x1 = int(min(max(0,src_x1),imageWidth-1))
54
+ idx_src_y0 = int(min(max(0,src_y0),imageHeight-1))
55
+ idx_src_y1 = int(min(max(0,src_y1),imageHeight-1))
56
+ for z in range(k,k+z_patch):
57
+ val = (1-sy) * (1-sx) * float(src[z,idx_src_y0,idx_src_x0])
58
+ val += (sy) * (1-sx) * float(src[z,idx_src_y1,idx_src_x0])
59
+ val += (1-sy) * (sx) * float(src[z,idx_src_y0,idx_src_x1])
60
+ val += (sy) * (sx) * float(src[z,idx_src_y1,idx_src_x1])
61
+ trg[z-k,y-l,x-m] = val
62
+ return trg
63
+
64
+ @numba.jit(nopython=True)#parallel=True
65
+ def rotate_label_patch(src,trg,k,l,m,cos_a,sin_a,z_patch,y_patch,x_patch,imageHeight,imageWidth):
66
+ for y in range(l,l+y_patch):
67
+ yA = y - imageHeight/2
68
+ for x in range(m,m+x_patch):
69
+ xA = x - imageWidth/2
70
+ xR = xA * cos_a - yA * sin_a
71
+ yR = xA * sin_a + yA * cos_a
72
+ src_x = xR + imageWidth/2
73
+ src_y = yR + imageHeight/2
74
+ # nearest neighbour
75
+ src_x = round(src_x)
76
+ src_y = round(src_y)
77
+ idx_src_x = int(min(max(0,src_x),imageWidth-1))
78
+ idx_src_y = int(min(max(0,src_y),imageHeight-1))
79
+ for z in range(k,k+z_patch):
80
+ trg[z-k,y-l,x-m] = src[z,idx_src_y,idx_src_x]
81
+ return trg
82
+
83
+ def random_rotation_3d(image, max_angle=180):
84
+ """ Randomly rotate an image by a random angle (-max_angle, max_angle).
85
+
86
+ Arguments:
87
+ max_angle: `float`. The maximum rotation angle.
88
+
89
+ Returns:
90
+ batch of rotated 3D images
91
+ """
92
+
93
+ # rotate along x-axis
94
+ angle = random.uniform(-max_angle, max_angle)
95
+ image2 = scipy.ndimage.rotate(image, angle, mode='nearest', axes=(0, 1), reshape=False)
96
+
97
+ # rotate along y-axis
98
+ angle = random.uniform(-max_angle, max_angle)
99
+ image3 = scipy.ndimage.rotate(image2, angle, mode='nearest', axes=(0, 2), reshape=False)
100
+
101
+ # rotate along z-axis
102
+ angle = random.uniform(-max_angle, max_angle)
103
+ image_rot = scipy.ndimage.rotate(image3, angle, mode='nearest', axes=(1, 2), reshape=False)
104
+
105
+ return image_rot
106
+
107
+ class DataGenerator(tf.keras.utils.Sequence):
108
+ 'Generates data for Keras'
109
+ def __init__(self, img, label, list_IDs_fg, list_IDs_bg, shuffle, train, classification, batch_size=32, dim=(32,32,32),
110
+ dim_img=(32,32,32), n_classes=10, n_channels=1, augment=(False,False,False,False,0), patch_normalization=False):
111
+ 'Initialization'
112
+ self.dim = dim
113
+ self.dim_img = dim_img
114
+ self.list_IDs_fg = list_IDs_fg
115
+ self.list_IDs_bg = list_IDs_bg
116
+ self.batch_size = batch_size
117
+ self.label = label
118
+ self.img = img
119
+ self.n_channels = n_channels
120
+ self.n_classes = n_classes
121
+ self.shuffle = shuffle
122
+ self.augment = augment
123
+ self.train = train
124
+ self.classification = classification
125
+ self.on_epoch_end()
126
+ self.patch_normalization = patch_normalization
127
+
128
+ def __len__(self):
129
+ 'Denotes the number of batches per epoch'
130
+ if len(self.list_IDs_bg) > 0:
131
+ len_IDs = 2 * max(len(self.list_IDs_fg), len(self.list_IDs_bg))
132
+ else:
133
+ len_IDs = len(self.list_IDs_fg)
134
+ n_batches = int(np.floor(len_IDs / self.batch_size))
135
+ return n_batches
136
+
137
+ def __getitem__(self, index):
138
+ 'Generate one batch of data'
139
+
140
+ if len(self.list_IDs_bg) > 0:
141
+
142
+ # len IDs
143
+ len_IDs = max(len(self.list_IDs_fg), len(self.list_IDs_bg))
144
+
145
+ # upsample lists of indexes to the same size
146
+ repetitions = int(np.floor(len_IDs / len(self.list_IDs_fg))) + 1
147
+ upsampled_indexes_fg = np.tile(self.indexes_fg, repetitions)
148
+ upsampled_indexes_fg = upsampled_indexes_fg[:len_IDs]
149
+
150
+ repetitions = int(np.floor(len_IDs / len(self.list_IDs_bg))) + 1
151
+ upsampled_indexes_bg = np.tile(self.indexes_bg, repetitions)
152
+ upsampled_indexes_bg = upsampled_indexes_bg[:len_IDs]
153
+
154
+ # Generate indexes of the batch
155
+ tmp_batch_size = int(self.batch_size / 2)
156
+ indexes_fg = upsampled_indexes_fg[index*tmp_batch_size:(index+1)*tmp_batch_size]
157
+ indexes_bg = upsampled_indexes_bg[index*tmp_batch_size:(index+1)*tmp_batch_size]
158
+
159
+ # Find list of IDs
160
+ list_IDs_temp = [self.list_IDs_fg[k] for k in indexes_fg] + [self.list_IDs_bg[k] for k in indexes_bg]
161
+
162
+ else:
163
+
164
+ # Generate indexes of the batch
165
+ indexes_fg = self.indexes_fg[index*self.batch_size:(index+1)*self.batch_size]
166
+
167
+ # Find list of IDs
168
+ list_IDs_temp = [self.list_IDs_fg[k] for k in indexes_fg]
169
+
170
+ # Generate data
171
+ X, y = self.__data_generation(list_IDs_temp)
172
+
173
+ return X, y
174
+
175
+ def on_epoch_end(self):
176
+ 'Updates indexes after each epoch'
177
+ self.indexes_fg = np.arange(len(self.list_IDs_fg))
178
+ self.indexes_bg = np.arange(len(self.list_IDs_bg))
179
+ if self.shuffle == True:
180
+ np.random.shuffle(self.indexes_fg)
181
+ np.random.shuffle(self.indexes_bg)
182
+
183
+ def __data_generation(self, list_IDs_temp):
184
+ 'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
185
+
186
+ # Initialization
187
+ X = np.empty((self.batch_size, *self.dim, self.n_channels), dtype=np.float32)
188
+ if self.classification:
189
+ y = np.empty((self.batch_size, 1), dtype=np.int32)
190
+ else:
191
+ y = np.empty((self.batch_size, *self.dim, 1), dtype=np.int32)
192
+
193
+ # get augmentation parameter
194
+ flip_x, flip_y, flip_z, swapaxes, rotate = self.augment
195
+ n_aug = np.sum([flip_z, flip_y, flip_x])
196
+ flips = np.where([flip_z, flip_y, flip_x])[0]
197
+
198
+ # create random angles
199
+ if rotate:
200
+ angle = np.random.uniform(-1,1,self.batch_size) * 3.1416/180*rotate
201
+ cos_angle = np.cos(angle)
202
+ sin_angle = np.sin(angle)
203
+
204
+ # Generate data
205
+ for i, ID in enumerate(list_IDs_temp):
206
+
207
+ # get patch indices
208
+ k = ID // (self.dim_img[1]*self.dim_img[2])
209
+ rest = ID % (self.dim_img[1]*self.dim_img[2])
210
+ l = rest // self.dim_img[2]
211
+ m = rest % self.dim_img[2]
212
+
213
+ # get patch
214
+ if self.classification:
215
+ tmp_X = self.img[k:k+self.dim[0],l:l+self.dim[1],m:m+self.dim[2]]
216
+ tmp_y = self.label[k,l,m]
217
+
218
+ # augmentation
219
+ if self.train:
220
+
221
+ # rotate in 3D
222
+ if rotate:
223
+ tmp_X = random_rotation_3d(tmp_X, max_angle=rotate)
224
+
225
+ # flip patch along axes
226
+ v = np.random.randint(n_aug+1)
227
+ if np.any([flip_x, flip_y, flip_z]) and v>0:
228
+ flip = flips[v-1]
229
+ tmp_X = np.flip(tmp_X, flip)
230
+
231
+ # swap axes
232
+ if swapaxes:
233
+ v = np.random.randint(4)
234
+ if v==1:
235
+ tmp_X = np.swapaxes(tmp_X,0,1)
236
+ elif v==2:
237
+ tmp_X = np.swapaxes(tmp_X,0,2)
238
+ elif v==3:
239
+ tmp_X = np.swapaxes(tmp_X,1,2)
240
+
241
+ # assign to batch
242
+ X[i,:,:,:,0] = tmp_X
243
+ y[i,0] = tmp_y
244
+
245
+ else:
246
+ # get patch
247
+ tmp_X = self.img[k:k+self.dim[0],l:l+self.dim[1],m:m+self.dim[2]]
248
+ tmp_y = self.label[k:k+self.dim[0],l:l+self.dim[1],m:m+self.dim[2]]
249
+
250
+ # augmentation
251
+ if self.train:
252
+
253
+ # rotate in xy plane
254
+ if rotate:
255
+ tmp_X = np.empty((*self.dim, self.n_channels), dtype=np.float32)
256
+ tmp_y = np.empty(self.dim, dtype=np.int32)
257
+ cos_a = cos_angle[i]
258
+ sin_a = sin_angle[i]
259
+ for c in range(self.n_channels):
260
+ tmp_X[:,:,:,c] = rotate_img_patch(self.img[:,:,:,c],tmp_X[:,:,:,c],k,l,m,cos_a,sin_a,
261
+ self.dim[0],self.dim[1],self.dim[2],
262
+ self.dim_img[1],self.dim_img[2])
263
+ tmp_y = rotate_label_patch(self.label,tmp_y,k,l,m,cos_a,sin_a,
264
+ self.dim[0],self.dim[1],self.dim[2],
265
+ self.dim_img[1],self.dim_img[2])
266
+
267
+ # flip patch along axes
268
+ v = np.random.randint(n_aug+1)
269
+ if np.any([flip_x, flip_y, flip_z]) and v>0:
270
+ flip = flips[v-1]
271
+ tmp_X = np.flip(tmp_X, flip)
272
+ tmp_y = np.flip(tmp_y, flip)
273
+
274
+ # swap axes
275
+ if swapaxes:
276
+ v = np.random.randint(4)
277
+ if v==1:
278
+ tmp_X = np.swapaxes(tmp_X,0,1)
279
+ tmp_y = np.swapaxes(tmp_y,0,1)
280
+ elif v==2:
281
+ tmp_X = np.swapaxes(tmp_X,0,2)
282
+ tmp_y = np.swapaxes(tmp_y,0,2)
283
+ elif v==3:
284
+ tmp_X = np.swapaxes(tmp_X,1,2)
285
+ tmp_y = np.swapaxes(tmp_y,1,2)
286
+
287
+ # patch normalization
288
+ if self.patch_normalization:
289
+ tmp_X = np.copy(tmp_X, order='C')
290
+ for c in range(self.n_channels):
291
+ tmp_X[:,:,:,c] -= np.mean(tmp_X[:,:,:,c])
292
+ tmp_X[:,:,:,c] /= max(np.std(tmp_X[:,:,:,c]), 1e-6)
293
+
294
+ # assign to batch
295
+ X[i] = tmp_X
296
+ y[i,:,:,:,0] = tmp_y
297
+
298
+ return X, tf.keras.utils.to_categorical(y, num_classes=self.n_classes)
299
+
@@ -0,0 +1,121 @@
1
+ ##########################################################################
2
+ ## ##
3
+ ## Copyright (c) 2024 Philipp Lösel. All rights reserved. ##
4
+ ## ##
5
+ ## This file is part of the open source project biomedisa. ##
6
+ ## ##
7
+ ## Licensed under the European Union Public Licence (EUPL) ##
8
+ ## v1.2, or - as soon as they will be approved by the ##
9
+ ## European Commission - subsequent versions of the EUPL; ##
10
+ ## ##
11
+ ## You may redistribute it and/or modify it under the terms ##
12
+ ## of the EUPL v1.2. You may not use this work except in ##
13
+ ## compliance with this Licence. ##
14
+ ## ##
15
+ ## You can obtain a copy of the Licence at: ##
16
+ ## ##
17
+ ## https://joinup.ec.europa.eu/page/eupl-text-11-12 ##
18
+ ## ##
19
+ ## Unless required by applicable law or agreed to in ##
20
+ ## writing, software distributed under the Licence is ##
21
+ ## distributed on an "AS IS" basis, WITHOUT WARRANTIES ##
22
+ ## OR CONDITIONS OF ANY KIND, either express or implied. ##
23
+ ## ##
24
+ ## See the Licence for the specific language governing ##
25
+ ## permissions and limitations under the Licence. ##
26
+ ## ##
27
+ ##########################################################################
28
+
29
+ import numpy as np
30
+ import tensorflow as tf
31
+ from scipy.ndimage import gaussian_filter, map_coordinates
32
+
33
+ def elastic_transform(image, alpha=100, sigma=20):
34
+ zsh, ysh, xsh = image.shape
35
+ dx = gaussian_filter((np.random.rand(ysh, xsh) * 2 - 1) * alpha, sigma)
36
+ dy = gaussian_filter((np.random.rand(ysh, xsh) * 2 - 1) * alpha, sigma)
37
+ y, x = np.meshgrid(np.arange(ysh), np.arange(xsh), indexing='ij')
38
+ indices = np.reshape(y+dy, (-1, 1)), np.reshape(x+dx, (-1, 1))
39
+ for k in range(zsh):
40
+ image[k] = map_coordinates(image[k], indices, order=1, mode='reflect').reshape(ysh, xsh)
41
+ return image
42
+
43
+ class DataGeneratorCrop(tf.keras.utils.Sequence):
44
+ 'Generates data for Keras'
45
+ def __init__(self, img, label, list_IDs_fg, list_IDs_bg, batch_size=32, dim=(32,32,32),
46
+ n_channels=3, n_classes=2, shuffle=True):
47
+ 'Initialization'
48
+ self.dim = dim
49
+ self.list_IDs_fg = list_IDs_fg
50
+ self.list_IDs_bg = list_IDs_bg
51
+ self.batch_size = batch_size
52
+ self.label = label
53
+ self.img = img
54
+ self.n_channels = n_channels
55
+ self.n_classes = n_classes
56
+ self.shuffle = shuffle
57
+ self.on_epoch_end()
58
+
59
+ def __len__(self):
60
+ 'Denotes the number of batches per epoch'
61
+ if len(self.list_IDs_bg) > 0:
62
+ len_IDs = 2 * min(len(self.list_IDs_fg), len(self.list_IDs_bg))
63
+ else:
64
+ len_IDs = len(self.list_IDs_fg)
65
+ return int(np.floor(len_IDs / self.batch_size))
66
+
67
+ def __getitem__(self, index):
68
+ 'Generate one batch of data'
69
+
70
+ if len(self.list_IDs_bg) > 0:
71
+
72
+ # len IDs
73
+ len_IDs = min(len(self.list_IDs_fg), len(self.list_IDs_bg))
74
+
75
+ # sample lists of indexes to the same size
76
+ tmp_indexes_fg = self.indexes_fg[:len_IDs]
77
+ tmp_indexes_bg = self.indexes_bg[:len_IDs]
78
+
79
+ # Generate indexes of the batch
80
+ tmp_batch_size = int(self.batch_size / 2)
81
+ indexes_fg = tmp_indexes_fg[index*tmp_batch_size:(index+1)*tmp_batch_size]
82
+ indexes_bg = tmp_indexes_bg[index*tmp_batch_size:(index+1)*tmp_batch_size]
83
+
84
+ # Find list of IDs
85
+ list_IDs_temp = [self.list_IDs_fg[k] for k in indexes_fg] + [self.list_IDs_bg[k] for k in indexes_bg]
86
+
87
+ else:
88
+
89
+ # Generate indexes of the batch
90
+ indexes_fg = self.indexes_fg[index*self.batch_size:(index+1)*self.batch_size]
91
+
92
+ # Find list of IDs
93
+ list_IDs_temp = [self.list_IDs_fg[k] for k in indexes_fg]
94
+
95
+ # Generate data
96
+ X, y = self.__data_generation(list_IDs_temp)
97
+
98
+ return X, y
99
+
100
+ def on_epoch_end(self):
101
+ 'Updates indexes after each epoch'
102
+ self.indexes_fg = np.arange(len(self.list_IDs_fg))
103
+ self.indexes_bg = np.arange(len(self.list_IDs_bg))
104
+ if self.shuffle == True:
105
+ np.random.shuffle(self.indexes_fg)
106
+ np.random.shuffle(self.indexes_bg)
107
+
108
+ def __data_generation(self, list_IDs_temp):
109
+ 'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
110
+
111
+ # Initialization
112
+ X = np.empty((self.batch_size, *self.dim, self.n_channels), dtype=np.uint8)
113
+ y = np.empty((self.batch_size,), dtype=np.int32)
114
+
115
+ # Generate data
116
+ for i, ID in enumerate(list_IDs_temp):
117
+ X[i,...] = self.img[ID,...]
118
+ y[i] = self.label[ID]
119
+
120
+ return X, y
121
+
@@ -0,0 +1,87 @@
1
+ ##########################################################################
2
+ ## ##
3
+ ## Copyright (c) 2024 Philipp Lösel. All rights reserved. ##
4
+ ## ##
5
+ ## This file is part of the open source project biomedisa. ##
6
+ ## ##
7
+ ## Licensed under the European Union Public Licence (EUPL) ##
8
+ ## v1.2, or - as soon as they will be approved by the ##
9
+ ## European Commission - subsequent versions of the EUPL; ##
10
+ ## ##
11
+ ## You may redistribute it and/or modify it under the terms ##
12
+ ## of the EUPL v1.2. You may not use this work except in ##
13
+ ## compliance with this Licence. ##
14
+ ## ##
15
+ ## You can obtain a copy of the Licence at: ##
16
+ ## ##
17
+ ## https://joinup.ec.europa.eu/page/eupl-text-11-12 ##
18
+ ## ##
19
+ ## Unless required by applicable law or agreed to in ##
20
+ ## writing, software distributed under the Licence is ##
21
+ ## distributed on an "AS IS" basis, WITHOUT WARRANTIES ##
22
+ ## OR CONDITIONS OF ANY KIND, either express or implied. ##
23
+ ## ##
24
+ ## See the Licence for the specific language governing ##
25
+ ## permissions and limitations under the Licence. ##
26
+ ## ##
27
+ ##########################################################################
28
+
29
+ import numpy as np
30
+ import tensorflow as tf
31
+
32
+ class PredictDataGenerator(tf.keras.utils.Sequence):
33
+ def __init__(self, img, list_IDs, batch_size=32, dim=(32,32,32),
34
+ dim_img=(32,32,32), n_channels=1, patch_normalization=False):
35
+ 'Initialization'
36
+ self.dim = dim
37
+ self.dim_img = dim_img
38
+ self.list_IDs = list_IDs
39
+ self.batch_size = batch_size
40
+ self.img = img
41
+ self.n_channels = n_channels
42
+ self.indexes = np.arange(len(self.list_IDs))
43
+ self.patch_normalization = patch_normalization
44
+
45
+ def __len__(self):
46
+ 'Denotes the number of batches per epoch'
47
+ return int(np.floor(len(self.list_IDs) / self.batch_size))
48
+
49
+ def __getitem__(self, index):
50
+ 'Generate one batch of data'
51
+ # Generate indexes of the batch
52
+ indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size]
53
+
54
+ # Find list of IDs
55
+ list_IDs_temp = [self.list_IDs[k] for k in indexes]
56
+
57
+ # Generate data
58
+ X = self.__data_generation(list_IDs_temp)
59
+
60
+ return X
61
+
62
+ def __data_generation(self, list_IDs_temp):
63
+ 'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
64
+
65
+ # Initialization
66
+ X = np.empty((self.batch_size, *self.dim, self.n_channels), dtype=np.float32)
67
+
68
+ # Generate data
69
+ for i, ID in enumerate(list_IDs_temp):
70
+
71
+ # get patch indices
72
+ k = ID // (self.dim_img[1]*self.dim_img[2])
73
+ rest = ID % (self.dim_img[1]*self.dim_img[2])
74
+ l = rest // self.dim_img[2]
75
+ m = rest % self.dim_img[2]
76
+
77
+ # get patch
78
+ tmp_X = self.img[k:k+self.dim[0],l:l+self.dim[1],m:m+self.dim[2]]
79
+ if self.patch_normalization:
80
+ tmp_X = np.copy(tmp_X, order='C')
81
+ for c in range(self.n_channels):
82
+ tmp_X[:,:,:,c] -= np.mean(tmp_X[:,:,:,c])
83
+ tmp_X[:,:,:,c] /= max(np.std(tmp_X[:,:,:,c]), 1e-6)
84
+ X[i] = tmp_X
85
+
86
+ return X
87
+
@@ -0,0 +1,74 @@
1
+ ##########################################################################
2
+ ## ##
3
+ ## Copyright (c) 2022 Philipp Lösel. All rights reserved. ##
4
+ ## ##
5
+ ## This file is part of the open source project biomedisa. ##
6
+ ## ##
7
+ ## Licensed under the European Union Public Licence (EUPL) ##
8
+ ## v1.2, or - as soon as they will be approved by the ##
9
+ ## European Commission - subsequent versions of the EUPL; ##
10
+ ## ##
11
+ ## You may redistribute it and/or modify it under the terms ##
12
+ ## of the EUPL v1.2. You may not use this work except in ##
13
+ ## compliance with this Licence. ##
14
+ ## ##
15
+ ## You can obtain a copy of the Licence at: ##
16
+ ## ##
17
+ ## https://joinup.ec.europa.eu/page/eupl-text-11-12 ##
18
+ ## ##
19
+ ## Unless required by applicable law or agreed to in ##
20
+ ## writing, software distributed under the Licence is ##
21
+ ## distributed on an "AS IS" basis, WITHOUT WARRANTIES ##
22
+ ## OR CONDITIONS OF ANY KIND, either express or implied. ##
23
+ ## ##
24
+ ## See the Licence for the specific language governing ##
25
+ ## permissions and limitations under the Licence. ##
26
+ ## ##
27
+ ##########################################################################
28
+
29
+ import numpy as np
30
+ import tensorflow as tf
31
+
32
+ class PredictDataGeneratorCrop(tf.keras.utils.Sequence):
33
+ def __init__(self, img, list_IDs, batch_size=32, dim=(32,32,32),
34
+ dim_img=(32,32,32), n_channels=3):
35
+ 'Initialization'
36
+ self.dim = dim
37
+ self.dim_img = dim_img
38
+ self.list_IDs = list_IDs
39
+ self.batch_size = batch_size
40
+ self.img = img
41
+ self.n_channels = n_channels
42
+ self.indexes = np.arange(len(self.list_IDs))
43
+
44
+ def __len__(self):
45
+ 'Denotes the number of batches per epoch'
46
+ return int(np.floor(len(self.list_IDs) / self.batch_size))
47
+
48
+ def __getitem__(self, index):
49
+ 'Generate one batch of data'
50
+ # Generate indexes of the batch
51
+ indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size]
52
+
53
+ # Find list of IDs
54
+ list_IDs_temp = [self.list_IDs[k] for k in indexes]
55
+
56
+ # Generate data
57
+ X = self.__data_generation(list_IDs_temp)
58
+
59
+ return X
60
+
61
+ def __data_generation(self, list_IDs_temp):
62
+ 'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
63
+
64
+ # Initialization
65
+ X = np.empty((self.batch_size, *self.dim, self.n_channels), dtype=np.uint8)
66
+
67
+ # Generate data
68
+ for i, ID in enumerate(list_IDs_temp):
69
+
70
+ # get layer
71
+ X[i,...] = self.img[ID,...]
72
+
73
+ return X
74
+
File without changes