small-fish-gui 1.3.4__tar.gz → 1.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/PKG-INFO +1 -1
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/pyproject.toml +1 -1
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/__init__.py +1 -1
- small_fish_gui-1.4.0/src/small_fish_gui/batch/__init__.py +5 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/input.py +62 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/integrity.py +158 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/output.py +0 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/pipeline.py +218 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/prompt.py +426 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/test.py +10 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/update.py +132 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/utils.py +66 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/values.py +3 -0
- small_fish_gui-1.4.0/src/small_fish_gui/batch/values.txt +65 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/gui/__init__.py +1 -2
- small_fish_gui-1.4.0/src/small_fish_gui/gui/animation.py +39 -0
- small_fish_gui-1.4.0/src/small_fish_gui/gui/layout.py +310 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/gui/prompts.py +24 -26
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/interface/output.py +8 -4
- small_fish_gui-1.4.0/src/small_fish_gui/pipeline/__init__.py +21 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/_preprocess.py +72 -19
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/_segmentation.py +37 -1
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/detection.py +72 -8
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/main.py +7 -3
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/utils.py +6 -1
- small_fish_gui-1.3.4/src/small_fish_gui/gui/animation.py +0 -30
- small_fish_gui-1.3.4/src/small_fish_gui/gui/layout.py +0 -184
- small_fish_gui-1.3.4/src/small_fish_gui/gui/test.py +0 -4
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/LICENSE +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/README.md +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/.github/workflows/python-publish.yml +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/LICENSE +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/README.md +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/Segmentation example.jpg +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/__main__.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/gui/general_help_screenshot.png +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/gui/help_module.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/gui/mapping_help_screenshot.png +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/gui/segmentation_help_screenshot.png +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/interface/__init__.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/interface/image.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/interface/parameters.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/interface/testing.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/napari_detection_example.png +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/_colocalisation.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/_custom_errors.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/_napari_wrapper.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/_signaltonoise.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/actions.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/spots.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/pipeline/test.py +0 -0
- {small_fish_gui-1.3.4 → small_fish_gui-1.4.0}/src/small_fish_gui/requirements.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: small_fish_gui
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.0
|
|
4
4
|
Summary: Small Fish is a python application for the analysis of smFish images. It provides a ready to use graphical interface to combine famous python packages for cell analysis without any need for coding.
|
|
5
5
|
Project-URL: Homepage, https://github.com/2Echoes/small_fish
|
|
6
6
|
Project-URL: Issues, https://github.com/2Echoes/small_fish/issues
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Submodule handling handling files and filenames in batch mode.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import bigfish.stack as stack
|
|
7
|
+
import czifile as czi
|
|
8
|
+
import numpy as np
|
|
9
|
+
from .integrity import check_file
|
|
10
|
+
|
|
11
|
+
def open_image(filename:str) :
|
|
12
|
+
|
|
13
|
+
if filename.endswith('.czi') :
|
|
14
|
+
image = czi.imread(filename)
|
|
15
|
+
else :
|
|
16
|
+
image = stack.read_image(filename)
|
|
17
|
+
|
|
18
|
+
image = np.squeeze(image)
|
|
19
|
+
|
|
20
|
+
return image
|
|
21
|
+
|
|
22
|
+
def get_images(filename:str) :
|
|
23
|
+
"""returns filename if is image else return None"""
|
|
24
|
+
|
|
25
|
+
supported_types = ('.tiff', '.tif', '.png', '.czi')
|
|
26
|
+
if filename.endswith(supported_types) :
|
|
27
|
+
return [filename]
|
|
28
|
+
else :
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
def get_files(path) :
|
|
32
|
+
|
|
33
|
+
filelist = os.listdir(path)
|
|
34
|
+
filelist = list(map(get_images,filelist))
|
|
35
|
+
|
|
36
|
+
while None in filelist : filelist.remove(None)
|
|
37
|
+
|
|
38
|
+
return filelist
|
|
39
|
+
|
|
40
|
+
def extract_files(filenames: list) :
|
|
41
|
+
return sum(filenames,[])
|
|
42
|
+
|
|
43
|
+
def load(
|
|
44
|
+
batch_folder:str,
|
|
45
|
+
) :
|
|
46
|
+
if not os.path.isdir(batch_folder) :
|
|
47
|
+
print("Can't open {0}".format(batch_folder))
|
|
48
|
+
files_values = [[]]
|
|
49
|
+
last_shape = None
|
|
50
|
+
dim_number = 0
|
|
51
|
+
else :
|
|
52
|
+
files_values = get_files(batch_folder)
|
|
53
|
+
if len(files_values) == 0 :
|
|
54
|
+
last_shape = None
|
|
55
|
+
dim_number = 0
|
|
56
|
+
else :
|
|
57
|
+
first_filename = files_values[0][0]
|
|
58
|
+
last_shape = check_file(batch_folder + '/' + first_filename)
|
|
59
|
+
dim_number = len(last_shape)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
return files_values, last_shape, dim_number
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Submodule handling all parameters check, asserting functions and pipeline will be able to run.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import czifile as czi
|
|
7
|
+
import bigfish.stack as stack
|
|
8
|
+
import numpy as np
|
|
9
|
+
import PySimpleGUI as sg
|
|
10
|
+
|
|
11
|
+
from ..pipeline._preprocess import check_integrity, convert_parameters_types, ParameterInputError, _check_segmentation_parameters
|
|
12
|
+
from ..pipeline._segmentation import _cast_segmentation_parameters
|
|
13
|
+
|
|
14
|
+
def check_file(filename:str) :
|
|
15
|
+
|
|
16
|
+
if filename.endswith('.czi') :
|
|
17
|
+
image = czi.imread(filename)
|
|
18
|
+
else :
|
|
19
|
+
image = stack.read_image(filename)
|
|
20
|
+
|
|
21
|
+
image = np.squeeze(image)
|
|
22
|
+
|
|
23
|
+
return image.shape
|
|
24
|
+
|
|
25
|
+
def sanity_check(
|
|
26
|
+
filename_list: list,
|
|
27
|
+
batch_folder : str,
|
|
28
|
+
window : sg.Window,
|
|
29
|
+
progress_bar: sg.ProgressBar,
|
|
30
|
+
) :
|
|
31
|
+
|
|
32
|
+
filenumber = len(filename_list)
|
|
33
|
+
if filenumber == 0 :
|
|
34
|
+
print("No file to check")
|
|
35
|
+
progress_bar.update(current_count= 0, bar_color=('gray','gray'))
|
|
36
|
+
return None
|
|
37
|
+
else :
|
|
38
|
+
print("{0} files to check".format(filenumber))
|
|
39
|
+
progress_bar.update(current_count=0, max= filenumber)
|
|
40
|
+
ref_shape = check_file(batch_folder + '/' + filename_list[0])
|
|
41
|
+
|
|
42
|
+
print("Starting sanity check. This could take some time...")
|
|
43
|
+
for i, file in enumerate(filename_list) :
|
|
44
|
+
progress_bar.update(current_count= i+1, bar_color=('green','gray'))
|
|
45
|
+
shape = check_file(batch_folder + '/' + file)
|
|
46
|
+
|
|
47
|
+
if len(shape) != len(ref_shape) : #then dimension missmatch
|
|
48
|
+
print("Different number of dimensions found : {0}, {1}".format(len(ref_shape), len(shape)))
|
|
49
|
+
progress_bar.update(current_count=filenumber, bar_color=('red','black'))
|
|
50
|
+
window= window.refresh()
|
|
51
|
+
break
|
|
52
|
+
|
|
53
|
+
window= window.refresh()
|
|
54
|
+
|
|
55
|
+
print("Sanity check completed.")
|
|
56
|
+
return None if len(shape) != len(ref_shape) else shape
|
|
57
|
+
|
|
58
|
+
def check_channel_map_integrity(
|
|
59
|
+
maping:dict,
|
|
60
|
+
shape: tuple,
|
|
61
|
+
expected_dim : int
|
|
62
|
+
) :
|
|
63
|
+
|
|
64
|
+
#Check integrity
|
|
65
|
+
channels_values = np.array(list(maping.values()), dtype= int)
|
|
66
|
+
total_channels = len(maping)
|
|
67
|
+
unique_channel = len(np.unique(channels_values))
|
|
68
|
+
res= True
|
|
69
|
+
|
|
70
|
+
if expected_dim != total_channels :
|
|
71
|
+
sg.popup("Image has {0} dimensions but {1} were mapped.".format(expected_dim, total_channels))
|
|
72
|
+
res = False
|
|
73
|
+
if total_channels != unique_channel :
|
|
74
|
+
sg.popup("{0} channel(s) are not uniquely mapped.".format(total_channels - unique_channel))
|
|
75
|
+
res = False
|
|
76
|
+
if not all(channels_values < len(shape)):
|
|
77
|
+
sg.popup("Channels values out of range for image dimensions.\nPlease select dimensions from {0}".format(list(range(len(shape)))))
|
|
78
|
+
res = False
|
|
79
|
+
|
|
80
|
+
return res
|
|
81
|
+
|
|
82
|
+
def check_segmentation_parameters(
|
|
83
|
+
values,
|
|
84
|
+
shape,
|
|
85
|
+
is_multichannel,
|
|
86
|
+
) :
|
|
87
|
+
values = _cast_segmentation_parameters(values=values)
|
|
88
|
+
try :
|
|
89
|
+
_check_segmentation_parameters(
|
|
90
|
+
user_parameters=values,
|
|
91
|
+
shape=shape,
|
|
92
|
+
is_multichannel=is_multichannel
|
|
93
|
+
)
|
|
94
|
+
except ParameterInputError as e:
|
|
95
|
+
segmentation_is_ok = False
|
|
96
|
+
sg.popup_error(e)
|
|
97
|
+
else :
|
|
98
|
+
segmentation_is_ok = True
|
|
99
|
+
|
|
100
|
+
return segmentation_is_ok, values
|
|
101
|
+
|
|
102
|
+
def check_detection_parameters(
|
|
103
|
+
values,
|
|
104
|
+
do_dense_region_deconvolution,
|
|
105
|
+
do_clustering,
|
|
106
|
+
is_multichannel,
|
|
107
|
+
is_3D,
|
|
108
|
+
map,
|
|
109
|
+
shape
|
|
110
|
+
) :
|
|
111
|
+
|
|
112
|
+
values['dim'] = 3 if is_3D else 2
|
|
113
|
+
values = convert_parameters_types(values)
|
|
114
|
+
try :
|
|
115
|
+
check_integrity(
|
|
116
|
+
values=values,
|
|
117
|
+
do_dense_region_deconvolution=do_dense_region_deconvolution,
|
|
118
|
+
do_clustering=do_clustering,
|
|
119
|
+
multichannel=is_multichannel,
|
|
120
|
+
segmentation_done=None,
|
|
121
|
+
map=map,
|
|
122
|
+
shape=shape
|
|
123
|
+
)
|
|
124
|
+
except ParameterInputError as e:
|
|
125
|
+
detection_is_ok = False
|
|
126
|
+
sg.popup_error(e)
|
|
127
|
+
else :
|
|
128
|
+
detection_is_ok = True
|
|
129
|
+
|
|
130
|
+
return detection_is_ok, values
|
|
131
|
+
|
|
132
|
+
def check_output_parameters(values) :
|
|
133
|
+
is_output_ok = True
|
|
134
|
+
|
|
135
|
+
#Output folder
|
|
136
|
+
output_folder = values.get('output_folder')
|
|
137
|
+
if not os.path.isdir(output_folder) :
|
|
138
|
+
sg.popup("Incorrect output folder selected")
|
|
139
|
+
is_output_ok=False
|
|
140
|
+
|
|
141
|
+
#Batch name
|
|
142
|
+
original_name = values['batch_name']
|
|
143
|
+
loop=1
|
|
144
|
+
values['batch_name'] = values['batch_name'].replace(' ','_')
|
|
145
|
+
while os.path.isdir(output_folder + '/' + values['batch_name']) :
|
|
146
|
+
values['batch_name'] = original_name + '_{0}'.format(loop)
|
|
147
|
+
loop+=1
|
|
148
|
+
if len(values['batch_name']) == 0 : is_output_ok = False
|
|
149
|
+
|
|
150
|
+
#extension
|
|
151
|
+
if values['csv'] or values['xlsx'] or values['feather'] :
|
|
152
|
+
pass
|
|
153
|
+
else :
|
|
154
|
+
sg.popup("Select at least one data format for output.")
|
|
155
|
+
is_output_ok=False
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
return is_output_ok, values
|
|
File without changes
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Submodule keeping necessary calls from main pipeline for batch processing.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import pandas as pd
|
|
7
|
+
import PySimpleGUI as sg
|
|
8
|
+
|
|
9
|
+
from .input import open_image
|
|
10
|
+
from ..interface import write_results
|
|
11
|
+
from ..pipeline import reorder_shape, reorder_image_stack, prepare_image_detection
|
|
12
|
+
from ..pipeline import cell_segmentation, launch_detection, launch_features_computation
|
|
13
|
+
from ..pipeline import launch_spots_extraction
|
|
14
|
+
from ..pipeline import get_nucleus_signal
|
|
15
|
+
from ..pipeline import _cast_segmentation_parameters, convert_parameters_types
|
|
16
|
+
from ..pipeline import plot_segmentation, output_spot_tiffvisual
|
|
17
|
+
from ..utils import get_datetime
|
|
18
|
+
|
|
19
|
+
def window_print(window: sg.Window, *args) :
|
|
20
|
+
print(*args)
|
|
21
|
+
window.refresh()
|
|
22
|
+
|
|
23
|
+
def batch_pipeline(
|
|
24
|
+
batch_window : sg.Window,
|
|
25
|
+
batch_progress_bar : sg.ProgressBar,
|
|
26
|
+
progress_count : sg.Text,
|
|
27
|
+
parameters : dict,
|
|
28
|
+
filenames_list : list,
|
|
29
|
+
do_segmentation : bool,
|
|
30
|
+
map : dict,
|
|
31
|
+
results_df : pd.DataFrame,
|
|
32
|
+
cell_results_df : pd.DataFrame,
|
|
33
|
+
is_3D,
|
|
34
|
+
last_acquisition_id=0,
|
|
35
|
+
) :
|
|
36
|
+
|
|
37
|
+
#Extracting parameters
|
|
38
|
+
input_path = parameters['Batch_folder']
|
|
39
|
+
output_path = parameters['output_folder']
|
|
40
|
+
batch_name = parameters['batch_name']
|
|
41
|
+
time = '_' + get_datetime()
|
|
42
|
+
|
|
43
|
+
#Preparing folder
|
|
44
|
+
window_print(batch_window,"Creating folders for output...")
|
|
45
|
+
main_dir = output_path + "/" + batch_name + time + "/"
|
|
46
|
+
os.makedirs(main_dir + "results/", exist_ok=True)
|
|
47
|
+
if parameters['save segmentation'] : os.makedirs(main_dir + "segmentation/", exist_ok=True)
|
|
48
|
+
if parameters['save detection'] : os.makedirs(main_dir + "detection/", exist_ok=True)
|
|
49
|
+
if parameters['extract spots'] : os.makedirs(main_dir + "results/spots_extraction", exist_ok=True)
|
|
50
|
+
|
|
51
|
+
#Setting spot detection dimension
|
|
52
|
+
parameters['dim'] = 3 if is_3D else 2
|
|
53
|
+
|
|
54
|
+
#Pipeline loop
|
|
55
|
+
window_print(batch_window,"Launching batch analysis...")
|
|
56
|
+
batch_progress_bar.update(max=len(filenames_list))
|
|
57
|
+
filenames_list.sort()
|
|
58
|
+
for acquisition_id, file in enumerate(filenames_list) :
|
|
59
|
+
|
|
60
|
+
#GUI
|
|
61
|
+
window_print(batch_window,"\nNext file : {0}".format(file))
|
|
62
|
+
batch_progress_bar.update(current_count= acquisition_id, max= len(filenames_list))
|
|
63
|
+
progress_count.update(value=str(acquisition_id))
|
|
64
|
+
batch_window = batch_window.refresh()
|
|
65
|
+
|
|
66
|
+
#0. Open image
|
|
67
|
+
image = open_image(input_path + '/' + file)
|
|
68
|
+
parameters['image'] = image
|
|
69
|
+
parameters['filename'] = file
|
|
70
|
+
for key_to_clean in [0,2] :
|
|
71
|
+
if key_to_clean in parameters : del parameters[key_to_clean]
|
|
72
|
+
|
|
73
|
+
#1. Re-order shape
|
|
74
|
+
shape = image.shape
|
|
75
|
+
parameters['shape'] = shape
|
|
76
|
+
parameters['reordered_shape'] = reorder_shape(shape, map=map)
|
|
77
|
+
|
|
78
|
+
#2. Segmentation (opt)
|
|
79
|
+
if do_segmentation :
|
|
80
|
+
window_print(batch_window,"Segmenting cells...")
|
|
81
|
+
im_seg = reorder_image_stack(map, parameters)
|
|
82
|
+
parameters = _cast_segmentation_parameters(parameters)
|
|
83
|
+
cytoplasm_label, nucleus_label = cell_segmentation(
|
|
84
|
+
im_seg,
|
|
85
|
+
cyto_model_name= parameters['cyto_model_name'],
|
|
86
|
+
cyto_diameter= parameters['cytoplasm diameter'],
|
|
87
|
+
nucleus_model_name= parameters['nucleus_model_name'],
|
|
88
|
+
nucleus_diameter= parameters['nucleus diameter'],
|
|
89
|
+
channels=[parameters['cytoplasm channel'], parameters['nucleus channel']],
|
|
90
|
+
do_only_nuc=parameters['Segment only nuclei']
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
if cytoplasm_label.max() == 0 : #No cell segmented
|
|
94
|
+
window_print(batch_window,"No cell was segmented, computing next image.")
|
|
95
|
+
continue
|
|
96
|
+
else :
|
|
97
|
+
window_print(batch_window, "{0} cells segmented.".format(cytoplasm_label.max()))
|
|
98
|
+
|
|
99
|
+
if parameters['save segmentation'] :
|
|
100
|
+
plot_segmentation(
|
|
101
|
+
cyto_image=im_seg[parameters['cytoplasm channel']],
|
|
102
|
+
cyto_label= cytoplasm_label,
|
|
103
|
+
nuc_image= im_seg[parameters['nucleus channel']],
|
|
104
|
+
nuc_label=nucleus_label,
|
|
105
|
+
path= main_dir + "segmentation/" + file,
|
|
106
|
+
do_only_nuc= parameters['Segment only nuclei'],
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
else :
|
|
110
|
+
cytoplasm_label, nucleus_label = None,None
|
|
111
|
+
|
|
112
|
+
#3. Detection, deconvolution, clusterisation
|
|
113
|
+
window_print(batch_window,"Detecting spots...")
|
|
114
|
+
parameters = convert_parameters_types(parameters)
|
|
115
|
+
image, other_image = prepare_image_detection(map, parameters)
|
|
116
|
+
nucleus_signal = get_nucleus_signal(image, other_image, parameters)
|
|
117
|
+
try : # Catch error raised if user enter a spot size too small compare to voxel size
|
|
118
|
+
parameters, frame_result, spots, clusters = launch_detection(
|
|
119
|
+
image,
|
|
120
|
+
other_image,
|
|
121
|
+
parameters,
|
|
122
|
+
cell_label=cytoplasm_label,
|
|
123
|
+
nucleus_label=nucleus_label,
|
|
124
|
+
hide_loading=True,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
except ValueError as error :
|
|
128
|
+
if "The array should have an upper bound of 1" in str(error) :
|
|
129
|
+
window_print(batch_window,"Spot size too small for current voxel size.")
|
|
130
|
+
continue
|
|
131
|
+
else :
|
|
132
|
+
raise(error)
|
|
133
|
+
|
|
134
|
+
if parameters['save detection'] :
|
|
135
|
+
if parameters['Cluster computation'] : spots_list = [spots, clusters[:,:parameters['dim']]]
|
|
136
|
+
else : spots_list = [spots]
|
|
137
|
+
output_spot_tiffvisual(
|
|
138
|
+
image,
|
|
139
|
+
spots_list= spots_list,
|
|
140
|
+
dot_size=2,
|
|
141
|
+
path_output= main_dir + "detection/" + file + "_spot_detection.tiff"
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
#4. Spots extraction
|
|
145
|
+
window_print(batch_window,"Extracting spots : ")
|
|
146
|
+
if parameters['extract spots'] :
|
|
147
|
+
|
|
148
|
+
#Setting parameter for call to lauch spot extraction
|
|
149
|
+
#Only spots have one file per image to avoir memory overload
|
|
150
|
+
parameters['do_spots_excel'] = parameters['xlsx']
|
|
151
|
+
parameters['do_spots_csv'] = parameters['csv']
|
|
152
|
+
parameters['do_spots_feather'] = parameters['feather']
|
|
153
|
+
parameters['spots_filename'] = "spots_extractions_{0}".format(file)
|
|
154
|
+
parameters['spots_extraction_folder'] = main_dir + "results/spots_extraction/"
|
|
155
|
+
|
|
156
|
+
launch_spots_extraction(
|
|
157
|
+
acquisition_id=acquisition_id + last_acquisition_id,
|
|
158
|
+
user_parameters=parameters,
|
|
159
|
+
image=image,
|
|
160
|
+
spots=spots,
|
|
161
|
+
nucleus_label= nucleus_label,
|
|
162
|
+
cell_label= cytoplasm_label,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
#5. Features computation
|
|
166
|
+
window_print(batch_window,"computing features...")
|
|
167
|
+
new_results_df, new_cell_results_df = launch_features_computation(
|
|
168
|
+
acquisition_id=acquisition_id + last_acquisition_id,
|
|
169
|
+
image=image,
|
|
170
|
+
nucleus_signal = nucleus_signal,
|
|
171
|
+
spots=spots,
|
|
172
|
+
clusters=clusters,
|
|
173
|
+
nucleus_label = nucleus_label,
|
|
174
|
+
cell_label= cytoplasm_label,
|
|
175
|
+
user_parameters=parameters,
|
|
176
|
+
frame_results=frame_result,
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
results_df = pd.concat([
|
|
180
|
+
results_df.reset_index(drop=True), new_results_df.reset_index(drop=True)
|
|
181
|
+
], axis=0)
|
|
182
|
+
|
|
183
|
+
cell_results_df = pd.concat([
|
|
184
|
+
cell_results_df.reset_index(drop=True), new_cell_results_df.reset_index(drop=True)
|
|
185
|
+
], axis=0)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
#6. Saving results
|
|
189
|
+
window_print(batch_window,"saving image_results...")
|
|
190
|
+
#1 file per batch + 1 file per batch if segmentation
|
|
191
|
+
acquisition_success = write_results(
|
|
192
|
+
results_df,
|
|
193
|
+
path= main_dir + "results/",
|
|
194
|
+
filename=batch_name,
|
|
195
|
+
do_excel= parameters["xlsx"],
|
|
196
|
+
do_feather= parameters["feather"],
|
|
197
|
+
do_csv= parameters["csv"],
|
|
198
|
+
overwrite=True,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
if do_segmentation :
|
|
202
|
+
cell_success = write_results(
|
|
203
|
+
cell_results_df,
|
|
204
|
+
path= main_dir + "results/",
|
|
205
|
+
filename=batch_name + '_cell_result',
|
|
206
|
+
do_excel= parameters["xlsx"],
|
|
207
|
+
do_feather= parameters["feather"],
|
|
208
|
+
do_csv= parameters["csv"],
|
|
209
|
+
overwrite=True,
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
window_print(batch_window,"Sucessfully saved.")
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
batch_progress_bar.update(current_count= acquisition_id+1, max= len(filenames_list))
|
|
216
|
+
progress_count.update(value=str(acquisition_id+1))
|
|
217
|
+
batch_window = batch_window.refresh()
|
|
218
|
+
return results_df, cell_results_df, acquisition_id
|