deepliif 1.1.10__py3-none-any.whl → 1.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cli.py +354 -67
- deepliif/data/__init__.py +7 -7
- deepliif/data/aligned_dataset.py +2 -3
- deepliif/data/unaligned_dataset.py +38 -19
- deepliif/models/CycleGAN_model.py +282 -0
- deepliif/models/DeepLIIFExt_model.py +47 -25
- deepliif/models/DeepLIIF_model.py +69 -19
- deepliif/models/SDG_model.py +57 -26
- deepliif/models/__init__ - run_dask_multi dev.py +943 -0
- deepliif/models/__init__ - timings.py +764 -0
- deepliif/models/__init__.py +354 -232
- deepliif/models/att_unet.py +199 -0
- deepliif/models/base_model.py +32 -8
- deepliif/models/networks.py +108 -34
- deepliif/options/__init__.py +49 -5
- deepliif/postprocessing.py +1034 -227
- deepliif/postprocessing__OLD__DELETE.py +440 -0
- deepliif/util/__init__.py +290 -64
- deepliif/util/visualizer.py +106 -19
- {deepliif-1.1.10.dist-info → deepliif-1.1.12.dist-info}/METADATA +81 -20
- deepliif-1.1.12.dist-info/RECORD +40 -0
- deepliif-1.1.10.dist-info/RECORD +0 -35
- {deepliif-1.1.10.dist-info → deepliif-1.1.12.dist-info}/LICENSE.md +0 -0
- {deepliif-1.1.10.dist-info → deepliif-1.1.12.dist-info}/WHEEL +0 -0
- {deepliif-1.1.10.dist-info → deepliif-1.1.12.dist-info}/entry_points.txt +0 -0
- {deepliif-1.1.10.dist-info → deepliif-1.1.12.dist-info}/top_level.txt +0 -0
deepliif/options/__init__.py
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
import os
|
|
5
5
|
from ..util.util import mkdirs
|
|
6
|
+
import re
|
|
6
7
|
|
|
7
8
|
def read_model_params(file_addr):
|
|
8
9
|
with open(file_addr) as f:
|
|
@@ -11,8 +12,27 @@ def read_model_params(file_addr):
|
|
|
11
12
|
for line in lines:
|
|
12
13
|
if ':' in line:
|
|
13
14
|
key = line.split(':')[0].strip()
|
|
14
|
-
val = line.split(':')[1]
|
|
15
|
-
|
|
15
|
+
val = ':'.join(line.split(':')[1:])
|
|
16
|
+
|
|
17
|
+
# drop default value
|
|
18
|
+
str_default = [x for x in re.findall(r"\[.+?\]", val) if x.startswith('[default')]
|
|
19
|
+
if len(str_default) > 1:
|
|
20
|
+
raise Exception('train_opt.txt should not contain multiple possible default keys in one line:',str_default)
|
|
21
|
+
elif len(str_default) == 1:
|
|
22
|
+
str_default = str_default[0]
|
|
23
|
+
val = val.replace(str_default,'')
|
|
24
|
+
val = val.strip()
|
|
25
|
+
|
|
26
|
+
# val = line.split(':')[1].split('[')[0].strip()
|
|
27
|
+
try:
|
|
28
|
+
param_dict[key] = eval(val)
|
|
29
|
+
#print(f'value of {key} is converted to {type(param_dict[key]).__name__}')
|
|
30
|
+
except:
|
|
31
|
+
param_dict[key] = val
|
|
32
|
+
|
|
33
|
+
# if isinstance(param_dict[key],list):
|
|
34
|
+
# param_dict[key] = param_dict[key][0]
|
|
35
|
+
|
|
16
36
|
return param_dict
|
|
17
37
|
|
|
18
38
|
class Options:
|
|
@@ -33,25 +53,36 @@ class Options:
|
|
|
33
53
|
except:
|
|
34
54
|
setattr(self,k,v)
|
|
35
55
|
|
|
56
|
+
self.optimizer = 'adam' if not hasattr(self,'optimizer') else self.optimizer
|
|
57
|
+
|
|
58
|
+
|
|
36
59
|
if mode == 'train':
|
|
37
60
|
self.is_train = True
|
|
38
|
-
self
|
|
39
|
-
|
|
61
|
+
if hasattr(self,'net_g') and not hasattr(self,'netG'):
|
|
62
|
+
self.netG = self.net_g #'resnet_9blocks'
|
|
63
|
+
if hasattr(self,'net_d') and not hasattr(self,'netD'):
|
|
64
|
+
self.netD = self.net_d #'n_layers'
|
|
40
65
|
self.n_layers_D = 4
|
|
41
66
|
self.lambda_L1 = 100
|
|
42
67
|
self.lambda_feat = 100
|
|
68
|
+
|
|
43
69
|
else:
|
|
44
70
|
self.phase = 'test'
|
|
45
71
|
self.is_train = False
|
|
46
72
|
self.input_nc = 3
|
|
47
73
|
self.output_nc = 3
|
|
48
74
|
self.ngf = 64
|
|
49
|
-
self.norm = 'batch'
|
|
75
|
+
self.norm = 'batch' if not hasattr(self,'norm') else self.norm
|
|
50
76
|
self.use_dropout = True
|
|
51
77
|
#self.padding_type = 'zero' # some models use reflect etc. which adds additional randomness
|
|
52
78
|
#self.padding = 'zero'
|
|
53
79
|
self.use_dropout = False #if self.no_dropout == 'True' else True
|
|
54
80
|
|
|
81
|
+
if self.model in ['CycleGAN']:
|
|
82
|
+
# this is only used for inference (whether to load generators Bs instead of As)
|
|
83
|
+
# and can be configured in the inference function
|
|
84
|
+
self.BtoA = False if not hasattr(self,'BtoA') else self.BtoA
|
|
85
|
+
|
|
55
86
|
# reset checkpoints_dir and name based on the model directory
|
|
56
87
|
# when base model is initialized: self.save_dir = os.path.join(opt.checkpoints_dir, opt.name)
|
|
57
88
|
model_dir = Path(path_file).parent
|
|
@@ -96,6 +127,19 @@ class Options:
|
|
|
96
127
|
self.scale_size = 1024
|
|
97
128
|
else:
|
|
98
129
|
raise Exception(f'scale_size cannot be automatically determined for {opt.model}')
|
|
130
|
+
|
|
131
|
+
# weights of the modalities used to generate segmentation mask
|
|
132
|
+
if not hasattr(self,'seg_weights'):
|
|
133
|
+
if self.model == 'DeepLIIF':
|
|
134
|
+
# self.seg_weights = [0.2, 0.2, 0.2, 0.2, 0.2]
|
|
135
|
+
self.seg_weights = [0.25, 0.15, 0.25, 0.1, 0.25]
|
|
136
|
+
# self.seg_weights = [0.25, 0.25, 0.25, 0.0, 0.25]
|
|
137
|
+
else:
|
|
138
|
+
self.seg_weights = [1 / self.modalities_no] * self.modalities_no
|
|
139
|
+
|
|
140
|
+
# weights of the modalities used to calculate the final loss
|
|
141
|
+
self.loss_G_weights = [1 / self.modalities_no] * self.modalities_no if not hasattr(self,'loss_G_weights') else self.loss_G_weights
|
|
142
|
+
self.loss_D_weights = [1 / self.modalities_no] * self.modalities_no if not hasattr(self,'loss_D_weights') else self.loss_D_weights
|
|
99
143
|
|
|
100
144
|
|
|
101
145
|
|