SURE-tools 2.1.31__tar.gz → 2.1.32__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of SURE-tools might be problematic. Click here for more details.
- {sure_tools-2.1.31 → sure_tools-2.1.32}/PKG-INFO +1 -1
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/PerturbFlow.py +0 -1
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/SURE.py +22 -9
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE_tools.egg-info/PKG-INFO +1 -1
- {sure_tools-2.1.31 → sure_tools-2.1.32}/setup.py +1 -1
- {sure_tools-2.1.31 → sure_tools-2.1.32}/LICENSE +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/README.md +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/__init__.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/assembly/__init__.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/assembly/assembly.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/assembly/atlas.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/atac/__init__.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/atac/utils.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/codebook/__init__.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/codebook/codebook.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/flow/__init__.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/flow/flow_stats.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/flow/plot_quiver.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/perturb/__init__.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/perturb/perturb.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/utils/__init__.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/utils/custom_mlp.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/utils/queue.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE/utils/utils.py +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE_tools.egg-info/SOURCES.txt +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE_tools.egg-info/dependency_links.txt +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE_tools.egg-info/entry_points.txt +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE_tools.egg-info/requires.txt +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/SURE_tools.egg-info/top_level.txt +0 -0
- {sure_tools-2.1.31 → sure_tools-2.1.32}/setup.cfg +0 -0
|
@@ -111,6 +111,7 @@ class SURE(nn.Module):
|
|
|
111
111
|
config_enum: str = 'parallel',
|
|
112
112
|
use_cuda: bool = False,
|
|
113
113
|
seed: int = 42,
|
|
114
|
+
zero_bias: bool = True,
|
|
114
115
|
dtype = torch.float32, # type: ignore
|
|
115
116
|
):
|
|
116
117
|
super().__init__()
|
|
@@ -134,6 +135,7 @@ class SURE(nn.Module):
|
|
|
134
135
|
self.post_layer_fct = post_layer_fct
|
|
135
136
|
self.post_act_fct = post_act_fct
|
|
136
137
|
self.hidden_layer_activation = hidden_layer_activation
|
|
138
|
+
self.use_bias = not zero_bias
|
|
137
139
|
|
|
138
140
|
self.codebook_weights = None
|
|
139
141
|
|
|
@@ -232,15 +234,26 @@ class SURE(nn.Module):
|
|
|
232
234
|
)
|
|
233
235
|
|
|
234
236
|
if self.cell_factor_size>0:
|
|
235
|
-
self.
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
237
|
+
if self.use_bias:
|
|
238
|
+
self.cell_factor_effect = MLP(
|
|
239
|
+
[self.latent_dim + self.cell_factor_size] + self.decoder_hidden_layers + [self.latent_dim],
|
|
240
|
+
activation=activate_fct,
|
|
241
|
+
output_activation=None,
|
|
242
|
+
post_layer_fct=post_layer_fct,
|
|
243
|
+
post_act_fct=post_act_fct,
|
|
244
|
+
allow_broadcast=self.allow_broadcast,
|
|
245
|
+
use_cuda=self.use_cuda,
|
|
246
|
+
)
|
|
247
|
+
else:
|
|
248
|
+
self.cell_factor_effect = ZeroBiasMLP(
|
|
249
|
+
[self.latent_dim + self.cell_factor_size] + self.decoder_hidden_layers + [self.latent_dim],
|
|
250
|
+
activation=activate_fct,
|
|
251
|
+
output_activation=None,
|
|
252
|
+
post_layer_fct=post_layer_fct,
|
|
253
|
+
post_act_fct=post_act_fct,
|
|
254
|
+
allow_broadcast=self.allow_broadcast,
|
|
255
|
+
use_cuda=self.use_cuda,
|
|
256
|
+
)
|
|
244
257
|
|
|
245
258
|
self.decoder_concentrate = MLP(
|
|
246
259
|
[self.latent_dim] + self.decoder_hidden_layers + [self.input_size],
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|