broccoli-ml 0.8.0__tar.gz → 0.9.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/PKG-INFO +1 -1
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/transformer.py +2 -1
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/vit.py +3 -2
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/pyproject.toml +1 -1
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/LICENSE +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/README.md +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/__init__.py +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/activation.py +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/assets/2025_resnet_imagenet_1k_pretrained_state_dict.pkl +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/assets/cifar100_eigenvectors_size_2.pt +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/assets/cifar100_eigenvectors_size_3.pt +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/cnn.py +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/eigenpatches.py +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/linear.py +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/rope.py +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/tensor.py +0 -0
- {broccoli_ml-0.8.0 → broccoli_ml-0.9.0}/broccoli/utils.py +0 -0
@@ -233,6 +233,7 @@ class FeedforwardLayer(nn.Module):
|
|
233
233
|
output_features,
|
234
234
|
activation=nn.ReLU,
|
235
235
|
activation_kwargs=None,
|
236
|
+
layer_norm=True,
|
236
237
|
dropout=0.0,
|
237
238
|
linear_module=nn.Linear,
|
238
239
|
):
|
@@ -247,7 +248,7 @@ class FeedforwardLayer(nn.Module):
|
|
247
248
|
|
248
249
|
self.process = nn.Sequential(
|
249
250
|
*[
|
250
|
-
nn.LayerNorm(input_features),
|
251
|
+
nn.LayerNorm(input_features) if layer_norm else nn.Identity(),
|
251
252
|
linear_module(
|
252
253
|
input_features,
|
253
254
|
(
|
@@ -261,6 +261,7 @@ class ViTEncoder(nn.Module):
|
|
261
261
|
transformer_embedding_size,
|
262
262
|
activation=transformer_activation,
|
263
263
|
activation_kwargs=transformer_activation_kwargs,
|
264
|
+
layer_norm=False, # rely on initial batch_norm
|
264
265
|
dropout=transformer_mlp_dropout,
|
265
266
|
linear_module=linear_module,
|
266
267
|
)
|
@@ -312,7 +313,7 @@ class CCT(nn.Module):
|
|
312
313
|
input_size=(32, 32),
|
313
314
|
cnn=True,
|
314
315
|
cnn_in_channels=3,
|
315
|
-
|
316
|
+
cnn_out_channels=16,
|
316
317
|
cnn_kernel_size=3,
|
317
318
|
cnn_kernel_stride=1,
|
318
319
|
cnn_padding="same",
|
@@ -365,7 +366,7 @@ class CCT(nn.Module):
|
|
365
366
|
input_size=input_size,
|
366
367
|
cnn=cnn,
|
367
368
|
cnn_in_channels=cnn_in_channels,
|
368
|
-
|
369
|
+
cnn_out_channels=cnn_out_channels,
|
369
370
|
cnn_kernel_size=cnn_kernel_size,
|
370
371
|
cnn_kernel_stride=cnn_kernel_stride,
|
371
372
|
cnn_padding=cnn_padding,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|