broccoli-ml 0.38.0__tar.gz → 0.39.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/PKG-INFO +1 -1
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/linear.py +6 -3
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/pyproject.toml +1 -1
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/LICENSE +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/README.md +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/__init__.py +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/activation.py +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/assets/2025_resnet_imagenet_1k_pretrained_state_dict.pkl +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/assets/cifar100_eigenvectors_size_2.pt +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/assets/cifar100_eigenvectors_size_3.pt +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/cnn.py +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/eigenpatches.py +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/rope.py +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/tensor.py +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/transformer.py +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/utils.py +0 -0
- {broccoli_ml-0.38.0 → broccoli_ml-0.39.0}/broccoli/vit.py +0 -0
@@ -34,7 +34,8 @@ class SpectralNormLinear(nn.Module):
|
|
34
34
|
|
35
35
|
def reset_parameters(self) -> None:
|
36
36
|
weights = torch.empty(self.out_features, self.in_features)
|
37
|
-
|
37
|
+
stdv = 1.0 / math.sqrt(self.in_features)
|
38
|
+
nn.init.uniform_(weights, a=-stdv, b=stdv)
|
38
39
|
if self.use_bias:
|
39
40
|
fan_in, _ = nn.init._calculate_fan_in_and_fan_out(weights)
|
40
41
|
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
|
@@ -77,7 +78,8 @@ class AnchoredLinear(nn.Module):
|
|
77
78
|
|
78
79
|
def reset_parameters(self) -> None:
|
79
80
|
weights = torch.empty(self.out_features, self.in_features)
|
80
|
-
|
81
|
+
stdv = 1.0 / math.sqrt(self.in_features)
|
82
|
+
nn.init.uniform_(weights, a=-stdv, b=stdv)
|
81
83
|
if self.use_bias:
|
82
84
|
fan_in, _ = nn.init._calculate_fan_in_and_fan_out(weights)
|
83
85
|
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
|
@@ -120,7 +122,8 @@ class WeightNormedLinear(nn.Module):
|
|
120
122
|
|
121
123
|
def reset_parameters(self) -> None:
|
122
124
|
weights = torch.empty(self.out_features, self.in_features)
|
123
|
-
|
125
|
+
stdv = 1.0 / math.sqrt(self.in_features)
|
126
|
+
nn.init.uniform_(weights, a=-stdv, b=stdv)
|
124
127
|
if self.use_bias:
|
125
128
|
fan_in, _ = nn.init._calculate_fan_in_and_fan_out(weights)
|
126
129
|
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|