broccoli-ml 0.24.0__tar.gz → 0.24.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: broccoli-ml
3
- Version: 0.24.0
3
+ Version: 0.24.1
4
4
  Summary: Some useful Pytorch models, circa 2025
5
5
  License: MIT
6
6
  Author: Nicholas Bailey
@@ -8,16 +8,18 @@ class ReLU(nn.Module):
8
8
  A ReLU activation function with optional clamp and leakiness.
9
9
  """
10
10
 
11
- def __init__(self, clamp=True, leaky=True, leaky_slope=0.01, clamp_max=6.0) -> None:
11
+ def __init__(
12
+ self, clamp=True, leaky=True, negative_slope=0.01, clamp_max=6.0
13
+ ) -> None:
12
14
  super().__init__()
13
15
  self.clamp = clamp
14
16
  self.leaky = leaky
15
- self.leaky_slope = leaky_slope
17
+ self.negative_slope = negative_slope
16
18
  self.clamp_max = clamp_max
17
19
 
18
20
  def forward(self, x):
19
21
  if self.leaky:
20
- relu = F.leaky_relu(x, leaky_slope=self.leaky_slope)
22
+ relu = F.leaky_relu(x, negative_slope=self.negative_slope)
21
23
  else:
22
24
  relu = F.relu(x)
23
25
  if self.clamp:
@@ -69,17 +71,17 @@ class SquaredReLU(nn.Module):
69
71
  """
70
72
 
71
73
  def __init__(
72
- self, clamp=True, leaky=True, leaky_slope: float = 0.01, clamp_max=6
74
+ self, clamp=True, leaky=True, negative_slope: float = 0.01, clamp_max=6
73
75
  ) -> None:
74
76
  super().__init__()
75
77
  self.clamp = clamp
76
78
  self.leaky = leaky
77
- self.leaky_slope = leaky_slope
79
+ self.negative_slope = negative_slope
78
80
  self.clamp_max = clamp_max
79
81
 
80
82
  def forward(self, x):
81
83
  if self.leaky:
82
- relu = F.leaky_relu(x, leaky_slope=self.leaky_slope)
84
+ relu = F.leaky_relu(x, negative_slope=self.negative_slope)
83
85
  else:
84
86
  relu = F.relu(x)
85
87
  relu_squared = relu**2
@@ -102,12 +104,12 @@ class XGLU(nn.Module):
102
104
  return self.activation(gate) * value
103
105
 
104
106
 
105
- def SquaredReGLU(clamp=True, leaky=True, leaky_slope=0.01, clamp_max=6.0) -> XGLU:
107
+ def SquaredReGLU(clamp=True, leaky=True, negative_slope=0.01, clamp_max=6.0) -> XGLU:
106
108
  """
107
109
  Factory function that creates a GLU with a SquaredReLU activation.
108
110
  """
109
111
  activation_module = SquaredReLU(
110
- clamp=clamp, leaky=leaky, leaky_slope=leaky_slope, clamp_max=clamp_max
112
+ clamp=clamp, leaky=leaky, negative_slope=negative_slope, clamp_max=clamp_max
111
113
  )
112
114
  return XGLU(activation_module)
113
115
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "broccoli-ml"
3
- version = "0.24.0"
3
+ version = "0.24.1"
4
4
  description = "Some useful Pytorch models, circa 2025"
5
5
  authors = [
6
6
  {name = "Nicholas Bailey"}
File without changes
File without changes