broccoli-ml 10.1.1__tar.gz → 10.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: broccoli-ml
3
- Version: 10.1.1
3
+ Version: 10.2.0
4
4
  Summary: Some useful Pytorch models, circa 2025
5
5
  License: MIT
6
6
  Author: Nicholas Bailey
@@ -22,12 +22,21 @@ except ImportError:
22
22
 
23
23
 
24
24
  class LayerScale(nn.Module):
25
- def __init__(self, dim, init_values=1e-4):
25
+ def __init__(self, dim, decay=False, init_values=1e-4):
26
26
  super().__init__()
27
- self.nondecay_scale = nn.Parameter(init_values * torch.ones(dim))
27
+ self.decay = decay
28
+ if decay:
29
+ self.scale = nn.Parameter(init_values * torch.ones(dim))
30
+ self.nondecay_scale = None
31
+ else:
32
+ self.nondecay_scale = nn.Parameter(init_values * torch.ones(dim))
33
+ self.scale = None
28
34
 
29
35
  def forward(self, x):
30
- return x * self.nondecay_scale
36
+ if self.decay:
37
+ return x * self.scale
38
+ else:
39
+ return x * self.nondecay_scale
31
40
 
32
41
 
33
42
  def drop_path(
@@ -711,9 +720,10 @@ class TransformerEncoder(nn.Module):
711
720
  self.return_utility_tokens = return_utility_tokens
712
721
 
713
722
  if layerscale:
714
- self.layerscale = LayerScale(d_model)
723
+ rope_and_ape = absolute_position_embedding and relative_position_embedding
724
+ self.position_layerscale = LayerScale(d_model, decay=rope_and_ape)
715
725
  else:
716
- self.layerscale = None
726
+ self.position_layerscale = None
717
727
 
718
728
  # Initialise utility tokens with normal init, like usual Pytorch embeddings
719
729
  if self._utility_tokens:
@@ -807,8 +817,8 @@ class TransformerEncoder(nn.Module):
807
817
  0
808
818
  ) # to shape (1, seq_len) to broadcast over batch
809
819
  )
810
- if self.layerscale is not None:
811
- position_embedding = self.layerscale(position_embedding)
820
+ if self.position_layerscale is not None:
821
+ position_embedding = self.position_layerscale(position_embedding)
812
822
  x += position_embedding
813
823
 
814
824
  return x
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "broccoli-ml"
3
- version = "10.1.1"
3
+ version = "10.2.0"
4
4
  description = "Some useful Pytorch models, circa 2025"
5
5
  authors = [
6
6
  {name = "Nicholas Bailey"}
File without changes
File without changes