broccoli-ml 12.0.0__tar.gz → 12.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: broccoli-ml
3
- Version: 12.0.0
3
+ Version: 12.1.0
4
4
  Summary: Some useful Pytorch models, circa 2025
5
5
  License: MIT
6
6
  Author: Nicholas Bailey
@@ -592,33 +592,52 @@ class TransformerBlock(nn.Module):
592
592
  return self.attn._kv_distance
593
593
 
594
594
  def forward(self, x):
595
-
596
- if self.pre_norm:
597
- process_x = self.pre_attention_norm(x)
598
- else:
599
- process_x = x
600
-
601
- processed = self.drop_path(self.attn(process_x, process_x, process_x))
602
-
603
- if self.normformer:
604
- processed = self.normformer_norm(processed)
605
-
606
- x = x + processed
607
-
608
- if self.post_norm:
609
- x = self.post_attention_norm(x)
610
-
611
595
  if self.pre_norm:
612
- process_x = self.pre_mlp_norm(x)
613
- else:
614
- process_x = x
615
-
616
- x = x + self.drop_path(self.ff(process_x))
617
-
618
- if self.post_norm:
619
- x = self.post_mlp_norm(x)
620
-
621
- return x
596
+ x = self.layer_norm_1(x)
597
+ x = x + self.drop_path(self.layerscale1(self.attn(x, x, x)))
598
+ x = self.layer_norm_2(x)
599
+ x = x + self.drop_path(self.layerscale2(self.ff(x)))
600
+ if self.post_norm: # i.e. in addition! Pre and post.
601
+ x = self.layer_norm_3(x)
602
+ elif self.post_norm: # i.e. only, not prenorm, just post
603
+ x = x + self.drop_path(self.layerscale1(self.attn(x, x, x)))
604
+ x = self.layer_norm_1(x)
605
+ x = x + self.drop_path(self.layerscale2(self.ff(x)))
606
+ x = self.layer_norm_2(x)
607
+ else: # Not pre or post norm. Stand well back.
608
+ x = x + self.drop_path(self.layerscale1(self.attn(x, x, x)))
609
+ x = x + self.drop_path(self.layerscale2(self.ff(x)))
610
+
611
+ # if self.pre_norm:
612
+ # process_x = self.pre_attention_norm(x)
613
+ # else:
614
+ # process_x = x
615
+
616
+ # processed = self.drop_path(self.attn(process_x, process_x, process_x))
617
+
618
+ # if self.normformer:
619
+ # processed = self.normformer_norm(processed)
620
+
621
+ # if self.residual_path:
622
+ # x = x + processed
623
+
624
+ # if self.post_norm:
625
+ # x = self.post_attention_norm(x)
626
+
627
+ # if self.pre_norm:
628
+ # process_x = self.pre_mlp_norm(x)
629
+ # else:
630
+ # process_x = x
631
+
632
+ # processed = self.drop_path(self.ff(process_x))
633
+
634
+ # if self.residual_path:
635
+ # x = x + processed
636
+
637
+ # if self.post_norm:
638
+ # x = self.post_mlp_norm(x)
639
+
640
+ # return x
622
641
 
623
642
  def attention_logits(self, x):
624
643
  """
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "broccoli-ml"
3
- version = "12.0.0"
3
+ version = "12.1.0"
4
4
  description = "Some useful Pytorch models, circa 2025"
5
5
  authors = [
6
6
  {name = "Nicholas Bailey"}
File without changes
File without changes