hyper-connections 0.1.12__tar.gz → 0.1.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hyper-connections
3
- Version: 0.1.12
3
+ Version: 0.1.15
4
4
  Summary: Hyper-Connections
5
5
  Project-URL: Homepage, https://pypi.org/project/hyper-connections/
6
6
  Project-URL: Repository, https://github.com/lucidrains/hyper-connections
@@ -0,0 +1,40 @@
1
+ import torch
2
+ from torch import nn
3
+ from torch.nn import Module
4
+
5
+ from einops import rearrange, pack, unpack
6
+
7
+ class GRUGatedResidual(Module):
8
+ def __init__(
9
+ self,
10
+ dim
11
+ ):
12
+ super().__init__()
13
+ self.gru = nn.GRUCell(dim, dim)
14
+
15
+ def forward(self, x, residual):
16
+ x, packed_shape = pack([x], '* d')
17
+ residual, _ = pack([residual], '* d')
18
+
19
+ output = self.gru(x, residual)
20
+
21
+ output, = unpack(output, packed_shape, '* d')
22
+ return output
23
+
24
+ class GatedResidual(Module):
25
+ def __init__(
26
+ self,
27
+ dim,
28
+ fine_gate = False
29
+ ):
30
+ super().__init__()
31
+
32
+ self.to_learned_mix = nn.Linear(dim * 2, dim if fine_gate else 1)
33
+
34
+ def forward(self, x, residual):
35
+ x_and_residual, _ = pack([x, residual], 'b n *')
36
+
37
+ mix = self.to_learned_mix(x_and_residual)
38
+
39
+ out = x.lerp(residual, mix.sigmoid())
40
+ return out
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "hyper-connections"
3
- version = "0.1.12"
3
+ version = "0.1.15"
4
4
  description = "Hyper-Connections"
5
5
  authors = [
6
6
  { name = "Phil Wang", email = "lucidrains@gmail.com" }
@@ -1,22 +0,0 @@
1
- import torch
2
- from torch import nn
3
- from torch.nn import Module
4
-
5
- from einops import rearrange
6
-
7
- class GRUGatedResidual(Module):
8
- def __init__(
9
- self,
10
- dim
11
- ):
12
- super().__init__()
13
- self.gru = nn.GRUCell(dim, dim)
14
-
15
- def forward(self, x, residual):
16
-
17
- gated_output = self.gru(
18
- rearrange(x, 'b n d -> (b n) d'),
19
- rearrange(residual, 'b n d -> (b n) d')
20
- )
21
-
22
- return gated_output.reshape_as(x)