titans-pytorch 0.3.5__tar.gz → 0.3.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: titans-pytorch
3
- Version: 0.3.5
3
+ Version: 0.3.6
4
4
  Summary: Titans
5
5
  Project-URL: Homepage, https://pypi.org/project/titans-pytorch/
6
6
  Project-URL: Repository, https://github.com/lucidrains/titans-pytorch
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "titans-pytorch"
3
- version = "0.3.5"
3
+ version = "0.3.6"
4
4
  description = "Titans"
5
5
  authors = [
6
6
  { name = "Phil Wang", email = "lucidrains@gmail.com" }
@@ -103,8 +103,6 @@ class GatedResidualMemoryMLP(Module):
103
103
 
104
104
  self.final_proj = Parameter(torch.randn(dim, dim))
105
105
 
106
- self.ln = LayerNorm(dim)
107
-
108
106
  for param in self.parameters():
109
107
  nn.init.xavier_uniform_(param)
110
108
 
@@ -145,8 +143,6 @@ class FactorizedMemoryMLP(Module):
145
143
  ]) for _ in range(depth)
146
144
  ])
147
145
 
148
- self.ln = LayerNorm(dim)
149
-
150
146
  for weight1, weight2 in self.weights:
151
147
  nn.init.xavier_uniform_(weight1)
152
148
  nn.init.xavier_uniform_(weight2)
@@ -187,8 +183,6 @@ class MemoryAttention(Module):
187
183
  nn.Parameter(torch.randn(dim_ff_hidden, dim)), # ff w2
188
184
  ])
189
185
 
190
- self.ln = LayerNorm(dim)
191
-
192
186
  for weight in self.weights:
193
187
  nn.init.xavier_uniform_(weight)
194
188
 
File without changes
File without changes
File without changes
File without changes