rxnn 0.1.58__py3-none-any.whl → 0.1.60__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,7 @@ from ..utils import get_model_size
11
11
  from .attention import init_experimental_attention
12
12
 
13
13
 
14
- class MoeAttentionTransformerConfig(TypedDict):
14
+ class ExperimentalAttentionTransformerConfig(TypedDict):
15
15
  num_layers: int
16
16
  vocab_size: int
17
17
  embed_dim: int
@@ -34,8 +34,12 @@ class MoeAttentionTransformerConfig(TypedDict):
34
34
  att_num_query_groups: int
35
35
 
36
36
 
37
- class MoeAttentionTransformer(nn.Module, PyTorchModelHubMixin, pipeline_tag="text-generation", license="apache-2.0"):
38
- """Research decoder model for experiments with Mixture-of-Experts Attention"""
37
+ class ExperimentalAttentionTransformer(nn.Module, PyTorchModelHubMixin, pipeline_tag="text-generation", license="apache-2.0"):
38
+ """
39
+ Research model for experiments with new attention layers.
40
+
41
+ Currently, accepts SparseQueryAttention, GroupedMoeAttention, DeepMoeAttention and standard variants (MHA/GQA/MQA) for reference models
42
+ """
39
43
 
40
44
  def __init__(
41
45
  self,
@@ -61,7 +65,7 @@ class MoeAttentionTransformer(nn.Module, PyTorchModelHubMixin, pipeline_tag="tex
61
65
  att_num_query_groups: int = None,
62
66
  **kwargs
63
67
  ):
64
- super(MoeAttentionTransformer, self).__init__(**kwargs)
68
+ super(ExperimentalAttentionTransformer, self).__init__(**kwargs)
65
69
  assert ff_activation in ['relu', 'gelu',
66
70
  'swish', 'silu', 'linear',
67
71
  'sigmoid'], 'Feed-forward activation could be "relu", "gelu", "swish", "silu", "linear", "sigmoid".'
@@ -83,7 +87,7 @@ class MoeAttentionTransformer(nn.Module, PyTorchModelHubMixin, pipeline_tag="tex
83
87
  num_query_experts=att_num_query_experts,
84
88
  num_query_groups=att_num_query_groups)
85
89
 
86
- use_moe_att = att_type in ['gma', 'dma', 'gma_s', 'dma_s']
90
+ use_moe_att = att_type in ['gma', 'dma']
87
91
 
88
92
  self.model = ClassicTransformerDecoder(
89
93
  embed_dim,
@@ -137,6 +137,9 @@ class MultiHeadAttention(nn.Module):
137
137
  b, t, d = query.size()
138
138
  q, k, v = self._forward_qkv(query, key, value, b, t, d)
139
139
  if not self.rel_embed:
140
+ print('q', q.size())
141
+ print('k', k.size())
142
+ print('v', v.size())
140
143
  q, k = self._apply_rope(q, k)
141
144
  attn_output = self._calculate_attention(q, k, v, b, t, d, mask=mask)
142
145
  else:
@@ -86,6 +86,7 @@ class ReactiveTransformerLayer(nn.Module):
86
86
  residual = x
87
87
  if not self.use_post_norm:
88
88
  x = self.norm1(x)
89
+ print('self x', x.size())
89
90
  x = self.attention(x, x, x, mask=mask)
90
91
  x = residual + x
91
92
  if self.use_post_norm:
@@ -94,6 +95,8 @@ class ReactiveTransformerLayer(nn.Module):
94
95
  residual = x
95
96
  if not self.use_post_norm:
96
97
  x = self.norm2(x)
98
+ print('x', x.size())
99
+ print('STM', stm.size())
97
100
  x = self.memory_cross_attention(x, stm, stm)
98
101
  x = residual + x
99
102
  if self.use_post_norm:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: rxnn
3
- Version: 0.1.58
3
+ Version: 0.1.60
4
4
  Summary: RxNN: Reactive Neural Networks Platform
5
5
  License: Apache-2.0
6
6
  Keywords: deep-learning,ai,machine-learning
@@ -1,7 +1,7 @@
1
1
  rxnn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  rxnn/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  rxnn/experimental/attention.py,sha256=bpZQiRXdQ8gJPwYRp3LBr2oELmrysB6-SWiD2F7UQrk,23127
4
- rxnn/experimental/models.py,sha256=_i9kvQsAYPyMQo2VfMUTmtBs-mE2w75j1X-OHx03IJk,4743
4
+ rxnn/experimental/models.py,sha256=foBo0n0ufvBnfIdJomiEg3CuSOiWSt-q5ako7vzYxx4,4888
5
5
  rxnn/experimental/moe.py,sha256=jHZ1QhpWiVQOswVpFmuH7b2IUOPf0Uuf-I2Ddwsd7Us,6140
6
6
  rxnn/memory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  rxnn/memory/norm.py,sha256=Ofl8Q5NYEF9GQeO0bhM43tkTW91J0y6TSvTAOYMgloM,6278
@@ -16,16 +16,16 @@ rxnn/training/dataset.py,sha256=JQuWSUdT5AnsrG6M_EsewoU6uroVHhg4K715nbtDx8A,9643
16
16
  rxnn/training/scheduler.py,sha256=ow6oALzWjWQmHSpcJEjv6tg4g4CDMvr73TypxfcefMc,712
17
17
  rxnn/training/tokenizer.py,sha256=umaLByMBx_NMrQElA45HLm9gkuzyKWDTFaKVd-CjXl0,8344
18
18
  rxnn/transformers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- rxnn/transformers/attention.py,sha256=dC0UmC-_kjX8US6Sf0Fi5zw5kJ-P6orH3JDHeBB5gI8,15695
19
+ rxnn/transformers/attention.py,sha256=2pJoAqRB6AKHtA2lj158NzbHzXr0dCaKj1RKjMfHsOI,15794
20
20
  rxnn/transformers/ff.py,sha256=jJnuBDsnnX5uYC_WZH8cXAYrMnz0P-iX7MwcPivjRtI,2533
21
- rxnn/transformers/layers.py,sha256=OX8CsFY9A7uqH1SLwyexR_5BNlwheYrJHCGXjF8Q7HU,7186
21
+ rxnn/transformers/layers.py,sha256=tSOwr_IwJcAjPLmyRJI-93wRjgHH6OPJj4QrdkEFYdc,7282
22
22
  rxnn/transformers/mask.py,sha256=J0cfLVLt3SzS2ra3KcY4khrkhI975Dw4CjpUi3Sn25s,419
23
23
  rxnn/transformers/models.py,sha256=QFzBrOR7tDp9d_T0HoIukBMfEbLxsCictV5p3e2ilxg,7552
24
24
  rxnn/transformers/moe.py,sha256=j6jEx6Ip0zttlUZKKn82azxo95lkLZs-H2GLSMD88hY,5859
25
25
  rxnn/transformers/positional.py,sha256=2l38RS0Dini3f6Z3LUHr3XwWzg1UK7fO2C6wazWDAYU,4292
26
26
  rxnn/transformers/sampler.py,sha256=poWBpxg1iuK5gEJtxHkk5VVfS9V48hs2Olqdhy_Gw8c,6548
27
27
  rxnn/utils.py,sha256=d5U8i5ukovgDyqiycc2AoxObTz_eF_bgo2MKvdtJ98s,467
28
- rxnn-0.1.58.dist-info/LICENSE,sha256=C8coDFIUYuOcke4JLPwTqahQUCyXyGq6WOaigOkx8tY,11275
29
- rxnn-0.1.58.dist-info/METADATA,sha256=6aamtiDsToIFsNhpO73cacZMFmCPLMCMNluCTWcwWrE,16627
30
- rxnn-0.1.58.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
31
- rxnn-0.1.58.dist-info/RECORD,,
28
+ rxnn-0.1.60.dist-info/LICENSE,sha256=C8coDFIUYuOcke4JLPwTqahQUCyXyGq6WOaigOkx8tY,11275
29
+ rxnn-0.1.60.dist-info/METADATA,sha256=eTT7LMBzM_AiWqreCwois6pTV50-zb8tyMlgrz4Mq-g,16627
30
+ rxnn-0.1.60.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
31
+ rxnn-0.1.60.dist-info/RECORD,,
File without changes
File without changes