x-transformers 2.7.2__py3-none-any.whl → 2.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- x_transformers/x_transformers.py +23 -0
- {x_transformers-2.7.2.dist-info → x_transformers-2.7.3.dist-info}/METADATA +13 -1
- {x_transformers-2.7.2.dist-info → x_transformers-2.7.3.dist-info}/RECORD +5 -5
- {x_transformers-2.7.2.dist-info → x_transformers-2.7.3.dist-info}/WHEEL +0 -0
- {x_transformers-2.7.2.dist-info → x_transformers-2.7.3.dist-info}/licenses/LICENSE +0 -0
x_transformers/x_transformers.py
CHANGED
@@ -1619,6 +1619,29 @@ class Attention(Module):
|
|
1619
1619
|
if zero_init_output:
|
1620
1620
|
init_zero_(self.to_out)
|
1621
1621
|
|
1622
|
+
@torch.no_grad()
|
1623
|
+
def qk_clip_(
|
1624
|
+
self,
|
1625
|
+
pre_softmax_attn: Tensor | Intermediates,
|
1626
|
+
tau = 100 # this hyperparameter controls how large the attention logits can be
|
1627
|
+
):
|
1628
|
+
""" proposed by the Moonshot AI team as a solution for Muon training instability """
|
1629
|
+
|
1630
|
+
if not is_tensor(pre_softmax_attn):
|
1631
|
+
pre_softmax_attn = pre_softmax_attn.pre_softmax_attn
|
1632
|
+
|
1633
|
+
attn_logit_maxes = reduce(pre_softmax_attn, 'b h i j -> h', 'max')
|
1634
|
+
|
1635
|
+
qk_weight_scale = (tau / attn_logit_maxes).clamp(max = 1.).sqrt()
|
1636
|
+
|
1637
|
+
q_weight = self.to_q.weight
|
1638
|
+
k_weight = self.to_k.weight
|
1639
|
+
|
1640
|
+
q_dim, k_dim, heads = q_weight.shape[0], k_weight.shape[0], qk_weight_scale.numel()
|
1641
|
+
|
1642
|
+
q_weight.mul_(repeat(qk_weight_scale, 'h -> (h expand)', expand = q_dim // heads))
|
1643
|
+
k_weight.mul_(repeat(qk_weight_scale, 'h -> (h expand)', expand = k_dim // heads))
|
1644
|
+
|
1622
1645
|
def forward(
|
1623
1646
|
self,
|
1624
1647
|
x,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: x-transformers
|
3
|
-
Version: 2.7.
|
3
|
+
Version: 2.7.3
|
4
4
|
Summary: X-Transformers
|
5
5
|
Project-URL: Homepage, https://pypi.org/project/x-transformers/
|
6
6
|
Project-URL: Repository, https://github.com/lucidrains/x-transformers
|
@@ -2528,4 +2528,16 @@ ids_out, num_out, is_number_mask = model.generate(start_ids, start_nums, 17)
|
|
2528
2528
|
}
|
2529
2529
|
```
|
2530
2530
|
|
2531
|
+
```bibtex
|
2532
|
+
@misc{kimiteam2025kimik2openagentic,
|
2533
|
+
title = {Kimi K2: Open Agentic Intelligence},
|
2534
|
+
author = {Kimi Team and Yifan Bai and Yiping Bao and Guanduo Chen and Jiahao Chen and Ningxin Chen and Ruijue Chen and Yanru Chen and Yuankun Chen and Yutian Chen and Zhuofu Chen and Jialei Cui and Hao Ding and Mengnan Dong and Angang Du and Chenzhuang Du and Dikang Du and Yulun Du and Yu Fan and Yichen Feng and Kelin Fu and Bofei Gao and Hongcheng Gao and Peizhong Gao and Tong Gao and Xinran Gu and Longyu Guan and Haiqing Guo and Jianhang Guo and Hao Hu and Xiaoru Hao and Tianhong He and Weiran He and Wenyang He and Chao Hong and Yangyang Hu and Zhenxing Hu and Weixiao Huang and Zhiqi Huang and Zihao Huang and Tao Jiang and Zhejun Jiang and Xinyi Jin and Yongsheng Kang and Guokun Lai and Cheng Li and Fang Li and Haoyang Li and Ming Li and Wentao Li and Yanhao Li and Yiwei Li and Zhaowei Li and Zheming Li and Hongzhan Lin and Xiaohan Lin and Zongyu Lin and Chengyin Liu and Chenyu Liu and Hongzhang Liu and Jingyuan Liu and Junqi Liu and Liang Liu and Shaowei Liu and T. Y. Liu and Tianwei Liu and Weizhou Liu and Yangyang Liu and Yibo Liu and Yiping Liu and Yue Liu and Zhengying Liu and Enzhe Lu and Lijun Lu and Shengling Ma and Xinyu Ma and Yingwei Ma and Shaoguang Mao and Jie Mei and Xin Men and Yibo Miao and Siyuan Pan and Yebo Peng and Ruoyu Qin and Bowen Qu and Zeyu Shang and Lidong Shi and Shengyuan Shi and Feifan Song and Jianlin Su and Zhengyuan Su and Xinjie Sun and Flood Sung and Heyi Tang and Jiawen Tao and Qifeng Teng and Chensi Wang and Dinglu Wang and Feng Wang and Haiming Wang and Jianzhou Wang and Jiaxing Wang and Jinhong Wang and Shengjie Wang and Shuyi Wang and Yao Wang and Yejie Wang and Yiqin Wang and Yuxin Wang and Yuzhi Wang and Zhaoji Wang and Zhengtao Wang and Zhexu Wang and Chu Wei and Qianqian Wei and Wenhao Wu and Xingzhe Wu and Yuxin Wu and Chenjun Xiao and Xiaotong Xie and Weimin Xiong and Boyu Xu and Jing Xu and Jinjing Xu and L. H. Xu and Lin Xu and Suting Xu and Weixin Xu and Xinran Xu and Yangchuan Xu and Ziyao Xu and Junjie Yan and Yuzi Yan and Xiaofei Yang and Ying Yang and Zhen Yang and Zhilin Yang and Zonghan Yang and Haotian Yao and Xingcheng Yao and Wenjie Ye and Zhuorui Ye and Bohong Yin and Longhui Yu and Enming Yuan and Hongbang Yuan and Mengjie Yuan and Haobing Zhan and Dehao Zhang and Hao Zhang and Wanlu Zhang and Xiaobin Zhang and Yangkun Zhang and Yizhi Zhang and Yongting Zhang and Yu Zhang and Yutao Zhang and Yutong Zhang and Zheng Zhang and Haotian Zhao and Yikai Zhao and Huabin Zheng and Shaojie Zheng and Jianren Zhou and Xinyu Zhou and Zaida Zhou and Zhen Zhu and Weiyu Zhuang and Xinxing Zu},
|
2535
|
+
year = {2025},
|
2536
|
+
eprint = {2507.20534},
|
2537
|
+
archivePrefix = {arXiv},
|
2538
|
+
primaryClass = {cs.LG},
|
2539
|
+
url = {https://arxiv.org/abs/2507.20534},
|
2540
|
+
}
|
2541
|
+
```
|
2542
|
+
|
2531
2543
|
*solve intelligence... then use that to solve everything else.* - Demis Hassabis
|
@@ -9,10 +9,10 @@ x_transformers/multi_input.py,sha256=tCh-fTJDj2ib4SMGtsa-AM8MxKzJAQSwqAXOu3HU2mg
|
|
9
9
|
x_transformers/neo_mlp.py,sha256=XCNnnop9WLarcxap1kGuYc1x8GHvwkZiDRnXOxSl3Po,3452
|
10
10
|
x_transformers/nonautoregressive_wrapper.py,sha256=hMQqNimGtchNIe13cR5LZule1V7I1qM5LmY8VQfVdnA,11698
|
11
11
|
x_transformers/up_wrapper.py,sha256=YC2LN14_7Xx9Wtiek2rtEJ_qHqdfSmKlh3d7Cgxwd80,7073
|
12
|
-
x_transformers/x_transformers.py,sha256=
|
12
|
+
x_transformers/x_transformers.py,sha256=5lWGEmRqj2_XwxQW0CiXOikPdRUQpyICSecokJB19Mk,123971
|
13
13
|
x_transformers/xl_autoregressive_wrapper.py,sha256=CvZMJ6A6PA-Y_bQAhnORwjJBSl6Vjq2IdW5KTdk8NI8,4195
|
14
14
|
x_transformers/xval.py,sha256=AwwYUm8yDAtKQyKJDIhYMsiLTJ_skh3scUFMjp5sda8,8597
|
15
|
-
x_transformers-2.7.
|
16
|
-
x_transformers-2.7.
|
17
|
-
x_transformers-2.7.
|
18
|
-
x_transformers-2.7.
|
15
|
+
x_transformers-2.7.3.dist-info/METADATA,sha256=XzaIZ5nLUiI_sGTz_uekMDZkOEqRsPxggpnm9SNcsfw,93739
|
16
|
+
x_transformers-2.7.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
17
|
+
x_transformers-2.7.3.dist-info/licenses/LICENSE,sha256=As9u198X-U-vph5noInuUfqsAG2zX_oXPHDmdjwlPPY,1066
|
18
|
+
x_transformers-2.7.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|