liger-kernel-nightly 0.6.2.dev20251011154427__py3-none-any.whl → 0.6.4.dev20251202054858__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of liger-kernel-nightly might be problematic. Click here for more details.
- liger_kernel/chunked_loss/cosine_similarity_loss.py +13 -4
- liger_kernel/chunked_loss/fused_linear_distillation.py +13 -2
- liger_kernel/chunked_loss/fused_linear_ppo.py +21 -5
- liger_kernel/chunked_loss/grpo_loss.py +8 -5
- liger_kernel/chunked_loss/jsd_loss.py +18 -5
- liger_kernel/ops/cross_entropy.py +65 -11
- liger_kernel/ops/dyt.py +5 -2
- liger_kernel/ops/fused_add_rms_norm.py +5 -1
- liger_kernel/ops/fused_linear_cross_entropy.py +43 -13
- liger_kernel/ops/geglu.py +2 -1
- liger_kernel/ops/group_norm.py +2 -1
- liger_kernel/ops/grpo_loss.py +3 -1
- liger_kernel/ops/layer_norm.py +86 -66
- liger_kernel/ops/poly_norm.py +390 -0
- liger_kernel/ops/rms_norm.py +7 -2
- liger_kernel/ops/tiled_mlp.py +136 -0
- liger_kernel/ops/utils.py +2 -0
- liger_kernel/transformers/__init__.py +27 -0
- liger_kernel/transformers/cross_entropy.py +8 -3
- liger_kernel/transformers/functional.py +29 -6
- liger_kernel/transformers/fused_linear_cross_entropy.py +8 -3
- liger_kernel/transformers/grpo_loss.py +56 -1
- liger_kernel/transformers/model/falcon_h1.py +19 -5
- liger_kernel/transformers/model/gemma.py +17 -6
- liger_kernel/transformers/model/gemma2.py +14 -5
- liger_kernel/transformers/model/gemma3.py +25 -12
- liger_kernel/transformers/model/glm4.py +16 -4
- liger_kernel/transformers/model/glm4v.py +16 -4
- liger_kernel/transformers/model/glm4v_moe.py +23 -4
- liger_kernel/transformers/model/hunyuan_v1.py +134 -0
- liger_kernel/transformers/model/internvl.py +12 -5
- liger_kernel/transformers/model/llama.py +14 -5
- liger_kernel/transformers/model/llama4.py +16 -4
- liger_kernel/transformers/model/llava.py +12 -4
- liger_kernel/transformers/model/loss_utils.py +31 -3
- liger_kernel/transformers/model/mistral.py +15 -6
- liger_kernel/transformers/model/mixtral.py +16 -7
- liger_kernel/transformers/model/mllama.py +12 -4
- liger_kernel/transformers/model/olmo2.py +16 -4
- liger_kernel/transformers/model/olmo3.py +142 -0
- liger_kernel/transformers/model/output_classes.py +147 -0
- liger_kernel/transformers/model/paligemma.py +22 -5
- liger_kernel/transformers/model/phi3.py +14 -7
- liger_kernel/transformers/model/qwen2.py +16 -3
- liger_kernel/transformers/model/qwen2_5_vl.py +14 -6
- liger_kernel/transformers/model/qwen2_vl.py +16 -4
- liger_kernel/transformers/model/qwen3.py +20 -5
- liger_kernel/transformers/model/qwen3_moe.py +19 -5
- liger_kernel/transformers/model/qwen3_next.py +146 -0
- liger_kernel/transformers/model/qwen3_vl.py +150 -0
- liger_kernel/transformers/model/qwen3_vl_moe.py +126 -0
- liger_kernel/transformers/model/smollm3.py +15 -6
- liger_kernel/transformers/model/smolvlm.py +158 -0
- liger_kernel/transformers/monkey_patch.py +594 -19
- liger_kernel/transformers/poly_norm.py +42 -0
- liger_kernel/transformers/rms_norm.py +7 -0
- liger_kernel/transformers/rope.py +43 -0
- liger_kernel/transformers/swiglu.py +17 -0
- liger_kernel/transformers/tiled_mlp.py +133 -0
- liger_kernel/utils.py +25 -0
- {liger_kernel_nightly-0.6.2.dev20251011154427.dist-info → liger_kernel_nightly-0.6.4.dev20251202054858.dist-info}/METADATA +4 -1
- liger_kernel_nightly-0.6.4.dev20251202054858.dist-info/RECORD +118 -0
- liger_kernel_nightly-0.6.2.dev20251011154427.dist-info/RECORD +0 -107
- {liger_kernel_nightly-0.6.2.dev20251011154427.dist-info → liger_kernel_nightly-0.6.4.dev20251202054858.dist-info}/LICENSE +0 -0
- {liger_kernel_nightly-0.6.2.dev20251011154427.dist-info → liger_kernel_nightly-0.6.4.dev20251202054858.dist-info}/NOTICE +0 -0
- {liger_kernel_nightly-0.6.2.dev20251011154427.dist-info → liger_kernel_nightly-0.6.4.dev20251202054858.dist-info}/WHEEL +0 -0
- {liger_kernel_nightly-0.6.2.dev20251011154427.dist-info → liger_kernel_nightly-0.6.4.dev20251202054858.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import torch
|
|
2
|
+
import torch.nn as nn
|
|
3
|
+
|
|
4
|
+
from liger_kernel.ops.poly_norm import LigerPolyNormFunction
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class LigerPolyNorm(nn.Module):
|
|
8
|
+
"""
|
|
9
|
+
PolyNorm layer wrapper for Liger kernel.
|
|
10
|
+
|
|
11
|
+
PolyNorm formula:
|
|
12
|
+
y = w₀·norm(x³) + w₁·norm(x²) + w₂·norm(x) + b
|
|
13
|
+
where norm(u) = u / sqrt(mean(u²) + ε)
|
|
14
|
+
|
|
15
|
+
Reference:
|
|
16
|
+
https://github.com/BryceZhuo/PolyCom/
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
eps: epsilon for numerical stability (default: 1e-6)
|
|
20
|
+
in_place: whether to in-place modify grad_output in backward to save memory (default: False).
|
|
21
|
+
Set to True to save memory if grad_output is not needed elsewhere.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(self, eps=1e-6, in_place=True):
|
|
25
|
+
super().__init__()
|
|
26
|
+
# Align with PolyCom reference: initialize weights to (1/3, 1/3, 1/3) and bias to 1.0
|
|
27
|
+
self.weight = nn.Parameter(torch.full((3,), 1.0 / 3.0))
|
|
28
|
+
self.bias = nn.Parameter(torch.tensor(1.0))
|
|
29
|
+
self.variance_epsilon = eps
|
|
30
|
+
self.in_place = in_place
|
|
31
|
+
|
|
32
|
+
def forward(self, hidden_states):
|
|
33
|
+
return LigerPolyNormFunction.apply(
|
|
34
|
+
hidden_states,
|
|
35
|
+
self.weight,
|
|
36
|
+
self.bias,
|
|
37
|
+
self.variance_epsilon,
|
|
38
|
+
self.in_place,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
def extra_repr(self):
|
|
42
|
+
return f"weight_shape={tuple(self.weight.shape)}, eps={self.variance_epsilon}, in_place={self.in_place}"
|
|
@@ -77,3 +77,10 @@ class LigerRMSNormForGlm4(LigerRMSNorm):
|
|
|
77
77
|
self, hidden_size, eps=1e-6, offset=0.0, casting_mode="llama", init_fn="ones", in_place=False, row_mode=None
|
|
78
78
|
):
|
|
79
79
|
super().__init__(hidden_size, eps, offset, casting_mode, init_fn, in_place, row_mode)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class LigerRMSNormForQwen3Next(LigerRMSNorm):
|
|
83
|
+
def __init__(
|
|
84
|
+
self, hidden_size, eps=1e-6, offset=1.0, casting_mode="gemma", init_fn="zeros", in_place=False, row_mode=None
|
|
85
|
+
):
|
|
86
|
+
super().__init__(hidden_size, eps, offset, casting_mode, init_fn, in_place, row_mode)
|
|
@@ -1,3 +1,8 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from typing import Tuple
|
|
3
|
+
|
|
4
|
+
import torch
|
|
5
|
+
|
|
1
6
|
from liger_kernel.ops.rope import LigerRopeFunction
|
|
2
7
|
|
|
3
8
|
|
|
@@ -18,3 +23,41 @@ def liger_rotary_pos_emb(q, k, cos, sin, position_ids=None, unsqueeze_dim=1):
|
|
|
18
23
|
"""
|
|
19
24
|
|
|
20
25
|
return LigerRopeFunction.apply(q, k, cos, sin, position_ids, unsqueeze_dim)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def liger_rotary_pos_emb_with_cast(
|
|
29
|
+
q: torch.Tensor,
|
|
30
|
+
k: torch.Tensor,
|
|
31
|
+
cos: torch.Tensor,
|
|
32
|
+
sin: torch.Tensor,
|
|
33
|
+
position_ids: Optional[torch.Tensor] = None,
|
|
34
|
+
unsqueeze_dim: int = 1,
|
|
35
|
+
) -> Tuple[torch.Tensor, torch.Tensor]:
|
|
36
|
+
orig_q_dtype, orig_k_dtype = q.dtype, k.dtype
|
|
37
|
+
|
|
38
|
+
q32 = q.to(torch.float32)
|
|
39
|
+
k32 = k.to(torch.float32)
|
|
40
|
+
cos32 = cos.to(torch.float32)
|
|
41
|
+
sin32 = sin.to(torch.float32)
|
|
42
|
+
|
|
43
|
+
q_out, k_out = liger_rotary_pos_emb(q32, k32, cos32, sin32, position_ids=position_ids, unsqueeze_dim=unsqueeze_dim)
|
|
44
|
+
return q_out.to(orig_q_dtype), k_out.to(orig_k_dtype)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def liger_rotary_pos_emb_with_cast_and_leading_batch(
|
|
48
|
+
q: torch.Tensor,
|
|
49
|
+
k: torch.Tensor,
|
|
50
|
+
cos: torch.Tensor,
|
|
51
|
+
sin: torch.Tensor,
|
|
52
|
+
position_ids: Optional[torch.Tensor] = None,
|
|
53
|
+
unsqueeze_dim: int = 1,
|
|
54
|
+
) -> Tuple[torch.Tensor, torch.Tensor]:
|
|
55
|
+
orig_q_dtype, orig_k_dtype = q.dtype, k.dtype
|
|
56
|
+
|
|
57
|
+
q32 = q.to(torch.float32).unsqueeze(0)
|
|
58
|
+
k32 = k.to(torch.float32).unsqueeze(0)
|
|
59
|
+
cos32 = cos.to(torch.float32).unsqueeze(0)
|
|
60
|
+
sin32 = sin.to(torch.float32).unsqueeze(0)
|
|
61
|
+
|
|
62
|
+
q_out, k_out = liger_rotary_pos_emb(q32, k32, cos32, sin32, position_ids=position_ids, unsqueeze_dim=unsqueeze_dim)
|
|
63
|
+
return q_out.to(orig_q_dtype).squeeze(0), k_out.to(orig_k_dtype).squeeze(0)
|
|
@@ -77,3 +77,20 @@ class LigerQwen3MoeSwiGLUMLP(nn.Module):
|
|
|
77
77
|
|
|
78
78
|
def forward(self, x):
|
|
79
79
|
return self.down_proj(LigerSiLUMulFunction.apply(self.gate_proj(x), self.up_proj(x)))
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class LigerHunyuanV1SwiGLUMLP(nn.Module):
|
|
83
|
+
def __init__(self, config, layer_idx=None, is_shared_mlp=False):
|
|
84
|
+
super().__init__()
|
|
85
|
+
self.config = config
|
|
86
|
+
self.hidden_size = config.hidden_size
|
|
87
|
+
self.intermediate_size = config.intermediate_size
|
|
88
|
+
self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
|
|
89
|
+
self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
|
|
90
|
+
self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False)
|
|
91
|
+
self.layer_idx = layer_idx
|
|
92
|
+
if config.hidden_act not in ["silu", "swish"]:
|
|
93
|
+
raise ValueError(f"Activation function {config.hidden_act} not supported.")
|
|
94
|
+
|
|
95
|
+
def forward(self, x):
|
|
96
|
+
return self.down_proj(LigerSiLUMulFunction.apply(self.gate_proj(x), self.up_proj(x)))
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
import torch.nn as nn
|
|
4
|
+
|
|
5
|
+
from liger_kernel.ops.geglu import LigerGELUMulFunction
|
|
6
|
+
from liger_kernel.ops.swiglu import LigerSiLUMulFunction
|
|
7
|
+
from liger_kernel.ops.tiled_mlp import apply_tiled_mlp
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class LigerTiledGEGLUMLP(nn.Module):
|
|
11
|
+
"""
|
|
12
|
+
Memory-efficient GEGLU MLP using tiled computation.
|
|
13
|
+
|
|
14
|
+
This module combines GEGLU activation with tiled processing to handle
|
|
15
|
+
very long sequences efficiently. The forward pass is recomputed during
|
|
16
|
+
backward to save memory.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
config: Model configuration with hidden_size and intermediate_size attributes
|
|
20
|
+
num_shards: Number of shards to split the sequence. If None, automatically
|
|
21
|
+
calculated as ceil(seqlen / hidden_size)
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(self, config, num_shards: Optional[int] = None):
|
|
25
|
+
super().__init__()
|
|
26
|
+
self.config = config
|
|
27
|
+
self.hidden_size = config.hidden_size
|
|
28
|
+
self.intermediate_size = config.intermediate_size
|
|
29
|
+
self.num_shards = num_shards
|
|
30
|
+
|
|
31
|
+
self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
|
|
32
|
+
self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
|
|
33
|
+
self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False)
|
|
34
|
+
|
|
35
|
+
# Validate activation function
|
|
36
|
+
if hasattr(config, "hidden_act") and config.hidden_act not in [
|
|
37
|
+
"gelu",
|
|
38
|
+
"gelu_new",
|
|
39
|
+
"gelu_pytorch_tanh",
|
|
40
|
+
]:
|
|
41
|
+
raise ValueError(f"LigerTiledGEGLUMLP requires GELU activation, got {config.hidden_act}")
|
|
42
|
+
|
|
43
|
+
def _mlp_forward(self, module, x):
|
|
44
|
+
"""Internal MLP forward function for tiled computation."""
|
|
45
|
+
gate = module.gate_proj(x)
|
|
46
|
+
up = module.up_proj(x)
|
|
47
|
+
return module.down_proj(LigerGELUMulFunction.apply(gate, up))
|
|
48
|
+
|
|
49
|
+
def forward(self, x):
|
|
50
|
+
"""
|
|
51
|
+
Forward pass with tiled computation.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
x: Input tensor of shape [batch_size, seq_len, hidden_size]
|
|
55
|
+
or [seq_len, hidden_size]
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Output tensor of the same shape as input
|
|
59
|
+
"""
|
|
60
|
+
compute_params = [
|
|
61
|
+
self.gate_proj.weight,
|
|
62
|
+
self.up_proj.weight,
|
|
63
|
+
self.down_proj.weight,
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
return apply_tiled_mlp(
|
|
67
|
+
fn=self._mlp_forward,
|
|
68
|
+
mlp_module=self,
|
|
69
|
+
x=x,
|
|
70
|
+
num_shards=self.num_shards,
|
|
71
|
+
compute_params=compute_params,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class LigerTiledSwiGLUMLP(nn.Module):
|
|
76
|
+
"""
|
|
77
|
+
Memory-efficient SwiGLU MLP using tiled computation.
|
|
78
|
+
|
|
79
|
+
This module combines SwiGLU activation with tiled processing to handle
|
|
80
|
+
very long sequences efficiently. The forward pass is recomputed during
|
|
81
|
+
backward to save memory.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
config: Model configuration with hidden_size and intermediate_size attributes
|
|
85
|
+
num_shards: Number of shards to split the sequence. If None, automatically
|
|
86
|
+
calculated as ceil(seqlen / hidden_size)
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
def __init__(self, config, num_shards: Optional[int] = None):
|
|
90
|
+
super().__init__()
|
|
91
|
+
self.config = config
|
|
92
|
+
self.hidden_size = config.hidden_size
|
|
93
|
+
self.intermediate_size = config.intermediate_size
|
|
94
|
+
self.num_shards = num_shards
|
|
95
|
+
|
|
96
|
+
self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
|
|
97
|
+
self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
|
|
98
|
+
self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False)
|
|
99
|
+
|
|
100
|
+
# Validate activation function
|
|
101
|
+
if hasattr(config, "hidden_act") and config.hidden_act not in ["silu", "swish"]:
|
|
102
|
+
raise ValueError(f"LigerTiledSwiGLUMLP requires SiLU/Swish activation, got {config.hidden_act}")
|
|
103
|
+
|
|
104
|
+
def _mlp_forward(self, module, x):
|
|
105
|
+
"""Internal MLP forward function for tiled computation."""
|
|
106
|
+
gate = module.gate_proj(x)
|
|
107
|
+
up = module.up_proj(x)
|
|
108
|
+
return module.down_proj(LigerSiLUMulFunction.apply(gate, up))
|
|
109
|
+
|
|
110
|
+
def forward(self, x):
|
|
111
|
+
"""
|
|
112
|
+
Forward pass with tiled computation.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
x: Input tensor of shape [batch_size, seq_len, hidden_size]
|
|
116
|
+
or [seq_len, hidden_size]
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
Output tensor of the same shape as input
|
|
120
|
+
"""
|
|
121
|
+
compute_params = [
|
|
122
|
+
self.gate_proj.weight,
|
|
123
|
+
self.up_proj.weight,
|
|
124
|
+
self.down_proj.weight,
|
|
125
|
+
]
|
|
126
|
+
|
|
127
|
+
return apply_tiled_mlp(
|
|
128
|
+
fn=self._mlp_forward,
|
|
129
|
+
mlp_module=self,
|
|
130
|
+
x=x,
|
|
131
|
+
num_shards=self.num_shards,
|
|
132
|
+
compute_params=compute_params,
|
|
133
|
+
)
|
liger_kernel/utils.py
CHANGED
|
@@ -18,12 +18,37 @@ def infer_device():
|
|
|
18
18
|
"""
|
|
19
19
|
if torch.cuda.is_available(): # Works for both Nvidia and AMD
|
|
20
20
|
return "cuda"
|
|
21
|
+
# Use Ascend NPU if available (torch.npu)
|
|
22
|
+
elif is_npu_available():
|
|
23
|
+
return "npu"
|
|
24
|
+
# XPU (Intel) if available
|
|
21
25
|
elif torch.xpu.is_available():
|
|
22
26
|
return "xpu"
|
|
23
27
|
else:
|
|
24
28
|
return "cpu"
|
|
25
29
|
|
|
26
30
|
|
|
31
|
+
def is_npu_available() -> bool:
|
|
32
|
+
"""Detect Ascend NPU availability."""
|
|
33
|
+
try:
|
|
34
|
+
from transformers.utils import is_torch_npu_available
|
|
35
|
+
|
|
36
|
+
return is_torch_npu_available()
|
|
37
|
+
except Exception:
|
|
38
|
+
return False
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def get_npu_multi_processor_count() -> int:
|
|
42
|
+
"""Return a heuristic multi-processor count for NPU."""
|
|
43
|
+
if is_npu_available():
|
|
44
|
+
NPU_MULTI_PROCESSOR_COUNT = 48
|
|
45
|
+
dev_props = torch.npu.get_device_properties()
|
|
46
|
+
# The vector_core_num attribute is supported in the torch.npu v7.2.0 release version.
|
|
47
|
+
return dev_props.vector_core_num if hasattr(dev_props, "vector_core_num") else NPU_MULTI_PROCESSOR_COUNT
|
|
48
|
+
# Reasonable default to avoid division by zero
|
|
49
|
+
return 1
|
|
50
|
+
|
|
51
|
+
|
|
27
52
|
def transformers_version_dispatch(
|
|
28
53
|
required_version: str,
|
|
29
54
|
before_fn,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: liger_kernel_nightly
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.4.dev20251202054858
|
|
4
4
|
Summary: Efficient Triton kernels for LLM Training
|
|
5
5
|
License: BSD 2-CLAUSE LICENSE
|
|
6
6
|
Copyright 2024 LinkedIn Corporation
|
|
@@ -310,8 +310,11 @@ loss.backward()
|
|
|
310
310
|
| Phi3 & Phi3.5 | `liger_kernel.transformers.apply_liger_kernel_to_phi3` | RoPE, RMSNorm, SwiGLU, CrossEntropyLoss, FusedLinearCrossEntropy |
|
|
311
311
|
| Granite 3.0 & 3.1 | `liger_kernel.transformers.apply_liger_kernel_to_granite` | RoPE, RMSNorm, SwiGLU, CrossEntropyLoss |
|
|
312
312
|
| OLMo2 | `liger_kernel.transformers.apply_liger_kernel_to_olmo2` | RoPE, RMSNorm, SwiGLU, CrossEntropyLoss, FusedLinearCrossEntropy |
|
|
313
|
+
| Olmo3 | `liger_kernel.transformers.apply_liger_kernel_to_olmo3` | RoPE, RMSNorm, SwiGLU, CrossEntropyLoss, FusedLinearCrossEntropy |
|
|
313
314
|
| GLM-4 | `liger_kernel.transformers.apply_liger_kernel_to_glm4` | RoPE, RMSNorm, SwiGLU, CrossEntropyLoss, FusedLinearCrossEntropy |
|
|
314
315
|
| InternVL3 | `liger_kernel.transformers.apply_liger_kernel_to_internvl` | RoPE, RMSNorm, SwiGLU, CrossEntropyLoss, FusedLinearCrossEntropy |
|
|
316
|
+
| HunyuanV1 | `liger_kernel.transformers.apply_liger_kernel_to_hunyuan_v1_dense` | RoPE, RMSNorm, SwiGLU, CrossEntropyLoss, FusedLinearCrossEntropy |
|
|
317
|
+
| HunyuanV1 MoE | `liger_kernel.transformers.apply_liger_kernel_to_hunyuan_v1_moe` | RoPE, RMSNorm, SwiGLU, CrossEntropyLoss, FusedLinearCrossEntropy |
|
|
315
318
|
|
|
316
319
|
|
|
317
320
|
## Low-level APIs
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
liger_kernel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
liger_kernel/env_report.py,sha256=uhdEC8OydxoZlb7B6YYcAaBF3crGFdIck-4cxaW4NJY,1728
|
|
3
|
+
liger_kernel/utils.py,sha256=TW-OSkuSLrMAPPMZtOsRKBqZ7MCiSrkATB1z_p81Ets,2879
|
|
4
|
+
liger_kernel/chunked_loss/README.md,sha256=0FmkFC3hKBqyoDT5uTlIYmrvRkF-EOCR1y-EBU1LpWU,2248
|
|
5
|
+
liger_kernel/chunked_loss/__init__.py,sha256=J5_jNnzZ4gZmA38W5f_4oab7xMoNk1Xy-yh3X_Xlf-s,714
|
|
6
|
+
liger_kernel/chunked_loss/cosine_similarity_loss.py,sha256=x2nprTHPraU8Ya2NMZtaDk9r-s-1NKJwCTrzQIdmg-8,4680
|
|
7
|
+
liger_kernel/chunked_loss/cpo_loss.py,sha256=Gzz1eU4kgcbdubFVRy55e8A1Cr-r45UgNicXwZIjmBU,5454
|
|
8
|
+
liger_kernel/chunked_loss/dpo_loss.py,sha256=I83khNs3QQjuhr8U3NIOAACkbse6DNiBV-TulPZ0lXw,9006
|
|
9
|
+
liger_kernel/chunked_loss/functional.py,sha256=-XPDbLml9dHmvoSU2VNTUrBDFehuzvuAGPikVetBMtI,1132
|
|
10
|
+
liger_kernel/chunked_loss/fused_linear_distillation.py,sha256=yRtolfFGfKB-SxGQQyF68GYXd11Zlvh1InLdGeWNFIE,12652
|
|
11
|
+
liger_kernel/chunked_loss/fused_linear_ppo.py,sha256=baU19PwqO1FTVxwlB-eyJv6gOLtL7baXGzSncYQ8Ktc,14296
|
|
12
|
+
liger_kernel/chunked_loss/fused_linear_preference.py,sha256=FIH85uUXAOgYx5Ax8MjFhJHVu-2pKtY7wSegd0zSyyY,18336
|
|
13
|
+
liger_kernel/chunked_loss/fused_linear_unpaired_preference.py,sha256=RiuK3UtRwH9T6jZ36sA8Urj-TVuOLOO2syLg_JOQapY,13437
|
|
14
|
+
liger_kernel/chunked_loss/grpo_loss.py,sha256=bmuZaNgqNbJ5pJGFDXWE-B4BGYF7xWVSN15UyCfuq_s,13079
|
|
15
|
+
liger_kernel/chunked_loss/jsd_loss.py,sha256=G0RghPYYelyZ6DOEiwS8we9TT5MY2iHpiFqzZ2Xy87g,8038
|
|
16
|
+
liger_kernel/chunked_loss/kto_loss.py,sha256=llVCe6DkcpCo57seGWoMikaQVFApx764jsmSbQyqwQY,7529
|
|
17
|
+
liger_kernel/chunked_loss/orpo_loss.py,sha256=nu9UYG16dcMw93lvHi4_hYs3Q0FK1KnlmMRj7OpYU8s,4872
|
|
18
|
+
liger_kernel/chunked_loss/simpo_loss.py,sha256=fy2w8KbhMrBv7b1jdIeH3bBFxY52bPQPZb3KwBvmurM,5385
|
|
19
|
+
liger_kernel/ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
+
liger_kernel/ops/cross_entropy.py,sha256=J0OaI9b3l1H5FPeLft84XGz4g-WhMbrKXGo3wqlWwd0,22692
|
|
21
|
+
liger_kernel/ops/dyt.py,sha256=4XmkCCZaPPM8Tl4QHo6vSF2m68jrwsnjucrbyOJvZpM,5628
|
|
22
|
+
liger_kernel/ops/fused_add_rms_norm.py,sha256=lvwrLsKvoAQqS9KatgBkAyy0Xdecado-g0rvXYXaBak,14237
|
|
23
|
+
liger_kernel/ops/fused_linear_cross_entropy.py,sha256=YepeWqX37gKc1-FUrzkDTzXYdOvmBmfv4KgL__KN_UI,16158
|
|
24
|
+
liger_kernel/ops/fused_linear_jsd.py,sha256=CSoprxb-YcJy-YUKiTcYkxN8sb9h2kdk_iHuncvSV5c,9683
|
|
25
|
+
liger_kernel/ops/fused_neighborhood_attention.py,sha256=vPi5xbnh6wxyZehaqo6Tuilqo2fN5SGDiONjnNmIKqs,35556
|
|
26
|
+
liger_kernel/ops/geglu.py,sha256=z-t9OMk3SDL5sJenJjCzhGEeGusL22j3nDjTDEUDAz8,4219
|
|
27
|
+
liger_kernel/ops/group_norm.py,sha256=zoy-TcNkYtKGmGhTFJmnyiG_4Es4ZphpqP8jtUSI6-I,10912
|
|
28
|
+
liger_kernel/ops/grpo_loss.py,sha256=2SyOujtF9I3xiNo4wFf4s6MeiDotE_qeYfRWgj_bOBE,9573
|
|
29
|
+
liger_kernel/ops/jsd.py,sha256=onHp5T3MbvJaVz5Vup7Ww6EQp_HTaZeayTjJk6FgQMY,7042
|
|
30
|
+
liger_kernel/ops/kl_div.py,sha256=ZjGdDLKWksHT9dZ0xF_TDgAkj5cuMTwwT5tr9E-_24o,8734
|
|
31
|
+
liger_kernel/ops/layer_norm.py,sha256=-4UEyko9eKgBi5LNmfdEU2hTpJOWVnEy5iYjJkMvHmk,10598
|
|
32
|
+
liger_kernel/ops/llama4_rope.py,sha256=-aqdZzllklTN8b9--e-TsWY_ntGCN8-tyseT4x0bd8s,8223
|
|
33
|
+
liger_kernel/ops/multi_token_attention.py,sha256=Oz_RXDp-OSS_R_HuGmaETHdAJ7Toda_70OfE7TXMUlY,7645
|
|
34
|
+
liger_kernel/ops/poly_norm.py,sha256=5IdJEZnbbhblkL_X8UhSD4A2CooQbOAZJw8nAekWNs4,11372
|
|
35
|
+
liger_kernel/ops/qwen2vl_mrope.py,sha256=3GExhYpLgB4VUtyZyjRk8XjEur3W4EWF6HQ67ML5vBU,8481
|
|
36
|
+
liger_kernel/ops/rms_norm.py,sha256=2V8qheEvidBm0VxwfOoAnV837F6blmtTgP91VYdGs7c,19211
|
|
37
|
+
liger_kernel/ops/rope.py,sha256=v-7JHRrv-5ImoROkpKfl30WwWI4qTa2tAl7zQeB4ml4,8956
|
|
38
|
+
liger_kernel/ops/softmax.py,sha256=tgORx6MK1IDDtZKqGarj0IPIVjqAIEUXXYPiinhRdtI,5864
|
|
39
|
+
liger_kernel/ops/sparsemax.py,sha256=AeWe1xgkHJFEKWTj2vu_0hj7LztGvjqXAps-QTpCY0U,5087
|
|
40
|
+
liger_kernel/ops/swiglu.py,sha256=D7nd4u_LInwsIRNCDdY77lqnTz8-W5dJrpEAt8zEO_A,3033
|
|
41
|
+
liger_kernel/ops/tiled_mlp.py,sha256=eyMFsFFgHch8a_6R6IYRG24_jqKg5GF_BQUoQuAG8SY,4529
|
|
42
|
+
liger_kernel/ops/tvd.py,sha256=FHJtLQI95ijqgg9UtaHpMAjSCiPxB6CduPwPMcGxelc,6405
|
|
43
|
+
liger_kernel/ops/utils.py,sha256=kYp84AOA7D9PYrvBUSrNsfQIt8elr_uA9OxCkbfiUFA,3980
|
|
44
|
+
liger_kernel/ops/experimental/embedding.py,sha256=tolj3tItkzpSb30zWqDN2_yX4ectflaQ8HMyKyFIQc8,4172
|
|
45
|
+
liger_kernel/ops/experimental/mm_int8int2.py,sha256=TrS9lpwekrik_w5qE7AhMJD1bcq-OidjtbsW80oZ6IM,13314
|
|
46
|
+
liger_kernel/transformers/__init__.py,sha256=CgwhrY5cdx6OcRgR2ZZJbOIkLswQWPTr-BAaoxDNNOY,10687
|
|
47
|
+
liger_kernel/transformers/auto_model.py,sha256=0qCTRZt280Bj_LcFdzo9hlaR-BWNazawXOGgoCZjgEg,1545
|
|
48
|
+
liger_kernel/transformers/cross_entropy.py,sha256=DMtHkKrVJDSsels7KgGQJqrXkEAd6Zopcdr-5oRmQgE,2010
|
|
49
|
+
liger_kernel/transformers/dyt.py,sha256=i-4GPaMrl-jab9TVI5qN0-H9qycn_mCbV82ozU4nbmU,723
|
|
50
|
+
liger_kernel/transformers/fsdp.py,sha256=CUiyjTmjkjY7pLXQv8ly9rnzgXw6529csd9pvtJNMYc,3096
|
|
51
|
+
liger_kernel/transformers/functional.py,sha256=OqEmsDkaV3YiXaw1zqjDvHcC9_tU5TBrmhCNPOdgHQY,8590
|
|
52
|
+
liger_kernel/transformers/fused_add_rms_norm.py,sha256=7_Bzg-x6lLe6W1qG2DtjDALhEpNZlC6N5GppEs9cTYY,1199
|
|
53
|
+
liger_kernel/transformers/fused_linear_cross_entropy.py,sha256=Hhp9XGgMKZhvlkjHY5Jkl_T7fSyJoCL9m5c3z_9mflQ,2347
|
|
54
|
+
liger_kernel/transformers/fused_linear_jsd.py,sha256=bZ4otCvWBuOnA5XdQL-FzZVItJlDt-ht9e_pG7PG93E,3999
|
|
55
|
+
liger_kernel/transformers/fused_neighborhood_attention.py,sha256=TxYDUAt9B6WSP14aJP66C_2Mbds2sSIPGnamhUSTrC8,7957
|
|
56
|
+
liger_kernel/transformers/geglu.py,sha256=mrgqzIUVd6lN7fkDKLkw5YaESDxDtFgbot430WwPVOQ,1107
|
|
57
|
+
liger_kernel/transformers/group_norm.py,sha256=6qMAWOprr4SzP0YhNVNGQIBpM5aUHplUD2VuGJrMBz0,2173
|
|
58
|
+
liger_kernel/transformers/grpo_loss.py,sha256=QS6Ycct1E2yMfqoHPBa2sUAu5cmweNPK_-Q_KJE8hb4,6098
|
|
59
|
+
liger_kernel/transformers/jsd.py,sha256=DGqRnxIZxsvxo0_tbbxX3b-sDbDjC_yKufyRIHCcScY,2979
|
|
60
|
+
liger_kernel/transformers/kl_div.py,sha256=WLffFbh1EExD2Eb1F7lN11fo9JJC-0751WJjZAF1Fj8,409
|
|
61
|
+
liger_kernel/transformers/layer_norm.py,sha256=c9pk3PEasOKYR0rhe5e5nNrnYKVCEW4VC8S6LpCq9EQ,906
|
|
62
|
+
liger_kernel/transformers/llama4_rope.py,sha256=kS6PSHEwf3dS7hD7C7p8S0geugx2EMCiP0h0F7LsUoY,3639
|
|
63
|
+
liger_kernel/transformers/monkey_patch.py,sha256=4LV6LSz_AAop6HWk1spZm1QigPN9nUDPJu9tK21-jIo,132446
|
|
64
|
+
liger_kernel/transformers/multi_token_attention.py,sha256=K3NIY9_5TPgZ4_Rahn0xnkMXxD_fmlJHK4CWGYvGQp0,1752
|
|
65
|
+
liger_kernel/transformers/poly_norm.py,sha256=g5tC75i3qy1_N26ZUP-jfpct7ivQAEdJfIfx8IXzeyE,1377
|
|
66
|
+
liger_kernel/transformers/qwen2vl_mrope.py,sha256=5EwSqrMdsL9MYspeBMXBsNJKvH0MOmRrtJXAJlnnlOI,1047
|
|
67
|
+
liger_kernel/transformers/rms_norm.py,sha256=HwddVqrqS58jE-M2_4NkFGARtCDBhGnkKyjBN9b3FYI,3004
|
|
68
|
+
liger_kernel/transformers/rope.py,sha256=VMlDZI6zss9mLaLcN5XCE_ktmYRwAi_Eh4TIgO6NrIQ,2361
|
|
69
|
+
liger_kernel/transformers/softmax.py,sha256=yadlAgE4V2JByMwrDDa2s5SUBp8Jgd57xwnVvAWoBaI,264
|
|
70
|
+
liger_kernel/transformers/sparsemax.py,sha256=0lQA0UEOs4mu8CMruZ3VLhImxQVXJWhPsAKUsYA7vj8,403
|
|
71
|
+
liger_kernel/transformers/swiglu.py,sha256=dRR69wDWSWfdjtnsTECyxQqWVo5QkdXdXm9SpSQ4Jvw,4291
|
|
72
|
+
liger_kernel/transformers/tiled_mlp.py,sha256=J51-kpzwikDMMhT5bX-RZCKMaXBK6zZc1bhgRYTK5F0,4651
|
|
73
|
+
liger_kernel/transformers/trainer_integration.py,sha256=W3ON51O5GkyzNJsItz0y5rKx-uy2f2cFfveZpqbUdhw,123
|
|
74
|
+
liger_kernel/transformers/tvd.py,sha256=XrRfyJIqN6HFxXk8MYyFVZM1OLz3mtSbRZvWfZ_JerQ,450
|
|
75
|
+
liger_kernel/transformers/experimental/__init__.py,sha256=oQqk-f32JYgWEP9DJCj6ty6bbJSGrdXsFDQFwGeX6vI,127
|
|
76
|
+
liger_kernel/transformers/experimental/embedding.py,sha256=2P0QYdlFyFrG5OqTzTa1wcRgDSyjBMv5i1a7BrDPDQw,881
|
|
77
|
+
liger_kernel/transformers/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
78
|
+
liger_kernel/transformers/model/falcon_h1.py,sha256=heUZ4wUt2ATmtBtmv8Rcro3pQl6fV9T0pburjTTW7os,5004
|
|
79
|
+
liger_kernel/transformers/model/gemma.py,sha256=pAri4PYpknsFfkvyo8Ez2NNlqrUDW-KkExUXTGZAcH4,10621
|
|
80
|
+
liger_kernel/transformers/model/gemma2.py,sha256=qa9Ok42vFojVGNmASTH3Ek566Vu507kjd--ZpZDKX9M,12024
|
|
81
|
+
liger_kernel/transformers/model/gemma3.py,sha256=mEV3Kuy-dqfTk_b899Vb-InuD4_DvwH0nm5xgbG-0MM,14911
|
|
82
|
+
liger_kernel/transformers/model/glm4.py,sha256=bSp22iPIjsli4-c_usUOsyh1Bs2gIK8X6ynS0azseUs,5900
|
|
83
|
+
liger_kernel/transformers/model/glm4v.py,sha256=dd-BQpccDCp1SbIxcJ5rG8xcwYQK3KOv1Tgm9TGnZc4,6594
|
|
84
|
+
liger_kernel/transformers/model/glm4v_moe.py,sha256=zKhMdOOrRhlrvCSFaeVYfddL1ubpY8edEO91TN81n98,7135
|
|
85
|
+
liger_kernel/transformers/model/hunyuan_v1.py,sha256=MJvP9xkUFePIV0HLETJM4YPbVCEPkAE1ZI5Jxyiebh0,5731
|
|
86
|
+
liger_kernel/transformers/model/internvl.py,sha256=OOutracs9qrPHSU7FVYar08yinvGrHQVPvo39JEws6w,6473
|
|
87
|
+
liger_kernel/transformers/model/llama.py,sha256=kqZeONzwTBzudoChlKMzq1w23BtYGbxWZC1l1V__JTw,13410
|
|
88
|
+
liger_kernel/transformers/model/llama4.py,sha256=PfkynGVI0xxMs3EtyYpCgaALI6stu25OIrTIymE-pvg,4853
|
|
89
|
+
liger_kernel/transformers/model/llava.py,sha256=yoADM_BuIEummtTDiwWqjfUjXUMZD78VJzS0TRj5GJ4,15687
|
|
90
|
+
liger_kernel/transformers/model/loss_utils.py,sha256=mAV6NsE1xR2smQMlr_n9afh4ek3BhIfieZdTn1Z-9Fw,2836
|
|
91
|
+
liger_kernel/transformers/model/mistral.py,sha256=OcwOzVDMwwDbVccVPv-AaocznzWwzLT3aRaKK5SMaAg,6030
|
|
92
|
+
liger_kernel/transformers/model/mixtral.py,sha256=YcBDoTEJDgLFJ_RTo180DYGxR8D5Ad9-idumif7kCPE,12130
|
|
93
|
+
liger_kernel/transformers/model/mllama.py,sha256=vAHwCm63sn4kpAY0rDGf_N0HR7KRTBVpBYDVTPOaZTg,12079
|
|
94
|
+
liger_kernel/transformers/model/olmo2.py,sha256=-h2bUOeuPfY1MdShdRvq5_wFDHKP4PEimgIl0fL-BT4,5902
|
|
95
|
+
liger_kernel/transformers/model/olmo3.py,sha256=k2zYOlS8U_b5MwjdToB3tDRQ0bH_mWapVQqJcH8-qAo,6007
|
|
96
|
+
liger_kernel/transformers/model/output_classes.py,sha256=0BGXVR4dYQpSHLkSqpRoXuHMryrceGSlTYRu6pvd8ZY,4542
|
|
97
|
+
liger_kernel/transformers/model/paligemma.py,sha256=r0smHLADkEwfLS6d6ArWoSWEeLt2d_8pmgOO5F04b1o,20793
|
|
98
|
+
liger_kernel/transformers/model/phi3.py,sha256=PT7Kw6yySg-7TsssWfi82eVMN3SWujCqzCqHigAdfeQ,4574
|
|
99
|
+
liger_kernel/transformers/model/qwen2.py,sha256=ojqdJpD3A9A5uCS0N_rSq8gyNYWSsHfuvx3Z3ObC7ss,10686
|
|
100
|
+
liger_kernel/transformers/model/qwen2_5_vl.py,sha256=FbIZDcg9cOr4PtBLNN8yVubN-gu2clndjSIzfi8NMos,6894
|
|
101
|
+
liger_kernel/transformers/model/qwen2_vl.py,sha256=967Ex4Scm0ehhiVxOtjwfj396nD9xkAwFwHcoURH6-o,6578
|
|
102
|
+
liger_kernel/transformers/model/qwen3.py,sha256=1fvioVmq5CRZSIuTd7uuLet-fti9ee3r8eLibvfNTcQ,5769
|
|
103
|
+
liger_kernel/transformers/model/qwen3_moe.py,sha256=yljJO4kyeM5U2Q4pXH3Mmq71ZFEC_Z73qgBx1-an-o8,6457
|
|
104
|
+
liger_kernel/transformers/model/qwen3_next.py,sha256=TayfD91GVLA1-fJwtVl6vMZgkUTYLQYURMRGBdCtnFc,6331
|
|
105
|
+
liger_kernel/transformers/model/qwen3_vl.py,sha256=sUIdJ-32IlFm_4pHv6PpLgVafqBS0QeJm_91tY67NdY,6646
|
|
106
|
+
liger_kernel/transformers/model/qwen3_vl_moe.py,sha256=CJEFcwBqItSEw9NA0mhEozlDTgIuJQ6VTjgkh5iLZ78,4856
|
|
107
|
+
liger_kernel/transformers/model/smollm3.py,sha256=1ewDY-99UAFJEfoeqfZxDcxjkqKYUSr5b7X-E_2BLLs,8126
|
|
108
|
+
liger_kernel/transformers/model/smolvlm.py,sha256=yFpPKawLVo3zXzLjM7Y_T8FyRrPxVyp-YPFMM8m3k0c,6734
|
|
109
|
+
liger_kernel/transformers/trainer/__init__.py,sha256=p7yQfklV8-467qSz_ZMimkbDF7HHWHwku25A-GYL0WU,193
|
|
110
|
+
liger_kernel/transformers/trainer/orpo_trainer.py,sha256=tX0h63aOFe3rNqTmk6JpMf75UPo981yzEa6TghnjS0Q,5370
|
|
111
|
+
liger_kernel/triton/__init__.py,sha256=qCiCamzCRv6lpV8IqpAc9YMdNKC7GKurClWceQPnlis,92
|
|
112
|
+
liger_kernel/triton/monkey_patch.py,sha256=Rd0hUHAzDkFfHvnX7-PBaNK5EKnZhtfM_h-fgQH9HPY,1568
|
|
113
|
+
liger_kernel_nightly-0.6.4.dev20251202054858.dist-info/LICENSE,sha256=OhzLDHJ0to4a8sodVLELZiCFylZ1NAAYLs-HrjPy0ag,1312
|
|
114
|
+
liger_kernel_nightly-0.6.4.dev20251202054858.dist-info/METADATA,sha256=-19QGcupE4UryR0Ai5X2O-1WMIQ_fFzCjNiUblQI27o,25238
|
|
115
|
+
liger_kernel_nightly-0.6.4.dev20251202054858.dist-info/NOTICE,sha256=njwnoPZLh9AN8SJQzxvCGLHi-8X__AvWRze6joNXIY8,2066
|
|
116
|
+
liger_kernel_nightly-0.6.4.dev20251202054858.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
|
117
|
+
liger_kernel_nightly-0.6.4.dev20251202054858.dist-info/top_level.txt,sha256=2eghu4hA3LnkM7ElW92tQ8zegWKgSbeo-k-aGe1YnvY,13
|
|
118
|
+
liger_kernel_nightly-0.6.4.dev20251202054858.dist-info/RECORD,,
|
|
@@ -1,107 +0,0 @@
|
|
|
1
|
-
liger_kernel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
liger_kernel/env_report.py,sha256=uhdEC8OydxoZlb7B6YYcAaBF3crGFdIck-4cxaW4NJY,1728
|
|
3
|
-
liger_kernel/utils.py,sha256=BQleeZWHSZPNuPcYcoZTOp1kcNEZONZilPP5-AmjgWI,2024
|
|
4
|
-
liger_kernel/chunked_loss/README.md,sha256=0FmkFC3hKBqyoDT5uTlIYmrvRkF-EOCR1y-EBU1LpWU,2248
|
|
5
|
-
liger_kernel/chunked_loss/__init__.py,sha256=J5_jNnzZ4gZmA38W5f_4oab7xMoNk1Xy-yh3X_Xlf-s,714
|
|
6
|
-
liger_kernel/chunked_loss/cosine_similarity_loss.py,sha256=pZ07OQ6RI-c8uk96tDRlUXdt31-da7yWhfwircZlKRw,4198
|
|
7
|
-
liger_kernel/chunked_loss/cpo_loss.py,sha256=Gzz1eU4kgcbdubFVRy55e8A1Cr-r45UgNicXwZIjmBU,5454
|
|
8
|
-
liger_kernel/chunked_loss/dpo_loss.py,sha256=I83khNs3QQjuhr8U3NIOAACkbse6DNiBV-TulPZ0lXw,9006
|
|
9
|
-
liger_kernel/chunked_loss/functional.py,sha256=-XPDbLml9dHmvoSU2VNTUrBDFehuzvuAGPikVetBMtI,1132
|
|
10
|
-
liger_kernel/chunked_loss/fused_linear_distillation.py,sha256=ooR-qnZCyWJN935oHCSWLaKKKyaYERyhNczRGi1VOiw,11935
|
|
11
|
-
liger_kernel/chunked_loss/fused_linear_ppo.py,sha256=ZjpNP5VC-tXXIKb4AckkQ3iWWQeej-JoG4StJq3N0wg,13650
|
|
12
|
-
liger_kernel/chunked_loss/fused_linear_preference.py,sha256=FIH85uUXAOgYx5Ax8MjFhJHVu-2pKtY7wSegd0zSyyY,18336
|
|
13
|
-
liger_kernel/chunked_loss/fused_linear_unpaired_preference.py,sha256=RiuK3UtRwH9T6jZ36sA8Urj-TVuOLOO2syLg_JOQapY,13437
|
|
14
|
-
liger_kernel/chunked_loss/grpo_loss.py,sha256=SkZuKoW8K94UbWR-OtfopsQkuQ8tFOr_90AGR6_Mhes,12844
|
|
15
|
-
liger_kernel/chunked_loss/jsd_loss.py,sha256=gRhnmB8xwuz7FcMJi5v5eyBsq01owaCbcyyrF4rYtY0,7133
|
|
16
|
-
liger_kernel/chunked_loss/kto_loss.py,sha256=llVCe6DkcpCo57seGWoMikaQVFApx764jsmSbQyqwQY,7529
|
|
17
|
-
liger_kernel/chunked_loss/orpo_loss.py,sha256=nu9UYG16dcMw93lvHi4_hYs3Q0FK1KnlmMRj7OpYU8s,4872
|
|
18
|
-
liger_kernel/chunked_loss/simpo_loss.py,sha256=fy2w8KbhMrBv7b1jdIeH3bBFxY52bPQPZb3KwBvmurM,5385
|
|
19
|
-
liger_kernel/ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
-
liger_kernel/ops/cross_entropy.py,sha256=OVkani9JEmCJ8IHN3UgJKzGW7zxJWDwy1EaWVcbShgQ,19517
|
|
21
|
-
liger_kernel/ops/dyt.py,sha256=gCLz4S8aul8SY9nvIGaoK67aGb7U9MJRQdo3ONqmQYs,5417
|
|
22
|
-
liger_kernel/ops/fused_add_rms_norm.py,sha256=UBqmlqFCmhSAIpkNKd8rrfXatX7Z4J9bp2dX9A0lrJQ,14017
|
|
23
|
-
liger_kernel/ops/fused_linear_cross_entropy.py,sha256=PqIPHU8EjkHRJF6cNZViDucFVOgqo7eanJxB53Npke8,14388
|
|
24
|
-
liger_kernel/ops/fused_linear_jsd.py,sha256=CSoprxb-YcJy-YUKiTcYkxN8sb9h2kdk_iHuncvSV5c,9683
|
|
25
|
-
liger_kernel/ops/fused_neighborhood_attention.py,sha256=vPi5xbnh6wxyZehaqo6Tuilqo2fN5SGDiONjnNmIKqs,35556
|
|
26
|
-
liger_kernel/ops/geglu.py,sha256=r0WSq9E93zzynL44Wh8femzOWK07_SseBM_pJUyxT3s,4144
|
|
27
|
-
liger_kernel/ops/group_norm.py,sha256=qD4D4lSjSgVtO52EBNLC2iTseALRgPgqXE50U2woggk,10837
|
|
28
|
-
liger_kernel/ops/grpo_loss.py,sha256=anRnv7k1-AV3pCC6_TqP0GMg78YYUfRAJrbpx6PVhl0,9448
|
|
29
|
-
liger_kernel/ops/jsd.py,sha256=onHp5T3MbvJaVz5Vup7Ww6EQp_HTaZeayTjJk6FgQMY,7042
|
|
30
|
-
liger_kernel/ops/kl_div.py,sha256=ZjGdDLKWksHT9dZ0xF_TDgAkj5cuMTwwT5tr9E-_24o,8734
|
|
31
|
-
liger_kernel/ops/layer_norm.py,sha256=WmiORsIyufOhazmYZTPjeSc5Z-xTAYwXAKqUcCv_dlY,9807
|
|
32
|
-
liger_kernel/ops/llama4_rope.py,sha256=-aqdZzllklTN8b9--e-TsWY_ntGCN8-tyseT4x0bd8s,8223
|
|
33
|
-
liger_kernel/ops/multi_token_attention.py,sha256=Oz_RXDp-OSS_R_HuGmaETHdAJ7Toda_70OfE7TXMUlY,7645
|
|
34
|
-
liger_kernel/ops/qwen2vl_mrope.py,sha256=3GExhYpLgB4VUtyZyjRk8XjEur3W4EWF6HQ67ML5vBU,8481
|
|
35
|
-
liger_kernel/ops/rms_norm.py,sha256=DtvsWN5YktFAoc0JYSAwVeoZfryBFJlX-ipU7ooP01A,18891
|
|
36
|
-
liger_kernel/ops/rope.py,sha256=v-7JHRrv-5ImoROkpKfl30WwWI4qTa2tAl7zQeB4ml4,8956
|
|
37
|
-
liger_kernel/ops/softmax.py,sha256=tgORx6MK1IDDtZKqGarj0IPIVjqAIEUXXYPiinhRdtI,5864
|
|
38
|
-
liger_kernel/ops/sparsemax.py,sha256=AeWe1xgkHJFEKWTj2vu_0hj7LztGvjqXAps-QTpCY0U,5087
|
|
39
|
-
liger_kernel/ops/swiglu.py,sha256=D7nd4u_LInwsIRNCDdY77lqnTz8-W5dJrpEAt8zEO_A,3033
|
|
40
|
-
liger_kernel/ops/tvd.py,sha256=FHJtLQI95ijqgg9UtaHpMAjSCiPxB6CduPwPMcGxelc,6405
|
|
41
|
-
liger_kernel/ops/utils.py,sha256=uoFKQqo-34N2TWQNvXMFywqGiOMMXNEVBxVojzlUAa0,3836
|
|
42
|
-
liger_kernel/ops/experimental/embedding.py,sha256=tolj3tItkzpSb30zWqDN2_yX4ectflaQ8HMyKyFIQc8,4172
|
|
43
|
-
liger_kernel/ops/experimental/mm_int8int2.py,sha256=TrS9lpwekrik_w5qE7AhMJD1bcq-OidjtbsW80oZ6IM,13314
|
|
44
|
-
liger_kernel/transformers/__init__.py,sha256=h-VQCbsM-T8l8jApA6mTsJdTnd3VeL14pUAdheruaiU,9010
|
|
45
|
-
liger_kernel/transformers/auto_model.py,sha256=0qCTRZt280Bj_LcFdzo9hlaR-BWNazawXOGgoCZjgEg,1545
|
|
46
|
-
liger_kernel/transformers/cross_entropy.py,sha256=z3KTWQnFxr_IZaVjtYt0ZNEWQdDdYThN35xWkHlDGH0,1683
|
|
47
|
-
liger_kernel/transformers/dyt.py,sha256=i-4GPaMrl-jab9TVI5qN0-H9qycn_mCbV82ozU4nbmU,723
|
|
48
|
-
liger_kernel/transformers/fsdp.py,sha256=CUiyjTmjkjY7pLXQv8ly9rnzgXw6529csd9pvtJNMYc,3096
|
|
49
|
-
liger_kernel/transformers/functional.py,sha256=-vpz95wbv5wLpInjSG06KNHETsEgKnRIiV-lMYHVs68,7841
|
|
50
|
-
liger_kernel/transformers/fused_add_rms_norm.py,sha256=7_Bzg-x6lLe6W1qG2DtjDALhEpNZlC6N5GppEs9cTYY,1199
|
|
51
|
-
liger_kernel/transformers/fused_linear_cross_entropy.py,sha256=toa54dpmJduoZLhU3lJA-HPZ03MYcMKekDWPcdYjvYA,2020
|
|
52
|
-
liger_kernel/transformers/fused_linear_jsd.py,sha256=bZ4otCvWBuOnA5XdQL-FzZVItJlDt-ht9e_pG7PG93E,3999
|
|
53
|
-
liger_kernel/transformers/fused_neighborhood_attention.py,sha256=TxYDUAt9B6WSP14aJP66C_2Mbds2sSIPGnamhUSTrC8,7957
|
|
54
|
-
liger_kernel/transformers/geglu.py,sha256=mrgqzIUVd6lN7fkDKLkw5YaESDxDtFgbot430WwPVOQ,1107
|
|
55
|
-
liger_kernel/transformers/group_norm.py,sha256=6qMAWOprr4SzP0YhNVNGQIBpM5aUHplUD2VuGJrMBz0,2173
|
|
56
|
-
liger_kernel/transformers/grpo_loss.py,sha256=uAkUNKSnUGEOqa82L9w2e6AI1kcmG8K45-QxyaT8zhM,3897
|
|
57
|
-
liger_kernel/transformers/jsd.py,sha256=DGqRnxIZxsvxo0_tbbxX3b-sDbDjC_yKufyRIHCcScY,2979
|
|
58
|
-
liger_kernel/transformers/kl_div.py,sha256=WLffFbh1EExD2Eb1F7lN11fo9JJC-0751WJjZAF1Fj8,409
|
|
59
|
-
liger_kernel/transformers/layer_norm.py,sha256=c9pk3PEasOKYR0rhe5e5nNrnYKVCEW4VC8S6LpCq9EQ,906
|
|
60
|
-
liger_kernel/transformers/llama4_rope.py,sha256=kS6PSHEwf3dS7hD7C7p8S0geugx2EMCiP0h0F7LsUoY,3639
|
|
61
|
-
liger_kernel/transformers/monkey_patch.py,sha256=L5mq5mL0GC62bxthN7p4Db5l7NogFE-1JsbZsr4GGik,105877
|
|
62
|
-
liger_kernel/transformers/multi_token_attention.py,sha256=K3NIY9_5TPgZ4_Rahn0xnkMXxD_fmlJHK4CWGYvGQp0,1752
|
|
63
|
-
liger_kernel/transformers/qwen2vl_mrope.py,sha256=5EwSqrMdsL9MYspeBMXBsNJKvH0MOmRrtJXAJlnnlOI,1047
|
|
64
|
-
liger_kernel/transformers/rms_norm.py,sha256=vkekcvTeWY8vL4H6hg3t0XeY0Ew_3OFMPHuzqlxPPVw,2719
|
|
65
|
-
liger_kernel/transformers/rope.py,sha256=ZTrTORSAyfcFIKjk6XEeYmk4ROH7xXED9L4g2NFntlE,999
|
|
66
|
-
liger_kernel/transformers/softmax.py,sha256=yadlAgE4V2JByMwrDDa2s5SUBp8Jgd57xwnVvAWoBaI,264
|
|
67
|
-
liger_kernel/transformers/sparsemax.py,sha256=0lQA0UEOs4mu8CMruZ3VLhImxQVXJWhPsAKUsYA7vj8,403
|
|
68
|
-
liger_kernel/transformers/swiglu.py,sha256=LZ8YeLIdv2k46JleZMjzubGk98smt6t780kSgcVLsQk,3454
|
|
69
|
-
liger_kernel/transformers/trainer_integration.py,sha256=W3ON51O5GkyzNJsItz0y5rKx-uy2f2cFfveZpqbUdhw,123
|
|
70
|
-
liger_kernel/transformers/tvd.py,sha256=XrRfyJIqN6HFxXk8MYyFVZM1OLz3mtSbRZvWfZ_JerQ,450
|
|
71
|
-
liger_kernel/transformers/experimental/__init__.py,sha256=oQqk-f32JYgWEP9DJCj6ty6bbJSGrdXsFDQFwGeX6vI,127
|
|
72
|
-
liger_kernel/transformers/experimental/embedding.py,sha256=2P0QYdlFyFrG5OqTzTa1wcRgDSyjBMv5i1a7BrDPDQw,881
|
|
73
|
-
liger_kernel/transformers/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
74
|
-
liger_kernel/transformers/model/falcon_h1.py,sha256=DTzfT-5OzQ6I-pU80Vn5e5ibd1EOEbJV5cMTJFhfwFg,4302
|
|
75
|
-
liger_kernel/transformers/model/gemma.py,sha256=WryzpVmCm2H_XgLKNu3jJ6gVawjQDjapTetg4WHlbR4,10078
|
|
76
|
-
liger_kernel/transformers/model/gemma2.py,sha256=eOQEfJBKezJNNrirhkPSagGxr9qj_y4lENOZgjUZKpE,11471
|
|
77
|
-
liger_kernel/transformers/model/gemma3.py,sha256=-tvZw88S-STqmvdim-xrZZRJ17KLWoge_73ilIvhpIU,14157
|
|
78
|
-
liger_kernel/transformers/model/glm4.py,sha256=2TBM5-4URpj6uX96G1AZ_DrjAmQtgLwXGzBvaXtfwdk,5328
|
|
79
|
-
liger_kernel/transformers/model/glm4v.py,sha256=nlgEMOBjFEOu7a-cwwp9mWhTFqIs3QrOvcxW-uaPq-s,6022
|
|
80
|
-
liger_kernel/transformers/model/glm4v_moe.py,sha256=q3-R_FoQPayS85AriJWWebblXB6Ix9fvxhSrI3mHiz4,6237
|
|
81
|
-
liger_kernel/transformers/model/internvl.py,sha256=Uv8KGXOz9NhiKVZDeRNzAJH5kRuMZikUbswWM9u5KM0,6069
|
|
82
|
-
liger_kernel/transformers/model/llama.py,sha256=L_VuaxxFJpzEmpLnaqwBbI5-Q14Qgfj-ufhLydCWgdk,12903
|
|
83
|
-
liger_kernel/transformers/model/llama4.py,sha256=epEO_VD1gJCDovabSIQLxxncoh-TQTBfj-UgIlR5c7U,4281
|
|
84
|
-
liger_kernel/transformers/model/llava.py,sha256=t6kMiyBkteVam-ltiod2f1mevj8l8ZHxYDvfu9C_lEk,15196
|
|
85
|
-
liger_kernel/transformers/model/loss_utils.py,sha256=02RVkPI7Qs4ZP4yU_udCAvD_2hgIaHmxremRKe3N7EE,1885
|
|
86
|
-
liger_kernel/transformers/model/mistral.py,sha256=XmM4N21RIOkJ9PJ4PZ3DcRUhGUczn_lbx0plf1zeHb0,5571
|
|
87
|
-
liger_kernel/transformers/model/mixtral.py,sha256=SLdLO81AZL7zror0LXLkn2PHqKzjwMMs4kALNqoaT00,11571
|
|
88
|
-
liger_kernel/transformers/model/mllama.py,sha256=5q8q2BxQR_8hNZ83XrJIbndw-l6T7ZyFLM7OCv_uPK0,11593
|
|
89
|
-
liger_kernel/transformers/model/olmo2.py,sha256=9O1Cze2B6ON-i1jgjQwjpS_WsDEK0PzL003s-MkevWA,5330
|
|
90
|
-
liger_kernel/transformers/model/paligemma.py,sha256=mnTnSmEDla_bbVmPFmqhNVT__Cuf-TM-KLGFUa1sU-4,19967
|
|
91
|
-
liger_kernel/transformers/model/phi3.py,sha256=L4gG8htOABmaxzcmHph0bBFCACRvL9r6wuDVFXi2o7Q,4117
|
|
92
|
-
liger_kernel/transformers/model/qwen2.py,sha256=lgn0X6EzAZUhOv17ZDD9choIDdaPVIAsIrrdvwzWXqs,10033
|
|
93
|
-
liger_kernel/transformers/model/qwen2_5_vl.py,sha256=Ea3zvL1FJfjlaerpeXCq-1zmorrajwNsR-XsgWr4fFQ,6465
|
|
94
|
-
liger_kernel/transformers/model/qwen2_vl.py,sha256=ZeasFPGs-bxm2Y_E15mo0YNx5wwtKYDV-bjVKjkLPBk,6018
|
|
95
|
-
liger_kernel/transformers/model/qwen3.py,sha256=Q2aOg5erPrgVgRcqJm8sefLSDtvU1AD5B7aJnP7mRMM,4956
|
|
96
|
-
liger_kernel/transformers/model/qwen3_moe.py,sha256=1CwTMCNFDYsjGoa_aHFBagtC5HuJTV-s0__5UvcjD3A,5686
|
|
97
|
-
liger_kernel/transformers/model/smollm3.py,sha256=0KWVkDtXbjsBKhJnaquV6vUUYyLtfmNwYH0sxJt-qTk,7667
|
|
98
|
-
liger_kernel/transformers/trainer/__init__.py,sha256=p7yQfklV8-467qSz_ZMimkbDF7HHWHwku25A-GYL0WU,193
|
|
99
|
-
liger_kernel/transformers/trainer/orpo_trainer.py,sha256=tX0h63aOFe3rNqTmk6JpMf75UPo981yzEa6TghnjS0Q,5370
|
|
100
|
-
liger_kernel/triton/__init__.py,sha256=qCiCamzCRv6lpV8IqpAc9YMdNKC7GKurClWceQPnlis,92
|
|
101
|
-
liger_kernel/triton/monkey_patch.py,sha256=Rd0hUHAzDkFfHvnX7-PBaNK5EKnZhtfM_h-fgQH9HPY,1568
|
|
102
|
-
liger_kernel_nightly-0.6.2.dev20251011154427.dist-info/LICENSE,sha256=OhzLDHJ0to4a8sodVLELZiCFylZ1NAAYLs-HrjPy0ag,1312
|
|
103
|
-
liger_kernel_nightly-0.6.2.dev20251011154427.dist-info/METADATA,sha256=3CtD4mdR4zhG-Dj4OQESjqTdQrC1_w-gVsOuzIosGW8,24777
|
|
104
|
-
liger_kernel_nightly-0.6.2.dev20251011154427.dist-info/NOTICE,sha256=njwnoPZLh9AN8SJQzxvCGLHi-8X__AvWRze6joNXIY8,2066
|
|
105
|
-
liger_kernel_nightly-0.6.2.dev20251011154427.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
|
106
|
-
liger_kernel_nightly-0.6.2.dev20251011154427.dist-info/top_level.txt,sha256=2eghu4hA3LnkM7ElW92tQ8zegWKgSbeo-k-aGe1YnvY,13
|
|
107
|
-
liger_kernel_nightly-0.6.2.dev20251011154427.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|