liger-kernel-nightly 0.4.0.dev20241109021846__tar.gz → 0.4.0.dev20241112233617__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of liger-kernel-nightly might be problematic. Click here for more details.
- {liger_kernel_nightly-0.4.0.dev20241109021846/src/liger_kernel_nightly.egg-info → liger_kernel_nightly-0.4.0.dev20241112233617}/PKG-INFO +1 -1
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/pyproject.toml +1 -1
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/rms_norm.py +27 -6
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/functional.py +32 -1
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/monkey_patch.py +56 -8
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/rms_norm.py +11 -3
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617/src/liger_kernel_nightly.egg-info}/PKG-INFO +1 -1
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/LICENSE +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/NOTICE +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/README.md +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/setup.cfg +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/env_report.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/__init__.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/cross_entropy.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/experimental/embedding.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/experimental/mm_int8int2.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/fused_linear_cross_entropy.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/fused_linear_jsd.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/geglu.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/group_norm.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/jsd.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/kl_div.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/layer_norm.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/rope.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/swiglu.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/ops/utils.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/__init__.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/auto_model.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/cross_entropy.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/experimental/embedding.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/fused_linear_cross_entropy.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/fused_linear_jsd.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/geglu.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/group_norm.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/jsd.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/kl_div.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/layer_norm.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/__init__.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/gemma.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/gemma2.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/llama.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/mistral.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/mixtral.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/mllama.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/phi3.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/qwen2.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/model/qwen2_vl.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/rope.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/swiglu.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/transformers/trainer_integration.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/triton/__init__.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel/triton/monkey_patch.py +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel_nightly.egg-info/SOURCES.txt +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel_nightly.egg-info/dependency_links.txt +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel_nightly.egg-info/requires.txt +0 -0
- {liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/src/liger_kernel_nightly.egg-info/top_level.txt +0 -0
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "liger_kernel_nightly"
|
|
7
|
-
version = "0.4.0.
|
|
7
|
+
version = "0.4.0.dev20241112233617"
|
|
8
8
|
description = "Efficient Triton kernels for LLM Training"
|
|
9
9
|
urls = { "Homepage" = "https://github.com/linkedin/Liger-Kernel" }
|
|
10
10
|
readme = { file = "README.md", content-type = "text/markdown" }
|
|
@@ -116,6 +116,8 @@ def _rms_norm_forward_kernel(
|
|
|
116
116
|
def _rms_norm_backward_kernel(
|
|
117
117
|
dY_ptr,
|
|
118
118
|
dY_row_stride,
|
|
119
|
+
dX_ptr,
|
|
120
|
+
dX_row_stride,
|
|
119
121
|
X_ptr,
|
|
120
122
|
X_row_stride,
|
|
121
123
|
X_dtype: tl.constexpr,
|
|
@@ -146,6 +148,8 @@ def _rms_norm_backward_kernel(
|
|
|
146
148
|
dW_row = tl.zeros((BLOCK_SIZE,), dtype=tl.float32)
|
|
147
149
|
|
|
148
150
|
dY_ptr += row_start * dY_row_stride
|
|
151
|
+
dX_ptr += row_start * dX_row_stride
|
|
152
|
+
|
|
149
153
|
X_ptr += row_start * X_row_stride
|
|
150
154
|
RSTD_ptr += row_start
|
|
151
155
|
|
|
@@ -184,9 +188,10 @@ def _rms_norm_backward_kernel(
|
|
|
184
188
|
# here X_row is already in fp32 (see previous if block)
|
|
185
189
|
dW_row += dY_row * (X_row * rstd_row)
|
|
186
190
|
|
|
187
|
-
tl.store(
|
|
191
|
+
tl.store(dX_ptr + col_offsets, dX_row.to(X_dtype), mask=mask)
|
|
188
192
|
|
|
189
193
|
dY_ptr += dY_row_stride
|
|
194
|
+
dX_ptr += dX_row_stride
|
|
190
195
|
X_ptr += X_row_stride
|
|
191
196
|
RSTD_ptr += RSTD_row_stride
|
|
192
197
|
|
|
@@ -251,7 +256,9 @@ def rms_norm_forward(X, W, eps, offset, casting_mode):
|
|
|
251
256
|
return Y.view(*shape), X, RSTD, BLOCK_SIZE, num_warps, casting_mode
|
|
252
257
|
|
|
253
258
|
|
|
254
|
-
def rms_norm_backward(
|
|
259
|
+
def rms_norm_backward(
|
|
260
|
+
dY, X, W, RSTD, offset, casting_mode, BLOCK_SIZE, num_warps, in_place
|
|
261
|
+
):
|
|
255
262
|
shape = dY.shape
|
|
256
263
|
dim = shape[-1]
|
|
257
264
|
dY = dY.view(-1, dim)
|
|
@@ -265,10 +272,17 @@ def rms_norm_backward(dY, X, W, RSTD, offset, casting_mode, BLOCK_SIZE, num_warp
|
|
|
265
272
|
raise RuntimeError("This layer norm doesn't support feature dim >= 64KB.")
|
|
266
273
|
rows_per_program = math.ceil(n_rows / sm_count)
|
|
267
274
|
grid = (sm_count,)
|
|
268
|
-
|
|
275
|
+
|
|
276
|
+
if in_place is True:
|
|
277
|
+
dX = dY
|
|
278
|
+
else:
|
|
279
|
+
dX = torch.zeros_like(dY)
|
|
280
|
+
|
|
269
281
|
_rms_norm_backward_kernel[grid](
|
|
270
282
|
dY,
|
|
271
283
|
dY.stride(0),
|
|
284
|
+
dX,
|
|
285
|
+
dX.stride(0),
|
|
272
286
|
X,
|
|
273
287
|
X.stride(0),
|
|
274
288
|
torch_to_triton_dtype[X.dtype],
|
|
@@ -286,8 +300,9 @@ def rms_norm_backward(dY, X, W, RSTD, offset, casting_mode, BLOCK_SIZE, num_warp
|
|
|
286
300
|
BLOCK_SIZE=BLOCK_SIZE,
|
|
287
301
|
num_warps=num_warps,
|
|
288
302
|
)
|
|
289
|
-
dX =
|
|
303
|
+
dX = dX.view(*shape)
|
|
290
304
|
dW = _dW.sum(dim=0).to(W.dtype)
|
|
305
|
+
|
|
291
306
|
return dX, dW
|
|
292
307
|
|
|
293
308
|
|
|
@@ -307,11 +322,15 @@ class LigerRMSNormFunction(torch.autograd.Function):
|
|
|
307
322
|
- 'llama': matches the Llama implementation, where only the inverse RMS is computed on fp32.
|
|
308
323
|
- 'gemma': matches the Gemma implementation, where everything is cast to fp32, then computed, then cast back to the original dtype.
|
|
309
324
|
- 'none': no casting is done. The computation is done in the original dtype. This saves memory and is slightly faster, but has more error w.r.t. the original implementation.
|
|
325
|
+
|
|
326
|
+
`in_place` option means whether to in_place modify dY to store dX. This is default to `True` to save memory. However, under certain cases, it can produce incorrect inputs.
|
|
327
|
+
For example, gemma2 uses two rmsnorm sequentially with residual in between. The resesidual part needs dY so it cannot be modified in-place.
|
|
328
|
+
Therefore, for the patching of RMSNorm in gemma2, we set `in_place` to `False`
|
|
310
329
|
"""
|
|
311
330
|
|
|
312
331
|
@staticmethod
|
|
313
332
|
@ensure_contiguous
|
|
314
|
-
def forward(ctx, X, W, eps, offset=0.0, casting_mode="llama"):
|
|
333
|
+
def forward(ctx, X, W, eps, offset=0.0, casting_mode="llama", in_place=True):
|
|
315
334
|
"""
|
|
316
335
|
X: (B, T, H) or (BxT, H)
|
|
317
336
|
W: (H,)
|
|
@@ -321,6 +340,7 @@ class LigerRMSNormFunction(torch.autograd.Function):
|
|
|
321
340
|
)
|
|
322
341
|
ctx.offset = offset
|
|
323
342
|
ctx.casting_mode = casting_mode
|
|
343
|
+
ctx.in_place = in_place
|
|
324
344
|
ctx.BLOCK_SIZE = BLOCK_SIZE
|
|
325
345
|
ctx.num_warps = num_warps
|
|
326
346
|
ctx.save_for_backward(X, W, RSTD)
|
|
@@ -342,5 +362,6 @@ class LigerRMSNormFunction(torch.autograd.Function):
|
|
|
342
362
|
ctx.casting_mode,
|
|
343
363
|
ctx.BLOCK_SIZE,
|
|
344
364
|
ctx.num_warps,
|
|
365
|
+
ctx.in_place,
|
|
345
366
|
)
|
|
346
|
-
return dX, dW, None, None, None
|
|
367
|
+
return dX, dW, None, None, None, None
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
1
3
|
from liger_kernel.ops.cross_entropy import LigerCrossEntropyFunction
|
|
2
4
|
from liger_kernel.ops.fused_linear_cross_entropy import (
|
|
3
5
|
LigerFusedLinearCrossEntropyFunction,
|
|
@@ -13,7 +15,6 @@ from liger_kernel.ops.rope import LigerRopeFunction
|
|
|
13
15
|
from liger_kernel.ops.swiglu import LigerSiLUMulFunction
|
|
14
16
|
|
|
15
17
|
liger_swiglu = LigerSiLUMulFunction.apply
|
|
16
|
-
liger_cross_entropy = LigerCrossEntropyFunction.apply
|
|
17
18
|
liger_fused_linear_cross_entropy = LigerFusedLinearCrossEntropyFunction.apply
|
|
18
19
|
liger_geglu = LigerGELUMulFunction.apply
|
|
19
20
|
liger_rms_norm = LigerRMSNormFunction.apply
|
|
@@ -23,3 +24,33 @@ liger_kl_div = LigerKLDivLossFunction.apply
|
|
|
23
24
|
liger_jsd = LigerJSDFunction.apply
|
|
24
25
|
liger_fused_linear_jsd = LigerFusedLinearJSDFunction.apply
|
|
25
26
|
liger_group_norm = LigerGroupNormFunction.apply
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# conform to the function signature in https://pytorch.org/docs/stable/generated/torch.nn.functional.cross_entropy.html
|
|
30
|
+
# `weight` and `size_average` are placeholders and not implemented yet
|
|
31
|
+
def liger_cross_entropy(
|
|
32
|
+
input,
|
|
33
|
+
target,
|
|
34
|
+
weight=None,
|
|
35
|
+
size_average=None,
|
|
36
|
+
ignore_index: int = -100,
|
|
37
|
+
reduce=None,
|
|
38
|
+
reduction: str = "mean",
|
|
39
|
+
label_smoothing: float = 0.0,
|
|
40
|
+
lse_square_scale: float = 0.0,
|
|
41
|
+
softcap: Optional[float] = None,
|
|
42
|
+
return_z_loss: bool = False,
|
|
43
|
+
):
|
|
44
|
+
loss, z_loss = LigerCrossEntropyFunction.apply(
|
|
45
|
+
input,
|
|
46
|
+
target,
|
|
47
|
+
ignore_index,
|
|
48
|
+
lse_square_scale,
|
|
49
|
+
label_smoothing,
|
|
50
|
+
reduction,
|
|
51
|
+
softcap,
|
|
52
|
+
return_z_loss,
|
|
53
|
+
)
|
|
54
|
+
if not return_z_loss:
|
|
55
|
+
return loss
|
|
56
|
+
return loss, z_loss
|
|
@@ -8,6 +8,7 @@ from packaging import version
|
|
|
8
8
|
from transformers import PreTrainedModel
|
|
9
9
|
|
|
10
10
|
from liger_kernel.transformers.cross_entropy import LigerCrossEntropyLoss
|
|
11
|
+
from liger_kernel.transformers.functional import liger_cross_entropy
|
|
11
12
|
from liger_kernel.transformers.geglu import LigerGEGLUMLP
|
|
12
13
|
from liger_kernel.transformers.layer_norm import LigerLayerNorm
|
|
13
14
|
from liger_kernel.transformers.model.gemma import lce_forward as gemma_lce_forward
|
|
@@ -111,8 +112,16 @@ def apply_liger_kernel_to_llama(
|
|
|
111
112
|
modeling_llama.LlamaRMSNorm = LigerRMSNorm
|
|
112
113
|
if swiglu:
|
|
113
114
|
modeling_llama.LlamaMLP = LigerSwiGLUMLP
|
|
115
|
+
|
|
114
116
|
if cross_entropy:
|
|
115
|
-
|
|
117
|
+
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
118
|
+
from transformers.loss.loss_utils import nn
|
|
119
|
+
|
|
120
|
+
nn.functional.cross_entropy = liger_cross_entropy
|
|
121
|
+
else:
|
|
122
|
+
logger.warning(TRANSFORMER_DEPRECATION_WARNING)
|
|
123
|
+
modeling_llama.CrossEntropyLoss = LigerCrossEntropyLoss
|
|
124
|
+
|
|
116
125
|
if fused_linear_cross_entropy:
|
|
117
126
|
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
118
127
|
modeling_llama.LlamaForCausalLM.forward = llama_lce_forward
|
|
@@ -192,7 +201,13 @@ def apply_liger_kernel_to_mllama(
|
|
|
192
201
|
if swiglu:
|
|
193
202
|
modeling_mllama.MllamaTextMLP = LigerSwiGLUMLP
|
|
194
203
|
if cross_entropy:
|
|
195
|
-
|
|
204
|
+
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
205
|
+
from transformers.loss.loss_utils import nn
|
|
206
|
+
|
|
207
|
+
nn.functional.cross_entropy = liger_cross_entropy
|
|
208
|
+
else:
|
|
209
|
+
logger.warning(TRANSFORMER_DEPRECATION_WARNING)
|
|
210
|
+
modeling_mllama.CrossEntropyLoss = LigerCrossEntropyLoss
|
|
196
211
|
if fused_linear_cross_entropy:
|
|
197
212
|
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
198
213
|
modeling_mllama.MllamaForCausalLM.forward = mllama_lce_forward
|
|
@@ -342,7 +357,14 @@ def apply_liger_kernel_to_mixtral(
|
|
|
342
357
|
if rms_norm:
|
|
343
358
|
modeling_mixtral.MixtralRMSNorm = LigerRMSNorm
|
|
344
359
|
if cross_entropy:
|
|
345
|
-
|
|
360
|
+
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
361
|
+
from transformers.loss.loss_utils import nn
|
|
362
|
+
|
|
363
|
+
nn.functional.cross_entropy = liger_cross_entropy
|
|
364
|
+
else:
|
|
365
|
+
logger.warning(TRANSFORMER_DEPRECATION_WARNING)
|
|
366
|
+
modeling_mixtral.CrossEntropyLoss = LigerCrossEntropyLoss
|
|
367
|
+
|
|
346
368
|
if fused_linear_cross_entropy:
|
|
347
369
|
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
348
370
|
modeling_mixtral.MixtralForCausalLM.forward = mixtral_lce_forward
|
|
@@ -417,7 +439,13 @@ def apply_liger_kernel_to_gemma(
|
|
|
417
439
|
if rms_norm:
|
|
418
440
|
modeling_gemma.GemmaRMSNorm = LigerRMSNormForGemma
|
|
419
441
|
if cross_entropy:
|
|
420
|
-
|
|
442
|
+
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
443
|
+
from transformers.loss.loss_utils import nn
|
|
444
|
+
|
|
445
|
+
nn.functional.cross_entropy = liger_cross_entropy
|
|
446
|
+
else:
|
|
447
|
+
logger.warning(TRANSFORMER_DEPRECATION_WARNING)
|
|
448
|
+
modeling_gemma.CrossEntropyLoss = LigerCrossEntropyLoss
|
|
421
449
|
if geglu:
|
|
422
450
|
modeling_gemma.GemmaMLP = LigerGEGLUMLP
|
|
423
451
|
if fused_linear_cross_entropy:
|
|
@@ -474,11 +502,12 @@ def apply_liger_kernel_to_gemma2(
|
|
|
474
502
|
assert not (
|
|
475
503
|
cross_entropy and fused_linear_cross_entropy
|
|
476
504
|
), "cross_entropy and fused_linear_cross_entropy cannot both be True."
|
|
505
|
+
|
|
477
506
|
from transformers.models.gemma2 import modeling_gemma2
|
|
478
507
|
from transformers.models.gemma2.modeling_gemma2 import Gemma2Model
|
|
479
508
|
|
|
480
509
|
LigerRMSNormForGemma2 = partial(
|
|
481
|
-
LigerRMSNorm, offset=1.0, casting_mode="gemma", init_fn="zeros"
|
|
510
|
+
LigerRMSNorm, offset=1.0, casting_mode="gemma", init_fn="zeros", in_place=False
|
|
482
511
|
)
|
|
483
512
|
_patch_rms_norm_module_for_gemma2 = partial(
|
|
484
513
|
_patch_rms_norm_module, offset=1.0, casting_mode="gemma"
|
|
@@ -490,7 +519,13 @@ def apply_liger_kernel_to_gemma2(
|
|
|
490
519
|
# https://github.com/huggingface/transformers/blob/v4.44.2/src/transformers/models/gemma/modeling_gemma.py#L109
|
|
491
520
|
modeling_gemma2.Gemma2RMSNorm = LigerRMSNormForGemma2
|
|
492
521
|
if cross_entropy:
|
|
493
|
-
|
|
522
|
+
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
523
|
+
from transformers.loss.loss_utils import nn
|
|
524
|
+
|
|
525
|
+
nn.functional.cross_entropy = liger_cross_entropy
|
|
526
|
+
else:
|
|
527
|
+
logger.warning(TRANSFORMER_DEPRECATION_WARNING)
|
|
528
|
+
modeling_gemma2.CrossEntropyLoss = LigerCrossEntropyLoss
|
|
494
529
|
if fused_linear_cross_entropy:
|
|
495
530
|
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
496
531
|
modeling_gemma2.Gemma2ForCausalLM.forward = gemma2_lce_forward
|
|
@@ -562,8 +597,15 @@ def apply_liger_kernel_to_qwen2(
|
|
|
562
597
|
modeling_qwen2.apply_rotary_pos_emb = liger_rotary_pos_emb
|
|
563
598
|
if rms_norm:
|
|
564
599
|
modeling_qwen2.Qwen2RMSNorm = LigerRMSNorm
|
|
600
|
+
|
|
565
601
|
if cross_entropy:
|
|
566
|
-
|
|
602
|
+
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
603
|
+
from transformers.loss.loss_utils import nn
|
|
604
|
+
|
|
605
|
+
nn.functional.cross_entropy = liger_cross_entropy
|
|
606
|
+
else:
|
|
607
|
+
logger.warning(TRANSFORMER_DEPRECATION_WARNING)
|
|
608
|
+
modeling_qwen2.CrossEntropyLoss = LigerCrossEntropyLoss
|
|
567
609
|
|
|
568
610
|
# import pdb; pdb.set_trace()
|
|
569
611
|
if fused_linear_cross_entropy:
|
|
@@ -710,7 +752,13 @@ def apply_liger_kernel_to_phi3(
|
|
|
710
752
|
if swiglu:
|
|
711
753
|
modeling_phi3.Phi3MLP = LigerPhi3SwiGLUMLP
|
|
712
754
|
if cross_entropy:
|
|
713
|
-
|
|
755
|
+
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
756
|
+
from transformers.loss.loss_utils import nn
|
|
757
|
+
|
|
758
|
+
nn.functional.cross_entropy = liger_cross_entropy
|
|
759
|
+
else:
|
|
760
|
+
logger.warning(TRANSFORMER_DEPRECATION_WARNING)
|
|
761
|
+
modeling_phi3.CrossEntropyLoss = LigerCrossEntropyLoss
|
|
714
762
|
if fused_linear_cross_entropy:
|
|
715
763
|
if transformer_version >= version.parse(SUPPORTED_TRANSFORMER_VERSION):
|
|
716
764
|
modeling_phi3.Phi3ForCausalLM.forward = phi3_lce_forward
|
|
@@ -6,7 +6,13 @@ from liger_kernel.ops.rms_norm import LigerRMSNormFunction
|
|
|
6
6
|
|
|
7
7
|
class LigerRMSNorm(nn.Module):
|
|
8
8
|
def __init__(
|
|
9
|
-
self,
|
|
9
|
+
self,
|
|
10
|
+
hidden_size,
|
|
11
|
+
eps=1e-6,
|
|
12
|
+
offset=0.0,
|
|
13
|
+
casting_mode="llama",
|
|
14
|
+
init_fn="ones",
|
|
15
|
+
in_place=True,
|
|
10
16
|
):
|
|
11
17
|
super().__init__()
|
|
12
18
|
assert init_fn in [
|
|
@@ -16,10 +22,11 @@ class LigerRMSNorm(nn.Module):
|
|
|
16
22
|
self.weight = nn.Parameter(
|
|
17
23
|
torch.ones(hidden_size) if init_fn == "ones" else torch.zeros(hidden_size)
|
|
18
24
|
)
|
|
19
|
-
self.variance_epsilon, self.offset, self.casting_mode = (
|
|
25
|
+
self.variance_epsilon, self.offset, self.casting_mode, self.in_place = (
|
|
20
26
|
eps,
|
|
21
27
|
offset,
|
|
22
28
|
casting_mode,
|
|
29
|
+
in_place,
|
|
23
30
|
)
|
|
24
31
|
|
|
25
32
|
def forward(self, hidden_states):
|
|
@@ -29,7 +36,8 @@ class LigerRMSNorm(nn.Module):
|
|
|
29
36
|
self.variance_epsilon,
|
|
30
37
|
self.offset,
|
|
31
38
|
self.casting_mode,
|
|
39
|
+
self.in_place,
|
|
32
40
|
)
|
|
33
41
|
|
|
34
42
|
def extra_repr(self):
|
|
35
|
-
return f"{tuple(self.weight.shape)}, eps={self.variance_epsilon}, offset={self.offset}"
|
|
43
|
+
return f"{tuple(self.weight.shape)}, eps={self.variance_epsilon}, offset={self.offset}, in_place={self.in_place}"
|
|
File without changes
|
{liger_kernel_nightly-0.4.0.dev20241109021846 → liger_kernel_nightly-0.4.0.dev20241112233617}/NOTICE
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|