ai-edge-torch-nightly 0.5.0.dev20250514__py3-none-any.whl → 0.5.0.dev20250516__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ai_edge_torch/__init__.py +1 -0
- ai_edge_torch/_convert/conversion.py +23 -0
- ai_edge_torch/_convert/converter.py +57 -3
- ai_edge_torch/_convert/test/test_convert.py +25 -0
- ai_edge_torch/generative/examples/amd_llama_135m/amd_llama_135m.py +9 -2
- ai_edge_torch/generative/examples/amd_llama_135m/convert_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/deepseek/convert_to_tflite.py +7 -2
- ai_edge_torch/generative/examples/deepseek/deepseek.py +8 -1
- ai_edge_torch/generative/examples/gemma/convert_gemma1_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/gemma/convert_gemma2_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/gemma/gemma1.py +9 -1
- ai_edge_torch/generative/examples/gemma/gemma2.py +7 -2
- ai_edge_torch/generative/examples/gemma3/convert_gemma3_to_tflite.py +6 -1
- ai_edge_torch/generative/examples/hammer/convert_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/hammer/hammer.py +14 -2
- ai_edge_torch/generative/examples/llama/convert_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/llama/llama.py +25 -6
- ai_edge_torch/generative/examples/moonshine/convert_moonshine_to_tflite.py +0 -1
- ai_edge_torch/generative/examples/openelm/convert_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/openelm/openelm.py +8 -1
- ai_edge_torch/generative/examples/paligemma/convert_to_tflite.py +6 -1
- ai_edge_torch/generative/examples/paligemma/decoder.py +1 -0
- ai_edge_torch/generative/examples/paligemma/decoder2.py +1 -0
- ai_edge_torch/generative/examples/paligemma/image_encoder.py +2 -1
- ai_edge_torch/generative/examples/paligemma/paligemma.py +12 -5
- ai_edge_torch/generative/examples/paligemma/verify.py +27 -5
- ai_edge_torch/generative/examples/phi/convert_phi3_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/phi/convert_phi4_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/phi/convert_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/phi/phi2.py +8 -1
- ai_edge_torch/generative/examples/phi/phi3.py +7 -2
- ai_edge_torch/generative/examples/phi/phi4.py +7 -2
- ai_edge_torch/generative/examples/qwen/convert_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/qwen/qwen.py +20 -3
- ai_edge_torch/generative/examples/qwen_vl/convert_to_tflite.py +6 -1
- ai_edge_torch/generative/examples/qwen_vl/decoder.py +1 -2
- ai_edge_torch/generative/examples/qwen_vl/image_encoder.py +12 -4
- ai_edge_torch/generative/examples/qwen_vl/qwen_vl.py +12 -4
- ai_edge_torch/generative/examples/qwen_vl/verify.py +26 -5
- ai_edge_torch/generative/examples/smollm/convert_to_tflite.py +7 -2
- ai_edge_torch/generative/examples/smollm/convert_v2_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/smollm/smollm.py +14 -2
- ai_edge_torch/generative/examples/smollm/verify.py +2 -2
- ai_edge_torch/generative/examples/stable_diffusion/clip.py +2 -1
- ai_edge_torch/generative/examples/tiny_llama/convert_to_tflite.py +7 -1
- ai_edge_torch/generative/examples/tiny_llama/tiny_llama.py +8 -1
- ai_edge_torch/generative/layers/normalization.py +26 -7
- ai_edge_torch/generative/layers/normalization_test.py +73 -0
- ai_edge_torch/generative/utilities/converter.py +16 -4
- ai_edge_torch/generative/utilities/loader.py +45 -0
- ai_edge_torch/version.py +1 -1
- {ai_edge_torch_nightly-0.5.0.dev20250514.dist-info → ai_edge_torch_nightly-0.5.0.dev20250516.dist-info}/METADATA +1 -1
- {ai_edge_torch_nightly-0.5.0.dev20250514.dist-info → ai_edge_torch_nightly-0.5.0.dev20250516.dist-info}/RECORD +56 -55
- {ai_edge_torch_nightly-0.5.0.dev20250514.dist-info → ai_edge_torch_nightly-0.5.0.dev20250516.dist-info}/LICENSE +0 -0
- {ai_edge_torch_nightly-0.5.0.dev20250514.dist-info → ai_edge_torch_nightly-0.5.0.dev20250516.dist-info}/WHEEL +0 -0
- {ai_edge_torch_nightly-0.5.0.dev20250514.dist-info → ai_edge_torch_nightly-0.5.0.dev20250516.dist-info}/top_level.txt +0 -0
@@ -15,8 +15,10 @@
|
|
15
15
|
|
16
16
|
"""Example of building a TinyLlama model."""
|
17
17
|
|
18
|
+
from typing import Callable, Dict
|
18
19
|
import ai_edge_torch.generative.layers.model_config as cfg
|
19
20
|
from ai_edge_torch.generative.utilities import model_builder
|
21
|
+
import torch
|
20
22
|
from torch import nn
|
21
23
|
|
22
24
|
TENSOR_NAMES = model_builder.TENSOR_NAMES_WITH_SEPARATE_LM_HEAD
|
@@ -81,10 +83,15 @@ def get_fake_model_config(**kwargs) -> cfg.ModelConfig:
|
|
81
83
|
return config
|
82
84
|
|
83
85
|
|
84
|
-
def build_model(
|
86
|
+
def build_model(
|
87
|
+
checkpoint_path: str,
|
88
|
+
custom_loader: Callable[[str], Dict[str, torch.Tensor]] = None,
|
89
|
+
**kwargs
|
90
|
+
) -> nn.Module:
|
85
91
|
return model_builder.build_decoder_only_model(
|
86
92
|
checkpoint_path=checkpoint_path,
|
87
93
|
config=get_model_config(**kwargs),
|
88
94
|
tensor_names=TENSOR_NAMES,
|
89
95
|
model_class=TinyLlama,
|
96
|
+
custom_loader=custom_loader,
|
90
97
|
)
|
@@ -28,6 +28,8 @@ class RMSNorm(torch.nn.Module):
|
|
28
28
|
dim: int,
|
29
29
|
eps: float = 1e-6,
|
30
30
|
zero_centered_gamma=False,
|
31
|
+
with_scale: bool = False,
|
32
|
+
scale_shift: float = 1.0,
|
31
33
|
enable_hlfb: bool = False,
|
32
34
|
):
|
33
35
|
"""Initialize the RMSNorm layer.
|
@@ -37,13 +39,22 @@ class RMSNorm(torch.nn.Module):
|
|
37
39
|
eps (float): A small float value to ensure numerical stability (default:
|
38
40
|
1e-6).
|
39
41
|
zero_centered_gamma (bool): Whether or not gamma has an offset.
|
42
|
+
with_scale (bool): Whether or not to use a scale parameter.
|
43
|
+
scale_shift (float): The shift to apply to the scale parameter.
|
40
44
|
enable_hlfb (bool): use HLFB in the op.
|
41
45
|
"""
|
42
46
|
super().__init__()
|
47
|
+
self.dim = dim
|
43
48
|
self.enable_hlfb = enable_hlfb
|
44
49
|
self.eps = eps
|
45
|
-
self.weight = torch.nn.Parameter(torch.ones(dim))
|
50
|
+
self.weight = torch.nn.Parameter(torch.ones(dim), requires_grad=False)
|
46
51
|
self.zero_centered_gamma = zero_centered_gamma
|
52
|
+
self.with_scale = with_scale
|
53
|
+
if with_scale:
|
54
|
+
self.scale = torch.nn.Parameter(
|
55
|
+
torch.zeros((dim,), dtype=torch.float32), requires_grad=False
|
56
|
+
)
|
57
|
+
self.scale_shift = scale_shift
|
47
58
|
|
48
59
|
def _norm(self, x):
|
49
60
|
"""Apply RMSNorm normalization.
|
@@ -70,14 +81,20 @@ class RMSNorm(torch.nn.Module):
|
|
70
81
|
else:
|
71
82
|
w = self.weight
|
72
83
|
|
84
|
+
final_scale = (
|
85
|
+
self.scale + self.scale_shift
|
86
|
+
if self.with_scale
|
87
|
+
else torch.ones((self.dim,), dtype=torch.float32)
|
88
|
+
)
|
73
89
|
if self.enable_hlfb:
|
74
90
|
return rms_norm_with_hlfb(
|
75
91
|
x,
|
76
92
|
w,
|
77
93
|
self.eps,
|
94
|
+
final_scale,
|
78
95
|
)
|
79
96
|
else:
|
80
|
-
output = self._norm(x.float()).type_as(x)
|
97
|
+
output = self._norm(x.float()).type_as(x) * final_scale
|
81
98
|
return output * w
|
82
99
|
|
83
100
|
|
@@ -104,8 +121,8 @@ class GroupNorm(torch.nn.Module):
|
|
104
121
|
self.enable_hlfb = enable_hlfb
|
105
122
|
self.group_num = group_num
|
106
123
|
self.eps = eps
|
107
|
-
self.weight = torch.nn.Parameter(torch.empty(dim))
|
108
|
-
self.bias = torch.nn.Parameter(torch.empty(dim))
|
124
|
+
self.weight = torch.nn.Parameter(torch.empty(dim), requires_grad=False)
|
125
|
+
self.bias = torch.nn.Parameter(torch.empty(dim), requires_grad=False)
|
109
126
|
|
110
127
|
def forward(self, x):
|
111
128
|
"""Running the forward pass of GroupNorm layer.
|
@@ -140,8 +157,8 @@ class LayerNorm(torch.nn.Module):
|
|
140
157
|
self.enable_hlfb = enable_hlfb
|
141
158
|
self.normalized_shape = (dim,)
|
142
159
|
self.eps = eps
|
143
|
-
self.weight = torch.nn.Parameter(torch.empty(dim))
|
144
|
-
self.bias = torch.nn.Parameter(torch.empty(dim))
|
160
|
+
self.weight = torch.nn.Parameter(torch.empty(dim), requires_grad=False)
|
161
|
+
self.bias = torch.nn.Parameter(torch.empty(dim), requires_grad=False)
|
145
162
|
|
146
163
|
def forward(self, x):
|
147
164
|
"""Running the forward pass of LayerNorm layer.
|
@@ -165,6 +182,7 @@ def rms_norm_with_hlfb(
|
|
165
182
|
x: torch.Tensor,
|
166
183
|
w: torch.Tensor,
|
167
184
|
eps: float,
|
185
|
+
final_scale: torch.Tensor,
|
168
186
|
):
|
169
187
|
"""RMS Normalization with high-level function boundary enabled.
|
170
188
|
|
@@ -172,6 +190,7 @@ def rms_norm_with_hlfb(
|
|
172
190
|
x (torch.Tensor): Input tensor for RMS Normalization, with BCHW shape.
|
173
191
|
w (torch.Tensor): The learned parameter tensor for normalization.
|
174
192
|
eps (float): A small float value to ensure numerical stability.
|
193
|
+
final_scale (torch.Tensor): The final scale to apply to the normalization.
|
175
194
|
|
176
195
|
Returns:
|
177
196
|
The output tensor of RMS Normalization.
|
@@ -185,7 +204,7 @@ def rms_norm_with_hlfb(
|
|
185
204
|
def _norm(x):
|
186
205
|
return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + eps)
|
187
206
|
|
188
|
-
output = _norm(x.float()).type_as(x)
|
207
|
+
output = _norm(x.float()).type_as(x) * final_scale
|
189
208
|
out = output * w
|
190
209
|
|
191
210
|
out = builder.mark_outputs(out)
|
@@ -0,0 +1,73 @@
|
|
1
|
+
# Copyright 2025 The AI Edge Torch Authors.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
# ==============================================================================
|
15
|
+
"""Tests for normalization layers."""
|
16
|
+
|
17
|
+
from ai_edge_torch.generative.layers import normalization
|
18
|
+
import torch
|
19
|
+
from absl.testing import absltest as googletest
|
20
|
+
from absl.testing import parameterized
|
21
|
+
|
22
|
+
|
23
|
+
class NormalizationTest(parameterized.TestCase):
|
24
|
+
|
25
|
+
@parameterized.named_parameters(
|
26
|
+
dict(
|
27
|
+
testcase_name="rms_norm_test_1",
|
28
|
+
model_dim=10,
|
29
|
+
with_scale=False,
|
30
|
+
scale_shift=1.0,
|
31
|
+
enable_hlfb=False,
|
32
|
+
expected_values=torch.ones((10,), dtype=torch.float32),
|
33
|
+
),
|
34
|
+
dict(
|
35
|
+
testcase_name="rms_norm_test_2",
|
36
|
+
model_dim=10,
|
37
|
+
with_scale=True,
|
38
|
+
scale_shift=2.0,
|
39
|
+
enable_hlfb=False,
|
40
|
+
expected_values=torch.ones((10,), dtype=torch.float32) * 2.0,
|
41
|
+
),
|
42
|
+
dict(
|
43
|
+
testcase_name="rms_norm_test_3",
|
44
|
+
model_dim=10,
|
45
|
+
with_scale=True,
|
46
|
+
scale_shift=2.0,
|
47
|
+
enable_hlfb=True,
|
48
|
+
expected_values=torch.ones((10,), dtype=torch.float32) * 2.0,
|
49
|
+
),
|
50
|
+
)
|
51
|
+
def test_rms_norm(
|
52
|
+
self,
|
53
|
+
model_dim: int,
|
54
|
+
with_scale: bool,
|
55
|
+
scale_shift: float,
|
56
|
+
enable_hlfb: bool,
|
57
|
+
expected_values: torch.Tensor,
|
58
|
+
):
|
59
|
+
rms_norm = normalization.RMSNorm(
|
60
|
+
dim=model_dim,
|
61
|
+
with_scale=with_scale,
|
62
|
+
scale_shift=scale_shift,
|
63
|
+
enable_hlfb=enable_hlfb,
|
64
|
+
)
|
65
|
+
|
66
|
+
x = torch.ones((model_dim,), dtype=torch.float32)
|
67
|
+
out = rms_norm(x)
|
68
|
+
self.assertEqual(out.shape, (model_dim,))
|
69
|
+
self.assertTrue(torch.allclose(out, expected_values))
|
70
|
+
|
71
|
+
|
72
|
+
if __name__ == "__main__":
|
73
|
+
googletest.main()
|
@@ -119,6 +119,12 @@ def define_conversion_flags(
|
|
119
119
|
default_transpose_kv_cache,
|
120
120
|
'If true, the model will be converted with transposed KV cache.',
|
121
121
|
)
|
122
|
+
flags.DEFINE_bool(
|
123
|
+
'custom_checkpoint_loader',
|
124
|
+
False,
|
125
|
+
'If true, the conversion script will use a custom checkpoint loader which'
|
126
|
+
' will read a checkpoint from a remote source.',
|
127
|
+
)
|
122
128
|
return flags
|
123
129
|
|
124
130
|
|
@@ -397,13 +403,19 @@ def _export_helper(
|
|
397
403
|
)
|
398
404
|
|
399
405
|
if prefill_pixel_values is not None:
|
400
|
-
|
401
|
-
|
402
|
-
|
406
|
+
sample_pixel_kwargs = {
|
407
|
+
'tokens': prefill_tokens_list_with_pixel[i],
|
408
|
+
'input_pos': prefill_input_pos_list_with_pixel[i],
|
409
|
+
'kv_cache': prefill_kv,
|
410
|
+
'pixel_values': prefill_pixel_values,
|
411
|
+
}
|
412
|
+
# mask should be built internally when pixel values are passed.
|
413
|
+
if lora is not None:
|
414
|
+
sample_pixel_kwargs['lora'] = lora
|
403
415
|
converter.add_signature(
|
404
416
|
prefill_signature_name + '_pixel',
|
405
417
|
mod,
|
406
|
-
sample_kwargs=
|
418
|
+
sample_kwargs=sample_pixel_kwargs,
|
407
419
|
)
|
408
420
|
|
409
421
|
sample_kwargs = {
|
@@ -19,10 +19,55 @@ import os
|
|
19
19
|
from typing import Callable, Dict, List, Tuple
|
20
20
|
|
21
21
|
from ai_edge_torch.generative.layers import model_config
|
22
|
+
import safetensors
|
22
23
|
from safetensors import safe_open
|
23
24
|
import torch
|
24
25
|
|
25
26
|
|
27
|
+
def get_custom_loader(
|
28
|
+
checkpoint_path: str,
|
29
|
+
) -> Callable[[str], Dict[str, torch.Tensor]]:
|
30
|
+
"""Returns a custom loader for the given checkpoint path.
|
31
|
+
|
32
|
+
Those customer loaders can either support state dictionary or safetensors, and
|
33
|
+
the actual data might be fetched from a remote source.
|
34
|
+
|
35
|
+
Args:
|
36
|
+
checkpoint_path (string): The path to the checkpoint.
|
37
|
+
|
38
|
+
Returns:
|
39
|
+
Callable[[str], Dict[str, torch.Tensor]]: The custom loader.
|
40
|
+
|
41
|
+
Raises:
|
42
|
+
ValueError: If the checkpoint format is not supported.
|
43
|
+
"""
|
44
|
+
|
45
|
+
if os.path.splitext(checkpoint_path)[1] in [".bin", ".pt", ".ckpt"]:
|
46
|
+
return lambda path: torch.load(path, weights_only=True)
|
47
|
+
if checkpoint_path.endswith(".safetensors"):
|
48
|
+
return safetensors.torch.load_file
|
49
|
+
raise ValueError(f"Unsupported checkpoint format: {checkpoint_path}")
|
50
|
+
|
51
|
+
|
52
|
+
def maybe_get_custom_loader(
|
53
|
+
checkpoint_path: str,
|
54
|
+
use_custom_loader: bool = False,
|
55
|
+
) -> Callable[[str], Dict[str, torch.Tensor]] | None:
|
56
|
+
"""Returns a custom loader for the given checkpoint path.
|
57
|
+
|
58
|
+
If use_custom_loader is True, the function will return a custom loader.
|
59
|
+
Otherwise, it will return None.
|
60
|
+
|
61
|
+
Args:
|
62
|
+
checkpoint_path (string): The path to the checkpoint.
|
63
|
+
use_custom_loader (bool): Whether to use a custom loader.
|
64
|
+
|
65
|
+
Returns:
|
66
|
+
Callable[[str], Dict[str, torch.Tensor]] | None: The custom loader.
|
67
|
+
"""
|
68
|
+
return get_custom_loader(checkpoint_path) if use_custom_loader else None
|
69
|
+
|
70
|
+
|
26
71
|
def load_safetensors(full_path: str):
|
27
72
|
"""Loads safetensors into a single state dictionary.
|
28
73
|
|
ai_edge_torch/version.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ai-edge-torch-nightly
|
3
|
-
Version: 0.5.0.
|
3
|
+
Version: 0.5.0.dev20250516
|
4
4
|
Summary: Supporting PyTorch models with the Google AI Edge TFLite runtime.
|
5
5
|
Home-page: https://github.com/google-ai-edge/ai-edge-torch
|
6
6
|
Keywords: On-Device ML,AI,Google,TFLite,PyTorch,LLMs,GenAI
|
@@ -1,12 +1,12 @@
|
|
1
|
-
ai_edge_torch/__init__.py,sha256=
|
1
|
+
ai_edge_torch/__init__.py,sha256=lemyLCNoGYRnJsmDuGZu7qOqLbLqG6CGDFtu3ue1syU,1290
|
2
2
|
ai_edge_torch/_config.py,sha256=AiqhbcheF7j_ozIGDLC89k1we95aVgFDa-tR6h7UI0s,2529
|
3
3
|
ai_edge_torch/conftest.py,sha256=r0GTrhMRhlmOGrrkvumHN8hkmyug6WvF60vWq8wRIBI,758
|
4
4
|
ai_edge_torch/model.py,sha256=wxjSFq_rBSxSqbUE8E8EJTCkgvgaRLjq_ZuAM-IZpCU,5606
|
5
|
-
ai_edge_torch/version.py,sha256=
|
5
|
+
ai_edge_torch/version.py,sha256=qsmmOMVNJ3QYndWFHn1wZqGlFpjk3G1-KHlQvjpBSFg,706
|
6
6
|
ai_edge_torch/_convert/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
7
|
-
ai_edge_torch/_convert/conversion.py,sha256=
|
7
|
+
ai_edge_torch/_convert/conversion.py,sha256=jidl5IOb3MhUPqhMLBNFRSzkqQyi3Y0R0ua-vOSahm0,6082
|
8
8
|
ai_edge_torch/_convert/conversion_utils.py,sha256=Sr8qXVcTwc-ZnZmK7yxVrIOOp1S_vNrwzC0zUvLTI2o,2160
|
9
|
-
ai_edge_torch/_convert/converter.py,sha256=
|
9
|
+
ai_edge_torch/_convert/converter.py,sha256=6MLKELzAwFoiXv-b7KRYi7gc7Z57XOeowcz9ArIl9TM,12100
|
10
10
|
ai_edge_torch/_convert/signature.py,sha256=-YKJdLk-eNEHfhdPCtcQVtZf915SoVePEFxKXPPf16c,2572
|
11
11
|
ai_edge_torch/_convert/to_channel_last_io.py,sha256=_31phf7TYgZY2ftpNbrdlB1RhDium1lz_BXEQ6IsMFc,2893
|
12
12
|
ai_edge_torch/_convert/fx_passes/__init__.py,sha256=jbRCZmSduG_1qmngaEEtbofAyL1PKZ8P1uxzzsXQhsw,1253
|
@@ -26,7 +26,7 @@ ai_edge_torch/_convert/fx_passes/optimize_layout_transposes_pass/layout_partitio
|
|
26
26
|
ai_edge_torch/_convert/fx_passes/optimize_layout_transposes_pass/layout_partitioners/greedy.py,sha256=L_x8BrF7UDah-SYl-pG11I6CIckdU9kBTUHcmwW4cts,2420
|
27
27
|
ai_edge_torch/_convert/fx_passes/optimize_layout_transposes_pass/layout_partitioners/min_cut.py,sha256=mzfL9cf0qBnpmxM_OlMQFvQsEZV2B_Mia9yEJV4J7rI,7135
|
28
28
|
ai_edge_torch/_convert/test/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
29
|
-
ai_edge_torch/_convert/test/test_convert.py,sha256=
|
29
|
+
ai_edge_torch/_convert/test/test_convert.py,sha256=yQC0WZk_gzReguTOfgWWodK71jnfMjYoRF29_Kafnuw,18692
|
30
30
|
ai_edge_torch/_convert/test/test_convert_composites.py,sha256=BCIODgxMI_3MxMLfNWYMGjcz-al-J3z5eDHCiZJXNwY,7992
|
31
31
|
ai_edge_torch/_convert/test/test_convert_multisig.py,sha256=6_C2R9--KyNR7_oezZIAfyTSR97tOeEWy4XGcbSxBDE,5778
|
32
32
|
ai_edge_torch/_convert/test/test_to_channel_last_io.py,sha256=1o-gUiwzIuO67FNAJ8DeyKv8fVUeZVNNNwofNVDjYeU,3024
|
@@ -52,83 +52,83 @@ ai_edge_torch/generative/custom_ops/bmm_4d.py,sha256=JmVbZCujG_wuBchma8QF3DSBfVc
|
|
52
52
|
ai_edge_torch/generative/custom_ops/dynamic_update_slice.py,sha256=ZGAq2CfWZsfef5mHulsWmyUx0dDWJX6J6xPjhBrjQdM,2097
|
53
53
|
ai_edge_torch/generative/examples/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
54
54
|
ai_edge_torch/generative/examples/amd_llama_135m/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
55
|
-
ai_edge_torch/generative/examples/amd_llama_135m/amd_llama_135m.py,sha256=
|
56
|
-
ai_edge_torch/generative/examples/amd_llama_135m/convert_to_tflite.py,sha256=
|
55
|
+
ai_edge_torch/generative/examples/amd_llama_135m/amd_llama_135m.py,sha256=NyBlyUUk-3ksS5M2jFPeor6_1vSa8W_CofO8-lQ_4gE,2962
|
56
|
+
ai_edge_torch/generative/examples/amd_llama_135m/convert_to_tflite.py,sha256=s2f5TJos6rSgogqeFk0qsOpI30qsR04umk9hAAZ5918,1782
|
57
57
|
ai_edge_torch/generative/examples/amd_llama_135m/verify.py,sha256=o13NkFlBgawBsjdJup05VMUjAPvDRAmig6VyEkX8q6U,2426
|
58
58
|
ai_edge_torch/generative/examples/deepseek/__init__.py,sha256=JaAnrFoXTl3RJX97XspklkTyqOHVyAgRJsZtzNDd10c,671
|
59
|
-
ai_edge_torch/generative/examples/deepseek/convert_to_tflite.py,sha256=
|
60
|
-
ai_edge_torch/generative/examples/deepseek/deepseek.py,sha256=
|
59
|
+
ai_edge_torch/generative/examples/deepseek/convert_to_tflite.py,sha256=xTPfT3Mt_4bMfGkrqDKatLecZOuaE0WhxXs3uAsO_uU,1749
|
60
|
+
ai_edge_torch/generative/examples/deepseek/deepseek.py,sha256=afKPeEjRUkLf5uhImvxtOdHrK2edfJ_R4lx92etEQpQ,3069
|
61
61
|
ai_edge_torch/generative/examples/deepseek/verify.py,sha256=iYldze-pvZGvPkkqr6zA7EmitPnH9sXkzjNVx353IcE,2403
|
62
62
|
ai_edge_torch/generative/examples/gemma/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
63
|
-
ai_edge_torch/generative/examples/gemma/convert_gemma1_to_tflite.py,sha256=
|
64
|
-
ai_edge_torch/generative/examples/gemma/convert_gemma2_to_tflite.py,sha256=
|
65
|
-
ai_edge_torch/generative/examples/gemma/gemma1.py,sha256=
|
66
|
-
ai_edge_torch/generative/examples/gemma/gemma2.py,sha256=
|
63
|
+
ai_edge_torch/generative/examples/gemma/convert_gemma1_to_tflite.py,sha256=t2qZTjyM2imPenb14fzbQ-CHj5Cejw4M5xfEZpgX6Uc,1748
|
64
|
+
ai_edge_torch/generative/examples/gemma/convert_gemma2_to_tflite.py,sha256=Yj-b4S9BNxArnGjruRIymCiWrlf7ZvwiG6keTVGldk4,1816
|
65
|
+
ai_edge_torch/generative/examples/gemma/gemma1.py,sha256=HqpNgJYL3X91Bpl9dAQsWEmaXJjDXGuGBVeyqK5hGTk,3682
|
66
|
+
ai_edge_torch/generative/examples/gemma/gemma2.py,sha256=zynxoe_9ESvTIsznpp44HUS3gVDaEltkapmjzoNOaqA,11691
|
67
67
|
ai_edge_torch/generative/examples/gemma/verify_gemma1.py,sha256=ip-Gmk4CI5f0GWSdAIdrectxQWJ0t328KCsA4nfHuGg,1736
|
68
68
|
ai_edge_torch/generative/examples/gemma/verify_gemma2.py,sha256=jhiyinOqPt5ZZjEadDRZt_wY5fiLSCpMo54PcxFaL_Q,1789
|
69
69
|
ai_edge_torch/generative/examples/gemma/verify_util.py,sha256=n7f2nF6Lin_tDvPs0JVldsuaBzo7pAwi5YAHAhlIxQg,6139
|
70
70
|
ai_edge_torch/generative/examples/gemma3/__init__.py,sha256=JaAnrFoXTl3RJX97XspklkTyqOHVyAgRJsZtzNDd10c,671
|
71
|
-
ai_edge_torch/generative/examples/gemma3/convert_gemma3_to_tflite.py,sha256=
|
71
|
+
ai_edge_torch/generative/examples/gemma3/convert_gemma3_to_tflite.py,sha256=wOrOV_jxCnjrhjC8X0-uIi0D-4aQjOfXw6XaxTSrM9k,2048
|
72
72
|
ai_edge_torch/generative/examples/gemma3/decoder.py,sha256=shdgLzKDUi0vyNOAsrIVAEFb3Adltsri6Rx1-wxzVf4,15089
|
73
73
|
ai_edge_torch/generative/examples/gemma3/gemma3.py,sha256=ZorRtnbElWsctcA0nEbfwjx0C578voF7fjFEvWSR5Ck,6582
|
74
74
|
ai_edge_torch/generative/examples/gemma3/image_encoder.py,sha256=uRoLoBWzFtQz5wFZfPCxbkvZsgPAqSkUUsV3977GbYc,5184
|
75
75
|
ai_edge_torch/generative/examples/gemma3/verify_gemma3.py,sha256=v8oNXFICmVOtQxfO7IhZ8GnbvotEkDi9lzYHjoQyOso,2464
|
76
76
|
ai_edge_torch/generative/examples/gemma3/verify_util.py,sha256=1vfAtayH_I_qTpqhzu6n9xnCuvhgTzhS8IzZviW2dJQ,9418
|
77
77
|
ai_edge_torch/generative/examples/hammer/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
78
|
-
ai_edge_torch/generative/examples/hammer/convert_to_tflite.py,sha256=
|
79
|
-
ai_edge_torch/generative/examples/hammer/hammer.py,sha256=
|
78
|
+
ai_edge_torch/generative/examples/hammer/convert_to_tflite.py,sha256=XLmPuJCBJjKzMTG-mRmBX92juep2zl5yYeMrEhdqQQk,1975
|
79
|
+
ai_edge_torch/generative/examples/hammer/hammer.py,sha256=s8arcxjETiyuERrFOvyQe_o8Lvr82gxmOIJO1hr2Dcs,3704
|
80
80
|
ai_edge_torch/generative/examples/hammer/verify.py,sha256=MkzAGkbPy4LKRhyCDm1cw-9jUt4VUxLPdwK_25fCGSE,2705
|
81
81
|
ai_edge_torch/generative/examples/llama/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
82
|
-
ai_edge_torch/generative/examples/llama/convert_to_tflite.py,sha256=
|
83
|
-
ai_edge_torch/generative/examples/llama/llama.py,sha256=
|
82
|
+
ai_edge_torch/generative/examples/llama/convert_to_tflite.py,sha256=4qnMyvJHqhqf9k01wEsO23BKo6tSy2KD7sHdTGimKGg,1957
|
83
|
+
ai_edge_torch/generative/examples/llama/llama.py,sha256=TJXU9yZwxPCnuT2uwlcXVLrs5pg1P-Csv4xY5WTcf8U,7005
|
84
84
|
ai_edge_torch/generative/examples/llama/verify.py,sha256=X7oKQi85M789ugBrOlMvzk8eSRR3Kf1Mprfl-U-WIpo,2842
|
85
85
|
ai_edge_torch/generative/examples/moonshine/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
86
|
-
ai_edge_torch/generative/examples/moonshine/convert_moonshine_to_tflite.py,sha256=
|
86
|
+
ai_edge_torch/generative/examples/moonshine/convert_moonshine_to_tflite.py,sha256=_GkaSkregS3NWN38UGXxj4pED5gtQGaaPZx5_CZ0TVM,1657
|
87
87
|
ai_edge_torch/generative/examples/moonshine/moonshine.py,sha256=nZ2b8u4TmsB5sgdClgAuH8E78bcTv9RCnF9666HqP2M,3394
|
88
88
|
ai_edge_torch/generative/examples/openelm/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
89
|
-
ai_edge_torch/generative/examples/openelm/convert_to_tflite.py,sha256=
|
90
|
-
ai_edge_torch/generative/examples/openelm/openelm.py,sha256=
|
89
|
+
ai_edge_torch/generative/examples/openelm/convert_to_tflite.py,sha256=S7OP8PJcOQbm8AHvi_Tc3qnQuVOtjMFNlwaZQ_oirUM,1747
|
90
|
+
ai_edge_torch/generative/examples/openelm/openelm.py,sha256=2jkIbj_G0IuFi5nXz_yAIY4qRxgWGD5rKQDTSweRV9M,4734
|
91
91
|
ai_edge_torch/generative/examples/openelm/verify.py,sha256=4W26ZtPF5Cb9mpHYuRM4b2QB_4W76zf4WV36KzexVjs,2446
|
92
92
|
ai_edge_torch/generative/examples/paligemma/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
93
|
-
ai_edge_torch/generative/examples/paligemma/convert_to_tflite.py,sha256=
|
94
|
-
ai_edge_torch/generative/examples/paligemma/decoder.py,sha256
|
95
|
-
ai_edge_torch/generative/examples/paligemma/decoder2.py,sha256=
|
96
|
-
ai_edge_torch/generative/examples/paligemma/image_encoder.py,sha256=
|
97
|
-
ai_edge_torch/generative/examples/paligemma/paligemma.py,sha256=
|
98
|
-
ai_edge_torch/generative/examples/paligemma/verify.py,sha256=
|
93
|
+
ai_edge_torch/generative/examples/paligemma/convert_to_tflite.py,sha256=Fl4k-lcpiUaJS0A1E7HVVUW7iTcZAU4FbA4KcSkO5SQ,2212
|
94
|
+
ai_edge_torch/generative/examples/paligemma/decoder.py,sha256=ruUTonTErvuinWsJ3pnSbvKhCnDUlupT1MW4TUwcrMY,5551
|
95
|
+
ai_edge_torch/generative/examples/paligemma/decoder2.py,sha256=C377j2ULpPvmY5SsNLUC8jskTNNHVDH8uYOLH5W7fOU,6100
|
96
|
+
ai_edge_torch/generative/examples/paligemma/image_encoder.py,sha256=IbneN2J-ASdUg7OHVRkrUBiZ0UXyCVRJXhnDAxjozl8,5644
|
97
|
+
ai_edge_torch/generative/examples/paligemma/paligemma.py,sha256=nxvcurGkFJcCjjgVkK59SJgp8mZ71D56bEnrjvGgPs4,6264
|
98
|
+
ai_edge_torch/generative/examples/paligemma/verify.py,sha256=myHdeIAtVTOqb915h661CnvjvFkwmihy3Vp4UrKHb5I,6195
|
99
99
|
ai_edge_torch/generative/examples/paligemma/verify_decoder.py,sha256=al5wMPWri4IRVWrLmCplPi6uoCzwh0vBHMGnCt-XUqo,2690
|
100
100
|
ai_edge_torch/generative/examples/paligemma/verify_decoder2.py,sha256=tm-UfLr0YeBRVcQsWLBOMWI9JUzHmtPEbYK2vpITpqY,2534
|
101
101
|
ai_edge_torch/generative/examples/paligemma/verify_image_encoder.py,sha256=vNm-wTT8BD6zbX6GocfP1QrVoHl0zSvuVxoXN36eeiU,3540
|
102
102
|
ai_edge_torch/generative/examples/phi/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
103
|
-
ai_edge_torch/generative/examples/phi/convert_phi3_to_tflite.py,sha256=
|
104
|
-
ai_edge_torch/generative/examples/phi/convert_phi4_to_tflite.py,sha256=
|
105
|
-
ai_edge_torch/generative/examples/phi/convert_to_tflite.py,sha256=
|
106
|
-
ai_edge_torch/generative/examples/phi/phi2.py,sha256=
|
107
|
-
ai_edge_torch/generative/examples/phi/phi3.py,sha256=
|
108
|
-
ai_edge_torch/generative/examples/phi/phi4.py,sha256=
|
103
|
+
ai_edge_torch/generative/examples/phi/convert_phi3_to_tflite.py,sha256=kYgZAIHXolUhOyDAYDuEK7RZ5ExL1YzpqtlcZjo622c,1736
|
104
|
+
ai_edge_torch/generative/examples/phi/convert_phi4_to_tflite.py,sha256=3y3vYlJjLjRmz4Vsq-B8YKyp0LnC2fj1LAACW3pQivI,1734
|
105
|
+
ai_edge_torch/generative/examples/phi/convert_to_tflite.py,sha256=tY5uwRu-4Jxro7Z9jsDqZR9SUDWB8PR6JKfswvsUSxM,1735
|
106
|
+
ai_edge_torch/generative/examples/phi/phi2.py,sha256=nu18YKF95yg9Mo7TjpkgjA_br5fSYqaHmw0o86b5hDQ,3654
|
107
|
+
ai_edge_torch/generative/examples/phi/phi3.py,sha256=c2h17Gmo9zLSEEdA7BzG_Jd8p4-3JmO6ZSEWLWXDGFU,7107
|
108
|
+
ai_edge_torch/generative/examples/phi/phi4.py,sha256=TgoRbaW27X2tYAUi_z2GCb3j6uze5POhKGchRf-5eZw,5889
|
109
109
|
ai_edge_torch/generative/examples/phi/verify.py,sha256=YPFCdbnfmvq38fbpBNr0kHPfSZo4p3_6WkLJAW3pLPo,2177
|
110
110
|
ai_edge_torch/generative/examples/phi/verify_phi3.py,sha256=kVYaBVvddfQng0IyZGxyTJEzhiPO0G4VFJm2WOc2Q94,2360
|
111
111
|
ai_edge_torch/generative/examples/phi/verify_phi4.py,sha256=BoCa5kUBRHtMQ-5ql6yD4pG4xHJMyUiQlpMOWVx-JgY,2356
|
112
112
|
ai_edge_torch/generative/examples/qwen/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
113
|
-
ai_edge_torch/generative/examples/qwen/convert_to_tflite.py,sha256=
|
114
|
-
ai_edge_torch/generative/examples/qwen/qwen.py,sha256=
|
113
|
+
ai_edge_torch/generative/examples/qwen/convert_to_tflite.py,sha256=TnzyARHQgmWeOdYsV9WpRj5vhKGBH0kAbp3tMj8ZCYw,1998
|
114
|
+
ai_edge_torch/generative/examples/qwen/qwen.py,sha256=XOLq1yTbW6nyAVrYYG3qu_8Cl0A74M2hkpjOT_UhyVs,4609
|
115
115
|
ai_edge_torch/generative/examples/qwen/verify.py,sha256=9_AyEJTeUfvhhID64Rto2bflFPyXMFokdQLsseLUMiI,2775
|
116
116
|
ai_edge_torch/generative/examples/qwen_vl/__init__.py,sha256=JaAnrFoXTl3RJX97XspklkTyqOHVyAgRJsZtzNDd10c,671
|
117
|
-
ai_edge_torch/generative/examples/qwen_vl/convert_to_tflite.py,sha256=
|
118
|
-
ai_edge_torch/generative/examples/qwen_vl/decoder.py,sha256=
|
119
|
-
ai_edge_torch/generative/examples/qwen_vl/image_encoder.py,sha256=
|
120
|
-
ai_edge_torch/generative/examples/qwen_vl/qwen_vl.py,sha256=
|
121
|
-
ai_edge_torch/generative/examples/qwen_vl/verify.py,sha256=
|
117
|
+
ai_edge_torch/generative/examples/qwen_vl/convert_to_tflite.py,sha256=BM-ed7KrmPwzI3MvDs2R7P-kJgE1SK_cNVqIfXhtJjs,2411
|
118
|
+
ai_edge_torch/generative/examples/qwen_vl/decoder.py,sha256=yt3pO0x9t39dS2RWCM-0NRLl2ImcyWRIfL3E06bDg8k,4485
|
119
|
+
ai_edge_torch/generative/examples/qwen_vl/image_encoder.py,sha256=vMZ6v6iVrps_NSFwycgG4OPG_RVQAxa80lKrbneMkaM,15023
|
120
|
+
ai_edge_torch/generative/examples/qwen_vl/qwen_vl.py,sha256=1Ac28olo0OJExZRyxqm7vxcf7GtXdkUwEbHvhiCHi0o,7908
|
121
|
+
ai_edge_torch/generative/examples/qwen_vl/verify.py,sha256=4WKgAFQNQzwmeJhC8ayI5vjGj9ko6VcU2HA3VAkhHug,5812
|
122
122
|
ai_edge_torch/generative/examples/qwen_vl/verify_decoder.py,sha256=xPWoOBLh2eK12KEhELLYymfL7xvc0chmYC98c6x37oo,2602
|
123
123
|
ai_edge_torch/generative/examples/qwen_vl/verify_image_encoder.py,sha256=PZ392nDoJG2OmHZ_7Jet3Zu1JkN6QErxKcDc7a-PPds,3126
|
124
124
|
ai_edge_torch/generative/examples/smollm/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
125
|
-
ai_edge_torch/generative/examples/smollm/convert_to_tflite.py,sha256=
|
126
|
-
ai_edge_torch/generative/examples/smollm/convert_v2_to_tflite.py,sha256=
|
127
|
-
ai_edge_torch/generative/examples/smollm/smollm.py,sha256=
|
128
|
-
ai_edge_torch/generative/examples/smollm/verify.py,sha256=
|
125
|
+
ai_edge_torch/generative/examples/smollm/convert_to_tflite.py,sha256=QVRX_ovqBQi8fKAG6PezaO1qoRvMGpVxNH-_sds0pf8,1997
|
126
|
+
ai_edge_torch/generative/examples/smollm/convert_v2_to_tflite.py,sha256=rOVYSaS68_otJcGewQSconBCPD4GhDEIIyquD4dSUWc,1979
|
127
|
+
ai_edge_torch/generative/examples/smollm/smollm.py,sha256=OXSN0Vu1MXnWb_H-aW9acgjpeLIhPIXGq2fx7RaojcM,4080
|
128
|
+
ai_edge_torch/generative/examples/smollm/verify.py,sha256=sH3rn1TbaCusPiUD5XlECiHY0rvoHIXALbk7ECOiinI,2720
|
129
129
|
ai_edge_torch/generative/examples/stable_diffusion/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
130
130
|
ai_edge_torch/generative/examples/stable_diffusion/attention.py,sha256=kDWG6MlIGa89zC5KSRcJlw2c4ITuw8KcchtfmF55f4g,3545
|
131
|
-
ai_edge_torch/generative/examples/stable_diffusion/clip.py,sha256=
|
131
|
+
ai_edge_torch/generative/examples/stable_diffusion/clip.py,sha256=fPSg217F9xBvqMZwujCAQvYq5MRZzXTYOxjiPLqD7ZU,6102
|
132
132
|
ai_edge_torch/generative/examples/stable_diffusion/convert_to_tflite.py,sha256=_yk6wVoZm1_FRMFJF5URaPZNNdmMR89fwmKz81BEyao,5601
|
133
133
|
ai_edge_torch/generative/examples/stable_diffusion/decoder.py,sha256=afyHXc86h-ij5zTULmZnM1h313N9VWCyIVriH6pqeSo,16368
|
134
134
|
ai_edge_torch/generative/examples/stable_diffusion/diffusion.py,sha256=ylqXOZhYc6XFCaNBKQw0jAnYrCtRFFQKzQzEsFIntvo,34890
|
@@ -150,8 +150,8 @@ ai_edge_torch/generative/examples/test_models/convert_toy_model.py,sha256=6-WaNH
|
|
150
150
|
ai_edge_torch/generative/examples/test_models/toy_model.py,sha256=Crpj-vOwSViHpblXOrRJmsIn4DrHyuB3XZ8kHifb7LA,5203
|
151
151
|
ai_edge_torch/generative/examples/test_models/toy_model_with_kv_cache.py,sha256=-z5tkQzGHbo37eAl9sDAJuT1Egxm8xI9CZmYLcmqIfU,4761
|
152
152
|
ai_edge_torch/generative/examples/tiny_llama/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
|
153
|
-
ai_edge_torch/generative/examples/tiny_llama/convert_to_tflite.py,sha256=
|
154
|
-
ai_edge_torch/generative/examples/tiny_llama/tiny_llama.py,sha256=
|
153
|
+
ai_edge_torch/generative/examples/tiny_llama/convert_to_tflite.py,sha256=urWkWjOaGzV2gwMXoGEs1mfHNEXfEKgwuXmQ0lrWcbM,1761
|
154
|
+
ai_edge_torch/generative/examples/tiny_llama/tiny_llama.py,sha256=HRyq5nzoljWEWGYw0kCHAZH-GNiNHxh7E2qNoupjA-4,2988
|
155
155
|
ai_edge_torch/generative/examples/tiny_llama/verify.py,sha256=LRu6PSw7Lqu6HGbv1tO2i0nUCqe-VkRgboA10VZ7KNg,2431
|
156
156
|
ai_edge_torch/generative/fx_passes/__init__.py,sha256=PFSMsA1vfBfrV9ssBCkYJNl8Hx_bLdWjN01iyjPM5jE,1094
|
157
157
|
ai_edge_torch/generative/fx_passes/remove_sdpa_zero_mask_pass.py,sha256=myGjal5A8yIBoqgArd2k40rZmCgD1Ya369KR7182bhI,2129
|
@@ -166,7 +166,8 @@ ai_edge_torch/generative/layers/feed_forward_test.py,sha256=8ZGy79BBpsyS6yKKDEKr
|
|
166
166
|
ai_edge_torch/generative/layers/kv_cache.py,sha256=b-7shzDaKexmvQF7P3SiAmIz4ZofjYWv3m5u71GojsA,10460
|
167
167
|
ai_edge_torch/generative/layers/lora.py,sha256=hsvWLLOnW7HQ0AysOZu30x_cetMquDd1tjfyLz8HCSU,17892
|
168
168
|
ai_edge_torch/generative/layers/model_config.py,sha256=X_gjN5524DCDBNXsX5GrOBlkKM4UHzj_RfdCD0-VOxQ,8572
|
169
|
-
ai_edge_torch/generative/layers/normalization.py,sha256=
|
169
|
+
ai_edge_torch/generative/layers/normalization.py,sha256=ijwCpi22NLX-Sygwy5sK9l9WjGvbPIhZvVwoBAonWAo,7014
|
170
|
+
ai_edge_torch/generative/layers/normalization_test.py,sha256=zwurZly-TgFxdgVVdpzu9vCpcLbd5RYt_gKg9Lfg1jI,2248
|
170
171
|
ai_edge_torch/generative/layers/rotary_position_embedding.py,sha256=975zR202MdIrILJ7blceAcxrNqX1ZCN0ECKG1gz-bV8,2655
|
171
172
|
ai_edge_torch/generative/layers/scaled_dot_product_attention.py,sha256=2_AgwENsaOgaxgiSqgoj0V0JzQ09dFtP_nBhX-lJK2g,5648
|
172
173
|
ai_edge_torch/generative/layers/scaled_dot_product_attention_test.py,sha256=c6JBMQsq9XeMmR1XvGEIidNsoh-YIvichXo2LwVHgr4,3301
|
@@ -192,9 +193,9 @@ ai_edge_torch/generative/test/test_model_conversion_large.py,sha256=vQWmpzMkJ2hP
|
|
192
193
|
ai_edge_torch/generative/test/test_quantize.py,sha256=kKJ01wscTC2t_Ylr7huO5gNKES01gm3dT1gx52z15PA,7356
|
193
194
|
ai_edge_torch/generative/test/utils.py,sha256=tF6aCfAGJnc9dmzCnZCEOuKNVimfWOqscv9og0DDLHU,2656
|
194
195
|
ai_edge_torch/generative/utilities/__init__.py,sha256=-_jxnnFnCgnTU4oTm4MnRsvL5lqhomBNdFBbqfmfHPo,720
|
195
|
-
ai_edge_torch/generative/utilities/converter.py,sha256=
|
196
|
+
ai_edge_torch/generative/utilities/converter.py,sha256=mM8Vgd6zWkOrGt4-waa8cNjJwfhhTp-VNJ306NhXrV8,15425
|
196
197
|
ai_edge_torch/generative/utilities/export_config.py,sha256=5IvR3grlMd4mWO5c_Y4x9Fk1b1xa57MzlYNE8XUaN28,2049
|
197
|
-
ai_edge_torch/generative/utilities/loader.py,sha256=
|
198
|
+
ai_edge_torch/generative/utilities/loader.py,sha256=y1uSkUBiR0b9U4aoCQQk9qk7ctya_vEeY28Wc0A5e2s,15504
|
198
199
|
ai_edge_torch/generative/utilities/model_builder.py,sha256=tBfOcsI_NcneggHqkCSydYN3ZgmkzPc6nW0AJrA81wI,6461
|
199
200
|
ai_edge_torch/generative/utilities/moonshine_loader.py,sha256=_RpFabSqtGH5PHiP3_1f6QfO14qMADUxr_HGRlVDFB0,4891
|
200
201
|
ai_edge_torch/generative/utilities/stable_diffusion_loader.py,sha256=dqPD9qRXEWtU3ombslOC-BE2l_dMwHoCNu7NsIJhsso,36158
|
@@ -251,8 +252,8 @@ ai_edge_torch/testing/__init__.py,sha256=_yGgvnBZWb7T3IN3mc4x1sS4vM96HZwM8pwIcPG
|
|
251
252
|
ai_edge_torch/testing/export.py,sha256=k5mGDGzwc23Z4zaIVDs8CNh-oOt64gsf9MS9NjhbPy4,3293
|
252
253
|
ai_edge_torch/testing/model_coverage/__init__.py,sha256=5P8J6Zk5YYtDvTBucFvB9NGSRI7Gw_24WnrbhXgycEE,765
|
253
254
|
ai_edge_torch/testing/model_coverage/model_coverage.py,sha256=UPB448aMDUyC0HNYVqio2rcJPnDN0tBQMP08J6vPYew,4718
|
254
|
-
ai_edge_torch_nightly-0.5.0.
|
255
|
-
ai_edge_torch_nightly-0.5.0.
|
256
|
-
ai_edge_torch_nightly-0.5.0.
|
257
|
-
ai_edge_torch_nightly-0.5.0.
|
258
|
-
ai_edge_torch_nightly-0.5.0.
|
255
|
+
ai_edge_torch_nightly-0.5.0.dev20250516.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
256
|
+
ai_edge_torch_nightly-0.5.0.dev20250516.dist-info/METADATA,sha256=669y6k49WKfsyVCxQ-N-xiyLc5U2lR90qfnNDoPpedA,2074
|
257
|
+
ai_edge_torch_nightly-0.5.0.dev20250516.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
258
|
+
ai_edge_torch_nightly-0.5.0.dev20250516.dist-info/top_level.txt,sha256=5KXRaF2hwkApYxf7Y8y_tVb9aulGTlbOoNdbx1aKRkE,14
|
259
|
+
ai_edge_torch_nightly-0.5.0.dev20250516.dist-info/RECORD,,
|
File without changes
|
File without changes
|