ai-edge-torch-nightly 0.5.0.dev20250517__py3-none-any.whl → 0.5.0.dev20250518__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. ai_edge_torch/generative/examples/amd_llama_135m/verify.py +4 -32
  2. ai_edge_torch/generative/examples/amd_llama_135m/verify_util.py +76 -0
  3. ai_edge_torch/generative/examples/deepseek/verify.py +4 -30
  4. ai_edge_torch/generative/examples/deepseek/verify_util.py +76 -0
  5. ai_edge_torch/generative/examples/gemma/verify_util.py +51 -6
  6. ai_edge_torch/generative/examples/gemma3/verify_util.py +13 -0
  7. ai_edge_torch/generative/examples/hammer/verify.py +5 -35
  8. ai_edge_torch/generative/examples/hammer/verify_util.py +82 -0
  9. ai_edge_torch/generative/examples/llama/verify.py +5 -38
  10. ai_edge_torch/generative/examples/llama/verify_util.py +81 -0
  11. ai_edge_torch/generative/examples/openelm/verify.py +4 -31
  12. ai_edge_torch/generative/examples/openelm/verify_util.py +76 -0
  13. ai_edge_torch/generative/examples/phi/verify.py +6 -24
  14. ai_edge_torch/generative/examples/phi/verify_phi3.py +5 -28
  15. ai_edge_torch/generative/examples/phi/verify_phi4.py +5 -28
  16. ai_edge_torch/generative/examples/phi/verify_util.py +84 -0
  17. ai_edge_torch/generative/examples/qwen/verify.py +5 -35
  18. ai_edge_torch/generative/examples/qwen/verify_util.py +83 -0
  19. ai_edge_torch/generative/examples/smollm/verify.py +5 -36
  20. ai_edge_torch/generative/examples/smollm/verify_util.py +81 -0
  21. ai_edge_torch/generative/examples/tiny_llama/verify.py +4 -31
  22. ai_edge_torch/generative/examples/tiny_llama/verify_util.py +76 -0
  23. ai_edge_torch/generative/utilities/loader.py +11 -1
  24. ai_edge_torch/version.py +1 -1
  25. {ai_edge_torch_nightly-0.5.0.dev20250517.dist-info → ai_edge_torch_nightly-0.5.0.dev20250518.dist-info}/METADATA +1 -1
  26. {ai_edge_torch_nightly-0.5.0.dev20250517.dist-info → ai_edge_torch_nightly-0.5.0.dev20250518.dist-info}/RECORD +29 -20
  27. {ai_edge_torch_nightly-0.5.0.dev20250517.dist-info → ai_edge_torch_nightly-0.5.0.dev20250518.dist-info}/LICENSE +0 -0
  28. {ai_edge_torch_nightly-0.5.0.dev20250517.dist-info → ai_edge_torch_nightly-0.5.0.dev20250518.dist-info}/WHEEL +0 -0
  29. {ai_edge_torch_nightly-0.5.0.dev20250517.dist-info → ai_edge_torch_nightly-0.5.0.dev20250518.dist-info}/top_level.txt +0 -0
@@ -15,15 +15,10 @@
15
15
 
16
16
  """Verifies the reauthored TinyLlama-1.1B model."""
17
17
 
18
- import logging
19
- import pathlib
20
18
 
21
19
  from absl import app
22
20
  from absl import flags
23
- from ai_edge_torch.generative.examples.tiny_llama import tiny_llama
24
- from ai_edge_torch.generative.utilities import transformers_verifier
25
- from ai_edge_torch.generative.utilities import verifier
26
- import transformers
21
+ from ai_edge_torch.generative.examples.tiny_llama import verify_util
27
22
 
28
23
 
29
24
  _PROMPTS = flags.DEFINE_multi_string(
@@ -39,32 +34,10 @@ _MAX_NEW_TOKENS = flags.DEFINE_integer(
39
34
 
40
35
 
41
36
  def main(_):
42
- checkpoint = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
43
- logging.info("Loading the original model from: %s", checkpoint)
44
- original_model = transformers.AutoModelForCausalLM.from_pretrained(
45
- checkpoint, trust_remote_code=True
46
- )
47
-
48
- # Locate the cached dir.
49
- cached_config_file = transformers.utils.cached_file(
50
- checkpoint, transformers.utils.CONFIG_NAME
51
- )
52
- reauthored_checkpoint = pathlib.Path(cached_config_file).parent
53
- logging.info("Building the reauthored model from: %s", reauthored_checkpoint)
54
- reauthored_model = tiny_llama.build_model(str(reauthored_checkpoint))
55
-
56
- logging.info("Loading the tokenizer from: %s", checkpoint)
57
- tokenizer = transformers.AutoTokenizer.from_pretrained(checkpoint)
58
-
59
- verifier.verify_reauthored_model(
60
- original_model=transformers_verifier.TransformersModelWrapper(
61
- original_model
62
- ),
63
- reauthored_model=verifier.ReauthoredModelWrapper(reauthored_model),
64
- tokenizer=verifier.TokenizerWrapper(tokenizer),
65
- generate_prompts=_PROMPTS.value,
37
+ verify_util.verify_tiny_llama(
38
+ checkpoint_dir="TinyLlama/TinyLlama-1.1B-Chat-v1.0",
66
39
  max_new_tokens=_MAX_NEW_TOKENS.value,
67
- atol=1e-04,
40
+ prompts=_PROMPTS.value,
68
41
  )
69
42
 
70
43
 
@@ -0,0 +1,76 @@
1
+ # Copyright 2025 The AI Edge Torch Authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """Utils for verifying the TinyLlama model."""
16
+ import logging
17
+ import os
18
+ import pathlib
19
+
20
+ from ai_edge_torch.generative.examples.tiny_llama import tiny_llama
21
+ from ai_edge_torch.generative.utilities import loader
22
+ from ai_edge_torch.generative.utilities import transformers_verifier
23
+ from ai_edge_torch.generative.utilities import verifier
24
+ import transformers
25
+
26
+
27
+ DEFAULT_PROMPTS = ["Show me the program to add 2 and 3."]
28
+
29
+
30
+ def verify_tiny_llama(
31
+ checkpoint_dir: str,
32
+ weight_filename: str = "model.safetensors",
33
+ max_new_tokens: int = 30,
34
+ initialize_from_local: bool = True,
35
+ prompts: list[str] | None = None,
36
+ ) -> bool:
37
+ """Verifies the reauthored TinyLlama model with a custom loader."""
38
+ logging.info("Loading the original model from: %s", checkpoint_dir)
39
+ original_model = transformers.AutoModelForCausalLM.from_pretrained(
40
+ checkpoint_dir
41
+ )
42
+
43
+ logging.info("Building the reauthored model from: %s", checkpoint_dir)
44
+ custom_loader = (
45
+ None
46
+ if initialize_from_local
47
+ else loader.get_custom_loader("", "safetensors")
48
+ )
49
+
50
+ if initialize_from_local:
51
+ # Locate the cached dir.
52
+ cached_config_file = transformers.utils.cached_file(
53
+ checkpoint_dir, transformers.utils.CONFIG_NAME
54
+ )
55
+ reauthored_checkpoint = pathlib.Path(cached_config_file).parent
56
+ else:
57
+ reauthored_checkpoint = os.path.join(checkpoint_dir, weight_filename)
58
+
59
+ logging.info("Building the reauthored model from: %s", reauthored_checkpoint)
60
+ reauthored_model = tiny_llama.build_model(
61
+ checkpoint_path=reauthored_checkpoint,
62
+ custom_loader=custom_loader,
63
+ )
64
+
65
+ logging.info("Loading the tokenizer from: %s", checkpoint_dir)
66
+ tokenizer = transformers.AutoTokenizer.from_pretrained(checkpoint_dir)
67
+ return verifier.verify_reauthored_model(
68
+ original_model=transformers_verifier.TransformersModelWrapper(
69
+ original_model
70
+ ),
71
+ reauthored_model=verifier.ReauthoredModelWrapper(reauthored_model),
72
+ tokenizer=verifier.TokenizerWrapper(tokenizer),
73
+ generate_prompts=DEFAULT_PROMPTS if prompts is None else prompts,
74
+ max_new_tokens=max_new_tokens,
75
+ atol=1e-04,
76
+ )
@@ -16,7 +16,7 @@
16
16
  from dataclasses import dataclass
17
17
  import glob
18
18
  import os
19
- from typing import Callable, Dict, List, Tuple
19
+ from typing import Callable, Dict, List, Optional, Tuple
20
20
 
21
21
  from ai_edge_torch.generative.layers import model_config
22
22
  import safetensors
@@ -26,6 +26,7 @@ import torch
26
26
 
27
27
  def get_custom_loader(
28
28
  checkpoint_path: str,
29
+ checkpoint_format: Optional[str] = None,
29
30
  ) -> Callable[[str], Dict[str, torch.Tensor]]:
30
31
  """Returns a custom loader for the given checkpoint path.
31
32
 
@@ -34,6 +35,8 @@ def get_custom_loader(
34
35
 
35
36
  Args:
36
37
  checkpoint_path (string): The path to the checkpoint.
38
+ checkpoint_format (Optional[str]): The format of the checkpoint. Can be set
39
+ to "safetensors" or "pt". Default is None.
37
40
 
38
41
  Returns:
39
42
  Callable[[str], Dict[str, torch.Tensor]]: The custom loader.
@@ -42,6 +45,13 @@ def get_custom_loader(
42
45
  ValueError: If the checkpoint format is not supported.
43
46
  """
44
47
 
48
+ if checkpoint_format:
49
+ if checkpoint_format == "safetensors":
50
+ return safetensors.torch.load_file
51
+ if checkpoint_format == "pt":
52
+ return lambda path: torch.load(path, weights_only=True)
53
+ raise ValueError(f"Unsupported checkpoint format: {checkpoint_format}")
54
+
45
55
  if os.path.splitext(checkpoint_path)[1] in [".bin", ".pt", ".ckpt"]:
46
56
  return lambda path: torch.load(path, weights_only=True)
47
57
  if checkpoint_path.endswith(".safetensors"):
ai_edge_torch/version.py CHANGED
@@ -13,4 +13,4 @@
13
13
  # limitations under the License.
14
14
  # ==============================================================================
15
15
 
16
- __version__ = "0.5.0.dev20250517"
16
+ __version__ = "0.5.0.dev20250518"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ai-edge-torch-nightly
3
- Version: 0.5.0.dev20250517
3
+ Version: 0.5.0.dev20250518
4
4
  Summary: Supporting PyTorch models with the Google AI Edge TFLite runtime.
5
5
  Home-page: https://github.com/google-ai-edge/ai-edge-torch
6
6
  Keywords: On-Device ML,AI,Google,TFLite,PyTorch,LLMs,GenAI
@@ -2,7 +2,7 @@ ai_edge_torch/__init__.py,sha256=lemyLCNoGYRnJsmDuGZu7qOqLbLqG6CGDFtu3ue1syU,129
2
2
  ai_edge_torch/_config.py,sha256=AiqhbcheF7j_ozIGDLC89k1we95aVgFDa-tR6h7UI0s,2529
3
3
  ai_edge_torch/conftest.py,sha256=r0GTrhMRhlmOGrrkvumHN8hkmyug6WvF60vWq8wRIBI,758
4
4
  ai_edge_torch/model.py,sha256=wxjSFq_rBSxSqbUE8E8EJTCkgvgaRLjq_ZuAM-IZpCU,5606
5
- ai_edge_torch/version.py,sha256=1nWIqrcLl_lGq0WnfPfnBgtlAECzCufGTdwLlpIpp_Y,706
5
+ ai_edge_torch/version.py,sha256=ROs2nnrPNKrl8jrGTynAgRfV8IOrNNSZIEuR176ILB8,706
6
6
  ai_edge_torch/_convert/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
7
7
  ai_edge_torch/_convert/conversion.py,sha256=iQk3R-pLq4c1nfLqPB4xTRj78gghxPGzJCJtILLdg5o,6123
8
8
  ai_edge_torch/_convert/conversion_utils.py,sha256=Sr8qXVcTwc-ZnZmK7yxVrIOOp1S_vNrwzC0zUvLTI2o,2160
@@ -55,11 +55,13 @@ ai_edge_torch/generative/examples/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQe
55
55
  ai_edge_torch/generative/examples/amd_llama_135m/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
56
56
  ai_edge_torch/generative/examples/amd_llama_135m/amd_llama_135m.py,sha256=PTKcl-CHQnzExQSfrwG9YC0KPc8zomG7WlPabXtZLx4,2910
57
57
  ai_edge_torch/generative/examples/amd_llama_135m/convert_to_tflite.py,sha256=s2f5TJos6rSgogqeFk0qsOpI30qsR04umk9hAAZ5918,1782
58
- ai_edge_torch/generative/examples/amd_llama_135m/verify.py,sha256=o13NkFlBgawBsjdJup05VMUjAPvDRAmig6VyEkX8q6U,2426
58
+ ai_edge_torch/generative/examples/amd_llama_135m/verify.py,sha256=uyBg5-trxQEjEHDZMX4qojkcsZgERUiPqIgR9n0_AY4,1311
59
+ ai_edge_torch/generative/examples/amd_llama_135m/verify_util.py,sha256=_d4r1WgqyUqb7nPIhba8hZsrqqOEVc2AF30j1mTjQBw,2838
59
60
  ai_edge_torch/generative/examples/deepseek/__init__.py,sha256=JaAnrFoXTl3RJX97XspklkTyqOHVyAgRJsZtzNDd10c,671
60
61
  ai_edge_torch/generative/examples/deepseek/convert_to_tflite.py,sha256=xTPfT3Mt_4bMfGkrqDKatLecZOuaE0WhxXs3uAsO_uU,1749
61
62
  ai_edge_torch/generative/examples/deepseek/deepseek.py,sha256=9gUnK1IOifQyYpm03f64Mzg-afwbYY9kVWz6-ynq8zY,3014
62
- ai_edge_torch/generative/examples/deepseek/verify.py,sha256=iYldze-pvZGvPkkqr6zA7EmitPnH9sXkzjNVx353IcE,2403
63
+ ai_edge_torch/generative/examples/deepseek/verify.py,sha256=HkvgEyGb-V_f6mWfyeN7Ai5uADAVQNzWvkygaKJiLAc,1344
64
+ ai_edge_torch/generative/examples/deepseek/verify_util.py,sha256=WIaDA0Iw_AM1tzligxY9hnJpaYljoqWQ2d0UrUHppMM,2848
63
65
  ai_edge_torch/generative/examples/gemma/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
64
66
  ai_edge_torch/generative/examples/gemma/convert_gemma1_to_tflite.py,sha256=t2qZTjyM2imPenb14fzbQ-CHj5Cejw4M5xfEZpgX6Uc,1748
65
67
  ai_edge_torch/generative/examples/gemma/convert_gemma2_to_tflite.py,sha256=Yj-b4S9BNxArnGjruRIymCiWrlf7ZvwiG6keTVGldk4,1816
@@ -67,29 +69,32 @@ ai_edge_torch/generative/examples/gemma/gemma1.py,sha256=wV_tr51UIwiKki8u5i-Q2YF
67
69
  ai_edge_torch/generative/examples/gemma/gemma2.py,sha256=b3zCFOjeU6T7K2PLUBABurpf7UjRIsGKkOym1wRuJOg,11630
68
70
  ai_edge_torch/generative/examples/gemma/verify_gemma1.py,sha256=ip-Gmk4CI5f0GWSdAIdrectxQWJ0t328KCsA4nfHuGg,1736
69
71
  ai_edge_torch/generative/examples/gemma/verify_gemma2.py,sha256=jhiyinOqPt5ZZjEadDRZt_wY5fiLSCpMo54PcxFaL_Q,1789
70
- ai_edge_torch/generative/examples/gemma/verify_util.py,sha256=n7f2nF6Lin_tDvPs0JVldsuaBzo7pAwi5YAHAhlIxQg,6139
72
+ ai_edge_torch/generative/examples/gemma/verify_util.py,sha256=bbbdwuP4DKekvVuWHZ-jYYOQISto5ZkK1hC0r1Vhq00,7907
71
73
  ai_edge_torch/generative/examples/gemma3/__init__.py,sha256=JaAnrFoXTl3RJX97XspklkTyqOHVyAgRJsZtzNDd10c,671
72
74
  ai_edge_torch/generative/examples/gemma3/convert_gemma3_to_tflite.py,sha256=wOrOV_jxCnjrhjC8X0-uIi0D-4aQjOfXw6XaxTSrM9k,2048
73
75
  ai_edge_torch/generative/examples/gemma3/decoder.py,sha256=GC22bZRTtO8IczccYpqh5nSE0FHJK3I0M9oaofrr-Ss,15344
74
76
  ai_edge_torch/generative/examples/gemma3/gemma3.py,sha256=CPk3VJUobga0MVVIVRyWhdsrlCBWdQgF5kdSw7Yo--Y,6543
75
77
  ai_edge_torch/generative/examples/gemma3/image_encoder.py,sha256=tUOI99kdny33qcDM7-z0R6F-1aU1lZ24kG5zeLVdwow,5129
76
78
  ai_edge_torch/generative/examples/gemma3/verify_gemma3.py,sha256=v8oNXFICmVOtQxfO7IhZ8GnbvotEkDi9lzYHjoQyOso,2464
77
- ai_edge_torch/generative/examples/gemma3/verify_util.py,sha256=1vfAtayH_I_qTpqhzu6n9xnCuvhgTzhS8IzZviW2dJQ,9418
79
+ ai_edge_torch/generative/examples/gemma3/verify_util.py,sha256=ZgoocQSTJqFEYbNNvBaO0Be4_bgSUEnihVm47mCTzTg,9864
78
80
  ai_edge_torch/generative/examples/hammer/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
79
81
  ai_edge_torch/generative/examples/hammer/convert_to_tflite.py,sha256=XLmPuJCBJjKzMTG-mRmBX92juep2zl5yYeMrEhdqQQk,1975
80
82
  ai_edge_torch/generative/examples/hammer/hammer.py,sha256=aiGRdmJbtcePRde7l_Vte61rPh_4F-zcxNuGtg_ceTY,3649
81
- ai_edge_torch/generative/examples/hammer/verify.py,sha256=MkzAGkbPy4LKRhyCDm1cw-9jUt4VUxLPdwK_25fCGSE,2705
83
+ ai_edge_torch/generative/examples/hammer/verify.py,sha256=iuKFMkI1VZc9_ESUQr32cHWHsDtYOAF702TdqhD3Ns4,1589
84
+ ai_edge_torch/generative/examples/hammer/verify_util.py,sha256=OtmLYBd2AlliSqj_5rNZokfwoXt2pBwwjJAYRv_dKNg,2905
82
85
  ai_edge_torch/generative/examples/llama/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
83
86
  ai_edge_torch/generative/examples/llama/convert_to_tflite.py,sha256=4qnMyvJHqhqf9k01wEsO23BKo6tSy2KD7sHdTGimKGg,1957
84
87
  ai_edge_torch/generative/examples/llama/llama.py,sha256=eWPFnuSxhjuk5XZmvtndu_Z1-e9NlZg7-uFfiOqJXfw,6952
85
- ai_edge_torch/generative/examples/llama/verify.py,sha256=X7oKQi85M789ugBrOlMvzk8eSRR3Kf1Mprfl-U-WIpo,2842
88
+ ai_edge_torch/generative/examples/llama/verify.py,sha256=XoF_-kxdryjt0Bt_YeHnIbLfjwFxSVioTSEG75moDr8,1581
89
+ ai_edge_torch/generative/examples/llama/verify_util.py,sha256=kDFRkly3wz0S_SIKAMAJkFuKciX3lJWj4c_4DwzV-J8,2896
86
90
  ai_edge_torch/generative/examples/moonshine/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
87
91
  ai_edge_torch/generative/examples/moonshine/convert_moonshine_to_tflite.py,sha256=_GkaSkregS3NWN38UGXxj4pED5gtQGaaPZx5_CZ0TVM,1657
88
92
  ai_edge_torch/generative/examples/moonshine/moonshine.py,sha256=nZ2b8u4TmsB5sgdClgAuH8E78bcTv9RCnF9666HqP2M,3394
89
93
  ai_edge_torch/generative/examples/openelm/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
90
94
  ai_edge_torch/generative/examples/openelm/convert_to_tflite.py,sha256=S7OP8PJcOQbm8AHvi_Tc3qnQuVOtjMFNlwaZQ_oirUM,1747
91
95
  ai_edge_torch/generative/examples/openelm/openelm.py,sha256=R_E0hXsg6l8ANEgBBy0R8egz3p4ONJvBmPWs6sXx63M,4692
92
- ai_edge_torch/generative/examples/openelm/verify.py,sha256=4W26ZtPF5Cb9mpHYuRM4b2QB_4W76zf4WV36KzexVjs,2446
96
+ ai_edge_torch/generative/examples/openelm/verify.py,sha256=kRoNEUEsrz51PFSeTPcrYsPBQRLtUmYM3t_-Jl0oFqM,1300
97
+ ai_edge_torch/generative/examples/openelm/verify_util.py,sha256=VA08XH1sDCqozY7CTlOnMz_UT_eyObll-LO1Q60RCRs,2790
93
98
  ai_edge_torch/generative/examples/paligemma/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
94
99
  ai_edge_torch/generative/examples/paligemma/convert_to_tflite.py,sha256=Fl4k-lcpiUaJS0A1E7HVVUW7iTcZAU4FbA4KcSkO5SQ,2212
95
100
  ai_edge_torch/generative/examples/paligemma/decoder.py,sha256=GLlfbJr3ZIzmH643IwXyrG54qKEYMPRsvhU6gXXi7yg,5490
@@ -107,13 +112,15 @@ ai_edge_torch/generative/examples/phi/convert_to_tflite.py,sha256=tY5uwRu-4Jxro7
107
112
  ai_edge_torch/generative/examples/phi/phi2.py,sha256=RprdXLbdG5lDOAgN4xolZZwp74vbRHHuf_-CzjnI5cA,3602
108
113
  ai_edge_torch/generative/examples/phi/phi3.py,sha256=LW1E0C7A3IIyB5CLbVt914YJB4Fx6mbXh4xXibDHA2w,7054
109
114
  ai_edge_torch/generative/examples/phi/phi4.py,sha256=ZHA0Rq7ifgxiHC_8PJf-y7WCA7i_2SlsiGibyOMBP4s,5837
110
- ai_edge_torch/generative/examples/phi/verify.py,sha256=YPFCdbnfmvq38fbpBNr0kHPfSZo4p3_6WkLJAW3pLPo,2177
111
- ai_edge_torch/generative/examples/phi/verify_phi3.py,sha256=kVYaBVvddfQng0IyZGxyTJEzhiPO0G4VFJm2WOc2Q94,2360
112
- ai_edge_torch/generative/examples/phi/verify_phi4.py,sha256=BoCa5kUBRHtMQ-5ql6yD4pG4xHJMyUiQlpMOWVx-JgY,2356
115
+ ai_edge_torch/generative/examples/phi/verify.py,sha256=fIWgqypLQ3uOQ1u5uuklYiQSJPhKCTYIBACjrp7DbMA,1346
116
+ ai_edge_torch/generative/examples/phi/verify_phi3.py,sha256=TwIu2xUPQyMUTFdz29E2y75wfq4c1fGJnT3QfA3eS1s,1347
117
+ ai_edge_torch/generative/examples/phi/verify_phi4.py,sha256=2MlgQrfRkhE7Dya8MIixGwpqEZYdPjQkUGB47Mt1hSI,1343
118
+ ai_edge_torch/generative/examples/phi/verify_util.py,sha256=kRREOMSikn_BRbTDkQiXBllPZwmWHa9KUk-kK5lCkbU,2945
113
119
  ai_edge_torch/generative/examples/qwen/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
114
120
  ai_edge_torch/generative/examples/qwen/convert_to_tflite.py,sha256=TnzyARHQgmWeOdYsV9WpRj5vhKGBH0kAbp3tMj8ZCYw,1998
115
121
  ai_edge_torch/generative/examples/qwen/qwen.py,sha256=EcIHVeBcJLc290TiPkPfE7jdG_VXZYKlVGf0XQXzqo8,4554
116
- ai_edge_torch/generative/examples/qwen/verify.py,sha256=9_AyEJTeUfvhhID64Rto2bflFPyXMFokdQLsseLUMiI,2775
122
+ ai_edge_torch/generative/examples/qwen/verify.py,sha256=mP1SIAX2B1vFO02vRkAZC0UCyvBBxeWxK_456gG5a1s,1633
123
+ ai_edge_torch/generative/examples/qwen/verify_util.py,sha256=jEmqYnOkOcQhOmHJrHsX0vdLq7JSahROvEBrG6n7tqg,2919
117
124
  ai_edge_torch/generative/examples/qwen_vl/__init__.py,sha256=JaAnrFoXTl3RJX97XspklkTyqOHVyAgRJsZtzNDd10c,671
118
125
  ai_edge_torch/generative/examples/qwen_vl/convert_to_tflite.py,sha256=BM-ed7KrmPwzI3MvDs2R7P-kJgE1SK_cNVqIfXhtJjs,2411
119
126
  ai_edge_torch/generative/examples/qwen_vl/decoder.py,sha256=plOi-3LltxReW_HVxhxwee_rYCQq-gsOwbGZtRsM8N8,4443
@@ -126,7 +133,8 @@ ai_edge_torch/generative/examples/smollm/__init__.py,sha256=hHLluseD2R0Hh4W6XZRI
126
133
  ai_edge_torch/generative/examples/smollm/convert_to_tflite.py,sha256=QVRX_ovqBQi8fKAG6PezaO1qoRvMGpVxNH-_sds0pf8,1997
127
134
  ai_edge_torch/generative/examples/smollm/convert_v2_to_tflite.py,sha256=rOVYSaS68_otJcGewQSconBCPD4GhDEIIyquD4dSUWc,1979
128
135
  ai_edge_torch/generative/examples/smollm/smollm.py,sha256=nQRiq6phJbtl3GAEEsJ_bPP_zrpQmiPumNEWCRrECn0,4028
129
- ai_edge_torch/generative/examples/smollm/verify.py,sha256=sH3rn1TbaCusPiUD5XlECiHY0rvoHIXALbk7ECOiinI,2720
136
+ ai_edge_torch/generative/examples/smollm/verify.py,sha256=tXiAnwOnqgwyoa8dI4tCBiGUXkOMfdE9MUkkY_Bc4Ig,1603
137
+ ai_edge_torch/generative/examples/smollm/verify_util.py,sha256=KT-eGsHFqtmu30ukC3jupXbF_qS3qx62hjLZfZt9ea8,2896
130
138
  ai_edge_torch/generative/examples/stable_diffusion/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
131
139
  ai_edge_torch/generative/examples/stable_diffusion/attention.py,sha256=kDWG6MlIGa89zC5KSRcJlw2c4ITuw8KcchtfmF55f4g,3545
132
140
  ai_edge_torch/generative/examples/stable_diffusion/clip.py,sha256=lSCRZsoLjH_kqasRMwCy5IogkhyJdwcHKsPEfyxsXCQ,6112
@@ -153,7 +161,8 @@ ai_edge_torch/generative/examples/test_models/toy_model_with_kv_cache.py,sha256=
153
161
  ai_edge_torch/generative/examples/tiny_llama/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
154
162
  ai_edge_torch/generative/examples/tiny_llama/convert_to_tflite.py,sha256=urWkWjOaGzV2gwMXoGEs1mfHNEXfEKgwuXmQ0lrWcbM,1761
155
163
  ai_edge_torch/generative/examples/tiny_llama/tiny_llama.py,sha256=cVNP_a_3UBML0j9ITtcITeVXqCdcC7U1JoYwir09Dk8,2936
156
- ai_edge_torch/generative/examples/tiny_llama/verify.py,sha256=LRu6PSw7Lqu6HGbv1tO2i0nUCqe-VkRgboA10VZ7KNg,2431
164
+ ai_edge_torch/generative/examples/tiny_llama/verify.py,sha256=qzUsW8tJlAD9mqRxDSAcz5xSUKNlBz_DykA3PwUHMwc,1336
165
+ ai_edge_torch/generative/examples/tiny_llama/verify_util.py,sha256=_zYGqP4HO_Stci14C7PXBNnQIT9TBa5uLUEcGfzxFvQ,2813
157
166
  ai_edge_torch/generative/fx_passes/__init__.py,sha256=PFSMsA1vfBfrV9ssBCkYJNl8Hx_bLdWjN01iyjPM5jE,1094
158
167
  ai_edge_torch/generative/fx_passes/remove_sdpa_zero_mask_pass.py,sha256=myGjal5A8yIBoqgArd2k40rZmCgD1Ya369KR7182bhI,2129
159
168
  ai_edge_torch/generative/layers/__init__.py,sha256=hHLluseD2R0Hh4W6XZRIXY_dRQeYudjsrKGf6LZz65g,671
@@ -196,7 +205,7 @@ ai_edge_torch/generative/test/utils.py,sha256=tF6aCfAGJnc9dmzCnZCEOuKNVimfWOqscv
196
205
  ai_edge_torch/generative/utilities/__init__.py,sha256=-_jxnnFnCgnTU4oTm4MnRsvL5lqhomBNdFBbqfmfHPo,720
197
206
  ai_edge_torch/generative/utilities/converter.py,sha256=VRI960xo86g6lGLc_II3vDovFMa2DGIxnAZgE2GfSiM,15530
198
207
  ai_edge_torch/generative/utilities/export_config.py,sha256=5IvR3grlMd4mWO5c_Y4x9Fk1b1xa57MzlYNE8XUaN28,2049
199
- ai_edge_torch/generative/utilities/loader.py,sha256=y1uSkUBiR0b9U4aoCQQk9qk7ctya_vEeY28Wc0A5e2s,15504
208
+ ai_edge_torch/generative/utilities/loader.py,sha256=ODAdOnwQXscVPiUM6ssFWqDtD-Hl-h814X1EH1c0tuw,15969
200
209
  ai_edge_torch/generative/utilities/model_builder.py,sha256=tBfOcsI_NcneggHqkCSydYN3ZgmkzPc6nW0AJrA81wI,6461
201
210
  ai_edge_torch/generative/utilities/moonshine_loader.py,sha256=_RpFabSqtGH5PHiP3_1f6QfO14qMADUxr_HGRlVDFB0,4891
202
211
  ai_edge_torch/generative/utilities/stable_diffusion_loader.py,sha256=dqPD9qRXEWtU3ombslOC-BE2l_dMwHoCNu7NsIJhsso,36158
@@ -253,8 +262,8 @@ ai_edge_torch/testing/__init__.py,sha256=_yGgvnBZWb7T3IN3mc4x1sS4vM96HZwM8pwIcPG
253
262
  ai_edge_torch/testing/export.py,sha256=k5mGDGzwc23Z4zaIVDs8CNh-oOt64gsf9MS9NjhbPy4,3293
254
263
  ai_edge_torch/testing/model_coverage/__init__.py,sha256=5P8J6Zk5YYtDvTBucFvB9NGSRI7Gw_24WnrbhXgycEE,765
255
264
  ai_edge_torch/testing/model_coverage/model_coverage.py,sha256=UPB448aMDUyC0HNYVqio2rcJPnDN0tBQMP08J6vPYew,4718
256
- ai_edge_torch_nightly-0.5.0.dev20250517.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
257
- ai_edge_torch_nightly-0.5.0.dev20250517.dist-info/METADATA,sha256=L8usKutHqIHjPswWNp7b3ynJzJiS5cTC9YWTnP82Qm8,2074
258
- ai_edge_torch_nightly-0.5.0.dev20250517.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
259
- ai_edge_torch_nightly-0.5.0.dev20250517.dist-info/top_level.txt,sha256=5KXRaF2hwkApYxf7Y8y_tVb9aulGTlbOoNdbx1aKRkE,14
260
- ai_edge_torch_nightly-0.5.0.dev20250517.dist-info/RECORD,,
265
+ ai_edge_torch_nightly-0.5.0.dev20250518.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
266
+ ai_edge_torch_nightly-0.5.0.dev20250518.dist-info/METADATA,sha256=G9SZNm9HEGhIlSjLENxNqaA7cIFNWJ83ZN8BMZF9igA,2074
267
+ ai_edge_torch_nightly-0.5.0.dev20250518.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
268
+ ai_edge_torch_nightly-0.5.0.dev20250518.dist-info/top_level.txt,sha256=5KXRaF2hwkApYxf7Y8y_tVb9aulGTlbOoNdbx1aKRkE,14
269
+ ai_edge_torch_nightly-0.5.0.dev20250518.dist-info/RECORD,,