ai-edge-litert-nightly 1.4.0.dev20250729__cp312-cp312-macosx_12_0_arm64.whl → 1.4.0.dev20250814__cp312-cp312-macosx_12_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ai-edge-litert-nightly might be problematic. Click here for more details.

Files changed (45) hide show
  1. ai_edge_litert/__init__.py +1 -1
  2. ai_edge_litert/any_pb2.py +4 -4
  3. ai_edge_litert/api_pb2.py +4 -4
  4. ai_edge_litert/descriptor_pb2.py +310 -118
  5. ai_edge_litert/duration_pb2.py +4 -4
  6. ai_edge_litert/empty_pb2.py +4 -4
  7. ai_edge_litert/field_mask_pb2.py +4 -4
  8. ai_edge_litert/model_runtime_info_pb2.py +4 -4
  9. ai_edge_litert/plugin_pb2.py +4 -4
  10. ai_edge_litert/profiling_info_pb2.py +4 -4
  11. ai_edge_litert/source_context_pb2.py +4 -4
  12. ai_edge_litert/struct_pb2.py +4 -4
  13. ai_edge_litert/timestamp_pb2.py +4 -4
  14. ai_edge_litert/type_pb2.py +4 -4
  15. ai_edge_litert/wrappers_pb2.py +4 -4
  16. {ai_edge_litert_nightly-1.4.0.dev20250729.dist-info → ai_edge_litert_nightly-1.4.0.dev20250814.dist-info}/METADATA +1 -1
  17. ai_edge_litert_nightly-1.4.0.dev20250814.dist-info/RECORD +36 -0
  18. ai_edge_litert/aot/__init__.py +0 -0
  19. ai_edge_litert/aot/ai_pack/__init__.py +0 -0
  20. ai_edge_litert/aot/ai_pack/export_lib.py +0 -281
  21. ai_edge_litert/aot/aot_compile.py +0 -152
  22. ai_edge_litert/aot/core/__init__.py +0 -0
  23. ai_edge_litert/aot/core/apply_plugin.py +0 -146
  24. ai_edge_litert/aot/core/common.py +0 -95
  25. ai_edge_litert/aot/core/components.py +0 -93
  26. ai_edge_litert/aot/core/mlir_transforms.py +0 -36
  27. ai_edge_litert/aot/core/tflxx_util.py +0 -30
  28. ai_edge_litert/aot/core/types.py +0 -374
  29. ai_edge_litert/aot/prepare_for_npu.py +0 -152
  30. ai_edge_litert/aot/vendors/__init__.py +0 -18
  31. ai_edge_litert/aot/vendors/example/__init__.py +0 -0
  32. ai_edge_litert/aot/vendors/example/example_backend.py +0 -157
  33. ai_edge_litert/aot/vendors/fallback_backend.py +0 -128
  34. ai_edge_litert/aot/vendors/import_vendor.py +0 -132
  35. ai_edge_litert/aot/vendors/mediatek/__init__.py +0 -0
  36. ai_edge_litert/aot/vendors/mediatek/mediatek_backend.py +0 -196
  37. ai_edge_litert/aot/vendors/mediatek/target.py +0 -91
  38. ai_edge_litert/aot/vendors/qualcomm/__init__.py +0 -0
  39. ai_edge_litert/aot/vendors/qualcomm/qualcomm_backend.py +0 -161
  40. ai_edge_litert/aot/vendors/qualcomm/target.py +0 -74
  41. ai_edge_litert/libLiteRtRuntimeCApi.so +0 -0
  42. ai_edge_litert/tools/apply_plugin_main +0 -0
  43. ai_edge_litert_nightly-1.4.0.dev20250729.dist-info/RECORD +0 -61
  44. {ai_edge_litert_nightly-1.4.0.dev20250729.dist-info → ai_edge_litert_nightly-1.4.0.dev20250814.dist-info}/WHEEL +0 -0
  45. {ai_edge_litert_nightly-1.4.0.dev20250729.dist-info → ai_edge_litert_nightly-1.4.0.dev20250814.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/duration.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/duration.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/empty.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/empty.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/field_mask.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/field_mask.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: tflite/profiling/proto/model_runtime_info.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'tflite/profiling/proto/model_runtime_info.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/compiler/plugin.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/compiler/plugin.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: tflite/profiling/proto/profiling_info.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'tflite/profiling/proto/profiling_info.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/source_context.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/source_context.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/struct.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/struct.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/timestamp.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/timestamp.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/type.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/type.proto'
19
19
  )
@@ -2,7 +2,7 @@
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
3
  # NO CHECKED-IN PROTOBUF GENCODE
4
4
  # source: google/protobuf/wrappers.proto
5
- # Protobuf Python Version: 5.28.3
5
+ # Protobuf Python Version: 6.31.1
6
6
  """Generated protocol buffer code."""
7
7
  from google.protobuf import descriptor as _descriptor
8
8
  from google.protobuf import descriptor_pool as _descriptor_pool
@@ -11,9 +11,9 @@ from google.protobuf import symbol_database as _symbol_database
11
11
  from google.protobuf.internal import builder as _builder
12
12
  _runtime_version.ValidateProtobufRuntimeVersion(
13
13
  _runtime_version.Domain.PUBLIC,
14
- 5,
15
- 28,
16
- 3,
14
+ 6,
15
+ 31,
16
+ 1,
17
17
  '',
18
18
  'google/protobuf/wrappers.proto'
19
19
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ai-edge-litert-nightly
3
- Version: 1.4.0.dev20250729
3
+ Version: 1.4.0.dev20250814
4
4
  Summary: LiteRT is for mobile and embedded devices.
5
5
  Home-page: https://www.tensorflow.org/lite/
6
6
  Author: Google AI Edge Authors
@@ -0,0 +1,36 @@
1
+ ai_edge_litert/__init__.py,sha256=_8wnWQPu-Jzh5AhwdogIQzRrnYwgDBb0b51yQ0pL8ng,34
2
+ ai_edge_litert/_pywrap_analyzer_wrapper.so,sha256=b208vF3tQRJUayfGJCiNN-h0JFAQHCkWx3mnQKGKSNY,50056
3
+ ai_edge_litert/_pywrap_litert_compiled_model_wrapper.so,sha256=R48bfhjg5-oSz8DUqDfvpztzk071ZF-snPbKHzEZMow,50104
4
+ ai_edge_litert/_pywrap_litert_tensor_buffer_wrapper.so,sha256=EZqmZDelToDS8vRG86Hekg2hG1pIrSMdcPtgiL6VIBg,50104
5
+ ai_edge_litert/_pywrap_modify_model_interface.so,sha256=-vhk4tGSksCJbVP48iEdO0rPQRjMs-XKxfItU-MqHwQ,50072
6
+ ai_edge_litert/_pywrap_string_util.so,sha256=Oy_WGgCX4sHhcGXI8XIeyWlw3VN1WTh4HLSNNwDJoV0,50040
7
+ ai_edge_litert/_pywrap_tensorflow_interpreter_wrapper.so,sha256=Fn-BHjXSwsiCyGxQakN7RnjDVQ7wIgjmurVfV5N1110,50104
8
+ ai_edge_litert/_pywrap_tensorflow_lite_calibration_wrapper.so,sha256=WxORp0WgNNwJ47VF4OIZ1kbtj6x2ZCEBZM_ygugST5s,50136
9
+ ai_edge_litert/_pywrap_tensorflow_lite_metrics_wrapper.so,sha256=IDQLjb0xB5hP_z2u-_-naBFhLXBlcm6DOFVWYRdgb_w,50104
10
+ ai_edge_litert/any_pb2.py,sha256=W6duyvBgx7RvePFCrJSxWagU7ddj1W9l8CsjarJJPOs,1703
11
+ ai_edge_litert/api_pb2.py,sha256=Jpn2ZFBihZFhhe47zMohsNMGIhJ9IL6VEFcLrTAlTao,2935
12
+ ai_edge_litert/compiled_model.py,sha256=xjoYYk5vHU6jP1QqjiLG5y7I5JpnBNOkl9khe6IT-Pw,7933
13
+ ai_edge_litert/descriptor_pb2.py,sha256=H0UB_-Xu4emXuqjimUeo6BaVsI0bkRex0ZWdEteBBRA,355639
14
+ ai_edge_litert/duration_pb2.py,sha256=ot6pGJwRMjEoJE0u-uoXmzqIjCLVVNb3vLSKhsf1w2c,1783
15
+ ai_edge_litert/empty_pb2.py,sha256=RtYfjWZz9WAe3fY5vDhjSo3i0M6kwZ8-kX9R_9vmyGc,1669
16
+ ai_edge_litert/field_mask_pb2.py,sha256=YVi1IasKjEQAlcIwM6HMgmW0wT6TMAXjiaiysrEY7Gc,1756
17
+ ai_edge_litert/format_converter_wrapper_pybind11.so,sha256=b_Nh1BlncXsqjzWwRNWo7Bv0sCAI8Afl7kMkOylrnxE,50088
18
+ ai_edge_litert/hardware_accelerator.py,sha256=FgouirT1Cs80mqvMvLbO9O7wiPBnQaWDB5vjFpJhtJo,800
19
+ ai_edge_litert/interpreter.py,sha256=4u9FfiaYkDxf_IxCjzeLWyTyxwG184uVxqg0fbj6tYg,40650
20
+ ai_edge_litert/metrics_interface.py,sha256=dVu6SmbnQUntPgE5o6BxHVMyemwli-7F6tDfVMGrlYI,1542
21
+ ai_edge_litert/metrics_portable.py,sha256=KKvR9ZOe8j2ZeBtDo_6gWJ8kENKoOawPK3LPkevnZa8,2039
22
+ ai_edge_litert/model_runtime_info_pb2.py,sha256=E93kYJtWnsChrdegZJbKzeFpplssBGEURTdOOfjtOxg,6370
23
+ ai_edge_litert/plugin_pb2.py,sha256=dDsvFbuWV2yq4ghU6XnHMW6ZrbxYG2l8DAArIoqL8PY,3514
24
+ ai_edge_litert/profiling_info_pb2.py,sha256=RxZo_P9siirO7ktyZImOxVKMZ7g-ienzuP7rSsF61YM,3579
25
+ ai_edge_litert/pywrap_genai_ops.so,sha256=jmpe56cqSgt1rFR-4mVysQmCvSfRGzg3o1Ml6_Wt-os,50024
26
+ ai_edge_litert/schema_py_generated.py,sha256=j1mIWi3QiiY0kVRcugNdqVcTGfRiaZmR6jb7AvwVUyM,649242
27
+ ai_edge_litert/source_context_pb2.py,sha256=m_GMQpv9LUK-7oKg1Vv64fTL6Bd3ZThH-fVVLtPxdqI,1775
28
+ ai_edge_litert/struct_pb2.py,sha256=DJ6P0fZe53upcah9J1jDo6BSClobouW9dHdCTItZO1A,2955
29
+ ai_edge_litert/tensor_buffer.py,sha256=3Xy7kd_aR499QrfO0YITvr_z-aU1eONga41KiDBUeew,5356
30
+ ai_edge_litert/timestamp_pb2.py,sha256=K7Gs_qOn2XAMZTNUUrJ1XcGf4OmYhdt858I_zDtZYzg,1793
31
+ ai_edge_litert/type_pb2.py,sha256=x8rTulmlk9FxtRJal7BnUsaPbn2TcLlu8D7AVoVPjMA,5065
32
+ ai_edge_litert/wrappers_pb2.py,sha256=4hQAZRGeaE5DyY6YQ7VfrqozPa_d_LCOBEYjxfQNlKs,2955
33
+ ai_edge_litert_nightly-1.4.0.dev20250814.dist-info/METADATA,sha256=uUlRPU3OhyxzwH9YEEuY1K8ltKCBP4j4HCL9umv5zQ8,1911
34
+ ai_edge_litert_nightly-1.4.0.dev20250814.dist-info/WHEEL,sha256=fqm6Ds3mU75lnZFd7HRercjd2pByFTGhJZf6kGlyJBs,109
35
+ ai_edge_litert_nightly-1.4.0.dev20250814.dist-info/top_level.txt,sha256=WcDZgG99n0a0xDS_ipL8ZWy956g1v5fVyR3FH96VDT0,15
36
+ ai_edge_litert_nightly-1.4.0.dev20250814.dist-info/RECORD,,
File without changes
File without changes
@@ -1,281 +0,0 @@
1
- # Copyright 2025 Google LLC.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- """Utility functions for exporting models to AI pack format."""
15
-
16
- import itertools
17
- import os
18
- import pathlib
19
- from typing import cast
20
-
21
- from ai_edge_litert.aot.core import common
22
- from ai_edge_litert.aot.core import types
23
- from ai_edge_litert.aot.vendors import fallback_backend
24
- from ai_edge_litert.aot.vendors.mediatek import mediatek_backend
25
- from ai_edge_litert.aot.vendors.mediatek import target as mtk_target
26
- from ai_edge_litert.aot.vendors.qualcomm import qualcomm_backend
27
- from ai_edge_litert.aot.vendors.qualcomm import target as qnn_target
28
-
29
- # TODO: b/407453529 - Add unittests.
30
-
31
-
32
- _DEVICE_TARGETING_CONFIGURATION = """<config:device-targeting-config
33
- xmlns:config="http://schemas.android.com/apk/config">
34
- {device_groups}
35
- </config:device-targeting-config>"""
36
-
37
- _DEVICE_GROUP_TEMPLATE = """ <config:device-group name="{device_group_name}">
38
- {device_selectors}
39
- </config:device-group>"""
40
-
41
- _DEVICE_SELECTOR_TEMPLATE = """ <config:device-selector>
42
- <config:system-on-chip manufacturer="{soc_man}" model="{soc_model}"/>
43
- </config:device-selector>"""
44
-
45
-
46
- def _is_mobile_device_backend(backend: types.Backend):
47
- target = backend.target
48
- if backend.id() == qualcomm_backend.QualcommBackend.id():
49
- target = cast(qnn_target.Target, target)
50
- # Non Android QNN targets.
51
- if target.soc_model in (
52
- qnn_target.SocModel.SA8255,
53
- qnn_target.SocModel.SA8295,
54
- ):
55
- return False
56
- return True
57
-
58
-
59
- def _export_model_files_to_ai_pack(
60
- compiled_models: types.CompilationResult,
61
- ai_pack_dir: pathlib.Path,
62
- ai_pack_name: str,
63
- litert_model_name: str,
64
- *,
65
- separate_mtk_ai_pack: bool = True,
66
- ):
67
- """Exports the model tflite files to the AI pack directory structure.
68
-
69
- Args:
70
- compiled_models: The compiled models to export.
71
- ai_pack_dir: The directory to export the AI pack to.
72
- ai_pack_name: The name of the AI pack.
73
- litert_model_name: The name of the model in the litert format.
74
- separate_mtk_ai_pack: Whether to separate the MTK AI pack. If True, the main
75
- AI pack will use the fallback model for MTK targets. The MTK AI pack will
76
- contain all MTK models, and empty directories for non-MTK targets.
77
- """
78
- fallback_model = None
79
- for backend, model in compiled_models.models_with_backend:
80
- if backend.target_id == fallback_backend.FallbackBackend.id():
81
- fallback_model = model
82
- assert fallback_model is not None, 'Fallback model is required.'
83
-
84
- model_export_dir = ai_pack_dir / ai_pack_name / 'src/main/assets'
85
- os.makedirs(model_export_dir, exist_ok=True)
86
- for backend, model in compiled_models.models_with_backend:
87
- if not _is_mobile_device_backend(backend):
88
- continue
89
- target_id = backend.target_id
90
- backend_id = backend.id()
91
- if backend_id == fallback_backend.FallbackBackend.id():
92
- target_id = 'other'
93
- elif backend_id == mediatek_backend.MediaTekBackend.id():
94
- target_id = backend.target_id.replace(
95
- mtk_target.SocManufacturer.MEDIATEK, 'Mediatek'
96
- )
97
- group_name = 'model#group_' + target_id
98
- export_dir = model_export_dir / group_name
99
- os.makedirs(export_dir, exist_ok=True)
100
- model_export_path = export_dir / (litert_model_name + common.DOT_TFLITE)
101
- if (
102
- separate_mtk_ai_pack
103
- and backend_id == mediatek_backend.MediaTekBackend.id()
104
- ):
105
- # Use the fallback model for MTK targets in main AI pack.
106
- model_to_export = fallback_model
107
- else:
108
- model_to_export = model
109
- if not model_to_export.in_memory:
110
- model_to_export.load()
111
- model_to_export.save(model_export_path, export_only=True)
112
-
113
- if separate_mtk_ai_pack:
114
- _export_model_files_to_mtk_ai_pack(
115
- compiled_models=compiled_models,
116
- ai_pack_dir=ai_pack_dir,
117
- ai_pack_name=ai_pack_name + '_mtk',
118
- litert_model_name=litert_model_name + '_mtk',
119
- )
120
-
121
-
122
- def _export_model_files_to_mtk_ai_pack(
123
- compiled_models: types.CompilationResult,
124
- ai_pack_dir: pathlib.Path,
125
- ai_pack_name: str,
126
- litert_model_name: str,
127
- ):
128
- """Exports the model tflite files to the MTK AI pack directory structure."""
129
- model_export_dir = ai_pack_dir / ai_pack_name / 'src/main/assets'
130
- os.makedirs(model_export_dir, exist_ok=True)
131
- for backend, model in compiled_models.models_with_backend:
132
- if not _is_mobile_device_backend(backend):
133
- continue
134
- backend_id = backend.id()
135
- target_id = backend.target_id
136
- if backend_id == fallback_backend.FallbackBackend.id():
137
- target_id = 'other'
138
- elif backend_id == mediatek_backend.MediaTekBackend.id():
139
- target_id = backend.target_id.replace(
140
- mtk_target.SocManufacturer.MEDIATEK, 'Mediatek'
141
- )
142
- group_name = 'model#group_' + target_id
143
- export_dir = model_export_dir / group_name
144
- os.makedirs(export_dir, exist_ok=True)
145
- if backend_id != mediatek_backend.MediaTekBackend.id():
146
- # Skip non-MTK targets, just create a placeholder file.
147
- placeholder_file = export_dir / 'placeholder.txt'
148
- placeholder_file.touch()
149
- continue
150
- model_export_path = export_dir / (litert_model_name + common.DOT_TFLITE)
151
- if not model.in_memory:
152
- model.load()
153
- model.save(model_export_path, export_only=True)
154
-
155
-
156
- def _build_targeting_config(compiled_backends: list[types.Backend]) -> str:
157
- """Builds device-targeting-config in device_targeting_configuration.xml."""
158
- device_groups = []
159
- for backend in compiled_backends:
160
- if not _is_mobile_device_backend(backend):
161
- continue
162
- target = backend.target
163
- device_group = _target_to_ai_pack_info(target)
164
- if device_group:
165
- device_groups.append(device_group)
166
- device_groups = '\n'.join(device_groups)
167
- return _DEVICE_TARGETING_CONFIGURATION.format(device_groups=device_groups)
168
-
169
-
170
- def _target_to_ai_pack_info(target: types.Target) -> str | None:
171
- """Builds the device group used in device_targeting_configuration.xml."""
172
- if isinstance(target, qnn_target.Target):
173
- group_name = str(target)
174
- selector = _process_qnn_target(target)
175
- device_selectors = [
176
- _DEVICE_SELECTOR_TEMPLATE.format(soc_man=man, soc_model=model)
177
- for man, model in selector
178
- ]
179
- device_selectors = '\n'.join(device_selectors)
180
- device_group = _DEVICE_GROUP_TEMPLATE.format(
181
- device_group_name=group_name, device_selectors=device_selectors
182
- )
183
- return device_group
184
- elif isinstance(target, mtk_target.Target):
185
- group_name = str(target).replace(
186
- mtk_target.SocManufacturer.MEDIATEK, 'Mediatek'
187
- )
188
- # TODO: b/407453529 - Support MTK SDK Version / OS version in selector.
189
- selector = _process_mtk_target(target)
190
- device_selector = _DEVICE_SELECTOR_TEMPLATE.format(
191
- soc_man=selector[0], soc_model=selector[1]
192
- )
193
- device_group = _DEVICE_GROUP_TEMPLATE.format(
194
- device_group_name=group_name, device_selectors=device_selector
195
- )
196
- return device_group
197
- elif isinstance(target, fallback_backend.FallbackTarget):
198
- # Don't need to have device selector for fallback target.
199
- return None
200
- else:
201
- print('unsupported target ', target)
202
- return None
203
-
204
-
205
- # TODO: b/407453529 - Auto-generate this function from CSVs.
206
- def _process_qnn_target(target: qnn_target.Target) -> list[tuple[str, str]]:
207
- """Returns the list of (manufacturer, model) for the given QNN target."""
208
- # Play cannot distinguish between Qualcomm and QTI for now.
209
- manufacturer = ['Qualcomm', 'QTI']
210
- models = [str(target.soc_model)]
211
- return list(itertools.product(manufacturer, models))
212
-
213
-
214
- # TODO: b/407453529 - Auto-generate this function from CSVs.
215
- def _process_mtk_target(
216
- target: mtk_target.Target,
217
- ) -> tuple[str, str]:
218
- """Returns tuple of (manufacturer, model) for the given MTK target."""
219
- # Play cannot distinguish between Qualcomm and QTI for now.
220
- return str(target.soc_manufacturer).replace(
221
- mtk_target.SocManufacturer.MEDIATEK, 'Mediatek'
222
- ), str(target.soc_model)
223
-
224
-
225
- def _write_targeting_config(
226
- compiled_models: types.CompilationResult, ai_pack_dir: pathlib.Path
227
- ) -> None:
228
- """Writes device_targeting_configuration.xml for the given compiled models."""
229
- compiled_backends = [x for x, _ in compiled_models.models_with_backend]
230
- targeting_config = _build_targeting_config(
231
- compiled_backends=compiled_backends
232
- )
233
-
234
- targeting_config_path = ai_pack_dir / 'device_targeting_configuration.xml'
235
- targeting_config_path.write_text(targeting_config)
236
-
237
-
238
- def export(
239
- compiled_models: types.CompilationResult,
240
- ai_pack_dir: pathlib.Path | str,
241
- ai_pack_name: str,
242
- litert_model_name: str,
243
- ) -> None:
244
- """Exports the compiled models to AI pack format.
245
-
246
- This function will export the compiled models to corresponding directory
247
- structure:
248
-
249
- {ai_pack_dir}/
250
- AiPackManifest.xml
251
- device_targeting_configuration.xml
252
- {ai_pack_name}/src/main/assets/
253
- model#group_target_1/
254
- {litert_model_name}.tflite
255
- model#group_target_2/
256
- {litert_model_name}.tflite
257
- model#group_target_3/
258
- {litert_model_name}.tflite
259
- model#group_other/
260
- {litert_model_name}.tflite
261
-
262
- Args:
263
- compiled_models: The compiled models to export.
264
- ai_pack_dir: The directory to export the AI pack to.
265
- ai_pack_name: The name of the AI pack.
266
- litert_model_name: The name of the model in the litert format.
267
- """
268
- if isinstance(ai_pack_dir, str):
269
- ai_pack_dir = pathlib.Path(ai_pack_dir)
270
-
271
- ai_pack_dir.mkdir(parents=True, exist_ok=True)
272
-
273
- _export_model_files_to_ai_pack(
274
- compiled_models=compiled_models,
275
- ai_pack_dir=ai_pack_dir,
276
- ai_pack_name=ai_pack_name,
277
- litert_model_name=litert_model_name,
278
- )
279
- _write_targeting_config(
280
- compiled_models=compiled_models, ai_pack_dir=ai_pack_dir
281
- )