ai-edge-litert-nightly 2.0.4.dev20251102__cp313-cp313-macosx_12_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ai-edge-litert-nightly might be problematic. Click here for more details.

Files changed (36) hide show
  1. ai_edge_litert/__init__.py +1 -0
  2. ai_edge_litert/_pywrap_analyzer_wrapper.so +0 -0
  3. ai_edge_litert/_pywrap_litert_compiled_model_wrapper.so +0 -0
  4. ai_edge_litert/_pywrap_litert_tensor_buffer_wrapper.so +0 -0
  5. ai_edge_litert/_pywrap_modify_model_interface.so +0 -0
  6. ai_edge_litert/_pywrap_string_util.so +0 -0
  7. ai_edge_litert/_pywrap_tensorflow_interpreter_wrapper.so +0 -0
  8. ai_edge_litert/_pywrap_tensorflow_lite_calibration_wrapper.so +0 -0
  9. ai_edge_litert/_pywrap_tensorflow_lite_metrics_wrapper.so +0 -0
  10. ai_edge_litert/any_pb2.py +37 -0
  11. ai_edge_litert/api_pb2.py +43 -0
  12. ai_edge_litert/compiled_model.py +250 -0
  13. ai_edge_litert/descriptor_pb2.py +3361 -0
  14. ai_edge_litert/duration_pb2.py +37 -0
  15. ai_edge_litert/empty_pb2.py +37 -0
  16. ai_edge_litert/field_mask_pb2.py +37 -0
  17. ai_edge_litert/format_converter_wrapper_pybind11.so +0 -0
  18. ai_edge_litert/hardware_accelerator.py +22 -0
  19. ai_edge_litert/interpreter.py +1039 -0
  20. ai_edge_litert/metrics_interface.py +48 -0
  21. ai_edge_litert/metrics_portable.py +70 -0
  22. ai_edge_litert/model_runtime_info_pb2.py +66 -0
  23. ai_edge_litert/plugin_pb2.py +46 -0
  24. ai_edge_litert/profiling_info_pb2.py +47 -0
  25. ai_edge_litert/pywrap_genai_ops.so +0 -0
  26. ai_edge_litert/schema_py_generated.py +19640 -0
  27. ai_edge_litert/source_context_pb2.py +37 -0
  28. ai_edge_litert/struct_pb2.py +47 -0
  29. ai_edge_litert/tensor_buffer.py +167 -0
  30. ai_edge_litert/timestamp_pb2.py +37 -0
  31. ai_edge_litert/type_pb2.py +53 -0
  32. ai_edge_litert/wrappers_pb2.py +53 -0
  33. ai_edge_litert_nightly-2.0.4.dev20251102.dist-info/METADATA +48 -0
  34. ai_edge_litert_nightly-2.0.4.dev20251102.dist-info/RECORD +36 -0
  35. ai_edge_litert_nightly-2.0.4.dev20251102.dist-info/WHEEL +5 -0
  36. ai_edge_litert_nightly-2.0.4.dev20251102.dist-info/top_level.txt +1 -0
@@ -0,0 +1 @@
1
+ __version__ = "2.0.4.dev20251102"
Binary file
@@ -0,0 +1,37 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/any.proto
5
+ # Protobuf Python Version: 6.31.1
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 31,
16
+ 1,
17
+ '',
18
+ 'google/protobuf/any.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_ANY']._serialized_start=46
36
+ _globals['_ANY']._serialized_end=84
37
+ # @@protoc_insertion_point(module_scope)
@@ -0,0 +1,43 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/api.proto
5
+ # Protobuf Python Version: 6.31.1
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 31,
16
+ 1,
17
+ '',
18
+ 'google/protobuf/api.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+ from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
26
+ from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
27
+
28
+
29
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
30
+
31
+ _globals = globals()
32
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
33
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', _globals)
34
+ if not _descriptor._USE_C_DESCRIPTORS:
35
+ _globals['DESCRIPTOR']._loaded_options = None
36
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
37
+ _globals['_API']._serialized_start=113
38
+ _globals['_API']._serialized_end=370
39
+ _globals['_METHOD']._serialized_start=373
40
+ _globals['_METHOD']._serialized_end=586
41
+ _globals['_MIXIN']._serialized_start=588
42
+ _globals['_MIXIN']._serialized_end=623
43
+ # @@protoc_insertion_point(module_scope)
@@ -0,0 +1,250 @@
1
+ # Copyright 2025 Google LLC.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Python wrapper for LiteRT compiled models."""
16
+
17
+ import os
18
+ from typing import Any, Dict, List
19
+
20
+ # pylint: disable=g-import-not-at-top
21
+ if not os.path.splitext(__file__)[0].endswith(
22
+ os.path.join("ai_edge_litert", "compiled_model")
23
+ ):
24
+ # This file is part of litert package.
25
+ from ai_edge_litert.litert_wrapper.compiled_model_wrapper import (
26
+ _pywrap_litert_compiled_model_wrapper as _cm,
27
+ )
28
+ from ai_edge_litert.litert_wrapper.compiled_model_wrapper.hardware_accelerator import HardwareAccelerator
29
+ from ai_edge_litert.litert_wrapper.tensor_buffer_wrapper.tensor_buffer import TensorBuffer
30
+ else:
31
+ # This file is part of ai_edge_litert package.
32
+ from ai_edge_litert import _pywrap_litert_compiled_model_wrapper as _cm
33
+ from ai_edge_litert.hardware_accelerator import HardwareAccelerator
34
+ from ai_edge_litert.tensor_buffer import TensorBuffer
35
+ # pylint: enable=g-import-not-at-top
36
+
37
+
38
+ class CompiledModel:
39
+ """Python wrapper for the C++ CompiledModelWrapper.
40
+
41
+ This class provides methods to load, inspect, and execute machine learning
42
+ models using the LiteRT runtime.
43
+ """
44
+
45
+ def __init__(self, c_model_ptr):
46
+ """Initializes the CompiledModel with a C++ model pointer.
47
+
48
+ Args:
49
+ c_model_ptr: Pointer to the underlying C++ CompiledModelWrapper.
50
+ """
51
+ self._model = c_model_ptr # Pointer to C++ CompiledModelWrapper
52
+
53
+ @classmethod
54
+ def from_file(
55
+ cls,
56
+ model_path: str,
57
+ ) -> "CompiledModel":
58
+ """Creates a CompiledModel from a model file.
59
+
60
+ Args:
61
+ model_path: Path to the model file.
62
+
63
+ Returns:
64
+ A new CompiledModel instance.
65
+ """
66
+ ptr = _cm.CreateCompiledModelFromFile(
67
+ model_path,
68
+ compiler_plugin_path="",
69
+ dispatch_library_path="",
70
+ hardware_accel=HardwareAccelerator.CPU,
71
+ )
72
+ return cls(ptr)
73
+
74
+ @classmethod
75
+ def from_buffer(
76
+ cls,
77
+ model_data: bytes,
78
+ ) -> "CompiledModel":
79
+ """Creates a CompiledModel from an in-memory buffer.
80
+
81
+ Args:
82
+ model_data: Model data as bytes.
83
+
84
+ Returns:
85
+ A new CompiledModel instance.
86
+ """
87
+ ptr = _cm.CreateCompiledModelFromBuffer(
88
+ model_data,
89
+ compiler_plugin_path="",
90
+ dispatch_library_path="",
91
+ hardware_accel=HardwareAccelerator.CPU,
92
+ )
93
+ return cls(ptr)
94
+
95
+ def get_signature_list(self) -> Dict[str, Dict[str, List[str]]]:
96
+ """Returns a dictionary of all available model signatures.
97
+
98
+ Returns:
99
+ Dictionary mapping signature names to their input/output specifications.
100
+ """
101
+ return self._model.GetSignatureList()
102
+
103
+ def get_signature_by_index(self, index: int) -> Dict[str, Any]:
104
+ """Returns signature information for the given index.
105
+
106
+ Args:
107
+ index: Index of the signature to retrieve.
108
+
109
+ Returns:
110
+ Dictionary containing signature information.
111
+ """
112
+ return self._model.GetSignatureByIndex(index)
113
+
114
+ def get_num_signatures(self) -> int:
115
+ """Returns the number of signatures in the model.
116
+
117
+ Returns:
118
+ Number of signatures.
119
+ """
120
+ return self._model.GetNumSignatures()
121
+
122
+ def get_signature_index(self, key: str) -> int:
123
+ """Returns the index for a signature name.
124
+
125
+ Args:
126
+ key: Name of the signature.
127
+
128
+ Returns:
129
+ Index of the signature, or -1 if not found.
130
+ """
131
+ return self._model.GetSignatureIndex(key)
132
+
133
+ def get_input_buffer_requirements(
134
+ self, input_index: int, signature_index: int = 0
135
+ ) -> Dict[str, Any]:
136
+ """Returns memory requirements for an input tensor.
137
+
138
+ Args:
139
+ input_index: Index of the input tensor.
140
+ signature_index: Index of the signature. Default is 0 (first signature).
141
+
142
+ Returns:
143
+ Dictionary with buffer requirements (size, alignment, etc.).
144
+ """
145
+ return self._model.GetInputBufferRequirements(signature_index, input_index)
146
+
147
+ def get_output_buffer_requirements(
148
+ self, output_index: int, signature_index: int = 0
149
+ ) -> Dict[str, Any]:
150
+ """Returns memory requirements for an output tensor.
151
+
152
+ Args:
153
+ output_index: Index of the output tensor.
154
+ signature_index: Index of the signature. Default is 0 (first signature).
155
+
156
+ Returns:
157
+ Dictionary with buffer requirements (size, alignment, etc.).
158
+ """
159
+ return self._model.GetOutputBufferRequirements(
160
+ signature_index, output_index
161
+ )
162
+
163
+ def create_input_buffer_by_name(
164
+ self, signature_key: str, input_name: str
165
+ ) -> TensorBuffer:
166
+ """Creates an input TensorBuffer for the specified signature and input name.
167
+
168
+ Args:
169
+ signature_key: Name of the signature.
170
+ input_name: Name of the input tensor.
171
+
172
+ Returns:
173
+ A TensorBuffer object for the specified input.
174
+ """
175
+ capsule = self._model.CreateInputBufferByName(signature_key, input_name)
176
+ return TensorBuffer(capsule)
177
+
178
+ def create_output_buffer_by_name(
179
+ self, signature_key: str, output_name: str
180
+ ) -> TensorBuffer:
181
+ """Creates an output TensorBuffer for the specified signature and output name.
182
+
183
+ Args:
184
+ signature_key: Name of the signature.
185
+ output_name: Name of the output tensor.
186
+
187
+ Returns:
188
+ A TensorBuffer object for the specified output.
189
+ """
190
+ capsule = self._model.CreateOutputBufferByName(signature_key, output_name)
191
+ return TensorBuffer(capsule)
192
+
193
+ def create_input_buffers(self, signature_index: int) -> List[TensorBuffer]:
194
+ """Creates TensorBuffers for all inputs of the specified signature.
195
+
196
+ Args:
197
+ signature_index: Index of the signature.
198
+
199
+ Returns:
200
+ List of TensorBuffer objects for all inputs.
201
+ """
202
+ capsule_list = self._model.CreateInputBuffers(signature_index)
203
+ return [TensorBuffer(c) for c in capsule_list]
204
+
205
+ def create_output_buffers(self, signature_index: int) -> List[TensorBuffer]:
206
+ """Creates TensorBuffers for all outputs of the specified signature.
207
+
208
+ Args:
209
+ signature_index: Index of the signature.
210
+
211
+ Returns:
212
+ List of TensorBuffer objects for all outputs.
213
+ """
214
+ capsule_list = self._model.CreateOutputBuffers(signature_index)
215
+ return [TensorBuffer(c) for c in capsule_list]
216
+
217
+ def run_by_name(
218
+ self,
219
+ signature_key: str,
220
+ input_map: Dict[str, TensorBuffer],
221
+ output_map: Dict[str, TensorBuffer],
222
+ ) -> None:
223
+ """Runs inference using the named signature and tensor maps.
224
+
225
+ Args:
226
+ signature_key: Name of the signature to execute.
227
+ input_map: Dictionary mapping input names to TensorBuffer objects.
228
+ output_map: Dictionary mapping output names to TensorBuffer objects.
229
+ """
230
+ # Convert TensorBuffer objects to raw capsules
231
+ capsule_input_map = {k: v.capsule for k, v in input_map.items()}
232
+ capsule_output_map = {k: v.capsule for k, v in output_map.items()}
233
+ self._model.RunByName(signature_key, capsule_input_map, capsule_output_map)
234
+
235
+ def run_by_index(
236
+ self,
237
+ signature_index: int,
238
+ input_buffers: List[TensorBuffer],
239
+ output_buffers: List[TensorBuffer],
240
+ ) -> None:
241
+ """Runs inference using the indexed signature and tensor lists.
242
+
243
+ Args:
244
+ signature_index: Index of the signature to execute.
245
+ input_buffers: List of input TensorBuffer objects.
246
+ output_buffers: List of output TensorBuffer objects.
247
+ """
248
+ input_capsules = [tb.capsule for tb in input_buffers]
249
+ output_capsules = [tb.capsule for tb in output_buffers]
250
+ self._model.RunByIndex(signature_index, input_capsules, output_capsules)