ai-edge-litert-nightly 1.4.0.dev20250729__cp311-cp311-macosx_12_0_arm64.whl → 1.4.0.dev20250814__cp311-cp311-macosx_12_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ai-edge-litert-nightly might be problematic. Click here for more details.
- ai_edge_litert/__init__.py +1 -1
- ai_edge_litert/any_pb2.py +4 -4
- ai_edge_litert/api_pb2.py +4 -4
- ai_edge_litert/descriptor_pb2.py +310 -118
- ai_edge_litert/duration_pb2.py +4 -4
- ai_edge_litert/empty_pb2.py +4 -4
- ai_edge_litert/field_mask_pb2.py +4 -4
- ai_edge_litert/model_runtime_info_pb2.py +4 -4
- ai_edge_litert/plugin_pb2.py +4 -4
- ai_edge_litert/profiling_info_pb2.py +4 -4
- ai_edge_litert/source_context_pb2.py +4 -4
- ai_edge_litert/struct_pb2.py +4 -4
- ai_edge_litert/timestamp_pb2.py +4 -4
- ai_edge_litert/type_pb2.py +4 -4
- ai_edge_litert/wrappers_pb2.py +4 -4
- {ai_edge_litert_nightly-1.4.0.dev20250729.dist-info → ai_edge_litert_nightly-1.4.0.dev20250814.dist-info}/METADATA +1 -1
- ai_edge_litert_nightly-1.4.0.dev20250814.dist-info/RECORD +36 -0
- ai_edge_litert/aot/__init__.py +0 -0
- ai_edge_litert/aot/ai_pack/__init__.py +0 -0
- ai_edge_litert/aot/ai_pack/export_lib.py +0 -281
- ai_edge_litert/aot/aot_compile.py +0 -152
- ai_edge_litert/aot/core/__init__.py +0 -0
- ai_edge_litert/aot/core/apply_plugin.py +0 -146
- ai_edge_litert/aot/core/common.py +0 -95
- ai_edge_litert/aot/core/components.py +0 -93
- ai_edge_litert/aot/core/mlir_transforms.py +0 -36
- ai_edge_litert/aot/core/tflxx_util.py +0 -30
- ai_edge_litert/aot/core/types.py +0 -374
- ai_edge_litert/aot/prepare_for_npu.py +0 -152
- ai_edge_litert/aot/vendors/__init__.py +0 -18
- ai_edge_litert/aot/vendors/example/__init__.py +0 -0
- ai_edge_litert/aot/vendors/example/example_backend.py +0 -157
- ai_edge_litert/aot/vendors/fallback_backend.py +0 -128
- ai_edge_litert/aot/vendors/import_vendor.py +0 -132
- ai_edge_litert/aot/vendors/mediatek/__init__.py +0 -0
- ai_edge_litert/aot/vendors/mediatek/mediatek_backend.py +0 -196
- ai_edge_litert/aot/vendors/mediatek/target.py +0 -91
- ai_edge_litert/aot/vendors/qualcomm/__init__.py +0 -0
- ai_edge_litert/aot/vendors/qualcomm/qualcomm_backend.py +0 -161
- ai_edge_litert/aot/vendors/qualcomm/target.py +0 -74
- ai_edge_litert/libLiteRtRuntimeCApi.so +0 -0
- ai_edge_litert/tools/apply_plugin_main +0 -0
- ai_edge_litert_nightly-1.4.0.dev20250729.dist-info/RECORD +0 -61
- {ai_edge_litert_nightly-1.4.0.dev20250729.dist-info → ai_edge_litert_nightly-1.4.0.dev20250814.dist-info}/WHEEL +0 -0
- {ai_edge_litert_nightly-1.4.0.dev20250729.dist-info → ai_edge_litert_nightly-1.4.0.dev20250814.dist-info}/top_level.txt +0 -0
ai_edge_litert/aot/core/types.py
DELETED
|
@@ -1,374 +0,0 @@
|
|
|
1
|
-
# Copyright 2024 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
# ==============================================================================
|
|
15
|
-
|
|
16
|
-
"""Basic types used in the LiteRt AOT flow."""
|
|
17
|
-
|
|
18
|
-
import abc
|
|
19
|
-
from collections.abc import Iterable
|
|
20
|
-
import dataclasses
|
|
21
|
-
import pathlib
|
|
22
|
-
import sys
|
|
23
|
-
from typing import Any, MutableMapping, Protocol, Type
|
|
24
|
-
|
|
25
|
-
# pylint: disable=g-importing-member
|
|
26
|
-
# pylint: disable=g-import-not-at-top
|
|
27
|
-
# pylint: disable=g-bad-import-order
|
|
28
|
-
if sys.version_info < (3, 10):
|
|
29
|
-
from typing_extensions import TypeAlias
|
|
30
|
-
else:
|
|
31
|
-
from typing import TypeAlias
|
|
32
|
-
# pylint: enable=g-bad-import-order
|
|
33
|
-
# pylint: enable=g-import-not-at-top
|
|
34
|
-
# pylint: enable=g-importing-member
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
@dataclasses.dataclass(frozen=True)
|
|
38
|
-
class SubgraphPartitionStats:
|
|
39
|
-
"""Subgraph partition stats."""
|
|
40
|
-
|
|
41
|
-
subgraph_index: int
|
|
42
|
-
num_ops_offloaded: int
|
|
43
|
-
num_total_ops: int
|
|
44
|
-
num_partitions_offloaded: int
|
|
45
|
-
|
|
46
|
-
def __str__(self) -> str:
|
|
47
|
-
is_full_offload = self.num_ops_offloaded == self.num_total_ops
|
|
48
|
-
return (
|
|
49
|
-
'Subgraph'
|
|
50
|
-
f' {self.subgraph_index} {"fully" if is_full_offload else "partially"}'
|
|
51
|
-
f' compiled:\t{self.num_ops_offloaded} /'
|
|
52
|
-
f' {self.num_total_ops} ops offloaded to'
|
|
53
|
-
f' {self.num_partitions_offloaded} partitions.'
|
|
54
|
-
)
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
@dataclasses.dataclass(frozen=True)
|
|
58
|
-
class PartitionStats:
|
|
59
|
-
"""Model partition stats."""
|
|
60
|
-
|
|
61
|
-
subgraph_stats: list[SubgraphPartitionStats]
|
|
62
|
-
|
|
63
|
-
def __str__(self) -> str:
|
|
64
|
-
return '\n'.join(str(s) for s in self.subgraph_stats)
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
class Model:
|
|
68
|
-
"""A model.
|
|
69
|
-
|
|
70
|
-
Note: If the model is not in memory, data_ will be a path to a file on disk.
|
|
71
|
-
If the model is in memory, data_ will be the model bytes.
|
|
72
|
-
|
|
73
|
-
However, there's no guarantee that the path will be a valid path to a file
|
|
74
|
-
on disk, and/or that the file are a valid TFLite model.
|
|
75
|
-
"""
|
|
76
|
-
|
|
77
|
-
data_: pathlib.Path | bytes
|
|
78
|
-
partition_stats: PartitionStats | None = None
|
|
79
|
-
|
|
80
|
-
def __init__(
|
|
81
|
-
self,
|
|
82
|
-
path: pathlib.Path | str | None = None,
|
|
83
|
-
model_bytes: bytes | None = None,
|
|
84
|
-
):
|
|
85
|
-
if path is not None:
|
|
86
|
-
if isinstance(path, str):
|
|
87
|
-
path = pathlib.Path(path)
|
|
88
|
-
if model_bytes:
|
|
89
|
-
raise ValueError('Cannot specify both path and model_bytes.')
|
|
90
|
-
self.data_ = path
|
|
91
|
-
else:
|
|
92
|
-
if model_bytes is None:
|
|
93
|
-
raise ValueError('Cannot specify neither path nor model_bytes.')
|
|
94
|
-
self.data_ = model_bytes
|
|
95
|
-
|
|
96
|
-
@property
|
|
97
|
-
def in_memory(self) -> bool:
|
|
98
|
-
return isinstance(self.data_, bytes)
|
|
99
|
-
|
|
100
|
-
@property
|
|
101
|
-
def path(self) -> pathlib.Path:
|
|
102
|
-
if not isinstance(self.data_, pathlib.Path):
|
|
103
|
-
raise ValueError('Model is not on disk.')
|
|
104
|
-
return self.data_
|
|
105
|
-
|
|
106
|
-
@property
|
|
107
|
-
def model_bytes(self) -> bytes:
|
|
108
|
-
if not isinstance(self.data_, bytes):
|
|
109
|
-
raise ValueError('Model is not in memory.')
|
|
110
|
-
return self.data_
|
|
111
|
-
|
|
112
|
-
@classmethod
|
|
113
|
-
def create_from_path(cls, path: pathlib.Path) -> 'Model':
|
|
114
|
-
return Model(path=path, model_bytes=None)
|
|
115
|
-
|
|
116
|
-
@classmethod
|
|
117
|
-
def create_from_bytes(cls, model_bytes: bytes) -> 'Model':
|
|
118
|
-
return Model(path=None, model_bytes=model_bytes)
|
|
119
|
-
|
|
120
|
-
def set_path(self, path: pathlib.Path | str):
|
|
121
|
-
if isinstance(path, str):
|
|
122
|
-
path = pathlib.Path(path)
|
|
123
|
-
self.data_ = path
|
|
124
|
-
|
|
125
|
-
def set_bytes(self, model_bytes: bytes):
|
|
126
|
-
self.data_ = model_bytes
|
|
127
|
-
|
|
128
|
-
def load(self):
|
|
129
|
-
"""Loads the model from the given path.
|
|
130
|
-
|
|
131
|
-
Raises:
|
|
132
|
-
ValueError: If the model is already in memory.
|
|
133
|
-
"""
|
|
134
|
-
if not isinstance(self.data_, pathlib.Path):
|
|
135
|
-
raise ValueError('Cannot load a model that is already in memory.')
|
|
136
|
-
self.data_ = self.data_.read_bytes()
|
|
137
|
-
|
|
138
|
-
def save(self, path: pathlib.Path | str, export_only: bool = False):
|
|
139
|
-
"""Saves the model to the given path from the in-memory model content.
|
|
140
|
-
|
|
141
|
-
If export_only is True, the model will be copied to the given path without
|
|
142
|
-
modifying the internal state, regardless of whether the model is already on
|
|
143
|
-
disk or in memory.
|
|
144
|
-
|
|
145
|
-
Args:
|
|
146
|
-
path: The path to save the model to.
|
|
147
|
-
export_only: Whether to only export the model without modifying the
|
|
148
|
-
internal stat (i.e. transfer the in-memory model to disk).
|
|
149
|
-
|
|
150
|
-
Raises:
|
|
151
|
-
ValueError: If export_only is False and the model is not in memory.
|
|
152
|
-
"""
|
|
153
|
-
if isinstance(path, str):
|
|
154
|
-
path = pathlib.Path(path)
|
|
155
|
-
if isinstance(self.data_, pathlib.Path):
|
|
156
|
-
if not export_only:
|
|
157
|
-
raise ValueError(
|
|
158
|
-
'Cannot save a model that is not in memory. Use export_only=True'
|
|
159
|
-
' for copying the model to a new path.'
|
|
160
|
-
)
|
|
161
|
-
with open(self.data_, 'rb') as f:
|
|
162
|
-
model_content = f.read()
|
|
163
|
-
else:
|
|
164
|
-
model_content = self.data_
|
|
165
|
-
path.write_bytes(model_content)
|
|
166
|
-
if not export_only:
|
|
167
|
-
self.data_ = path
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
@dataclasses.dataclass()
|
|
171
|
-
class CompilationResult:
|
|
172
|
-
"""Compilation result, as a collection of compiled models."""
|
|
173
|
-
|
|
174
|
-
models_with_backend: list[tuple['Backend', Model]] = dataclasses.field(
|
|
175
|
-
default_factory=list
|
|
176
|
-
)
|
|
177
|
-
failed_backends: list[tuple['Backend', str]] = dataclasses.field(
|
|
178
|
-
default_factory=list
|
|
179
|
-
)
|
|
180
|
-
|
|
181
|
-
@property
|
|
182
|
-
def models(self) -> list[Model]:
|
|
183
|
-
return [model for _, model in self.models_with_backend]
|
|
184
|
-
|
|
185
|
-
def load(self):
|
|
186
|
-
for _, model in self.models_with_backend:
|
|
187
|
-
if not model.in_memory:
|
|
188
|
-
model.load()
|
|
189
|
-
|
|
190
|
-
def export(self, output_dir: pathlib.Path | str, model_name: str = 'model'):
|
|
191
|
-
if isinstance(output_dir, str):
|
|
192
|
-
output_dir = pathlib.Path(output_dir)
|
|
193
|
-
output_dir.mkdir(parents=True, exist_ok=True)
|
|
194
|
-
for backend, model in self.models_with_backend:
|
|
195
|
-
model.save(
|
|
196
|
-
output_dir / (model_name + backend.target_id_suffix + '.tflite'),
|
|
197
|
-
export_only=True,
|
|
198
|
-
)
|
|
199
|
-
|
|
200
|
-
def compilation_report(self) -> str:
|
|
201
|
-
"""Returns a human readable compilation report."""
|
|
202
|
-
report = []
|
|
203
|
-
for backend, model in self.models_with_backend:
|
|
204
|
-
report.append(f'{backend.target_id}')
|
|
205
|
-
report.append('==========================')
|
|
206
|
-
report.append(f'Partition Stats:\n{model.partition_stats}\n')
|
|
207
|
-
report = '\n'.join(report)
|
|
208
|
-
|
|
209
|
-
failed_report = []
|
|
210
|
-
if self.failed_backends:
|
|
211
|
-
failed_report.append('==========================')
|
|
212
|
-
failed_report.append('COMPILATION FAILURES:')
|
|
213
|
-
failed_report.append('==========================')
|
|
214
|
-
for backend, error in self.failed_backends:
|
|
215
|
-
failed_report.append(f'{backend.target_id}\t{error}')
|
|
216
|
-
failed_report = '\n'.join(failed_report)
|
|
217
|
-
return '\n'.join([report, failed_report])
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
class Component(Protocol):
|
|
221
|
-
"""An arbitrary module in the AOT flow that inputs and outputs a Model.
|
|
222
|
-
|
|
223
|
-
For example quantizer, graph rewriter, compiler plugin etc.
|
|
224
|
-
"""
|
|
225
|
-
|
|
226
|
-
@property
|
|
227
|
-
def component_name(self) -> str:
|
|
228
|
-
...
|
|
229
|
-
|
|
230
|
-
def __call__(self, input_model: Model, output_model: Model, *args, **kwargs):
|
|
231
|
-
...
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
# A user provided configuration. This will contain all the information needed
|
|
235
|
-
# to select the proper backend and run components (e.g. quant recipe,
|
|
236
|
-
# backend id etc). Backends will validate and resolve configurations and are
|
|
237
|
-
# ultimately responsible deciding how to configure the components.
|
|
238
|
-
# NOTE: Consider a typed config approach (proto, data class, etc.)
|
|
239
|
-
Config: TypeAlias = MutableMapping[str, Any]
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
# Backend specific compilation configuration.
|
|
243
|
-
BackendCompilationConfig: TypeAlias = MutableMapping[str, Any]
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
# The following is experimental and for protyping only.
|
|
247
|
-
class CompilationConfig:
|
|
248
|
-
"""A typed configuration."""
|
|
249
|
-
|
|
250
|
-
target: 'Target'
|
|
251
|
-
compilation_config: BackendCompilationConfig = dataclasses.field(
|
|
252
|
-
default_factory=dict
|
|
253
|
-
)
|
|
254
|
-
quant_recipe: str | None = None
|
|
255
|
-
|
|
256
|
-
def __init__(self, target: 'Target', **kwargs: Any):
|
|
257
|
-
self.target = target
|
|
258
|
-
self.quant_recipe = kwargs.pop('quantize_recipe', None)
|
|
259
|
-
self.compilation_config = kwargs
|
|
260
|
-
|
|
261
|
-
def to_dict(self) -> dict[str, Any]:
|
|
262
|
-
ret = self.target.flatten()
|
|
263
|
-
ret['compilation_config'] = self.compilation_config
|
|
264
|
-
if self.quant_recipe is not None:
|
|
265
|
-
ret['quantize_recipe'] = self.quant_recipe
|
|
266
|
-
return ret
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
class Backend(metaclass=abc.ABCMeta):
|
|
270
|
-
"""A backend pertaining to a particular SoC vendor.
|
|
271
|
-
|
|
272
|
-
Mainly responsible for resolving configurations and managing vendor specific
|
|
273
|
-
resources (e.g. .so etc).
|
|
274
|
-
"""
|
|
275
|
-
|
|
276
|
-
# NOTE: Only initialize through "create".
|
|
277
|
-
def __init__(self, config: Config):
|
|
278
|
-
self._config = config
|
|
279
|
-
|
|
280
|
-
@classmethod
|
|
281
|
-
@abc.abstractmethod
|
|
282
|
-
def create(cls, config: Config) -> 'Backend':
|
|
283
|
-
"""Creates a backend instance.
|
|
284
|
-
|
|
285
|
-
If no target is specified, the backend will represent all targets.
|
|
286
|
-
|
|
287
|
-
Args:
|
|
288
|
-
config: The compilation configuration.
|
|
289
|
-
|
|
290
|
-
Returns:
|
|
291
|
-
The backend instance.
|
|
292
|
-
"""
|
|
293
|
-
|
|
294
|
-
@classmethod
|
|
295
|
-
@abc.abstractmethod
|
|
296
|
-
def id(cls) -> str:
|
|
297
|
-
pass
|
|
298
|
-
|
|
299
|
-
@property
|
|
300
|
-
@abc.abstractmethod
|
|
301
|
-
def target(self) -> 'Target':
|
|
302
|
-
pass
|
|
303
|
-
|
|
304
|
-
@property
|
|
305
|
-
@abc.abstractmethod
|
|
306
|
-
def target_id(self) -> str:
|
|
307
|
-
pass
|
|
308
|
-
|
|
309
|
-
@property
|
|
310
|
-
def target_id_suffix(self) -> str:
|
|
311
|
-
if self.target_id:
|
|
312
|
-
return '_' + self.target_id
|
|
313
|
-
return ''
|
|
314
|
-
|
|
315
|
-
@property
|
|
316
|
-
def config(self) -> Config:
|
|
317
|
-
return self._config
|
|
318
|
-
|
|
319
|
-
@property
|
|
320
|
-
def soc_manufacturer(self) -> str:
|
|
321
|
-
"""Manufacturer name or enum."""
|
|
322
|
-
raise NotImplementedError()
|
|
323
|
-
|
|
324
|
-
@property
|
|
325
|
-
def soc_model(self) -> str:
|
|
326
|
-
"""Model name or enum."""
|
|
327
|
-
raise NotImplementedError()
|
|
328
|
-
|
|
329
|
-
@property
|
|
330
|
-
def shared_pass_names(self) -> list[str]:
|
|
331
|
-
"""Names of shared passes."""
|
|
332
|
-
raise NotImplementedError()
|
|
333
|
-
|
|
334
|
-
@property
|
|
335
|
-
def quantize_recipe(self) -> str | None:
|
|
336
|
-
"""Optional quantization recipe."""
|
|
337
|
-
return None
|
|
338
|
-
|
|
339
|
-
@abc.abstractmethod
|
|
340
|
-
def call_component(
|
|
341
|
-
self, input_model: Model, output_model: Model, component: Component
|
|
342
|
-
):
|
|
343
|
-
pass
|
|
344
|
-
|
|
345
|
-
def specialize(self) -> Iterable['Backend']:
|
|
346
|
-
yield self
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
BackendT: TypeAlias = Type[Backend]
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
class Target(metaclass=abc.ABCMeta):
|
|
353
|
-
"""Compilation target."""
|
|
354
|
-
|
|
355
|
-
@abc.abstractmethod
|
|
356
|
-
def __hash__(self) -> int:
|
|
357
|
-
pass
|
|
358
|
-
|
|
359
|
-
@abc.abstractmethod
|
|
360
|
-
def __eq__(self, other) -> bool:
|
|
361
|
-
pass
|
|
362
|
-
|
|
363
|
-
@abc.abstractmethod
|
|
364
|
-
def __repr__(self) -> str:
|
|
365
|
-
pass
|
|
366
|
-
|
|
367
|
-
@classmethod
|
|
368
|
-
@abc.abstractmethod
|
|
369
|
-
def backend_id(cls) -> str:
|
|
370
|
-
pass
|
|
371
|
-
|
|
372
|
-
@abc.abstractmethod
|
|
373
|
-
def flatten(self) -> dict[str, Any]:
|
|
374
|
-
return {'backend_id': self.backend_id()}
|
|
@@ -1,152 +0,0 @@
|
|
|
1
|
-
# Copyright 2024 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
# ==============================================================================
|
|
15
|
-
|
|
16
|
-
"""Implementations for the main public API functionalities."""
|
|
17
|
-
|
|
18
|
-
import pathlib
|
|
19
|
-
from typing import cast
|
|
20
|
-
|
|
21
|
-
# pylint: disable=g-import-not-at-top
|
|
22
|
-
# pytype: disable=import-error
|
|
23
|
-
try:
|
|
24
|
-
from tqdm import auto as autotqdm
|
|
25
|
-
except ImportError:
|
|
26
|
-
from tqdm.tqdm import auto as autotqdm
|
|
27
|
-
# pytype: enable=import-error
|
|
28
|
-
|
|
29
|
-
from ai_edge_litert.aot.core import common
|
|
30
|
-
from ai_edge_litert.aot.core import components
|
|
31
|
-
from ai_edge_litert.aot.core import types
|
|
32
|
-
from ai_edge_litert.aot.vendors import import_vendor
|
|
33
|
-
|
|
34
|
-
# pylint: enable=g-import-not-at-top
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def resolve_backend(config: types.Config) -> types.BackendT:
|
|
38
|
-
# Import the backend based on the ID.
|
|
39
|
-
backend_id = config.get("backend_id", None)
|
|
40
|
-
if backend_id is None:
|
|
41
|
-
raise ValueError("Backend ID is required.")
|
|
42
|
-
return import_vendor.import_vendor(backend_id)
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
def prepare_for_npu_multiple_configs(
|
|
46
|
-
flatbuffer: types.Model,
|
|
47
|
-
output_dir: pathlib.Path,
|
|
48
|
-
configs: list[tuple[types.BackendT, types.Config]],
|
|
49
|
-
plugin: components.ApplyPluginT,
|
|
50
|
-
transforms: components.MlirTransformsT | None = None,
|
|
51
|
-
quantizer: components.AieQuantizerT | None = None,
|
|
52
|
-
keep_going: bool = False,
|
|
53
|
-
) -> types.CompilationResult:
|
|
54
|
-
"""Prepares a TFLite model for NPU execution."""
|
|
55
|
-
backends = []
|
|
56
|
-
for backend_class, config in configs:
|
|
57
|
-
backend = backend_class.create(config)
|
|
58
|
-
backends += list(backend.specialize())
|
|
59
|
-
|
|
60
|
-
pipeline: list[types.Component] = [
|
|
61
|
-
c for c in [transforms, quantizer, plugin] if c is not None
|
|
62
|
-
]
|
|
63
|
-
return compile_model(flatbuffer, output_dir, backends, pipeline, keep_going)
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def prepare_for_npu(
|
|
67
|
-
flatbuffer: types.Model,
|
|
68
|
-
output_dir: pathlib.Path,
|
|
69
|
-
backend_class: types.BackendT,
|
|
70
|
-
config: types.Config,
|
|
71
|
-
plugin: components.ApplyPluginT,
|
|
72
|
-
transforms: components.MlirTransformsT | None = None,
|
|
73
|
-
quantizer: components.AieQuantizerT | None = None,
|
|
74
|
-
keep_going: bool = False,
|
|
75
|
-
) -> types.CompilationResult:
|
|
76
|
-
"""Prepares a TFLite model for NPU execution.
|
|
77
|
-
|
|
78
|
-
High level command that erforms various backend specific pre-processing steps
|
|
79
|
-
and then applies an NPU compiler to the given model.
|
|
80
|
-
|
|
81
|
-
Args:
|
|
82
|
-
flatbuffer: Path to the input flatbuffer file.
|
|
83
|
-
output_dir: Directory to write the output flatbuffer file.
|
|
84
|
-
backend_class: The backend to prepare the model for.
|
|
85
|
-
config: The configuration for the backend.
|
|
86
|
-
plugin: The plugin to apply to the model.
|
|
87
|
-
transforms: The transforms to apply to the model.
|
|
88
|
-
quantizer: The quantizer to apply to the model.
|
|
89
|
-
keep_going: Whether to keep going if some backends fail.
|
|
90
|
-
|
|
91
|
-
Returns:
|
|
92
|
-
List of the paths to the output flatbuffer file.
|
|
93
|
-
|
|
94
|
-
Raises:
|
|
95
|
-
ValueError: If the given path is not a valid flatbuffer file.
|
|
96
|
-
"""
|
|
97
|
-
|
|
98
|
-
backend = backend_class.create(config)
|
|
99
|
-
|
|
100
|
-
pipeline: list[types.Component] = [
|
|
101
|
-
c for c in [transforms, quantizer, plugin] if c is not None
|
|
102
|
-
]
|
|
103
|
-
backends = list(backend.specialize())
|
|
104
|
-
return compile_model(flatbuffer, output_dir, backends, pipeline, keep_going)
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
def compile_model(
|
|
108
|
-
flatbuffer: types.Model,
|
|
109
|
-
output_dir: pathlib.Path,
|
|
110
|
-
backends: list[types.Backend],
|
|
111
|
-
pipeline: list[types.Component],
|
|
112
|
-
keep_going: bool = False,
|
|
113
|
-
) -> types.CompilationResult:
|
|
114
|
-
"""Compiles a TFLite model for NPU execution."""
|
|
115
|
-
if flatbuffer.in_memory:
|
|
116
|
-
base_name = "model"
|
|
117
|
-
else:
|
|
118
|
-
base_name = flatbuffer.path.name.removesuffix(common.DOT_TFLITE)
|
|
119
|
-
compile_models = types.CompilationResult()
|
|
120
|
-
with autotqdm.tqdm(backends, desc="Backend") as t_backends:
|
|
121
|
-
for backend in t_backends:
|
|
122
|
-
component_input = flatbuffer
|
|
123
|
-
backend = cast(types.Backend, backend)
|
|
124
|
-
input_name_pref = base_name + backend.target_id_suffix
|
|
125
|
-
t_backends.set_description(f"Compiling {backend.target_id}")
|
|
126
|
-
try:
|
|
127
|
-
for component in pipeline:
|
|
128
|
-
component = cast(types.Component, component)
|
|
129
|
-
t_backends.set_description(
|
|
130
|
-
f"Compiling {backend.target_id}: {component.component_name}"
|
|
131
|
-
)
|
|
132
|
-
component_output = types.Model.create_from_path(
|
|
133
|
-
output_dir
|
|
134
|
-
/ f"{input_name_pref}_{component.component_name}{common.DOT_TFLITE}"
|
|
135
|
-
)
|
|
136
|
-
backend.call_component(component_input, component_output, component)
|
|
137
|
-
if not component_output.in_memory and not common.is_tflite(
|
|
138
|
-
component_output.path
|
|
139
|
-
):
|
|
140
|
-
raise ValueError(
|
|
141
|
-
f"{component.component_name} failed to produce a TFLite model."
|
|
142
|
-
)
|
|
143
|
-
component_input = component_output
|
|
144
|
-
compile_models.models_with_backend.append((backend, component_input))
|
|
145
|
-
except ValueError as e:
|
|
146
|
-
if keep_going:
|
|
147
|
-
print(f"Skipping failed compilation for {backend.target}. Error: {e}")
|
|
148
|
-
compile_models.failed_backends.append((backend, str(e)))
|
|
149
|
-
else:
|
|
150
|
-
raise
|
|
151
|
-
|
|
152
|
-
return compile_models
|
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
# Copyright 2024 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
# ==============================================================================
|
|
15
|
-
"""Vendor backends for LiteRt."""
|
|
16
|
-
|
|
17
|
-
from ai_edge_litert.aot.vendors.mediatek import mediatek_backend as _
|
|
18
|
-
from ai_edge_litert.aot.vendors.qualcomm import qualcomm_backend as _
|
|
File without changes
|
|
@@ -1,157 +0,0 @@
|
|
|
1
|
-
# Copyright 2024 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
# ==============================================================================
|
|
15
|
-
|
|
16
|
-
"""Backend implementation for the example compiler plugin.."""
|
|
17
|
-
|
|
18
|
-
import functools
|
|
19
|
-
from typing import Any
|
|
20
|
-
|
|
21
|
-
from ai_edge_litert.aot.core import components
|
|
22
|
-
from ai_edge_litert.aot.core import types
|
|
23
|
-
from ai_edge_litert.aot.vendors import import_vendor
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class ExampleTarget(types.Target):
|
|
27
|
-
"""Compilation target for the example backend."""
|
|
28
|
-
|
|
29
|
-
def __init__(self, soc_manufacturer: str, soc_model: str):
|
|
30
|
-
self.soc_manufacturer = soc_manufacturer
|
|
31
|
-
self.soc_model = soc_model
|
|
32
|
-
|
|
33
|
-
def __hash__(self) -> int:
|
|
34
|
-
return hash((self.soc_manufacturer, self.soc_model))
|
|
35
|
-
|
|
36
|
-
def __eq__(self, other) -> bool:
|
|
37
|
-
return (
|
|
38
|
-
self.soc_manufacturer == other.soc_manufacturer
|
|
39
|
-
and self.soc_model == other.soc_model
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
def __repr__(self) -> str:
|
|
43
|
-
return f"{self.soc_manufacturer}_{self.soc_model}"
|
|
44
|
-
|
|
45
|
-
def flatten(self) -> dict[str, Any]:
|
|
46
|
-
return {
|
|
47
|
-
"soc_manufacturer": self.soc_manufacturer,
|
|
48
|
-
"soc_model": self.soc_model,
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
@classmethod
|
|
52
|
-
def backend_id(cls) -> str:
|
|
53
|
-
return "example"
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
# Note this is not a real target so not auto-registered unless the module is
|
|
57
|
-
# imported.
|
|
58
|
-
@import_vendor.register_backend
|
|
59
|
-
class ExampleBackend(types.Backend):
|
|
60
|
-
"""Backend implementation for the example compiler plugin."""
|
|
61
|
-
|
|
62
|
-
def __init__(self, config: types.Config):
|
|
63
|
-
super().__init__(config)
|
|
64
|
-
self._compilation_config = config.get("compilation_config", None)
|
|
65
|
-
|
|
66
|
-
@classmethod
|
|
67
|
-
def target_(cls) -> ExampleTarget:
|
|
68
|
-
return ExampleTarget("ExampleSocManufacturer", "ExampleSocModel")
|
|
69
|
-
|
|
70
|
-
@property
|
|
71
|
-
def target(self) -> ExampleTarget:
|
|
72
|
-
return self.target_()
|
|
73
|
-
|
|
74
|
-
@classmethod
|
|
75
|
-
def soc_manufacturer(cls) -> str:
|
|
76
|
-
return cls.target_().soc_manufacturer
|
|
77
|
-
|
|
78
|
-
@classmethod
|
|
79
|
-
def soc_model(cls) -> str:
|
|
80
|
-
return cls.target_().soc_model
|
|
81
|
-
|
|
82
|
-
@classmethod
|
|
83
|
-
def id(cls) -> str:
|
|
84
|
-
return "example"
|
|
85
|
-
|
|
86
|
-
@property
|
|
87
|
-
def target_id(self) -> str:
|
|
88
|
-
return ""
|
|
89
|
-
|
|
90
|
-
@property
|
|
91
|
-
def shared_pass_names(self) -> list[str]:
|
|
92
|
-
return ["example-pass"]
|
|
93
|
-
|
|
94
|
-
@classmethod
|
|
95
|
-
def create(cls, config: types.Config) -> "ExampleBackend":
|
|
96
|
-
if config.get("backend_id", "") != cls.id():
|
|
97
|
-
raise ValueError("Invalid backend id")
|
|
98
|
-
return cls(config)
|
|
99
|
-
|
|
100
|
-
def call_component(
|
|
101
|
-
self,
|
|
102
|
-
input_model: types.Model,
|
|
103
|
-
output_model: types.Model,
|
|
104
|
-
component: types.Component,
|
|
105
|
-
):
|
|
106
|
-
return _call_component(component, self, input_model, output_model)
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
@functools.singledispatch
|
|
110
|
-
def _call_component(
|
|
111
|
-
component: types.Component,
|
|
112
|
-
backend: ExampleBackend,
|
|
113
|
-
unused_input_model: types.Model,
|
|
114
|
-
unused_output_model: types.Model,
|
|
115
|
-
):
|
|
116
|
-
raise NotImplementedError(
|
|
117
|
-
f"{backend.id()} backend does not support"
|
|
118
|
-
f" {component.component_name} component."
|
|
119
|
-
)
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
@_call_component.register
|
|
123
|
-
def _apply_plugin(
|
|
124
|
-
component: components.ApplyPluginT,
|
|
125
|
-
backend: ExampleBackend,
|
|
126
|
-
input_model: types.Model,
|
|
127
|
-
output_model: types.Model,
|
|
128
|
-
):
|
|
129
|
-
return component(
|
|
130
|
-
input_model,
|
|
131
|
-
output_model,
|
|
132
|
-
backend.soc_manufacturer,
|
|
133
|
-
backend.soc_model,
|
|
134
|
-
)
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
@_call_component.register
|
|
138
|
-
def _aie_quantizer(
|
|
139
|
-
component: components.AieQuantizerT,
|
|
140
|
-
unused_backend: ExampleBackend,
|
|
141
|
-
input_model: types.Model,
|
|
142
|
-
output_model: types.Model,
|
|
143
|
-
):
|
|
144
|
-
return component(
|
|
145
|
-
input_model,
|
|
146
|
-
output_model,
|
|
147
|
-
)
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
@_call_component.register
|
|
151
|
-
def _mlir_transforms(
|
|
152
|
-
component: components.MlirTransformsT,
|
|
153
|
-
backend: ExampleBackend,
|
|
154
|
-
input_model: types.Model,
|
|
155
|
-
output_model: types.Model,
|
|
156
|
-
):
|
|
157
|
-
return component(input_model, output_model, backend.shared_pass_names)
|