bigdl-core-npu 2.5.0__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (223) hide show
  1. bigdl_core_npu-2.5.0.dist-info/METADATA +35 -0
  2. bigdl_core_npu-2.5.0.dist-info/RECORD +223 -0
  3. bigdl_core_npu-2.5.0.dist-info/WHEEL +5 -0
  4. bigdl_core_npu-2.5.0.dist-info/top_level.txt +1 -0
  5. intel_npu_acceleration_library/__init__.py +24 -0
  6. intel_npu_acceleration_library/_version.py +6 -0
  7. intel_npu_acceleration_library/backend/__init__.py +37 -0
  8. intel_npu_acceleration_library/backend/base.py +215 -0
  9. intel_npu_acceleration_library/backend/bindings.py +279 -0
  10. intel_npu_acceleration_library/backend/compression.py +24 -0
  11. intel_npu_acceleration_library/backend/convolution.py +58 -0
  12. intel_npu_acceleration_library/backend/factory.py +944 -0
  13. intel_npu_acceleration_library/backend/linear.py +60 -0
  14. intel_npu_acceleration_library/backend/matmul.py +59 -0
  15. intel_npu_acceleration_library/backend/mlp.py +58 -0
  16. intel_npu_acceleration_library/backend/ops.py +141 -0
  17. intel_npu_acceleration_library/backend/qlinear.py +71 -0
  18. intel_npu_acceleration_library/backend/qmatmul.py +66 -0
  19. intel_npu_acceleration_library/backend/runtime.py +210 -0
  20. intel_npu_acceleration_library/backend/sdpa.py +107 -0
  21. intel_npu_acceleration_library/backend/tensor.py +1050 -0
  22. intel_npu_acceleration_library/backend/utils.py +70 -0
  23. intel_npu_acceleration_library/compiler.py +194 -0
  24. intel_npu_acceleration_library/device.py +230 -0
  25. intel_npu_acceleration_library/dtypes.py +122 -0
  26. intel_npu_acceleration_library/external/openvino/__init__.py +71 -0
  27. intel_npu_acceleration_library/external/openvino/_offline_transformations/__init__.py +20 -0
  28. intel_npu_acceleration_library/external/openvino/_pyopenvino.cp310-win_amd64.pyd +0 -0
  29. intel_npu_acceleration_library/external/openvino/_pyopenvino.cp311-win_amd64.pyd +0 -0
  30. intel_npu_acceleration_library/external/openvino/_pyopenvino.cp312-win_amd64.pyd +0 -0
  31. intel_npu_acceleration_library/external/openvino/_pyopenvino.cp38-win_amd64.pyd +0 -0
  32. intel_npu_acceleration_library/external/openvino/_pyopenvino.cp39-win_amd64.pyd +0 -0
  33. intel_npu_acceleration_library/external/openvino/frontend/__init__.py +34 -0
  34. intel_npu_acceleration_library/external/openvino/frontend/frontend.py +44 -0
  35. intel_npu_acceleration_library/external/openvino/frontend/onnx/__init__.py +15 -0
  36. intel_npu_acceleration_library/external/openvino/frontend/onnx/py_onnx_frontend.cp310-win_amd64.pyd +0 -0
  37. intel_npu_acceleration_library/external/openvino/frontend/onnx/py_onnx_frontend.cp311-win_amd64.pyd +0 -0
  38. intel_npu_acceleration_library/external/openvino/frontend/onnx/py_onnx_frontend.cp312-win_amd64.pyd +0 -0
  39. intel_npu_acceleration_library/external/openvino/frontend/onnx/py_onnx_frontend.cp38-win_amd64.pyd +0 -0
  40. intel_npu_acceleration_library/external/openvino/frontend/onnx/py_onnx_frontend.cp39-win_amd64.pyd +0 -0
  41. intel_npu_acceleration_library/external/openvino/frontend/paddle/__init__.py +15 -0
  42. intel_npu_acceleration_library/external/openvino/frontend/paddle/py_paddle_frontend.cp310-win_amd64.pyd +0 -0
  43. intel_npu_acceleration_library/external/openvino/frontend/paddle/py_paddle_frontend.cp311-win_amd64.pyd +0 -0
  44. intel_npu_acceleration_library/external/openvino/frontend/paddle/py_paddle_frontend.cp312-win_amd64.pyd +0 -0
  45. intel_npu_acceleration_library/external/openvino/frontend/paddle/py_paddle_frontend.cp38-win_amd64.pyd +0 -0
  46. intel_npu_acceleration_library/external/openvino/frontend/paddle/py_paddle_frontend.cp39-win_amd64.pyd +0 -0
  47. intel_npu_acceleration_library/external/openvino/frontend/pytorch/__init__.py +19 -0
  48. intel_npu_acceleration_library/external/openvino/frontend/pytorch/fx_decoder.py +352 -0
  49. intel_npu_acceleration_library/external/openvino/frontend/pytorch/gptq.py +139 -0
  50. intel_npu_acceleration_library/external/openvino/frontend/pytorch/module_extension.py +39 -0
  51. intel_npu_acceleration_library/external/openvino/frontend/pytorch/patch_model.py +98 -0
  52. intel_npu_acceleration_library/external/openvino/frontend/pytorch/py_pytorch_frontend.cp310-win_amd64.pyd +0 -0
  53. intel_npu_acceleration_library/external/openvino/frontend/pytorch/py_pytorch_frontend.cp311-win_amd64.pyd +0 -0
  54. intel_npu_acceleration_library/external/openvino/frontend/pytorch/py_pytorch_frontend.cp312-win_amd64.pyd +0 -0
  55. intel_npu_acceleration_library/external/openvino/frontend/pytorch/py_pytorch_frontend.cp38-win_amd64.pyd +0 -0
  56. intel_npu_acceleration_library/external/openvino/frontend/pytorch/py_pytorch_frontend.cp39-win_amd64.pyd +0 -0
  57. intel_npu_acceleration_library/external/openvino/frontend/pytorch/torchdynamo/backend.py +119 -0
  58. intel_npu_acceleration_library/external/openvino/frontend/pytorch/torchdynamo/backend_utils.py +85 -0
  59. intel_npu_acceleration_library/external/openvino/frontend/pytorch/torchdynamo/compile.py +141 -0
  60. intel_npu_acceleration_library/external/openvino/frontend/pytorch/torchdynamo/decompositions.py +116 -0
  61. intel_npu_acceleration_library/external/openvino/frontend/pytorch/torchdynamo/execute.py +189 -0
  62. intel_npu_acceleration_library/external/openvino/frontend/pytorch/torchdynamo/op_support.py +289 -0
  63. intel_npu_acceleration_library/external/openvino/frontend/pytorch/torchdynamo/partition.py +118 -0
  64. intel_npu_acceleration_library/external/openvino/frontend/pytorch/ts_decoder.py +536 -0
  65. intel_npu_acceleration_library/external/openvino/frontend/pytorch/utils.py +256 -0
  66. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/__init__.py +16 -0
  67. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/graph_iterator.py +116 -0
  68. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/node_decoder.py +219 -0
  69. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/py_tensorflow_frontend.cp310-win_amd64.pyd +0 -0
  70. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/py_tensorflow_frontend.cp311-win_amd64.pyd +0 -0
  71. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/py_tensorflow_frontend.cp312-win_amd64.pyd +0 -0
  72. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/py_tensorflow_frontend.cp38-win_amd64.pyd +0 -0
  73. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/py_tensorflow_frontend.cp39-win_amd64.pyd +0 -0
  74. intel_npu_acceleration_library/external/openvino/frontend/tensorflow/utils.py +460 -0
  75. intel_npu_acceleration_library/external/openvino/helpers/__init__.py +6 -0
  76. intel_npu_acceleration_library/external/openvino/helpers/packing.py +87 -0
  77. intel_npu_acceleration_library/external/openvino/preprocess/README.md +60 -0
  78. intel_npu_acceleration_library/external/openvino/preprocess/__init__.py +26 -0
  79. intel_npu_acceleration_library/external/openvino/preprocess/torchvision/__init__.py +15 -0
  80. intel_npu_acceleration_library/external/openvino/preprocess/torchvision/preprocess_converter.py +47 -0
  81. intel_npu_acceleration_library/external/openvino/preprocess/torchvision/requirements.txt +4 -0
  82. intel_npu_acceleration_library/external/openvino/preprocess/torchvision/torchvision_preprocessing.py +347 -0
  83. intel_npu_acceleration_library/external/openvino/properties/__init__.py +21 -0
  84. intel_npu_acceleration_library/external/openvino/properties/_properties.py +55 -0
  85. intel_npu_acceleration_library/external/openvino/properties/device/__init__.py +14 -0
  86. intel_npu_acceleration_library/external/openvino/properties/hint/__init__.py +15 -0
  87. intel_npu_acceleration_library/external/openvino/properties/intel_auto/__init__.py +12 -0
  88. intel_npu_acceleration_library/external/openvino/properties/intel_cpu/__init__.py +8 -0
  89. intel_npu_acceleration_library/external/openvino/properties/intel_gpu/__init__.py +12 -0
  90. intel_npu_acceleration_library/external/openvino/properties/intel_gpu/hint/__init__.py +11 -0
  91. intel_npu_acceleration_library/external/openvino/properties/log/__init__.py +11 -0
  92. intel_npu_acceleration_library/external/openvino/properties/streams/__init__.py +11 -0
  93. intel_npu_acceleration_library/external/openvino/runtime/__init__.py +85 -0
  94. intel_npu_acceleration_library/external/openvino/runtime/exceptions.py +17 -0
  95. intel_npu_acceleration_library/external/openvino/runtime/ie_api.py +631 -0
  96. intel_npu_acceleration_library/external/openvino/runtime/op/__init__.py +18 -0
  97. intel_npu_acceleration_library/external/openvino/runtime/op/util/__init__.py +22 -0
  98. intel_npu_acceleration_library/external/openvino/runtime/opset1/__init__.py +112 -0
  99. intel_npu_acceleration_library/external/openvino/runtime/opset1/ops.py +3067 -0
  100. intel_npu_acceleration_library/external/openvino/runtime/opset10/__init__.py +179 -0
  101. intel_npu_acceleration_library/external/openvino/runtime/opset10/ops.py +173 -0
  102. intel_npu_acceleration_library/external/openvino/runtime/opset11/__init__.py +179 -0
  103. intel_npu_acceleration_library/external/openvino/runtime/opset11/ops.py +107 -0
  104. intel_npu_acceleration_library/external/openvino/runtime/opset12/__init__.py +180 -0
  105. intel_npu_acceleration_library/external/openvino/runtime/opset12/ops.py +120 -0
  106. intel_npu_acceleration_library/external/openvino/runtime/opset13/__init__.py +188 -0
  107. intel_npu_acceleration_library/external/openvino/runtime/opset13/ops.py +399 -0
  108. intel_npu_acceleration_library/external/openvino/runtime/opset14/__init__.py +190 -0
  109. intel_npu_acceleration_library/external/openvino/runtime/opset14/ops.py +171 -0
  110. intel_npu_acceleration_library/external/openvino/runtime/opset15/__init__.py +10 -0
  111. intel_npu_acceleration_library/external/openvino/runtime/opset15/ops.py +85 -0
  112. intel_npu_acceleration_library/external/openvino/runtime/opset2/__init__.py +118 -0
  113. intel_npu_acceleration_library/external/openvino/runtime/opset2/ops.py +216 -0
  114. intel_npu_acceleration_library/external/openvino/runtime/opset3/__init__.py +134 -0
  115. intel_npu_acceleration_library/external/openvino/runtime/opset3/ops.py +638 -0
  116. intel_npu_acceleration_library/external/openvino/runtime/opset4/__init__.py +145 -0
  117. intel_npu_acceleration_library/external/openvino/runtime/opset4/ops.py +464 -0
  118. intel_npu_acceleration_library/external/openvino/runtime/opset5/__init__.py +152 -0
  119. intel_npu_acceleration_library/external/openvino/runtime/opset5/ops.py +372 -0
  120. intel_npu_acceleration_library/external/openvino/runtime/opset6/__init__.py +154 -0
  121. intel_npu_acceleration_library/external/openvino/runtime/opset6/ops.py +189 -0
  122. intel_npu_acceleration_library/external/openvino/runtime/opset7/__init__.py +158 -0
  123. intel_npu_acceleration_library/external/openvino/runtime/opset7/ops.py +169 -0
  124. intel_npu_acceleration_library/external/openvino/runtime/opset8/__init__.py +169 -0
  125. intel_npu_acceleration_library/external/openvino/runtime/opset8/ops.py +783 -0
  126. intel_npu_acceleration_library/external/openvino/runtime/opset9/__init__.py +175 -0
  127. intel_npu_acceleration_library/external/openvino/runtime/opset9/ops.py +341 -0
  128. intel_npu_acceleration_library/external/openvino/runtime/opset_utils.py +22 -0
  129. intel_npu_acceleration_library/external/openvino/runtime/passes/__init__.py +19 -0
  130. intel_npu_acceleration_library/external/openvino/runtime/passes/graph_rewrite.py +33 -0
  131. intel_npu_acceleration_library/external/openvino/runtime/passes/manager.py +26 -0
  132. intel_npu_acceleration_library/external/openvino/runtime/properties/__init__.py +38 -0
  133. intel_npu_acceleration_library/external/openvino/runtime/properties/hint/__init__.py +25 -0
  134. intel_npu_acceleration_library/external/openvino/runtime/utils/__init__.py +7 -0
  135. intel_npu_acceleration_library/external/openvino/runtime/utils/broadcasting.py +44 -0
  136. intel_npu_acceleration_library/external/openvino/runtime/utils/data_helpers/__init__.py +8 -0
  137. intel_npu_acceleration_library/external/openvino/runtime/utils/data_helpers/data_dispatcher.py +429 -0
  138. intel_npu_acceleration_library/external/openvino/runtime/utils/data_helpers/wrappers.py +148 -0
  139. intel_npu_acceleration_library/external/openvino/runtime/utils/decorators.py +70 -0
  140. intel_npu_acceleration_library/external/openvino/runtime/utils/input_validation.py +133 -0
  141. intel_npu_acceleration_library/external/openvino/runtime/utils/node_factory.py +127 -0
  142. intel_npu_acceleration_library/external/openvino/runtime/utils/reduction.py +25 -0
  143. intel_npu_acceleration_library/external/openvino/runtime/utils/types.py +175 -0
  144. intel_npu_acceleration_library/external/openvino/tools/__init__.py +4 -0
  145. intel_npu_acceleration_library/external/openvino/tools/benchmark/__init__.py +3 -0
  146. intel_npu_acceleration_library/external/openvino/tools/benchmark/benchmark.py +186 -0
  147. intel_npu_acceleration_library/external/openvino/tools/benchmark/main.py +695 -0
  148. intel_npu_acceleration_library/external/openvino/tools/benchmark/parameters.py +199 -0
  149. intel_npu_acceleration_library/external/openvino/tools/benchmark/utils/__init__.py +3 -0
  150. intel_npu_acceleration_library/external/openvino/tools/benchmark/utils/constants.py +26 -0
  151. intel_npu_acceleration_library/external/openvino/tools/benchmark/utils/inputs_filling.py +482 -0
  152. intel_npu_acceleration_library/external/openvino/tools/benchmark/utils/logging.py +8 -0
  153. intel_npu_acceleration_library/external/openvino/tools/benchmark/utils/statistics_report.py +296 -0
  154. intel_npu_acceleration_library/external/openvino/tools/benchmark/utils/utils.py +836 -0
  155. intel_npu_acceleration_library/external/openvino/tools/ovc/__init__.py +20 -0
  156. intel_npu_acceleration_library/external/openvino/tools/ovc/__main__.py +10 -0
  157. intel_npu_acceleration_library/external/openvino/tools/ovc/cli_parser.py +633 -0
  158. intel_npu_acceleration_library/external/openvino/tools/ovc/convert.py +102 -0
  159. intel_npu_acceleration_library/external/openvino/tools/ovc/convert_data_type.py +82 -0
  160. intel_npu_acceleration_library/external/openvino/tools/ovc/convert_impl.py +536 -0
  161. intel_npu_acceleration_library/external/openvino/tools/ovc/environment_setup_utils.py +50 -0
  162. intel_npu_acceleration_library/external/openvino/tools/ovc/error.py +49 -0
  163. intel_npu_acceleration_library/external/openvino/tools/ovc/get_ov_update_message.py +16 -0
  164. intel_npu_acceleration_library/external/openvino/tools/ovc/help.py +45 -0
  165. intel_npu_acceleration_library/external/openvino/tools/ovc/logger.py +91 -0
  166. intel_npu_acceleration_library/external/openvino/tools/ovc/main.py +35 -0
  167. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/__init__.py +2 -0
  168. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/analysis.py +46 -0
  169. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/check_config.py +57 -0
  170. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/extractor.py +447 -0
  171. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/layout_utils.py +73 -0
  172. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/moc_emit_ir.py +32 -0
  173. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/offline_transformations.py +107 -0
  174. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/paddle_frontend_utils.py +83 -0
  175. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/pipeline.py +246 -0
  176. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/preprocessing.py +220 -0
  177. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/pytorch_frontend_utils.py +205 -0
  178. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/shape_utils.py +109 -0
  179. intel_npu_acceleration_library/external/openvino/tools/ovc/moc_frontend/type_utils.py +82 -0
  180. intel_npu_acceleration_library/external/openvino/tools/ovc/ovc.py +13 -0
  181. intel_npu_acceleration_library/external/openvino/tools/ovc/telemetry_params.py +6 -0
  182. intel_npu_acceleration_library/external/openvino/tools/ovc/telemetry_stub.py +28 -0
  183. intel_npu_acceleration_library/external/openvino/tools/ovc/telemetry_utils.py +118 -0
  184. intel_npu_acceleration_library/external/openvino/tools/ovc/utils.py +109 -0
  185. intel_npu_acceleration_library/external/openvino/tools/ovc/version.py +80 -0
  186. intel_npu_acceleration_library/external/openvino/torch/__init__.py +5 -0
  187. intel_npu_acceleration_library/external/openvino/utils.py +98 -0
  188. intel_npu_acceleration_library/functional/__init__.py +8 -0
  189. intel_npu_acceleration_library/functional/scaled_dot_product_attention.py +47 -0
  190. intel_npu_acceleration_library/lib/Release/cache.json +113732 -0
  191. intel_npu_acceleration_library/lib/Release/intel_npu_acceleration_library.dll +0 -0
  192. intel_npu_acceleration_library/lib/Release/openvino.dll +0 -0
  193. intel_npu_acceleration_library/lib/Release/openvino_auto_batch_plugin.dll +0 -0
  194. intel_npu_acceleration_library/lib/Release/openvino_auto_plugin.dll +0 -0
  195. intel_npu_acceleration_library/lib/Release/openvino_c.dll +0 -0
  196. intel_npu_acceleration_library/lib/Release/openvino_hetero_plugin.dll +0 -0
  197. intel_npu_acceleration_library/lib/Release/openvino_intel_cpu_plugin.dll +0 -0
  198. intel_npu_acceleration_library/lib/Release/openvino_intel_gpu_plugin.dll +0 -0
  199. intel_npu_acceleration_library/lib/Release/openvino_intel_npu_plugin.dll +0 -0
  200. intel_npu_acceleration_library/lib/Release/openvino_ir_frontend.dll +0 -0
  201. intel_npu_acceleration_library/lib/Release/openvino_onnx_frontend.dll +0 -0
  202. intel_npu_acceleration_library/lib/Release/openvino_paddle_frontend.dll +0 -0
  203. intel_npu_acceleration_library/lib/Release/openvino_pytorch_frontend.dll +0 -0
  204. intel_npu_acceleration_library/lib/Release/openvino_tensorflow_frontend.dll +0 -0
  205. intel_npu_acceleration_library/lib/Release/openvino_tensorflow_lite_frontend.dll +0 -0
  206. intel_npu_acceleration_library/lib/Release/tbb12.dll +0 -0
  207. intel_npu_acceleration_library/lib/Release/tbb12_debug.dll +0 -0
  208. intel_npu_acceleration_library/lib/Release/tbbbind_2_5.dll +0 -0
  209. intel_npu_acceleration_library/lib/Release/tbbbind_2_5_debug.dll +0 -0
  210. intel_npu_acceleration_library/lib/Release/tbbmalloc.dll +0 -0
  211. intel_npu_acceleration_library/lib/Release/tbbmalloc_debug.dll +0 -0
  212. intel_npu_acceleration_library/lib/Release/tbbmalloc_proxy.dll +0 -0
  213. intel_npu_acceleration_library/lib/Release/tbbmalloc_proxy_debug.dll +0 -0
  214. intel_npu_acceleration_library/modelling.py +150 -0
  215. intel_npu_acceleration_library/nn/__init__.py +20 -0
  216. intel_npu_acceleration_library/nn/autograd.py +68 -0
  217. intel_npu_acceleration_library/nn/conv.py +257 -0
  218. intel_npu_acceleration_library/nn/functional.py +1207 -0
  219. intel_npu_acceleration_library/nn/linear.py +162 -0
  220. intel_npu_acceleration_library/nn/llm.py +417 -0
  221. intel_npu_acceleration_library/nn/module.py +393 -0
  222. intel_npu_acceleration_library/optimizations.py +157 -0
  223. intel_npu_acceleration_library/quantization.py +174 -0
@@ -0,0 +1,536 @@
1
+ # Copyright (C) 2018-2024 Intel Corporation
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ import argparse
5
+ import datetime
6
+ import logging as log
7
+ import os
8
+ import sys
9
+ import traceback
10
+ import tracemalloc
11
+ from collections import OrderedDict
12
+ from pathlib import Path
13
+ from typing import Iterable, Callable
14
+
15
+
16
+ try:
17
+ import openvino_telemetry as tm
18
+ from openvino_telemetry.backend import backend_ga4
19
+ except ImportError:
20
+ import openvino.tools.ovc.telemetry_stub as tm
21
+
22
+ from openvino.tools.ovc.moc_frontend.check_config import any_extensions_used
23
+ from openvino.tools.ovc.moc_frontend.pipeline import moc_pipeline
24
+ from openvino.tools.ovc.moc_frontend.moc_emit_ir import moc_emit_ir
25
+ from openvino.tools.ovc.moc_frontend.type_utils import to_ov_type
26
+ from openvino.tools.ovc.cli_parser import get_available_front_ends, get_common_cli_options, depersonalize, \
27
+ get_mo_convert_params, input_to_input_cut_info, parse_inputs
28
+ from openvino.tools.ovc.help import get_convert_model_help_specifics
29
+
30
+ from openvino.tools.ovc.error import Error, FrameworkError
31
+ from openvino.tools.ovc.get_ov_update_message import get_compression_message
32
+ from openvino.tools.ovc.version import VersionChecker
33
+ from openvino.tools.ovc.utils import check_values_equal
34
+ from openvino.tools.ovc.logger import init_logger
35
+ from openvino.tools.ovc.telemetry_utils import send_params_info, send_conversion_result, \
36
+ init_mo_telemetry
37
+ from openvino.tools.ovc.moc_frontend.pytorch_frontend_utils import get_pytorch_decoder, extract_input_info_from_example
38
+ from openvino.tools.ovc.moc_frontend.paddle_frontend_utils import paddle_frontend_converter
39
+
40
+ # pylint: disable=no-name-in-module,import-error
41
+ from openvino.frontend import FrontEndManager, OpConversionFailure, TelemetryExtension
42
+ from openvino.runtime import get_version as get_rt_version
43
+ from openvino.runtime import PartialShape
44
+
45
+ try:
46
+ from openvino.frontend.tensorflow.utils import create_tf_graph_iterator, type_supported_by_tf_fe, \
47
+ extract_model_graph # pylint: disable=no-name-in-module,import-error
48
+
49
+ tf_frontend_with_python_bindings_installed = True
50
+ except (ModuleNotFoundError, ImportError):
51
+ tf_frontend_with_python_bindings_installed = False
52
+
53
+
54
+ def replace_ext(name: str, old: str, new: str):
55
+ base, ext = os.path.splitext(name)
56
+ log.debug("base: {}, ext: {}".format(base, ext))
57
+ if ext == old:
58
+ return base + new
59
+
60
+
61
+ def print_argv(argv: argparse.Namespace):
62
+ print('Model Conversion arguments:')
63
+ props = OrderedDict()
64
+ props['common_args'] = get_common_cli_options(argv, argv.is_python_api_used)
65
+
66
+ framework_specifics_map = {
67
+ 'common_args': 'Common parameters:'
68
+ }
69
+
70
+ lines = []
71
+ for key in props:
72
+ lines.append(framework_specifics_map[key])
73
+ for (op, desc) in props[key].items():
74
+ if isinstance(desc, list):
75
+ lines.append('\t{}: \t{}'.format(desc[0], desc[1](getattr(argv, op, 'NONE'))))
76
+ else:
77
+ lines.append('\t{}: \t{}'.format(desc, getattr(argv, op, 'NONE')))
78
+ print('\n'.join(lines), flush=True)
79
+
80
+
81
+ def check_iterable(iterable: Iterable, func: Callable):
82
+ for element in iterable:
83
+ if not func(element):
84
+ return False
85
+ return True
86
+
87
+
88
+ def arguments_post_parsing(argv: argparse.Namespace):
89
+ # TODO: This function looks similar to another one. Check for code duplicates.
90
+ log.debug("Model Conversion API started")
91
+ if not argv.is_python_api_used:
92
+ log.debug('Output model name would be {}{{.xml, .bin}}'.format(argv.output_model))
93
+
94
+ if is_verbose(argv):
95
+ print_argv(argv)
96
+
97
+ import re
98
+ if argv.is_python_api_used and isinstance(argv.input, str):
99
+ argv.input = [argv.input]
100
+
101
+ if not argv.is_python_api_used and isinstance(argv.input, str):
102
+ argv.input = parse_inputs(argv.input)
103
+
104
+ normalize_inputs(argv)
105
+ log.debug("Placeholder shapes : {}".format(argv.placeholder_shapes))
106
+
107
+ if not hasattr(argv, 'output') or argv.output is None:
108
+ return argv
109
+
110
+ if argv.is_python_api_used:
111
+ error_msg = f"output '{argv.output}' is incorrect, it should be string or a list/tuple of strings"
112
+ assert isinstance(argv.output, (str, list, tuple)), error_msg
113
+ if isinstance(argv.output, list):
114
+ assert check_iterable(argv.output, lambda x: isinstance(x, str)), error_msg
115
+ else:
116
+ argv.output = [argv.output]
117
+ else:
118
+ assert isinstance(argv.output, str)
119
+
120
+ error_msg = f"output '{argv.output}' is incorrect, output names should not be empty or contain spaces"
121
+ processed_output = re.split(r'\s*,\s*', argv.output.strip())
122
+ assert check_iterable(processed_output, lambda x: x.find(' ') == -1), error_msg
123
+ assert check_iterable(processed_output, lambda x: len(x) > 0), error_msg
124
+ argv.output = processed_output
125
+ return argv
126
+
127
+
128
+ def get_moc_frontends(argv: argparse.Namespace):
129
+ fem = argv.feManager
130
+
131
+ if not fem:
132
+ return None, []
133
+
134
+ available_moc_front_ends = get_available_front_ends(fem)
135
+ if argv.framework:
136
+ moc_front_end = fem.load_by_framework(argv.framework)
137
+ return moc_front_end, available_moc_front_ends
138
+ if argv.input_model:
139
+ if isinstance(argv.input_model, (tuple, list)) and len(argv.input_model) == 2:
140
+ moc_front_end = fem.load_by_model(
141
+ [argv.input_model[0], argv.input_model[1]]) # TODO: Pass all input model parts
142
+ else:
143
+ moc_front_end = fem.load_by_model(argv.input_model)
144
+ if not moc_front_end:
145
+ return None, available_moc_front_ends
146
+ argv.framework = moc_front_end.get_name()
147
+ else:
148
+ return None, []
149
+
150
+ # This check as a workaround to skip IR frontend
151
+ if not moc_front_end.get_name() in available_moc_front_ends:
152
+ return None, available_moc_front_ends
153
+
154
+ return moc_front_end, available_moc_front_ends
155
+
156
+
157
+ def filtered_extensions(extensions):
158
+ try:
159
+ new_extensions = []
160
+ from openvino.frontend.pytorch.module_extension import ModuleExtension
161
+ for ext in extensions:
162
+ if not isinstance(ext, ModuleExtension):
163
+ new_extensions.append(ext)
164
+ return new_extensions
165
+ except:
166
+ return extensions
167
+
168
+
169
+ def prepare_ir(argv: argparse.Namespace):
170
+ argv = arguments_post_parsing(argv)
171
+ t = tm.Telemetry()
172
+
173
+ if isinstance(argv.input_model, (tuple, list)) and len(argv.input_model) == 1:
174
+ argv.input_model = argv.input_model[0]
175
+
176
+ moc_front_end, available_moc_front_ends = get_moc_frontends(argv)
177
+ if moc_front_end:
178
+ # TODO: Should be moved to the same place where paddle and pytorch handle their objects
179
+ if argv.framework == 'tf' and argv.is_python_object and type_supported_by_tf_fe(argv.input_model):
180
+ argv.input_model = create_tf_graph_iterator(argv.input_model,
181
+ argv.placeholder_shapes,
182
+ argv.placeholder_data_types,
183
+ getattr(argv, "example_input", None),
184
+ argv.share_weights)
185
+ t.send_event("ovc", "conversion_method", moc_front_end.get_name() + "_frontend")
186
+ moc_front_end.add_extension(TelemetryExtension("ovc", t.send_event, t.send_error, t.send_stack_trace))
187
+ if any_extensions_used(argv):
188
+ for extension in filtered_extensions(argv.extension):
189
+ moc_front_end.add_extension(extension)
190
+ ov_model = moc_pipeline(argv, moc_front_end)
191
+ return ov_model
192
+
193
+ if not argv.input_model:
194
+ raise Error('No input model is provided')
195
+
196
+ raise Error('Cannot recognize input model.')
197
+
198
+
199
+ def check_model_object(argv):
200
+ model = argv['input_model']
201
+ if 'tensorflow' in sys.modules:
202
+ if tf_frontend_with_python_bindings_installed and extract_model_graph(argv):
203
+ return "tf"
204
+ if 'torch' in sys.modules:
205
+ import torch
206
+ if isinstance(model, (torch.nn.Module, torch.jit.ScriptFunction)) or (hasattr(torch, "export") and isinstance(model, (torch.export.ExportedProgram))):
207
+ return "pytorch"
208
+ try:
209
+ from openvino.frontend.pytorch.ts_decoder import TorchScriptPythonDecoder
210
+ from openvino.frontend.pytorch.fx_decoder import TorchFXPythonDecoder
211
+
212
+ if isinstance(model, (TorchScriptPythonDecoder, TorchFXPythonDecoder)):
213
+ return "pytorch"
214
+ except Exception as e:
215
+ pass
216
+
217
+ import io
218
+ # FIXME: Consuming any io.BytesIO object as an ONNX model is too dengerous and
219
+ # can conflict with others in the future (not future proof).
220
+ # TODO: Refer to https://onnx.ai/onnx/intro/python.html to find examples with
221
+ # real ONNX python objects. ONNX model has onnx.onnx_ml_pb2.ModelProto type.
222
+ if isinstance(model, io.BytesIO):
223
+ return 'onnx'
224
+
225
+ if 'paddle' in sys.modules:
226
+ import paddle
227
+ if isinstance(model, paddle.hapi.model.Model) or isinstance(model,
228
+ paddle.fluid.dygraph.layers.Layer) or isinstance(
229
+ model, paddle.fluid.executor.Executor):
230
+ return "paddle"
231
+
232
+ raise Error('Unknown model type: {}'.format(type(model)))
233
+
234
+
235
+ def driver(argv: argparse.Namespace, non_default_params: dict):
236
+ init_logger('ERROR', argv.verbose)
237
+
238
+ # Log dictionary with non-default cli parameters where complex classes are excluded.
239
+ log.debug(str(non_default_params))
240
+
241
+ ov_model = moc_emit_ir(prepare_ir(argv), argv)
242
+
243
+ return ov_model
244
+
245
+
246
+ def get_non_default_params(argv, cli_parser):
247
+ import numbers
248
+ import inspect
249
+ from openvino.tools.ovc import convert_model
250
+
251
+ signature = inspect.signature(convert_model)
252
+ # make dictionary with parameters which have non-default values to be serialized in IR in rt_info
253
+ non_default_params = {}
254
+ for arg, arg_value in vars(argv).items():
255
+ if arg in signature.parameters and check_values_equal(arg_value, signature.parameters[arg].default):
256
+ continue
257
+ if check_values_equal(arg_value, cli_parser.get_default(arg)):
258
+ continue
259
+ value = depersonalize(arg_value, arg)
260
+ # Skip complex classes in params to prevent
261
+ # serializing it to rt_info
262
+ if isinstance(value, (str, bool, numbers.Number)):
263
+ non_default_params[arg] = value
264
+ return non_default_params
265
+
266
+
267
+ def add_line_breaks(text: str, char_num: int, line_break: str):
268
+ words = text.replace('\n', "\n ").split(" ")
269
+ cnt = 0
270
+ for i, w in enumerate(words):
271
+ cnt += len(w)
272
+ if '\n' in w:
273
+ cnt = len(w) - w.find('\n') - 1
274
+ if cnt > char_num:
275
+ if words[i][-1] not in ['\n', '\t']:
276
+ words[i] = w + '\n'
277
+ cnt = 0
278
+ text = ' '.join(words).replace("\n ", "\n")
279
+ return line_break + text.replace("\n", line_break)
280
+
281
+
282
+ def show_mo_convert_help():
283
+ mo_convert_params = get_mo_convert_params()
284
+ for group_name, group in mo_convert_params.items():
285
+ print(group_name)
286
+ for param_name in group:
287
+ param_data = group[param_name]
288
+ text = param_data.description.replace(" ", '')
289
+ text = add_line_breaks(text, 56, "\n\t\t\t")
290
+ print(" :param {} {}".format(param_name, text))
291
+ print()
292
+
293
+
294
+ def input_model_is_object(input_model):
295
+ if input_model == ():
296
+ return False
297
+ if isinstance(input_model, (str, Path)):
298
+ return False
299
+ if isinstance(input_model, (tuple, list)):
300
+ return all(input_model_is_object(part) for part in input_model)
301
+ return True
302
+
303
+
304
+ def normalize_inputs(argv: argparse.Namespace):
305
+ """
306
+ repacks params passed to convert_model and wraps resulting values into dictionaries or lists.
307
+ After working of this method following values are set in argv:
308
+
309
+ argv.input, argv.inputs_list - list of input names. Both values are used in some parts of MO.
310
+ Could be good to refactor it and use only one of these values.
311
+
312
+ argv.placeholder_shapes - dictionary where key is node name, value is PartialShape,
313
+ or list of PartialShape if node names were not set.
314
+
315
+ argv.placeholder_data_types - dictionary where key is node name, value is node np.type,
316
+ or list of np.types if node names were not set.
317
+
318
+ :param argv: MO arguments
319
+ """
320
+ # Parse input to list of InputCutInfo
321
+ inputs = input_to_input_cut_info(argv.input)
322
+
323
+ # Make list of input names
324
+ input_names_list = []
325
+ for inp in inputs:
326
+ if inp.name is not None:
327
+ input_names_list.append(inp.name)
328
+ if len(input_names_list) > 0:
329
+ assert len(input_names_list) == len(inputs), "\"input\" parameter has unnamed inputs and named inputs. " \
330
+ "Please either set names for all inputs, " \
331
+ "or do not set names for all inputs."
332
+ argv.inputs_list = input_names_list
333
+ argv.input = ','.join(input_names_list)
334
+
335
+ if len(input_names_list) > 0:
336
+ # Named inputs case
337
+ shape_dict = {}
338
+ data_type_dict = {}
339
+ for inp in inputs:
340
+ if inp.shape is not None:
341
+ # Wrap shape to PartialShape for uniformity of stored values
342
+ shape_dict[inp.name] = PartialShape(inp.shape)
343
+ else:
344
+ shape_dict[inp.name] = None
345
+ if inp.type is not None:
346
+ # Convert type to ov.Type for uniformity of stored values
347
+ data_type_dict[inp.name] = to_ov_type(inp.type)
348
+ argv.placeholder_shapes = shape_dict if shape_dict else None
349
+ argv.placeholder_data_types = data_type_dict if data_type_dict else {}
350
+ else:
351
+ # Unnamed inputs case
352
+ shape_list = []
353
+ data_type_list = []
354
+ for inp in inputs:
355
+ if inp.shape is not None:
356
+ # Wrap shape to PartialShape for uniformity of stored values
357
+ shape_list.append(PartialShape(inp.shape))
358
+ if inp.type is not None:
359
+ # Convert type to ov.Type for uniformity of stored values
360
+ data_type_list.append(to_ov_type(inp.type))
361
+ argv.placeholder_shapes = shape_list if shape_list else None
362
+ argv.placeholder_data_types = data_type_list if data_type_list else {}
363
+ if hasattr(argv, "framework") and argv.framework == "pytorch" and getattr(argv, "example_input", None) is not None:
364
+ extract_input_info_from_example(argv, inputs)
365
+
366
+
367
+ def args_to_argv(**kwargs):
368
+ argv = argparse.Namespace()
369
+ args_specifics = get_convert_model_help_specifics()
370
+
371
+ import inspect
372
+ from openvino.tools.ovc import convert_model
373
+ signature = inspect.signature(convert_model)
374
+ for key, value in kwargs.items():
375
+ if value is None and key in signature.parameters:
376
+ setattr(argv, key, signature.parameters[key].default)
377
+ continue
378
+ if key in args_specifics:
379
+ param_specifics = args_specifics[key]
380
+ if 'action' in param_specifics and hasattr(param_specifics['action'], 'check_value'):
381
+ value = param_specifics['action'].check_value(value, key)
382
+ if 'type' in param_specifics:
383
+ value = param_specifics['type'](value)
384
+ setattr(argv, key, value)
385
+ return argv
386
+
387
+
388
+ def pack_params_to_args_namespace(args: dict, cli_parser: argparse.ArgumentParser, python_api_used):
389
+ if python_api_used:
390
+ argv = args_to_argv(**args)
391
+
392
+ # get list of all available params for convert_model()
393
+ all_params = {}
394
+ for key, value in get_mo_convert_params().items():
395
+ all_params.update(value)
396
+
397
+ # check that there are no unknown params provided
398
+ for key, value in args.items():
399
+ if key not in all_params.keys():
400
+ raise Error("Unrecognized argument: {}".format(key))
401
+ else:
402
+ argv = cli_parser.parse_args()
403
+ return argv
404
+
405
+
406
+ def is_verbose(argv, args=None):
407
+ if argv is not None and hasattr(argv, 'verbose') and argv.verbose:
408
+ return True
409
+ if args is not None and 'verbose' in args and args['verbose']:
410
+ return True
411
+ if '--verbose' in sys.argv:
412
+ return True
413
+ return False
414
+
415
+
416
+ def _convert(cli_parser: argparse.ArgumentParser, args, python_api_used):
417
+ start_time = datetime.datetime.now()
418
+ if is_verbose(None, args):
419
+ tracemalloc.start()
420
+
421
+ simplified_ie_version = VersionChecker().get_ie_simplified_version()
422
+ telemetry = init_mo_telemetry()
423
+ telemetry.start_session('ovc')
424
+ telemetry.send_event('ovc', 'version', simplified_ie_version)
425
+ # Initialize logger with 'ERROR' as default level to be able to form nice messages
426
+ # before arg parser deliver log_level requested by user
427
+ init_logger('ERROR', False)
428
+ argv = None
429
+ # Minimize modifications among other places in case if multiple pieces are passed as input_model
430
+ if python_api_used:
431
+ if 'input_model' not in args:
432
+ args['input_model'] = ()
433
+ if isinstance(args['input_model'], (tuple, list)) and len(args['input_model']) == 1:
434
+ args['input_model'] = args['input_model'][0]
435
+ try:
436
+ model_framework = None
437
+ inp_model_is_object = input_model_is_object(args['input_model']) if python_api_used else False
438
+
439
+ if inp_model_is_object:
440
+ model_framework = check_model_object(args)
441
+ if model_framework == "pytorch":
442
+ example_inputs = None
443
+ if 'example_input' in args and args['example_input'] is not None:
444
+ example_inputs = args['example_input']
445
+ elif 'example_inputs' in args:
446
+ raise AssertionError(
447
+ "'example_inputs' argument is not recognized, maybe you meant to provide 'example_input'?")
448
+
449
+ get_pytorch_decoder(args['input_model'], example_inputs, args)
450
+ if model_framework == "paddle":
451
+ example_inputs = None
452
+ if 'example_input' in args and args['example_input'] is not None:
453
+ example_inputs = args['example_input']
454
+
455
+ outputs = None
456
+ if 'output' in args and args['output'] is not None:
457
+ # Once the temporary PDPD model is generated. output can be dropped.
458
+ # Just swap outputs and args['output'] can reset the argv.output to `None`.
459
+ # It can avoid the following `output` negative effect.
460
+ outputs, args['output'] = args['output'], outputs
461
+ paddle_runtime_converter = paddle_frontend_converter(args['input_model'], example_inputs,
462
+ outputs)
463
+ pdmodel = paddle_runtime_converter.convert_paddle_to_pdmodel()
464
+ args['input_model'] = pdmodel
465
+
466
+ argv = pack_params_to_args_namespace(args, cli_parser, python_api_used)
467
+ argv.framework = model_framework
468
+ argv.is_python_object = inp_model_is_object
469
+
470
+ argv.feManager = FrontEndManager()
471
+
472
+ # send telemetry with params info
473
+ send_params_info(argv, cli_parser)
474
+
475
+ non_default_params = get_non_default_params(argv, cli_parser)
476
+ argv.is_python_api_used = python_api_used
477
+
478
+ argv.framework = model_framework
479
+
480
+ ov_model = driver(argv, {"conversion_parameters": non_default_params})
481
+
482
+ if inp_model_is_object and model_framework == "paddle":
483
+ if paddle_runtime_converter:
484
+ paddle_runtime_converter.destroy()
485
+
486
+ # add MO meta data to model
487
+ ov_model.set_rt_info(get_rt_version(), "Runtime_version")
488
+ for key, value in non_default_params.items():
489
+ ov_model.set_rt_info(str(value), ["conversion_parameters", str(key)])
490
+
491
+ if is_verbose(argv) or not python_api_used:
492
+ if 'compress_to_fp16' in argv and argv.compress_to_fp16:
493
+ print(get_compression_message())
494
+
495
+ send_conversion_result('success')
496
+
497
+ if is_verbose(argv):
498
+ elapsed_time = datetime.datetime.now() - start_time
499
+ print('[ SUCCESS ] Total execution time: {:.2f} seconds. '.format(elapsed_time.total_seconds()))
500
+
501
+ _, peak_size = tracemalloc.get_traced_memory()
502
+ print("[ SUCCESS ] Peak memory consumption (includes only memory allocated in Python): {:.2f} MB. ".format(
503
+ peak_size / (1024 * 1024)))
504
+ tracemalloc.stop()
505
+
506
+ return ov_model, argv
507
+
508
+ except Exception as e:
509
+ if is_verbose(argv) or not python_api_used:
510
+ if isinstance(e, (FileNotFoundError, NotADirectoryError)):
511
+ log.error('File {} was not found'.format(str(e).split('No such file or directory:')[1]))
512
+ log.debug(traceback.format_exc())
513
+ elif isinstance(e, (Error, OpConversionFailure)):
514
+ log.error(e)
515
+ log.debug(traceback.format_exc())
516
+ elif isinstance(e, FrameworkError):
517
+ log.error(e, extra={'framework_error': True})
518
+ log.debug(traceback.format_exc())
519
+ else:
520
+ log.error("-------------------------------------------------")
521
+ log.error("----------------- INTERNAL ERROR ----------------")
522
+ log.error("Unexpected exception happened.")
523
+ log.error("Please verify parameters and environment.")
524
+ log.error("If you think this is a bug, please create new ticket here: ")
525
+ log.error("https://github.com/openvinotoolkit/openvino/issues.")
526
+ log.error("-------------- DETAILED INFORMATION -------------")
527
+ log.error(str(e))
528
+ log.error(traceback.format_exc())
529
+ log.error("----------------- END OF REPORT -----------------")
530
+ log.error("-------------------------------------------------")
531
+
532
+ send_conversion_result('fail')
533
+ if python_api_used:
534
+ raise e
535
+ else:
536
+ return None, argv
@@ -0,0 +1,50 @@
1
+ # Copyright (C) 2018-2024 Intel Corporation
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ import os
5
+ import sys
6
+
7
+ # do not print INFO and WARNING messages from TensorFlow
8
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
9
+
10
+
11
+ def get_imported_module_version(imported_module):
12
+ """
13
+ Get imported module version
14
+ :return: version(str) or raise AttributeError exception
15
+ """
16
+ version_attrs = ("__version__", "VERSION", "version")
17
+ installed_version = None
18
+ for attr in version_attrs:
19
+ installed_version = getattr(imported_module, attr, None)
20
+ if isinstance(installed_version, str):
21
+ return installed_version
22
+ else:
23
+ installed_version = None
24
+
25
+ if installed_version is None:
26
+ raise AttributeError("{} module doesn't have version attribute".format(imported_module))
27
+ else:
28
+ return installed_version
29
+
30
+
31
+ def get_environment_setup(framework):
32
+ """
33
+ Get environment setup such as Python version, TensorFlow version
34
+ :param framework: framework name
35
+ :return: a dictionary of environment variables
36
+ """
37
+ env_setup = dict()
38
+ python_version = "{}.{}.{}".format(sys.version_info.major,
39
+ sys.version_info.minor,
40
+ sys.version_info.micro)
41
+ env_setup['python_version'] = python_version
42
+ try:
43
+ if framework == 'tf':
44
+ exec("import tensorflow")
45
+ env_setup['tensorflow'] = get_imported_module_version(sys.modules["tensorflow"])
46
+ exec("del tensorflow")
47
+ except (AttributeError, ImportError):
48
+ pass
49
+ env_setup['sys_platform'] = sys.platform
50
+ return env_setup
@@ -0,0 +1,49 @@
1
+ # Copyright (C) 2018-2024 Intel Corporation
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ import re
5
+
6
+
7
+ class BasicError(Exception):
8
+ """ Base class for all exceptions in Model Conversion API
9
+
10
+ It operates like Exception but when it is converted to str,
11
+ it formats string as args[0].format(*args[1:]), where
12
+ args are arguments provided when an exception instance is
13
+ created.
14
+ """
15
+
16
+ def __str__(self):
17
+ if len(self.args) <= 1:
18
+ return Exception.__str__(self)
19
+ return self.args[0].format(*self.args[1:]) # pylint: disable=unsubscriptable-object
20
+
21
+
22
+ class FrameworkError(BasicError):
23
+ """ User-friendly error: raised when the error on the framework side. """
24
+ pass
25
+
26
+
27
+ class Error(BasicError):
28
+ """ User-friendly error: raised when the error on the user side. """
29
+ pass
30
+
31
+
32
+ class InternalError(BasicError):
33
+ """ Not user-friendly error: user cannot fix it and it points to the bug inside MO. """
34
+ pass
35
+
36
+
37
+ def classify_error_type(e):
38
+ patterns = [
39
+ # Example: No module named 'openvino._offline_transformations.offline_transformations_api'
40
+ r"No module named \'\S+\'",
41
+ # Example: cannot import name 'IECore' from 'openvino.inference_engine' (unknown location)
42
+ r"cannot import name \'\S+\'",
43
+ ]
44
+ error_message = str(e)
45
+ for pattern in patterns:
46
+ m = re.search(pattern, error_message)
47
+ if m:
48
+ return m.group(0)
49
+ return "undefined"
@@ -0,0 +1,16 @@
1
+ # Copyright (C) 2018-2024 Intel Corporation
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ import datetime
5
+
6
+ msg_fmt = 'Check for a new version of Intel(R) Distribution of OpenVINO(TM) toolkit here {0} ' \
7
+ 'or on https://github.com/openvinotoolkit/openvino'
8
+
9
+
10
+ def get_compression_message():
11
+ link = "https://docs.openvino.ai/2023.0/openvino_docs_MO_DG_FP16_Compression.html"
12
+ message = '[ INFO ] Generated IR will be compressed to FP16. ' \
13
+ 'If you get lower accuracy, please consider disabling compression ' \
14
+ 'by removing argument "compress_to_fp16" or set it to false "compress_to_fp16=False".\n' \
15
+ 'Find more information about compression to FP16 at {}'.format(link)
16
+ return message