mindspore 2.4.10__cp310-none-any.whl → 2.5.0__cp310-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (688) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/Third_Party_Open_Source_Software_Notice +39 -0
  3. mindspore/__init__.py +8 -3
  4. mindspore/_akg/akg/composite/build_module.py +6 -2
  5. mindspore/_akg/akg/utils/kernel_exec.py +2 -2
  6. mindspore/_c_dataengine.cpython-310-aarch64-linux-gnu.so +0 -0
  7. mindspore/_c_expression.cpython-310-aarch64-linux-gnu.so +0 -0
  8. mindspore/_c_mindrecord.cpython-310-aarch64-linux-gnu.so +0 -0
  9. mindspore/_checkparam.py +0 -5
  10. mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
  11. mindspore/_extends/parse/compile_config.py +64 -0
  12. mindspore/_extends/parse/deprecated/__init__.py +0 -0
  13. mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +375 -0
  14. mindspore/_extends/parse/parser.py +23 -5
  15. mindspore/_extends/parse/standard_method.py +123 -27
  16. mindspore/_extends/pijit/pijit_func_white_list.py +1 -1
  17. mindspore/amp.py +7 -1
  18. mindspore/boost/boost_cell_wrapper.py +136 -41
  19. mindspore/common/__init__.py +3 -1
  20. mindspore/common/_register_for_tensor.py +0 -1
  21. mindspore/common/_stub_tensor.py +25 -4
  22. mindspore/common/_tensor_cpp_method.py +17 -0
  23. mindspore/common/_tensor_docs.py +6132 -0
  24. mindspore/common/api.py +98 -21
  25. mindspore/common/dtype.py +34 -34
  26. mindspore/common/dump.py +2 -1
  27. mindspore/common/file_system.py +8 -3
  28. mindspore/common/generator.py +2 -0
  29. mindspore/common/hook_handle.py +3 -1
  30. mindspore/common/initializer.py +3 -4
  31. mindspore/common/lazy_inline.py +8 -2
  32. mindspore/common/mindir_util.py +10 -2
  33. mindspore/common/parameter.py +31 -15
  34. mindspore/common/tensor.py +713 -1337
  35. mindspore/communication/__init__.py +1 -1
  36. mindspore/communication/_comm_helper.py +5 -0
  37. mindspore/communication/comm_func.py +215 -173
  38. mindspore/communication/management.py +23 -20
  39. mindspore/context.py +285 -191
  40. mindspore/dataset/__init__.py +23 -19
  41. mindspore/dataset/callback/ds_callback.py +2 -1
  42. mindspore/dataset/core/config.py +84 -3
  43. mindspore/dataset/engine/cache_admin.py +3 -3
  44. mindspore/dataset/engine/cache_client.py +5 -4
  45. mindspore/dataset/engine/datasets.py +192 -149
  46. mindspore/dataset/engine/datasets_audio.py +14 -0
  47. mindspore/dataset/engine/datasets_standard_format.py +11 -11
  48. mindspore/dataset/engine/datasets_text.py +38 -1
  49. mindspore/dataset/engine/datasets_user_defined.py +100 -66
  50. mindspore/dataset/engine/datasets_vision.py +81 -8
  51. mindspore/dataset/engine/iterators.py +281 -63
  52. mindspore/dataset/engine/obs/util.py +8 -0
  53. mindspore/dataset/engine/queue.py +40 -0
  54. mindspore/dataset/engine/samplers.py +26 -2
  55. mindspore/dataset/engine/serializer_deserializer.py +1 -1
  56. mindspore/dataset/engine/validators.py +43 -11
  57. mindspore/dataset/transforms/py_transforms_util.py +17 -0
  58. mindspore/dataset/transforms/transforms.py +29 -12
  59. mindspore/dataset/vision/validators.py +1 -2
  60. mindspore/device_context/__init__.py +21 -0
  61. mindspore/device_context/ascend/__init__.py +25 -0
  62. mindspore/device_context/ascend/device.py +72 -0
  63. mindspore/device_context/ascend/op_debug.py +94 -0
  64. mindspore/device_context/ascend/op_precision.py +193 -0
  65. mindspore/device_context/ascend/op_tuning.py +127 -0
  66. mindspore/device_context/cpu/__init__.py +25 -0
  67. mindspore/device_context/cpu/device.py +62 -0
  68. mindspore/device_context/cpu/op_tuning.py +43 -0
  69. mindspore/device_context/gpu/__init__.py +21 -0
  70. mindspore/device_context/gpu/device.py +70 -0
  71. mindspore/device_context/gpu/op_precision.py +67 -0
  72. mindspore/device_context/gpu/op_tuning.py +175 -0
  73. mindspore/device_manager.py +134 -0
  74. mindspore/experimental/llm_boost/__init__.py +1 -0
  75. mindspore/experimental/llm_boost/ascend_native/__init__.py +22 -0
  76. mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +211 -0
  77. mindspore/experimental/llm_boost/ascend_native/llm_boost.py +52 -0
  78. mindspore/experimental/llm_boost/atb/boost_base.py +2 -3
  79. mindspore/experimental/llm_boost/atb/llama_boost.py +6 -1
  80. mindspore/experimental/llm_boost/register.py +1 -0
  81. mindspore/experimental/optim/adadelta.py +26 -22
  82. mindspore/experimental/optim/adam.py +3 -0
  83. mindspore/experimental/optim/lr_scheduler.py +33 -24
  84. mindspore/experimental/optim/radam.py +33 -30
  85. mindspore/hal/device.py +28 -0
  86. mindspore/hal/event.py +17 -0
  87. mindspore/hal/memory.py +94 -3
  88. mindspore/hal/stream.py +91 -6
  89. mindspore/include/api/context.h +0 -1
  90. mindspore/lib/libavcodec.so.59 +0 -0
  91. mindspore/lib/libavdevice.so.59 +0 -0
  92. mindspore/lib/libavfilter.so.8 +0 -0
  93. mindspore/lib/libavformat.so.59 +0 -0
  94. mindspore/lib/libavutil.so.57 +0 -0
  95. mindspore/lib/libdnnl.so.2 +0 -0
  96. mindspore/lib/libmindspore_backend.so +0 -0
  97. mindspore/lib/libmindspore_common.so +0 -0
  98. mindspore/lib/libmindspore_core.so +0 -0
  99. mindspore/lib/libmindspore_glog.so.0 +0 -0
  100. mindspore/lib/libmindspore_gpr.so.15 +0 -0
  101. mindspore/lib/libmindspore_grpc++.so.1 +0 -0
  102. mindspore/lib/libmindspore_grpc.so.15 +0 -0
  103. mindspore/lib/libmindspore_ops.so +0 -0
  104. mindspore/lib/libmpi_adapter.so +0 -0
  105. mindspore/lib/libmpi_collective.so +0 -0
  106. mindspore/lib/libnnacl.so +0 -0
  107. mindspore/lib/libopencv_core.so.4.5 +0 -0
  108. mindspore/lib/libps_cache.so +0 -0
  109. mindspore/lib/libswresample.so.4 +0 -0
  110. mindspore/lib/libswscale.so.6 +0 -0
  111. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +2048 -0
  112. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
  113. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
  114. mindspore/lib/plugin/ascend/custom_ascendc_910/op_api/lib/libcust_opapi.so +0 -0
  115. mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl/dynamic/decoder_kv_cache.py +1 -1
  116. mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl/dynamic/prompt_kv_cache.py +1 -1
  117. mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
  118. mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
  119. mindspore/lib/plugin/ascend/custom_ascendc_910/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
  120. mindspore/lib/plugin/ascend/custom_ascendc_910/version.info +1 -1
  121. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_api/lib/libcust_opapi.so +0 -0
  122. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +224 -0
  123. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/all_finite.py +1 -1
  124. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/decoder_kv_cache.py +1 -1
  125. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/prompt_kv_cache.py +1 -1
  126. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.json +78 -0
  127. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.o +0 -0
  128. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.json +78 -0
  129. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.o +0 -0
  130. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.json +78 -0
  131. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.o +0 -0
  132. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.json +156 -0
  133. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.o +0 -0
  134. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.json +156 -0
  135. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.o +0 -0
  136. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.json +156 -0
  137. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.o +0 -0
  138. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.json +156 -0
  139. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.o +0 -0
  140. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.json +156 -0
  141. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.o +0 -0
  142. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.json +156 -0
  143. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.o +0 -0
  144. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.json +156 -0
  145. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.o +0 -0
  146. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.json +156 -0
  147. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.o +0 -0
  148. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.json +165 -0
  149. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.o +0 -0
  150. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.json +165 -0
  151. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.o +0 -0
  152. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.json +165 -0
  153. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.o +0 -0
  154. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.json +165 -0
  155. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.o +0 -0
  156. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.json +165 -0
  157. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.o +0 -0
  158. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.json +165 -0
  159. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.o +0 -0
  160. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.json +165 -0
  161. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.o +0 -0
  162. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.json +165 -0
  163. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.o +0 -0
  164. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/all_finite.json +139 -0
  165. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/binary_info_config.json +361 -0
  166. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/decoder_kv_cache.json +892 -0
  167. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/prompt_kv_cache.json +892 -0
  168. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
  169. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
  170. mindspore/lib/plugin/ascend/custom_ascendc_910b/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
  171. mindspore/lib/plugin/ascend/custom_ascendc_910b/version.info +1 -1
  172. mindspore/lib/plugin/ascend/custom_compiler/setup.py +1 -1
  173. mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
  174. mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
  175. mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
  176. mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
  177. mindspore/lib/plugin/ascend/libmindspore_internal_kernels.so +0 -0
  178. mindspore/lib/plugin/ascend/libms_ascend_native_boost.so +0 -0
  179. mindspore/lib/plugin/ascend/libms_atb_boost.so +0 -0
  180. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +957 -955
  181. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
  182. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/liblcal_static.a +0 -0
  183. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/base_type.h → base_type.h} +25 -20
  184. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{cast/cast_tiling.h → internal.h} +6 -4
  185. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_op.h +114 -0
  186. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/boost_kernel.h +70 -0
  187. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/llama_impl.h +85 -0
  188. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/model_interface.h +52 -0
  189. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/tensor.h +81 -0
  190. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_creator.h +123 -0
  191. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +155 -110
  192. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/tiling_info.h → tiling_info.h} +12 -9
  193. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tiling_utils.h +178 -0
  194. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layer_norm_op.so +0 -0
  195. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_op.so +0 -0
  196. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_op.so +0 -0
  197. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_op.so +0 -0
  198. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_op.so +0 -0
  199. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_op.so +0 -0
  200. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcompare_op.so +0 -0
  201. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_op.so +0 -0
  202. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libllama_op.so +0 -0
  203. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_op.so +0 -0
  204. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
  205. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_optiling.so +0 -0
  206. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmulti_weight_matmul_kernel_op.so +0 -0
  207. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_op.so +0 -0
  208. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_op.so +0 -0
  209. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_op.so +0 -0
  210. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz.o +0 -0
  211. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz_0.o +0 -0
  212. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress.o +0 -0
  213. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress_0.o +0 -0
  214. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz.o +0 -0
  215. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz_0.o +0 -0
  216. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libadd_rms_norm_quant_ascend310p.so +0 -0
  217. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_310p_impl.so → op_kernels/ascend310p/so_kernels/libapply_rotary_pos_emb_310p_ascend310p.so} +0 -0
  218. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcast_ascend310p.so +0 -0
  219. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcompare_ascend310p.so +0 -0
  220. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libgelu_ascend310p.so +0 -0
  221. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libmatmul_ascend310p.so +0 -0
  222. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libreshape_and_cache_nz_ascend310p.so +0 -0
  223. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.json +163 -0
  224. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.o +0 -0
  225. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.json +163 -0
  226. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.o +0 -0
  227. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
  228. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
  229. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
  230. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
  231. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
  232. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
  233. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
  234. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
  235. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix.o +0 -0
  236. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aic_0.o +0 -0
  237. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  238. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix.o +0 -0
  239. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
  240. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  241. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_bf16.o +0 -0
  242. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_fp16.o +0 -0
  243. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_fp32.o +0 -0
  244. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_bf16.o +0 -0
  245. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_fp16.o +0 -0
  246. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_fp32.o +0 -0
  247. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2.o +0 -0
  248. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aic_0.o +0 -0
  249. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aiv_0.o +0 -0
  250. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libadd_layer_norm_impl.so → op_kernels/ascend910b/so_kernels/libadd_layer_norm_ascend910b.so} +0 -0
  251. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libadd_rms_norm_impl.so → op_kernels/ascend910b/so_kernels/libadd_rms_norm_ascend910b.so} +0 -0
  252. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libadd_rms_norm_quant_ascend910b.so +0 -0
  253. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_impl.so → op_kernels/ascend910b/so_kernels/libapply_rotary_pos_emb_ascend910b.so} +0 -0
  254. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libcast_impl.so → op_kernels/ascend910b/so_kernels/libcast_ascend910b.so} +0 -0
  255. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libnot_equal_impl.so → op_kernels/ascend910b/so_kernels/libcompare_ascend910b.so} +0 -0
  256. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libgelu_impl.so → op_kernels/ascend910b/so_kernels/libgelu_ascend910b.so} +0 -0
  257. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libllama_ascend910b.so +0 -0
  258. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmatmul_impl.so → op_kernels/ascend910b/so_kernels/libmatmul_ascend910b.so} +0 -0
  259. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmulti_weight_matmul_kernel_impl.so → op_kernels/ascend910b/so_kernels/libmulti_weight_matmul_kernel_ascend910b.so} +0 -0
  260. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libreshape_and_cache_impl.so → op_kernels/ascend910b/so_kernels/libreshape_and_cache_ascend910b.so} +0 -0
  261. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/librms_norm_impl.so → op_kernels/ascend910b/so_kernels/librms_norm_ascend910b.so} +0 -0
  262. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
  263. mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
  264. mindspore/log.py +12 -0
  265. mindspore/mindrecord/__init__.py +1 -1
  266. mindspore/mindrecord/config.py +17 -316
  267. mindspore/mindrecord/filereader.py +1 -9
  268. mindspore/mindrecord/filewriter.py +5 -15
  269. mindspore/mindrecord/mindpage.py +1 -9
  270. mindspore/mint/__init__.py +824 -218
  271. mindspore/mint/distributed/__init__.py +66 -4
  272. mindspore/mint/distributed/distributed.py +2594 -44
  273. mindspore/mint/linalg/__init__.py +6 -0
  274. mindspore/mint/nn/__init__.py +473 -14
  275. mindspore/mint/nn/functional.py +486 -11
  276. mindspore/mint/nn/layer/__init__.py +17 -4
  277. mindspore/mint/nn/layer/_functions.py +330 -0
  278. mindspore/mint/nn/layer/activation.py +169 -1
  279. mindspore/mint/nn/layer/basic.py +123 -0
  280. mindspore/mint/nn/layer/conv.py +727 -0
  281. mindspore/mint/nn/layer/normalization.py +215 -19
  282. mindspore/mint/nn/layer/padding.py +797 -0
  283. mindspore/mint/nn/layer/pooling.py +170 -0
  284. mindspore/mint/optim/__init__.py +2 -1
  285. mindspore/mint/optim/adam.py +223 -0
  286. mindspore/mint/optim/adamw.py +26 -19
  287. mindspore/mint/special/__init__.py +2 -1
  288. mindspore/multiprocessing/__init__.py +5 -0
  289. mindspore/nn/cell.py +126 -19
  290. mindspore/nn/dynamic_lr.py +2 -1
  291. mindspore/nn/layer/activation.py +6 -6
  292. mindspore/nn/layer/basic.py +35 -25
  293. mindspore/nn/layer/channel_shuffle.py +3 -3
  294. mindspore/nn/layer/embedding.py +3 -3
  295. mindspore/nn/layer/normalization.py +8 -7
  296. mindspore/nn/layer/padding.py +4 -3
  297. mindspore/nn/layer/pooling.py +47 -13
  298. mindspore/nn/layer/rnn_cells.py +1 -1
  299. mindspore/nn/layer/rnns.py +2 -1
  300. mindspore/nn/layer/timedistributed.py +5 -5
  301. mindspore/nn/layer/transformer.py +48 -26
  302. mindspore/nn/learning_rate_schedule.py +5 -3
  303. mindspore/nn/loss/loss.py +31 -36
  304. mindspore/nn/optim/ada_grad.py +1 -0
  305. mindspore/nn/optim/adadelta.py +2 -2
  306. mindspore/nn/optim/adam.py +1 -1
  307. mindspore/nn/optim/lars.py +1 -4
  308. mindspore/nn/optim/optimizer.py +1 -1
  309. mindspore/nn/optim/rprop.py +2 -2
  310. mindspore/nn/optim/thor.py +2 -1
  311. mindspore/nn/utils/init.py +13 -11
  312. mindspore/nn/wrap/cell_wrapper.py +4 -6
  313. mindspore/nn/wrap/loss_scale.py +3 -4
  314. mindspore/numpy/array_creations.py +60 -62
  315. mindspore/numpy/array_ops.py +148 -143
  316. mindspore/numpy/logic_ops.py +41 -42
  317. mindspore/numpy/math_ops.py +361 -359
  318. mindspore/numpy/utils.py +16 -16
  319. mindspore/numpy/utils_const.py +4 -4
  320. mindspore/ops/__init__.py +2 -1
  321. mindspore/ops/_grad_experimental/grad_comm_ops.py +94 -13
  322. mindspore/ops/_grad_experimental/grad_debug_ops.py +6 -1
  323. mindspore/ops/_grad_experimental/grad_inner_ops.py +9 -0
  324. mindspore/ops/_grad_experimental/grad_math_ops.py +2 -1
  325. mindspore/ops/_op_impl/cpu/__init__.py +1 -0
  326. mindspore/ops/_op_impl/cpu/raise_op.py +28 -0
  327. mindspore/ops/_vmap/vmap_array_ops.py +20 -19
  328. mindspore/ops/_vmap/vmap_base.py +0 -2
  329. mindspore/ops/_vmap/vmap_grad_nn_ops.py +19 -13
  330. mindspore/ops/_vmap/vmap_math_ops.py +11 -9
  331. mindspore/ops/_vmap/vmap_nn_ops.py +20 -34
  332. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +149 -12
  333. mindspore/ops/auto_generate/gen_arg_handler.py +0 -61
  334. mindspore/ops/auto_generate/gen_extend_func.py +554 -60
  335. mindspore/ops/auto_generate/gen_ops_def.py +1621 -115
  336. mindspore/ops/auto_generate/gen_ops_prim.py +8024 -3409
  337. mindspore/ops/auto_generate/pyboost_inner_prim.py +183 -79
  338. mindspore/ops/composite/base.py +1 -1
  339. mindspore/ops/composite/multitype_ops/_compile_utils.py +229 -30
  340. mindspore/ops/composite/multitype_ops/pow_impl.py +0 -29
  341. mindspore/ops/function/__init__.py +12 -0
  342. mindspore/ops/function/array_func.py +561 -159
  343. mindspore/ops/function/clip_func.py +64 -0
  344. mindspore/ops/function/debug_func.py +28 -20
  345. mindspore/ops/function/image_func.py +1 -1
  346. mindspore/ops/function/linalg_func.py +5 -4
  347. mindspore/ops/function/math_func.py +1659 -290
  348. mindspore/ops/function/nn_func.py +988 -317
  349. mindspore/ops/function/parameter_func.py +3 -56
  350. mindspore/ops/function/random_func.py +243 -33
  351. mindspore/ops/function/sparse_unary_func.py +1 -1
  352. mindspore/ops/functional.py +18 -5
  353. mindspore/ops/functional_overload.py +897 -0
  354. mindspore/ops/operations/__init__.py +3 -2
  355. mindspore/ops/operations/_embedding_cache_ops.py +4 -4
  356. mindspore/ops/operations/_grad_ops.py +2 -34
  357. mindspore/ops/operations/_infer_ops.py +2 -1
  358. mindspore/ops/operations/_inner_ops.py +38 -8
  359. mindspore/ops/operations/array_ops.py +45 -303
  360. mindspore/ops/operations/comm_ops.py +19 -16
  361. mindspore/ops/operations/custom_ops.py +11 -55
  362. mindspore/ops/operations/debug_ops.py +42 -47
  363. mindspore/ops/operations/inner_ops.py +6 -4
  364. mindspore/ops/operations/linalg_ops.py +3 -2
  365. mindspore/ops/operations/manually_defined/ops_def.py +185 -104
  366. mindspore/ops/operations/math_ops.py +11 -216
  367. mindspore/ops/operations/nn_ops.py +146 -308
  368. mindspore/ops/primitive.py +23 -21
  369. mindspore/ops/tensor_method.py +1669 -0
  370. mindspore/ops_generate/aclnn_kernel_register_auto_cc_generator.py +110 -0
  371. mindspore/ops_generate/add_tensor_docs_generator.py +54 -0
  372. mindspore/ops_generate/arg_handler.py +0 -61
  373. mindspore/ops_generate/auto_grad_impl_cc_generator.py +135 -0
  374. mindspore/ops_generate/auto_grad_reg_cc_generator.py +93 -0
  375. mindspore/ops_generate/base_generator.py +11 -0
  376. mindspore/ops_generate/cpp_create_prim_instance_helper_generator.py +108 -0
  377. mindspore/ops_generate/functional_map_cpp_generator.py +491 -0
  378. mindspore/ops_generate/functional_overload_py_generator.py +110 -0
  379. mindspore/ops_generate/functions_cc_generator.py +233 -0
  380. mindspore/ops_generate/gen_aclnn_implement.py +110 -114
  381. mindspore/ops_generate/gen_constants.py +157 -3
  382. mindspore/ops_generate/gen_ops.py +245 -990
  383. mindspore/ops_generate/gen_pyboost_func.py +97 -998
  384. mindspore/ops_generate/gen_utils.py +119 -33
  385. mindspore/ops_generate/lite_ops_cpp_generator.py +155 -0
  386. mindspore/ops_generate/op_api_proto.py +206 -0
  387. mindspore/ops_generate/op_def_py_generator.py +131 -0
  388. mindspore/ops_generate/op_prim_py_generator.py +480 -0
  389. mindspore/ops_generate/op_proto.py +373 -108
  390. mindspore/ops_generate/op_template_parser.py +436 -0
  391. mindspore/ops_generate/ops_def_cc_generator.py +288 -0
  392. mindspore/ops_generate/ops_def_h_generator.py +74 -0
  393. mindspore/ops_generate/ops_name_h_generator.py +68 -0
  394. mindspore/ops_generate/ops_primitive_h_generator.py +81 -0
  395. mindspore/ops_generate/pyboost_functions_cpp_generator.py +370 -0
  396. mindspore/ops_generate/pyboost_functions_h_generator.py +68 -0
  397. mindspore/ops_generate/pyboost_functions_py_generator.py +148 -0
  398. mindspore/ops_generate/pyboost_grad_function_cpp_generator.py +154 -0
  399. mindspore/ops_generate/pyboost_inner_prim_generator.py +131 -0
  400. mindspore/ops_generate/pyboost_native_grad_functions_generator.py +268 -0
  401. mindspore/ops_generate/pyboost_op_cpp_code_generator.py +851 -0
  402. mindspore/ops_generate/pyboost_overload_functions_cpp_generator.py +344 -0
  403. mindspore/ops_generate/pyboost_utils.py +92 -33
  404. mindspore/ops_generate/template.py +294 -44
  405. mindspore/ops_generate/tensor_func_reg_cpp_generator.py +422 -0
  406. mindspore/parallel/__init__.py +3 -3
  407. mindspore/parallel/_auto_parallel_context.py +24 -33
  408. mindspore/parallel/_parallel_serialization.py +13 -2
  409. mindspore/parallel/_utils.py +4 -1
  410. mindspore/parallel/algo_parameter_config.py +1 -1
  411. mindspore/parallel/checkpoint_transform.py +44 -0
  412. mindspore/parallel/cluster/process_entity/_api.py +131 -37
  413. mindspore/parallel/cluster/process_entity/_utils.py +41 -6
  414. mindspore/parallel/cluster/run.py +20 -3
  415. mindspore/parallel/parameter_broadcast.py +1 -1
  416. mindspore/parallel/shard.py +3 -0
  417. mindspore/parallel/transform_safetensors.py +119 -253
  418. mindspore/profiler/__init__.py +17 -4
  419. mindspore/profiler/analysis/__init__.py +0 -0
  420. mindspore/profiler/analysis/parser/__init__.py +0 -0
  421. mindspore/profiler/analysis/parser/ascend_cann_parser.py +166 -0
  422. mindspore/profiler/analysis/parser/base_parser.py +158 -0
  423. mindspore/profiler/analysis/parser/framework_cann_relation_parser.py +45 -0
  424. mindspore/profiler/analysis/parser/ms_framework_parser.py +142 -0
  425. mindspore/profiler/analysis/parser/ms_minddata_parser.py +145 -0
  426. mindspore/profiler/analysis/parser/timeline_assembly_factory/__init__.py +0 -0
  427. mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +261 -0
  428. mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +40 -0
  429. mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +84 -0
  430. mindspore/profiler/analysis/parser/timeline_creator/__init__.py +0 -0
  431. mindspore/profiler/analysis/parser/timeline_creator/base_timeline_creator.py +44 -0
  432. mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +90 -0
  433. mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +76 -0
  434. mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +103 -0
  435. mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +134 -0
  436. mindspore/profiler/analysis/parser/timeline_event/__init__.py +0 -0
  437. mindspore/profiler/analysis/parser/timeline_event/base_event.py +233 -0
  438. mindspore/profiler/analysis/parser/timeline_event/cpu_op_event.py +47 -0
  439. mindspore/profiler/analysis/parser/timeline_event/flow_event.py +36 -0
  440. mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +260 -0
  441. mindspore/profiler/analysis/parser/timeline_event/msprof_event.py +73 -0
  442. mindspore/profiler/analysis/parser/timeline_event/scope_layer_event.py +53 -0
  443. mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +146 -0
  444. mindspore/profiler/analysis/task_manager.py +131 -0
  445. mindspore/profiler/analysis/time_converter.py +84 -0
  446. mindspore/profiler/analysis/viewer/__init__.py +0 -0
  447. mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +333 -0
  448. mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +87 -0
  449. mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +252 -0
  450. mindspore/profiler/analysis/viewer/ascend_memory_viewer.py +313 -0
  451. mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +322 -0
  452. mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +265 -0
  453. mindspore/profiler/analysis/viewer/ascend_timeline_viewer.py +58 -0
  454. mindspore/profiler/analysis/viewer/base_viewer.py +26 -0
  455. mindspore/profiler/analysis/viewer/ms_dataset_viewer.py +97 -0
  456. mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +581 -0
  457. mindspore/profiler/analysis/work_flow.py +73 -0
  458. mindspore/profiler/common/ascend_msprof_exporter.py +138 -0
  459. mindspore/profiler/common/command_executor.py +90 -0
  460. mindspore/profiler/common/constant.py +174 -3
  461. mindspore/profiler/common/file_manager.py +208 -0
  462. mindspore/profiler/common/log.py +130 -0
  463. mindspore/profiler/common/msprof_cmd_tool.py +202 -0
  464. mindspore/profiler/common/path_manager.py +371 -0
  465. mindspore/profiler/common/process_bar.py +168 -0
  466. mindspore/profiler/common/process_pool.py +9 -3
  467. mindspore/profiler/common/profiler_context.py +476 -0
  468. mindspore/profiler/common/profiler_info.py +304 -0
  469. mindspore/profiler/common/profiler_output_path.py +284 -0
  470. mindspore/profiler/common/profiler_parameters.py +210 -0
  471. mindspore/profiler/common/profiler_path_manager.py +120 -0
  472. mindspore/profiler/common/record_function.py +76 -0
  473. mindspore/profiler/common/tlv_decoder.py +76 -0
  474. mindspore/profiler/common/util.py +75 -2
  475. mindspore/profiler/dynamic_profiler.py +270 -37
  476. mindspore/profiler/envprofiler.py +138 -0
  477. mindspore/profiler/mstx.py +199 -0
  478. mindspore/profiler/platform/__init__.py +21 -0
  479. mindspore/profiler/platform/base_profiler.py +40 -0
  480. mindspore/profiler/platform/cpu_profiler.py +124 -0
  481. mindspore/profiler/platform/gpu_profiler.py +74 -0
  482. mindspore/profiler/platform/npu_profiler.py +309 -0
  483. mindspore/profiler/profiler.py +580 -93
  484. mindspore/profiler/profiler_action_controller.py +187 -0
  485. mindspore/profiler/profiler_interface.py +114 -0
  486. mindspore/profiler/schedule.py +208 -0
  487. mindspore/rewrite/api/symbol_tree.py +1 -2
  488. mindspore/run_check/_check_version.py +2 -6
  489. mindspore/runtime/__init__.py +37 -0
  490. mindspore/runtime/device.py +27 -0
  491. mindspore/runtime/event.py +209 -0
  492. mindspore/runtime/executor.py +148 -0
  493. mindspore/runtime/memory.py +392 -0
  494. mindspore/runtime/stream.py +460 -0
  495. mindspore/runtime/thread_bind_core.py +401 -0
  496. mindspore/train/__init__.py +2 -2
  497. mindspore/train/_utils.py +53 -18
  498. mindspore/train/amp.py +8 -4
  499. mindspore/train/callback/_checkpoint.py +32 -18
  500. mindspore/train/callback/_early_stop.py +1 -1
  501. mindspore/train/callback/_flops_collector.py +105 -69
  502. mindspore/train/callback/_history.py +1 -1
  503. mindspore/train/callback/_summary_collector.py +44 -6
  504. mindspore/train/callback/_tft_register.py +31 -10
  505. mindspore/train/dataset_helper.py +11 -11
  506. mindspore/train/metrics/precision.py +4 -5
  507. mindspore/train/mind_ir_pb2.py +167 -46
  508. mindspore/train/model.py +13 -15
  509. mindspore/train/serialization.py +462 -76
  510. mindspore/train/summary/summary_record.py +1 -2
  511. mindspore/train/train_thor/model_thor.py +1 -1
  512. mindspore/utils/__init__.py +4 -2
  513. mindspore/utils/bin/dataset-cache +0 -0
  514. mindspore/utils/bin/dataset-cache-server +0 -0
  515. mindspore/utils/dryrun.py +138 -0
  516. mindspore/utils/runtime_execution_order_check.py +550 -0
  517. mindspore/version.py +1 -1
  518. {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/METADATA +2 -3
  519. {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/RECORD +522 -456
  520. {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/entry_points.txt +1 -1
  521. mindspore/_data_dump.cpython-310-aarch64-linux-gnu.so +0 -0
  522. mindspore/bin/cache_admin +0 -0
  523. mindspore/bin/cache_server +0 -0
  524. mindspore/common/_tensor_overload.py +0 -139
  525. mindspore/lib/libmindspore_np_dtype.so +0 -0
  526. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
  527. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -82
  528. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -113
  529. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -193
  530. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/dtype_registry.h +0 -90
  531. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -46
  532. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
  533. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
  534. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_layer_norm_op.h +0 -60
  535. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_op.h +0 -50
  536. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_quant_op.h +0 -50
  537. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_nz_op.h +0 -42
  538. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_op.h +0 -55
  539. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -34
  540. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_only_ops.h +0 -94
  541. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_op_base.h +0 -97
  542. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
  543. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/flash_attention_score_op.h +0 -97
  544. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/gelu_op.h +0 -44
  545. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_add_rmsnorm_op.h +0 -73
  546. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -108
  547. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_impls_op.h +0 -64
  548. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_weight_matmul_op.h +0 -91
  549. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/paged_attention_op.h +0 -99
  550. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_nz_op.h +0 -44
  551. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_op.h +0 -44
  552. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/rms_norm_op.h +0 -64
  553. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -179
  554. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -69
  555. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/profiling_util.h +0 -366
  556. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -56
  557. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/kernel/add.h +0 -21
  558. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/tiling/add_tiling.h +0 -43
  559. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -46
  560. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb.h +0 -23
  561. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_base.h +0 -456
  562. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_bf16.h +0 -217
  563. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp.h +0 -391
  564. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp16.h +0 -126
  565. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -230
  566. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_tiling.h +0 -43
  567. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_value.h +0 -27
  568. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/apply_rotary_pos_emb_nz_impl.h +0 -34
  569. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz.h +0 -23
  570. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_base.h +0 -460
  571. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp16.h +0 -116
  572. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp32.h +0 -230
  573. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_tiling.h +0 -43
  574. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_value.h +0 -27
  575. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -74
  576. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -74
  577. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_impl.h +0 -48
  578. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/kernel/cast_kernel.h +0 -21
  579. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -55
  580. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_tiling.h +0 -27
  581. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/kernel/compare_kernel.h +0 -23
  582. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
  583. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
  584. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
  585. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
  586. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
  587. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
  588. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
  589. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
  590. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
  591. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
  592. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
  593. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
  594. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
  595. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
  596. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
  597. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
  598. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
  599. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
  600. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
  601. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
  602. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
  603. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
  604. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
  605. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
  606. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
  607. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
  608. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
  609. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
  610. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
  611. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
  612. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
  613. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
  614. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
  615. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
  616. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
  617. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
  618. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +0 -68
  619. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -99
  620. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +0 -21
  621. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/lccl/lccl_wrapper.h +0 -58
  622. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_types.h +0 -91
  623. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_utils.h +0 -108
  624. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +0 -64
  625. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/add_param.h +0 -68
  626. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +0 -40
  627. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/cast_param.h +0 -30
  628. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
  629. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
  630. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
  631. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -38
  632. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +0 -42
  633. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +0 -33
  634. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -377
  635. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/kernel/reshape_and_cache_nz.h +0 -24
  636. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_impl.h +0 -42
  637. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_tiling.h +0 -27
  638. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -46
  639. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/kernel/sub_kernel.h +0 -20
  640. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -48
  641. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_tiling.h +0 -25
  642. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +0 -399
  643. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/utils.h +0 -41
  644. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +0 -45
  645. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_tiling.h +0 -29
  646. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +0 -30
  647. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -69
  648. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_core.h +0 -43
  649. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_entity.h +0 -38
  650. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_sink.h +0 -69
  651. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_stream.h +0 -41
  652. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -71
  653. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -165
  654. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +0 -20
  655. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
  656. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -121
  657. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -106
  658. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
  659. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
  660. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_acme_impl.so +0 -0
  661. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_old_impl.so +0 -0
  662. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_old_impl.so +0 -0
  663. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_impl.so +0 -0
  664. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_old_impl.so +0 -0
  665. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.json +0 -19
  666. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.o +0 -0
  667. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aic_0.o +0 -0
  668. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  669. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.json +0 -19
  670. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.o +0 -0
  671. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
  672. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  673. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
  674. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
  675. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
  676. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
  677. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
  678. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
  679. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
  680. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
  681. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bnsd_mix.o +0 -0
  682. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bsh_mix.o +0 -0
  683. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bnsd_mix.o +0 -0
  684. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bsh_mix.o +0 -0
  685. mindspore/profiler/envprofiling.py +0 -254
  686. mindspore/profiler/profiling.py +0 -1926
  687. {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/WHEEL +0 -0
  688. {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,897 @@
1
+ # Copyright 2024 Huawei Technologies Co., Ltd
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ============================================================================
15
+ """Holding mint APIs"""
16
+ from mindspore._c_expression import _all_gather_matmul_instance
17
+ from mindspore._c_expression import _bitwise_not_instance
18
+ from mindspore._c_expression import _clamp_instance
19
+ from mindspore._c_expression import _div_instance
20
+ from mindspore._c_expression import _empty_instance
21
+ from mindspore._c_expression import _fmod_instance
22
+ from mindspore._c_expression import _lerp_instance
23
+ from mindspore._c_expression import _matmul_reduce_scatter_instance
24
+ from mindspore._c_expression import _max_instance
25
+ from mindspore._c_expression import _min_instance
26
+ from mindspore._c_expression import _nansum_instance
27
+ from mindspore._c_expression import _remainder_instance
28
+ from mindspore._c_expression import _repeat_interleave_instance
29
+ from mindspore._c_expression import _where_instance
30
+
31
+ def all_gather_matmul(*args, **kwargs):
32
+ r"""
33
+ all_gather_matmul(input, x2, group, world_size, *, bias=None, gather_index=0, gather_output=True, comm_turn=0, trans_input=False, trans_x2=False) -> Tensor
34
+
35
+ In the TP segmentation scenario, allgather and matmul are fused, and communication and computational pipelines
36
+ are parallelized within the fusion operator.
37
+
38
+ .. math::
39
+ output = allgather(input)@x2
40
+
41
+ gather\_out = allgather(input)
42
+
43
+ .. warning::
44
+ This is an experimental API that is subject to change or deletion.
45
+
46
+ Args:
47
+ input (Tensor): The left matrix of matmul, the dtype supports float16 and bfloat16, the shape supports 2
48
+ dimensions, and the data format supports ND.
49
+ x2 (Tensor): The right matrix of matmul, the dtype needs to be consistent with ``input`` , the shape
50
+ supports 2 dimensions, and the data format supports ND.
51
+ group (str): Communication group name, can be created by ``create_group`` method, or use the default group
52
+ ``mindspore.communication.GlobalComm.WORLD_COMM_GROUP``.
53
+ world_size (int): The total number of ranks in the communication group, should be consistent with the number
54
+ of devices actually running, supporting ``2`` , ``4`` , and ``8`` .
55
+
56
+ Keyword Args:
57
+ bias (Tensor, optional): Currently only ``None`` is supported. Default: ``None`` .
58
+ gather_index (int, optional): Indicates the allgather operation object, ``0`` means gather ``input`` ,
59
+ ``1`` means gather ``x2`` . Currently only ``0`` is supported. Default: ``0`` .
60
+ gather_output (bool, optional): Indicates whether gather output is required. Default: ``True`` .
61
+ comm_turn (int, optional): Indicates the granularity of communication between ranks. Currently only ``0``
62
+ is supported. Default: ``0`` .
63
+ trans_input (bool, optional): Indicates whether ``input`` is transposed. Currently only ``False`` is
64
+ supported. Default: ``False`` .
65
+ trans_x2 (bool, optional): Indicates whether ``x2`` is transposed. Default: ``False`` .
66
+
67
+ Returns:
68
+ - output (Tensor) - The result of allgather and matmul fusion calculations.
69
+ - gather_out (Tensor) - The result of allgather. If gather_output is ``False`` , ``gather_out`` returns a
70
+ tensor with shape 0.
71
+
72
+ Note:
73
+ - When using this interface, please ensure that the driver firmware package and CANN package are both the
74
+ matching 8.0.RC2 version or a higher version, otherwise an error will be reported, such as BUS ERROR.
75
+ - The shape of ``input`` is (m, k), the shape of ``x2`` is (k, n), k is required to be equal, and the value
76
+ range of k is [256, 65535). The shape of ``output`` is (m * world_size, n), and the shape of
77
+ ``gather_out`` is (m * world_size, k).
78
+ - The common fusion operators in a model only support the same communication group.
79
+
80
+ Raises:
81
+ TypeError: Any arg is of wrong type.
82
+ RuntimeError: The dtype of ``input`` or ``x2`` is neither float16 nor bfloat16.
83
+ RuntimeError: The dtypes of ``input`` and ``x2`` are different.
84
+ RuntimeError: The shape of ``input`` or ``x2`` is not two-dimensional.
85
+ RuntimeError: The k axis of ``input`` shape and ``x2`` shape are not equal.
86
+ RuntimeError: k is less than ``256`` or greater than or equal to ``65535`` .
87
+ RuntimeError: ``bias`` is not None.
88
+ RuntimeError: ``group`` does not exist.
89
+ RuntimeError: ``world_size`` is inconsistent with the actual number of running cards.
90
+ RuntimeError: ``world_size`` is not equal to ``2`` , ``4`` , or ``8`` .
91
+ RuntimeError: ``gather_index`` is not ``0`` .
92
+ RuntimeError: ``trans_input`` is ``True`` .
93
+
94
+ Supported Platforms:
95
+ ``Ascend``
96
+
97
+ Examples:
98
+ .. note::
99
+ Before running the following examples, you need to configure the communication environment variables.
100
+
101
+ For Ascend/GPU/CPU devices, it is recommended to use the msrun startup method without any third-party or
102
+ configuration file dependencies. Please see the `msrun start up <https://www.mindspore.cn/docs/en/master/model_train/parallel/msrun_launcher.html>`_
103
+ for more details.
104
+
105
+ This example should be run with 2 devices.
106
+
107
+ >>> import mindspore as ms
108
+ >>> import numpy as np
109
+ >>> from mindspore import ops
110
+ >>> ms.communication.init()
111
+ >>> rank = ms.communication.get_rank()
112
+ >>> np.random.seed(rank)
113
+ >>> input = ms.Tensor(np.random.randn(128, 256).astype(np.float32), dtype=ms.float16)
114
+ >>> x2 = ms.Tensor(np.random.randn(256, 512).astype(np.float32), dtype=ms.float16)
115
+ >>> group = ms.communication.GlobalComm.WORLD_COMM_GROUP
116
+ >>> world_size = ms.communication.get_group_size()
117
+ >>> output, gather_out = ops.all_gather_matmul(
118
+ ... input,
119
+ ... x2,
120
+ ... group,
121
+ ... world_size,
122
+ ... bias=None,
123
+ ... gather_index=0,
124
+ ... gather_output=True,
125
+ ... comm_turn=0,
126
+ ... trans_input=False,
127
+ ... trans_x2=False,
128
+ ... )
129
+ >>> print(output.shape)
130
+ (256, 512)
131
+ >>> print(gather_out.shape)
132
+ (256, 256)
133
+ """
134
+ return _all_gather_matmul_instance(*args, **kwargs)
135
+
136
+
137
+ def bitwise_not(*args, **kwargs):
138
+ r"""
139
+ bitwise_not(input) -> Tensor
140
+
141
+ Returns bitwise `not` of the input tensor.
142
+
143
+ .. warning::
144
+ This is an experimental API that is subject to change or deletion.
145
+
146
+ Args:
147
+ input (Tensor): The input tensor must be of integral or Boolean types.
148
+
149
+ Returns:
150
+ Tensor, has the same shape and type as `input`.
151
+
152
+ Raises:
153
+ TypeError: If `input` is not a Tensor.
154
+ RuntimeError: If dtype of `input` is not int or bool.
155
+
156
+ Supported Platforms:
157
+ ``Ascend``
158
+
159
+ Examples:
160
+ >>> import mindspore
161
+ >>> import numpy as np
162
+ >>> from mindspore import Tensor, mint
163
+ >>> x = Tensor(np.array([True, False, True, False]))
164
+ >>> y = mint.bitwise_not(x)
165
+ >>> print(y)
166
+ [False True False True]
167
+ """
168
+ return _bitwise_not_instance(*args, **kwargs)
169
+
170
+
171
+ def clamp(*args, **kwargs):
172
+ r"""
173
+ clamp(input, min=None, max=None) -> Tensor
174
+
175
+ Clamps tensor values between the specified minimum value and maximum value.
176
+
177
+ Limits the value of :math:`input` to a range, whose lower limit is `min` and upper limit is `max` .
178
+
179
+ .. math::
180
+
181
+ out_i= \left\{
182
+ \begin{array}{align}
183
+ max & \text{ if } input_i\ge max \\
184
+ input_i & \text{ if } min \lt input_i \lt max \\
185
+ min & \text{ if } input_i \le min \\
186
+ \end{array}\right.
187
+
188
+ Note:
189
+ - `min` and `max` cannot be None at the same time;
190
+ - When `min` is None and `max` is not None, the elements in Tensor larger than `max` will become `max`;
191
+ - When `min` is not None and `max` is None, the elements in Tensor smaller than `min` will become `min`;
192
+ - If `min` is greater than `max`, the value of all elements in Tensor will be set to `max`;
193
+ - The data type of `input`, `min` and `max` should support implicit type conversion and cannot be bool type.
194
+
195
+ Args:
196
+ input (Tensor): Input data, which type is Tensor. Tensors of arbitrary dimensions are supported.
197
+ min (Union(Tensor, float, int), optional): The minimum value. Default: ``None`` .
198
+ max (Union(Tensor, float, int), optional): The maximum value. Default: ``None`` .
199
+
200
+ Returns:
201
+ Tensor, a clipped Tensor.
202
+ The data type and shape are the same as input.
203
+
204
+ Raises:
205
+ ValueError: If both `min` and `max` are None.
206
+ TypeError: If the type of `input` is not Tensor.
207
+ TypeError: If the type of `min` is not in None, Tensor, float or int.
208
+ TypeError: If the type of `max` is not in None, Tensor, float or int.
209
+
210
+ Supported Platforms:
211
+ ``Ascend``
212
+
213
+ Examples:
214
+ >>> # case 1: the data type of input is Tensor
215
+ >>> import mindspore
216
+ >>> from mindspore import Tensor, mint
217
+ >>> import numpy as np
218
+ >>> min_value = Tensor(5, mindspore.float32)
219
+ >>> max_value = Tensor(20, mindspore.float32)
220
+ >>> input = Tensor(np.array([[1., 25., 5., 7.], [4., 11., 6., 21.]]), mindspore.float32)
221
+ >>> output = mint.clamp(input, min_value, max_value)
222
+ >>> print(output)
223
+ [[ 5. 20. 5. 7.]
224
+ [ 5. 11. 6. 20.]]
225
+ >>> # case 2: the data type of input is number
226
+ >>> import mindspore
227
+ >>> from mindspore import Tensor, mint
228
+ >>> import numpy as np
229
+ >>> min_value = 5
230
+ >>> max_value = 20
231
+ >>> input = Tensor(np.array([[1., 25., 5., 7.], [4., 11., 6., 21.]]), mindspore.float32)
232
+ >>> output = mint.clamp(input, min_value, max_value)
233
+ >>> print(output)
234
+ [[ 5. 20. 5. 7.]
235
+ [ 5. 11. 6. 20.]]
236
+ """
237
+ return _clamp_instance(*args, **kwargs)
238
+
239
+
240
+ def clip(*args, **kwargs):
241
+ r"""
242
+ clip(input, min=None, max=None) -> Tensor
243
+
244
+ Alias for :func:`mindspore.mint.clamp`.
245
+ """
246
+ return _clamp_instance(*args, **kwargs)
247
+
248
+
249
+ def div(*args, **kwargs):
250
+ r"""
251
+ div(input, other, *, rounding_mode=None) -> Tensor
252
+
253
+ Divides each element of the `input` by the corresponding element of the `other` .
254
+
255
+ .. math::
256
+
257
+ out_{i} = input_{i} / other_{i}
258
+
259
+ .. note::
260
+ - When the two inputs have different shapes, they must be able to broadcast to a common shape.
261
+ - The two inputs can not be bool type at the same time,
262
+ [True, Tensor(True, bool\_), Tensor(np.array([True]), bool\_)] are all considered bool type.
263
+ - The two inputs comply with the implicit type conversion rules to make the data types
264
+ consistent.
265
+
266
+ Args:
267
+ input (Union[Tensor, Number, bool]): The dividend.
268
+ other (Union[Tensor, Number, bool]): The divisor.
269
+
270
+ Keyword Args:
271
+ rounding_mode (str, optional): Type of rounding applied to the result. Default: ``None`` .
272
+ Three types are defined as,
273
+
274
+ - None: Default behavior, which is the same as true division in Python or `true_divide` in NumPy.
275
+
276
+ - "floor": Rounds the division of the inputs down, which is the same as floor division in Python
277
+ or `floor_divide` in NumPy.
278
+
279
+ - "trunc": Rounds the division of the inputs towards zero, which is the same as C-style integer division.
280
+
281
+ Returns:
282
+ Tensor, the shape is the same as the one after broadcasting,
283
+ and the data type is the one with higher precision or higher digits among the two inputs.
284
+
285
+ Raises:
286
+ TypeError: If `input` and `other` is not one of the following: Tensor, Number, bool.
287
+ ValueError: If `rounding_mode` value is not None, "floor" or "trunc".
288
+
289
+ Supported Platforms:
290
+ ``Ascend``
291
+
292
+ Examples:
293
+ >>> import mindspore
294
+ >>> import numpy as np
295
+ >>> from mindspore import Tensor, mint
296
+ >>> x = Tensor(np.array([1.0, 2.0, 3.0]), mindspore.float32)
297
+ >>> y = Tensor(np.array([4.0, 5.0, 6.0]), mindspore.float32)
298
+ >>> output = mint.div(x, y)
299
+ >>> print(output)
300
+ [0.25 0.4 0.5]
301
+ """
302
+ return _div_instance(*args, **kwargs)
303
+
304
+
305
+ def divide(*args, **kwargs):
306
+ r"""
307
+ divide(input, other, *, rounding_mode=None) -> Tensor
308
+
309
+ Alias for :func:`mindspore.mint.div`.
310
+ """
311
+ return _div_instance(*args, **kwargs)
312
+
313
+
314
+ def empty(*args, **kwargs):
315
+ r"""
316
+ empty(*size, dtype=None, device=None) -> Tensor
317
+
318
+ Creates a tensor with uninitialized data, whose shape, dtype and device are described by the argument `size`,
319
+ `dtype` and `device` respectively.
320
+
321
+ .. warning::
322
+ This is an experimental API that is subject to change or deletion.
323
+
324
+ Args:
325
+ size (Union[tuple[int], list[int], int]): The specified shape of output tensor. Can be variable numbers of
326
+ positive integers or tupled or list containing positive integers.
327
+
328
+ Keyword Args:
329
+ dtype (:class:`mindspore.dtype`, optional): The specified type of output tensor. If `dtype` is ``None`` ,
330
+ `mindspore.float32` will be used. Default: ``None`` .
331
+ device (string, optional): The specified device of the output tensor. Support ``CPU`` and ``Ascend``. If
332
+ `device = None`, `mindspore.context.device_target` will be used. Default ``None``.
333
+
334
+ Returns:
335
+ Tensor, whose dtype and size are defined by input.
336
+
337
+ Raises:
338
+ TypeError: If `size` is neither an int nor a tuple or list of int.
339
+
340
+ Supported Platforms:
341
+ ``Ascend``
342
+
343
+ Examples:
344
+ >>> import mindspore
345
+ >>> from mindspore import ops
346
+ >>> output = ops.empty((2, 3), dtype=mindspore.float32)
347
+ >>> print(output)
348
+ [[0. 0. 0.]
349
+ [0. 0. 0.]]
350
+ """
351
+ return _empty_instance(*args, **kwargs)
352
+
353
+
354
+ def fmod(*args, **kwargs):
355
+ r"""
356
+ fmod(input, other) -> Tensor
357
+
358
+ Computes the floating-point remainder of the division operation input/other.
359
+
360
+ .. math::
361
+
362
+ out = input - n * other
363
+
364
+ Where :math:`n` is :math:`input/other` with its fractional part truncated.
365
+ The returned value has the same sign as `input` and is less than `other` in magnitude.
366
+
367
+ .. warning::
368
+ This is an experimental API that is subject to change or deletion.
369
+
370
+ Args:
371
+ input (Tensor): the dividend.
372
+ other (Union[Tensor, Number]): the divisor.
373
+
374
+ Returns:
375
+ Tensor, the shape is the same as the one after broadcasting,
376
+ and the data type is the one with higher precision or higher digits among the two inputs.
377
+
378
+ Raises:
379
+ TypeError: If `input` is not a Tensor.
380
+
381
+ Supported Platforms:
382
+ ``Ascend``
383
+
384
+ Examples:
385
+ >>> import mindspore
386
+ >>> import numpy as np
387
+ >>> from mindspore import Tensor, mint
388
+ >>> input = Tensor(np.array([-4., -3.5, 0, 3.5, 4]), mindspore.float32)
389
+ >>> output = mint.fmod(input, 2.5)
390
+ >>> print(output)
391
+ [-1.5 -1. 0. 1. 1.5]
392
+ """
393
+ return _fmod_instance(*args, **kwargs)
394
+
395
+
396
+ def lerp(*args, **kwargs):
397
+ r"""
398
+ lerp(input, end, weight) -> Tensor
399
+
400
+ Perform a linear interpolation of two tensors input and end based on a float or tensor weight.
401
+
402
+ If `weight` is a tensor, the shapes of three inputs need to be broadcast;
403
+ If `weight` is a float, the shapes of `input` and `end` need to be broadcast.
404
+ If `weight` is a float and platform is Ascend, the types of `input` and `end` need to be float32.
405
+
406
+ .. warning::
407
+ This is an experimental API that is subject to change or deletion.
408
+
409
+ .. math::
410
+ output_{i} = input_{i} + weight_{i} * (end_{i} - input_{i})
411
+
412
+ Args:
413
+ input (Tensor): The tensor with the starting points. Data type must be float16 or float32.
414
+ end (Tensor): The tensor with the ending points. Data type must be the same as `input`.
415
+ weight (Union[float, Tensor]): The weight for the interpolation formula. Must be a float scalar
416
+ or a tensor with float16 or float32 data type.
417
+
418
+ Returns:
419
+ Tensor, has the same type and shape as input `input`.
420
+
421
+ Raises:
422
+ TypeError: If `input` or `end` is not a tensor.
423
+ TypeError: If `weight` is neither scalar(float) nor tensor.
424
+ TypeError: If dtype of `input` or `end` is neither float16 nor float32.
425
+ TypeError: If dtype of `weight` is neither float16 nor float32 when it is a tensor.
426
+ TypeError: If `input` and `end` have different data types.
427
+ TypeError: If `input`, `end` and `weight` have different data types when `weight` is a tensor.
428
+ ValueError: If `end` could not be broadcast to a tensor with shape of `input`.
429
+ ValueError: If `weight` could not be broadcast to tensors with shapes of `input` and `end` when it is a tensor.
430
+
431
+ Supported Platforms:
432
+ ``Ascend`` ``GPU`` ``CPU``
433
+
434
+ Examples:
435
+ >>> import mindspore
436
+ >>> import numpy as np
437
+ >>> from mindspore import Tensor, mint
438
+ >>> start = Tensor(np.array([1., 2., 3., 4.]), mindspore.float32)
439
+ >>> end = Tensor(np.array([10., 10., 10., 10.]), mindspore.float32)
440
+ >>> output = mint.lerp(start, end, 0.5)
441
+ >>> print(output)
442
+ [5.5 6. 6.5 7. ]
443
+ """
444
+ return _lerp_instance(*args, **kwargs)
445
+
446
+
447
+ def matmul_reduce_scatter(*args, **kwargs):
448
+ r"""
449
+ matmul_reduce_scatter(input, x2, group, world_size, *, reduce_op='sum', bias=None, comm_turn=0, trans_input=False, trans_x2=False) -> Tensor
450
+
451
+ In the TP segmentation scenario, matmul and reducescatter are fused, and communication and computational
452
+ pipelines are parallelized within the fusion operator.
453
+
454
+ .. math::
455
+ output = reducescatter(input@x2)
456
+
457
+ .. warning::
458
+ This is an experimental API that is subject to change or deletion.
459
+
460
+ Args:
461
+ input (Tensor): The left matrix of matmul, the dtype supports float16 and bfloat16, the shape supports 2
462
+ dimensions, and the data format supports ND.
463
+ x2 (Tensor): The right matrix of matmul, the dtype needs to be consistent with ``input`` , the shape
464
+ supports 2 dimensions, and the data format supports ND.
465
+ group (str): Communication group name, can be created by ``create_group`` method, or use the default group
466
+ ``mindspore.communication.GlobalComm.WORLD_COMM_GROUP``.
467
+ world_size (int): The total number of ranks in the communication group, should be consistent with the number
468
+ of devices actually running, supporting ``2`` , ``4`` , and ``8`` .
469
+
470
+ Keyword Args:
471
+ reduce_op (str, optional) The reduce operation type. Currently only ``'sum'`` is supported. Default:
472
+ ``'sum'`` .
473
+ bias (Tensor, optional): Currently only ``None`` is supported. Default: ``None`` .
474
+ comm_turn (int, optional): Indicates the granularity of communication between ranks. Currently only ``0``
475
+ is supported. Default: ``0`` .
476
+ trans_input (bool, optional): Indicates whether ``input`` is transposed. Currently only ``False`` is
477
+ supported. Default: ``False`` .
478
+ trans_x2 (bool, optional): Indicates whether ``x2`` is transposed. Default: ``False`` .
479
+
480
+ Returns:
481
+ - output (Tensor) - The result of allgather and matmul fusion calculations.
482
+
483
+ Note:
484
+ - When using this interface, please ensure that the driver firmware package and CANN package are both the
485
+ matching 8.0.RC2 version or a higher version, otherwise an error will be reported, such as BUS ERROR.
486
+ - The shape of ``input`` is (m, k), the shape of ``x2`` is (k, n), k is required to be equal, and the value
487
+ range of k is [256, 65535), and m is required to be an integer multiple of ``world_size`` . The shape of
488
+ ``output`` is (m * world_size, n).
489
+ - The common fusion operators in a model only support the same communication group.
490
+
491
+ Raises:
492
+ TypeError: Any arg is of wrong type.
493
+ RuntimeError: The dtype of ``input`` or ``x2`` is neither float16 nor bfloat16.
494
+ RuntimeError: The dtypes of ``input`` and ``x2`` are different.
495
+ RuntimeError: The shape of ``input`` or ``x2`` is not two-dimensional.
496
+ RuntimeError: The k axis of ``input`` shape and ``x2`` shape are not equal.
497
+ RuntimeError: k is less than ``256`` or greater than or equal to ``65535`` .
498
+ RuntimeError: ``bias`` is not None.
499
+ RuntimeError: ``group`` does not exist.
500
+ RuntimeError: ``world_size`` is inconsistent with the actual number of running cards.
501
+ RuntimeError: ``world_size`` is not equal to ``2`` , ``4`` , or ``8`` .
502
+ RuntimeError: ``reduce_op`` is not ``'sum'`` .
503
+ RuntimeError: ``trans_input`` is ``True`` .
504
+
505
+ Supported Platforms:
506
+ ``Ascend``
507
+
508
+ Examples:
509
+ .. note::
510
+ Before running the following examples, you need to configure the communication environment variables.
511
+
512
+ For Ascend/GPU/CPU devices, it is recommended to use the msrun startup method without any third-party or
513
+ configuration file dependencies. Please see the `msrun start up <https://www.mindspore.cn/docs/en/master/model_train/parallel/msrun_launcher.html>`_
514
+ for more details.
515
+
516
+ This example should be run with 2 devices.
517
+
518
+ >>> import mindspore as ms
519
+ >>> from mindspore import ops
520
+ >>> import numpy as np
521
+ >>> ms.communication.init()
522
+ >>> rank = ms.communication.get_rank()
523
+ >>> np.random.seed(rank)
524
+ >>> input = ms.Tensor(np.random.randn(1024, 256).astype(np.float32), dtype=ms.float16)
525
+ >>> x2 = ms.Tensor(np.random.randn(256, 512).astype(np.float32), dtype=ms.float16)
526
+ >>> group = ms.communication.GlobalComm.WORLD_COMM_GROUP
527
+ >>> world_size = ms.communication.get_group_size()
528
+ >>> reduce_op = ops.ReduceOp.SUM
529
+ >>> output = ops.matmul_reduce_scatter(
530
+ ... input,
531
+ ... x2,
532
+ ... group,
533
+ ... world_size,
534
+ ... reduce_op=reduce_op,
535
+ ... bias=None,
536
+ ... comm_turn=0,
537
+ ... trans_input=False,
538
+ ... trans_x2=False,
539
+ ... )
540
+ >>> print(output.shape)
541
+ (512, 512)
542
+ """
543
+ return _matmul_reduce_scatter_instance(*args, **kwargs)
544
+
545
+
546
+ def max(*args, **kwargs):
547
+ r"""
548
+ max(input) -> Tensor
549
+
550
+ Returns the maximum value of the input tensor.
551
+
552
+ Args:
553
+ input (Tensor): The input tensor.
554
+
555
+ Returns:
556
+ Scalar Tensor with the same dtype as `input`, the maximum value of the input.
557
+
558
+ Supported Platforms:
559
+ ``Ascend`` ``GPU`` ``CPU``
560
+
561
+ Examples:
562
+ >>> import mindspore
563
+ >>> import numpy as np
564
+ >>> from mindspore import Tensor, mint
565
+ >>> x = Tensor(np.array([0.0, 0.4, 0.6, 0.7, 0.1]), mindspore.float32)
566
+ >>> output = mint.max(x)
567
+ >>> print(output)
568
+ 0.7
569
+
570
+ .. function:: max(input, dim, keepdim=False) -> tuple(Tensor)
571
+ :noindex:
572
+
573
+ Calculates the maximum value along with the given dim for the input tensor, and returns the maximum values and
574
+ indices.
575
+
576
+ Args:
577
+ input (Tensor): The input tensor, can be any dimension. Set the shape of input tensor as
578
+ :math:`(input_1, input_2, ..., input_N)` , Complex tensor is not supported.
579
+ dim (int): The dimension to reduce.
580
+ keepdim (bool, optional): Whether to reduce dimension, if ``True`` the output will keep the same dimension as the
581
+ `input` , the output will reduce dimension if ``false``. Default: ``False``.
582
+
583
+ Returns:
584
+ tuple (Tensor), tuple of 2 tensors, containing the maximum value of the self tensor along the given
585
+ dimension `dim` and the corresponding index.
586
+
587
+ - **values** (Tensor) - The maximum value of input tensor, with the same shape as `index`, and same dtype as `input`.
588
+ - **index** (Tensor) - The index for the maximum value of the input tensor, with dtype int64. If `keepdim`
589
+ is ``True`` , the shape of output tensors is :math:`(input_1, input_2, ..., input_{dim-1}, 1, input_{dim+1}, ..., input_N)`.
590
+ Otherwise, the shape is :math:`(input_1, input_2, ..., input_{dim-1}, input_{dim+1}, ..., input_N)` .
591
+
592
+ Raises:
593
+ TypeError: If `input` is not Tensor.
594
+ TypeError: If `keepdim` is not a bool.
595
+ TypeError: If `dim` is not an int.
596
+
597
+ Supported Platforms:
598
+ ``Ascend`` ``GPU`` ``CPU``
599
+
600
+ Examples:
601
+ >>> import mindspore
602
+ >>> import numpy as np
603
+ >>> from mindspore import Tensor, mint
604
+ >>> x = Tensor(np.array([0.0, 0.4, 0.6, 0.7, 0.1]), mindspore.float32)
605
+ >>> output, index = mint.max(x, 0, keepdim=True)
606
+ >>> print(output, index)
607
+ [0.7] [3]
608
+
609
+ .. function:: max(input, other) -> Tensor
610
+ :noindex:
611
+
612
+ For details, please refer to :func:`mindspore.mint.maximum`.
613
+ """
614
+ return _max_instance(*args, **kwargs)
615
+
616
+
617
+ def min(*args, **kwargs):
618
+ r"""
619
+ min(input) -> Tensor
620
+
621
+ Returns the minimum value of the input tensor.
622
+
623
+ Args:
624
+ input (Tensor): The input tensor.
625
+
626
+ Returns:
627
+ Scalar Tensor with the same dtype as `input`, the minimum value of the input.
628
+
629
+ Supported Platforms:
630
+ ``Ascend`` ``GPU`` ``CPU``
631
+
632
+ Examples:
633
+ >>> import mindspore
634
+ >>> import numpy as np
635
+ >>> from mindspore import Tensor, mint
636
+ >>> x = Tensor(np.array([0.0, 0.4, 0.6, 0.7, 0.1]), mindspore.float32)
637
+ >>> output = mint.min(x)
638
+ >>> print(output)
639
+ 0.0
640
+
641
+ .. function:: min(input, dim, keepdim=False) -> Tensor
642
+ :noindex:
643
+
644
+ Calculates the minimum value along with the given dim for the input tensor, and returns the minimum values and
645
+ indices.
646
+
647
+ Args:
648
+ input (Tensor) - The input tensor, can be any dimension. Set the shape of input tensor as
649
+ :math:`(input_1, input_2, ..., input_N)` , Complex tensor is not supported.
650
+ dim (int): The dimension to reduce.
651
+ keepdim (bool, optional): Whether to reduce dimension, if ``True`` the output will keep the same dimension as the
652
+ input, the output will reduce dimension if ``false``. Default: ``False``.
653
+
654
+ Returns:
655
+ tuple (Tensor), tuple of 2 tensors, containing the minimum value of the self tensor along the given
656
+ dimension `dim` and the corresponding index.
657
+
658
+ - **values** (Tensor) - The minimum value of input tensor, with the same shape as `index`, and same dtype as `input`.
659
+ - **index** (Tensor) - The index for the minimum value of the input tensor, with dtype int64. If `keepdim`
660
+ is ``True`` , the shape of output tensors is :math:`(input_1, input_2, ..., input_{dim-1}, 1, input_{dim+1}, ..., input_N)`.
661
+ Otherwise, the shape is :math:`(input_1, input_2, ..., input_{dim-1}, input_{dim+1}, ..., input_N)` .
662
+
663
+ Raises:
664
+ TypeError: If `input` is not Tensor.
665
+ TypeError: If `keepdim` is not a bool.
666
+ TypeError: If `dim` is not an int.
667
+
668
+ Supported Platforms:
669
+ ``Ascend`` ``GPU`` ``CPU``
670
+
671
+ Examples:
672
+ >>> import mindspore
673
+ >>> import numpy as np
674
+ >>> from mindspore import Tensor, mint
675
+ >>> x = Tensor(np.array([0.0, 0.4, 0.6, 0.7, 0.1]), mindspore.float32)
676
+ >>> output, index = mint.min(x, 0, keepdim=True)
677
+ >>> print(output, index)
678
+ [0.0] [0]
679
+
680
+ .. function:: min(input, other) -> Tensor
681
+ :noindex:
682
+
683
+ For details, please refer to :func:`mindspore.mint.minimum`.
684
+ """
685
+ return _min_instance(*args, **kwargs)
686
+
687
+
688
+ def nansum(*args, **kwargs):
689
+ r"""
690
+ nansum(input, dim=None, keepdim=False, *, dtype=None) -> Tensor
691
+
692
+ Computes sum of `input` over a given dimension, treating NaNs as zero.
693
+
694
+ .. warning::
695
+ It is only supported on Atlas A2 Training Series Products.
696
+ This is an experimental API that is subject to change or deletion.
697
+
698
+ Args:
699
+ input (Tensor): The input Tensor.
700
+ dim (Union[int, tuple(int)], optional): The dimensions to sum.
701
+ Dim must be in the range [-rank(input), rank(input)). Default: ``None``, which indicates the sum of all
702
+ elements in a tensor.
703
+ keepdim (bool, optional): Whether the output Tensor keeps dimensions or not. Default: ``False``, indicating that no dimension is kept.
704
+
705
+ Keyword Args:
706
+ dtype (:class:`mindspore.dtype`, optional): The dtype of output Tensor. Default: ``None``.
707
+
708
+ Returns:
709
+ Tensor, the sum of input `input` in the given dimension dim, treating NaNs as zero.
710
+
711
+ - If dim is None, keepdim is False,
712
+ the output is a 0-D Tensor representing the sum of all elements in the input Tensor.
713
+ - If dim is int, set as 2, and keepdim is False,
714
+ the shape of output is :math:`(input_1, input_3, ..., input_R)`.
715
+ - If dim is tuple(int) or list(int), set as (2, 3), and keepdim is False,
716
+ the shape of output is :math:`(input_1, input_4, ..., input_R)`.
717
+
718
+ Raises:
719
+ TypeError: If `input` is not Tensor.
720
+ TypeError: If `keepdim` is not a bool.
721
+ TypeError: If the dtype of `input` or `dtype` is complex type.
722
+ ValueError: If `dim` is not in [-rank(input), rank(input)).
723
+
724
+ Supported Platforms:
725
+ ``Ascend``
726
+
727
+ Examples:
728
+ >>> import mindspore
729
+ >>> import numpy as np
730
+ >>> from mindspore import Tensor, mint
731
+ >>> x = Tensor(np.array([[float("nan"), 2, 3], [1, 2, float("nan")]]), mindspore.float32)
732
+ >>> output1 = mint.nansum(x, dim=0, keepdim=False, dtype=mindspore.float32)
733
+ >>> output2 = mint.nansum(x, dim=0, keepdim=True, dtype=mindspore.float32)
734
+ >>> print(output1)
735
+ [1. 4. 3.]
736
+ >>> print(output2)
737
+ [[1. 4. 3.]]
738
+ """
739
+ return _nansum_instance(*args, **kwargs)
740
+
741
+
742
+ def remainder(*args, **kwargs):
743
+ r"""
744
+ remainder(input, other) -> Tensor
745
+
746
+ Computes the remainder of `input` divided by `other` element-wise. The result has the same sign as the divisor and
747
+ its absolute value is less than that of `other`.
748
+
749
+ Supports broadcasting to a common shape and implicit type promotion.
750
+
751
+ .. code:: python
752
+
753
+ remainder(input, other) == input - input.div(other, rounding_mode="floor") * other
754
+
755
+ Note:
756
+ Complex inputs are not supported. At least one input need to be tensor, but not both are bool tensors.
757
+
758
+ Args:
759
+ input (Union[Tensor, numbers.Number, bool]): The dividend is a numbers.Number or
760
+ a bool or a tensor whose data type is
761
+ `number <https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.dtype.html>`_ or
762
+ `bool_ <https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.dtype.html>`_.
763
+ other (Union[Tensor, numbers.Number, bool]): The divisor is a numbers.Number or
764
+ a bool or a tensor whose data type is number or bool\_ when the dividend is a tensor.
765
+ When the dividend is Scalar, the divisor must be a Tensor whose data type is number or bool\_.
766
+
767
+ Returns:
768
+ Tensor, with dtype promoted and shape broadcasted.
769
+
770
+ Raises:
771
+ TypeError: If `input` and `other` are not of types: (tensor, tensor), (tensor, number), (tensor, bool),
772
+ (number, tensor) or (bool, tensor).
773
+ ValueError: If `input` and `other` are not broadcastable.
774
+
775
+ Supported Platforms:
776
+ ``Ascend``
777
+
778
+ Examples:
779
+ >>> import numpy as np
780
+ >>> from mindspore import Tensor, mint
781
+ >>> x = Tensor(np.array([-4.0, 5.0, 6.0]).astype(np.float32))
782
+ >>> y = Tensor(np.array([3.0, 2.0, 3.0]).astype(np.float64))
783
+ >>> output = mint.remainder(x, y)
784
+ >>> print(output)
785
+ [2. 1. 0.]
786
+ """
787
+ return _remainder_instance(*args, **kwargs)
788
+
789
+
790
+ def repeat_interleave(*args, **kwargs):
791
+ r"""
792
+ repeat_interleave(input, repeats, dim=None, *, output_size=None) -> Tensor
793
+
794
+ Repeat elements of a tensor along an axis, like :func:`mindspore.numpy.repeat`.
795
+
796
+ .. warning::
797
+ Only support on Atlas A2 training series.
798
+
799
+ Args:
800
+ input (Tensor): The tensor to repeat values for. Must be of types: float16,
801
+ float32, int8, uint8, int16, int32, or int64.
802
+ repeats (Union[int, tuple, list, Tensor]): The number of times to repeat, must be positive.
803
+ dim (int, optional): The dim along which to repeat, Default: ``None``. If dims is None,
804
+ the input Tensor will be flattened and the output will alse be flattened.
805
+
806
+ Keyword Args:
807
+ output_size (int, optional): Total output size for the given axis (e.g. sum of repeats),
808
+ Default: ``None``.
809
+
810
+ Returns:
811
+ One tensor with values repeated along the specified dim. If input has shape
812
+ :math:`(s1, s2, ..., sn)` and dim is i, the output will have shape :math:`(s1, s2, ...,
813
+ si * repeats, ..., sn)`. The output type will be the same as the type of `input`.
814
+
815
+ Supported Platforms:
816
+ ``Ascend``
817
+
818
+ Examples:
819
+ >>> import mindspore
820
+ >>> import numpy as np
821
+ >>> from mindspore import Tensor, mint
822
+ >>> input = Tensor(np.array([[0, 1, 2], [3, 4, 5]]), mindspore.int32)
823
+ >>> output = mint.repeat_interleave(input, repeats=2, dim=0)
824
+ >>> print(output)
825
+ [[0 1 2]
826
+ [0 1 2]
827
+ [3 4 5]
828
+ [3 4 5]]
829
+ """
830
+ return _repeat_interleave_instance(*args, **kwargs)
831
+
832
+
833
+ def where(*args, **kwargs):
834
+ r"""
835
+ where(condition, input, other) -> Tensor
836
+
837
+ Selects elements from `input` or `other` based on `condition` and returns a tensor.
838
+
839
+ .. math::
840
+ output_i = \begin{cases} input_i,\quad &if\ condition_i \\ other_i,\quad &otherwise \end{cases}
841
+
842
+ Args:
843
+ condition (Tensor[bool]): If true, yield `input`, otherwise yield `other`.
844
+ input (Union[Tensor, Scalar]): When `condition` is true, values to select from.
845
+ other (Union[Tensor, Scalar]): When `condition` is false, values to select from.
846
+
847
+ Returns:
848
+ Tensor, elements are selected from `input` and `other`.
849
+
850
+ Raises:
851
+ TypeError: If `condition` is not a tensor.
852
+ TypeError: If both `input` and `other` are scalars.
853
+ ValueError: If `condition`, `input` and `other` can not broadcast to each other.
854
+
855
+ Supported Platforms:
856
+ ``Ascend`` ``GPU`` ``CPU``
857
+
858
+ Examples:
859
+ >>> import numpy as np
860
+ >>> from mindspore import tensor, ops
861
+ >>> from mindspore import dtype as mstype
862
+ >>> a = tensor(np.arange(4).reshape((2, 2)), mstype.float32)
863
+ >>> b = tensor(np.ones((2, 2)), mstype.float32)
864
+ >>> condition = a < 3
865
+ >>> output = ops.where(condition, a, b)
866
+ >>> print(output)
867
+ [[0. 1.]
868
+ [2. 1.]]
869
+
870
+ .. function:: where(condition) -> Tensor
871
+ :noindex:
872
+
873
+ Identical to :func:`mindspore.ops.nonzero` with input `condition` and `as_tuple` being True.
874
+
875
+ Supported Platforms:
876
+ ``Ascend``
877
+ """
878
+ return _where_instance(*args, **kwargs)
879
+
880
+ __all__ = [
881
+ "all_gather_matmul",
882
+ "bitwise_not",
883
+ "clamp",
884
+ "clip",
885
+ "div",
886
+ "divide",
887
+ "empty",
888
+ "fmod",
889
+ "lerp",
890
+ "matmul_reduce_scatter",
891
+ "max",
892
+ "min",
893
+ "nansum",
894
+ "remainder",
895
+ "repeat_interleave",
896
+ "where",
897
+ ]