mindspore 2.4.10__cp39-none-any.whl → 2.5.0__cp39-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +39 -0
- mindspore/__init__.py +8 -3
- mindspore/_akg/akg/composite/build_module.py +6 -2
- mindspore/_akg/akg/utils/kernel_exec.py +2 -2
- mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_checkparam.py +0 -5
- mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
- mindspore/_extends/parse/compile_config.py +64 -0
- mindspore/_extends/parse/deprecated/__init__.py +0 -0
- mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +375 -0
- mindspore/_extends/parse/parser.py +23 -5
- mindspore/_extends/parse/standard_method.py +123 -27
- mindspore/_extends/pijit/pijit_func_white_list.py +1 -1
- mindspore/amp.py +7 -1
- mindspore/boost/boost_cell_wrapper.py +136 -41
- mindspore/common/__init__.py +3 -1
- mindspore/common/_register_for_tensor.py +0 -1
- mindspore/common/_stub_tensor.py +25 -4
- mindspore/common/_tensor_cpp_method.py +17 -0
- mindspore/common/_tensor_docs.py +6132 -0
- mindspore/common/api.py +98 -21
- mindspore/common/dtype.py +34 -34
- mindspore/common/dump.py +2 -1
- mindspore/common/file_system.py +8 -3
- mindspore/common/generator.py +2 -0
- mindspore/common/hook_handle.py +3 -1
- mindspore/common/initializer.py +3 -4
- mindspore/common/lazy_inline.py +8 -2
- mindspore/common/mindir_util.py +10 -2
- mindspore/common/parameter.py +31 -15
- mindspore/common/tensor.py +713 -1337
- mindspore/communication/__init__.py +1 -1
- mindspore/communication/_comm_helper.py +5 -0
- mindspore/communication/comm_func.py +215 -173
- mindspore/communication/management.py +23 -20
- mindspore/context.py +285 -191
- mindspore/dataset/__init__.py +23 -19
- mindspore/dataset/callback/ds_callback.py +2 -1
- mindspore/dataset/core/config.py +84 -3
- mindspore/dataset/engine/cache_admin.py +3 -3
- mindspore/dataset/engine/cache_client.py +5 -4
- mindspore/dataset/engine/datasets.py +192 -149
- mindspore/dataset/engine/datasets_audio.py +14 -0
- mindspore/dataset/engine/datasets_standard_format.py +11 -11
- mindspore/dataset/engine/datasets_text.py +38 -1
- mindspore/dataset/engine/datasets_user_defined.py +100 -66
- mindspore/dataset/engine/datasets_vision.py +81 -8
- mindspore/dataset/engine/iterators.py +281 -63
- mindspore/dataset/engine/obs/util.py +8 -0
- mindspore/dataset/engine/queue.py +40 -0
- mindspore/dataset/engine/samplers.py +26 -2
- mindspore/dataset/engine/serializer_deserializer.py +1 -1
- mindspore/dataset/engine/validators.py +43 -11
- mindspore/dataset/transforms/py_transforms_util.py +17 -0
- mindspore/dataset/transforms/transforms.py +29 -12
- mindspore/dataset/vision/validators.py +1 -2
- mindspore/device_context/__init__.py +21 -0
- mindspore/device_context/ascend/__init__.py +25 -0
- mindspore/device_context/ascend/device.py +72 -0
- mindspore/device_context/ascend/op_debug.py +94 -0
- mindspore/device_context/ascend/op_precision.py +193 -0
- mindspore/device_context/ascend/op_tuning.py +127 -0
- mindspore/device_context/cpu/__init__.py +25 -0
- mindspore/device_context/cpu/device.py +62 -0
- mindspore/device_context/cpu/op_tuning.py +43 -0
- mindspore/device_context/gpu/__init__.py +21 -0
- mindspore/device_context/gpu/device.py +70 -0
- mindspore/device_context/gpu/op_precision.py +67 -0
- mindspore/device_context/gpu/op_tuning.py +175 -0
- mindspore/device_manager.py +134 -0
- mindspore/experimental/llm_boost/__init__.py +1 -0
- mindspore/experimental/llm_boost/ascend_native/__init__.py +22 -0
- mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +211 -0
- mindspore/experimental/llm_boost/ascend_native/llm_boost.py +52 -0
- mindspore/experimental/llm_boost/atb/boost_base.py +2 -3
- mindspore/experimental/llm_boost/atb/llama_boost.py +6 -1
- mindspore/experimental/llm_boost/register.py +1 -0
- mindspore/experimental/optim/adadelta.py +26 -22
- mindspore/experimental/optim/adam.py +3 -0
- mindspore/experimental/optim/lr_scheduler.py +33 -24
- mindspore/experimental/optim/radam.py +33 -30
- mindspore/hal/device.py +28 -0
- mindspore/hal/event.py +17 -0
- mindspore/hal/memory.py +94 -3
- mindspore/hal/stream.py +91 -6
- mindspore/include/api/context.h +0 -1
- mindspore/lib/libavcodec.so.59 +0 -0
- mindspore/lib/libavdevice.so.59 +0 -0
- mindspore/lib/libavfilter.so.8 +0 -0
- mindspore/lib/libavformat.so.59 +0 -0
- mindspore/lib/libavutil.so.57 +0 -0
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_ops.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libmpi_collective.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libopencv_imgcodecs.so.4.5 +0 -0
- mindspore/lib/libopencv_imgproc.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/libswresample.so.4 +0 -0
- mindspore/lib/libswscale.so.6 +0 -0
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +2048 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl/dynamic/decoder_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl/dynamic/prompt_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/version.info +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +224 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/all_finite.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/decoder_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/prompt_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/all_finite.json +139 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/binary_info_config.json +361 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/decoder_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/prompt_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/version.info +1 -1
- mindspore/lib/plugin/ascend/custom_compiler/setup.py +1 -1
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_internal_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libms_ascend_native_boost.so +0 -0
- mindspore/lib/plugin/ascend/libms_atb_boost.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +957 -955
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/liblcal_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/base_type.h → base_type.h} +25 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{cast/cast_tiling.h → internal.h} +6 -4
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_op.h +114 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/boost_kernel.h +70 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/llama_impl.h +85 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/model_interface.h +52 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/tensor.h +81 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_creator.h +123 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +155 -110
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/tiling_info.h → tiling_info.h} +12 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tiling_utils.h +178 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layer_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcompare_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libllama_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_optiling.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmulti_weight_matmul_kernel_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libadd_rms_norm_quant_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_310p_impl.so → op_kernels/ascend310p/so_kernels/libapply_rotary_pos_emb_310p_ascend310p.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcast_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcompare_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libgelu_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libmatmul_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libreshape_and_cache_nz_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.json +163 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.json +163 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libadd_layer_norm_impl.so → op_kernels/ascend910b/so_kernels/libadd_layer_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libadd_rms_norm_impl.so → op_kernels/ascend910b/so_kernels/libadd_rms_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libadd_rms_norm_quant_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_impl.so → op_kernels/ascend910b/so_kernels/libapply_rotary_pos_emb_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libcast_impl.so → op_kernels/ascend910b/so_kernels/libcast_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libnot_equal_impl.so → op_kernels/ascend910b/so_kernels/libcompare_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libgelu_impl.so → op_kernels/ascend910b/so_kernels/libgelu_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libllama_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmatmul_impl.so → op_kernels/ascend910b/so_kernels/libmatmul_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmulti_weight_matmul_kernel_impl.so → op_kernels/ascend910b/so_kernels/libmulti_weight_matmul_kernel_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libreshape_and_cache_impl.so → op_kernels/ascend910b/so_kernels/libreshape_and_cache_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/librms_norm_impl.so → op_kernels/ascend910b/so_kernels/librms_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/log.py +12 -0
- mindspore/mindrecord/__init__.py +1 -1
- mindspore/mindrecord/config.py +17 -316
- mindspore/mindrecord/filereader.py +1 -9
- mindspore/mindrecord/filewriter.py +5 -15
- mindspore/mindrecord/mindpage.py +1 -9
- mindspore/mint/__init__.py +824 -218
- mindspore/mint/distributed/__init__.py +66 -4
- mindspore/mint/distributed/distributed.py +2594 -44
- mindspore/mint/linalg/__init__.py +6 -0
- mindspore/mint/nn/__init__.py +473 -14
- mindspore/mint/nn/functional.py +486 -11
- mindspore/mint/nn/layer/__init__.py +17 -4
- mindspore/mint/nn/layer/_functions.py +330 -0
- mindspore/mint/nn/layer/activation.py +169 -1
- mindspore/mint/nn/layer/basic.py +123 -0
- mindspore/mint/nn/layer/conv.py +727 -0
- mindspore/mint/nn/layer/normalization.py +215 -19
- mindspore/mint/nn/layer/padding.py +797 -0
- mindspore/mint/nn/layer/pooling.py +170 -0
- mindspore/mint/optim/__init__.py +2 -1
- mindspore/mint/optim/adam.py +223 -0
- mindspore/mint/optim/adamw.py +26 -19
- mindspore/mint/special/__init__.py +2 -1
- mindspore/multiprocessing/__init__.py +5 -0
- mindspore/nn/cell.py +126 -19
- mindspore/nn/dynamic_lr.py +2 -1
- mindspore/nn/layer/activation.py +6 -6
- mindspore/nn/layer/basic.py +35 -25
- mindspore/nn/layer/channel_shuffle.py +3 -3
- mindspore/nn/layer/embedding.py +3 -3
- mindspore/nn/layer/normalization.py +8 -7
- mindspore/nn/layer/padding.py +4 -3
- mindspore/nn/layer/pooling.py +47 -13
- mindspore/nn/layer/rnn_cells.py +1 -1
- mindspore/nn/layer/rnns.py +2 -1
- mindspore/nn/layer/timedistributed.py +5 -5
- mindspore/nn/layer/transformer.py +48 -26
- mindspore/nn/learning_rate_schedule.py +5 -3
- mindspore/nn/loss/loss.py +31 -36
- mindspore/nn/optim/ada_grad.py +1 -0
- mindspore/nn/optim/adadelta.py +2 -2
- mindspore/nn/optim/adam.py +1 -1
- mindspore/nn/optim/lars.py +1 -4
- mindspore/nn/optim/optimizer.py +1 -1
- mindspore/nn/optim/rprop.py +2 -2
- mindspore/nn/optim/thor.py +2 -1
- mindspore/nn/utils/init.py +13 -11
- mindspore/nn/wrap/cell_wrapper.py +4 -6
- mindspore/nn/wrap/loss_scale.py +3 -4
- mindspore/numpy/array_creations.py +60 -62
- mindspore/numpy/array_ops.py +148 -143
- mindspore/numpy/logic_ops.py +41 -42
- mindspore/numpy/math_ops.py +361 -359
- mindspore/numpy/utils.py +16 -16
- mindspore/numpy/utils_const.py +4 -4
- mindspore/ops/__init__.py +2 -1
- mindspore/ops/_grad_experimental/grad_comm_ops.py +94 -13
- mindspore/ops/_grad_experimental/grad_debug_ops.py +6 -1
- mindspore/ops/_grad_experimental/grad_inner_ops.py +9 -0
- mindspore/ops/_grad_experimental/grad_math_ops.py +2 -1
- mindspore/ops/_op_impl/cpu/__init__.py +1 -0
- mindspore/ops/_op_impl/cpu/raise_op.py +28 -0
- mindspore/ops/_vmap/vmap_array_ops.py +20 -19
- mindspore/ops/_vmap/vmap_base.py +0 -2
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +19 -13
- mindspore/ops/_vmap/vmap_math_ops.py +11 -9
- mindspore/ops/_vmap/vmap_nn_ops.py +20 -34
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +149 -12
- mindspore/ops/auto_generate/gen_arg_handler.py +0 -61
- mindspore/ops/auto_generate/gen_extend_func.py +554 -60
- mindspore/ops/auto_generate/gen_ops_def.py +1621 -115
- mindspore/ops/auto_generate/gen_ops_prim.py +8024 -3409
- mindspore/ops/auto_generate/pyboost_inner_prim.py +183 -79
- mindspore/ops/composite/base.py +1 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +229 -30
- mindspore/ops/composite/multitype_ops/pow_impl.py +0 -29
- mindspore/ops/function/__init__.py +12 -0
- mindspore/ops/function/array_func.py +561 -159
- mindspore/ops/function/clip_func.py +64 -0
- mindspore/ops/function/debug_func.py +28 -20
- mindspore/ops/function/image_func.py +1 -1
- mindspore/ops/function/linalg_func.py +5 -4
- mindspore/ops/function/math_func.py +1659 -290
- mindspore/ops/function/nn_func.py +988 -317
- mindspore/ops/function/parameter_func.py +3 -56
- mindspore/ops/function/random_func.py +243 -33
- mindspore/ops/function/sparse_unary_func.py +1 -1
- mindspore/ops/functional.py +18 -5
- mindspore/ops/functional_overload.py +897 -0
- mindspore/ops/operations/__init__.py +3 -2
- mindspore/ops/operations/_embedding_cache_ops.py +4 -4
- mindspore/ops/operations/_grad_ops.py +2 -34
- mindspore/ops/operations/_infer_ops.py +2 -1
- mindspore/ops/operations/_inner_ops.py +38 -8
- mindspore/ops/operations/array_ops.py +45 -303
- mindspore/ops/operations/comm_ops.py +19 -16
- mindspore/ops/operations/custom_ops.py +11 -55
- mindspore/ops/operations/debug_ops.py +42 -47
- mindspore/ops/operations/inner_ops.py +6 -4
- mindspore/ops/operations/linalg_ops.py +3 -2
- mindspore/ops/operations/manually_defined/ops_def.py +185 -104
- mindspore/ops/operations/math_ops.py +11 -216
- mindspore/ops/operations/nn_ops.py +146 -308
- mindspore/ops/primitive.py +23 -21
- mindspore/ops/tensor_method.py +1669 -0
- mindspore/ops_generate/aclnn_kernel_register_auto_cc_generator.py +110 -0
- mindspore/ops_generate/add_tensor_docs_generator.py +54 -0
- mindspore/ops_generate/arg_handler.py +0 -61
- mindspore/ops_generate/auto_grad_impl_cc_generator.py +135 -0
- mindspore/ops_generate/auto_grad_reg_cc_generator.py +93 -0
- mindspore/ops_generate/base_generator.py +11 -0
- mindspore/ops_generate/cpp_create_prim_instance_helper_generator.py +108 -0
- mindspore/ops_generate/functional_map_cpp_generator.py +491 -0
- mindspore/ops_generate/functional_overload_py_generator.py +110 -0
- mindspore/ops_generate/functions_cc_generator.py +233 -0
- mindspore/ops_generate/gen_aclnn_implement.py +110 -114
- mindspore/ops_generate/gen_constants.py +157 -3
- mindspore/ops_generate/gen_ops.py +245 -990
- mindspore/ops_generate/gen_pyboost_func.py +97 -998
- mindspore/ops_generate/gen_utils.py +119 -33
- mindspore/ops_generate/lite_ops_cpp_generator.py +155 -0
- mindspore/ops_generate/op_api_proto.py +206 -0
- mindspore/ops_generate/op_def_py_generator.py +131 -0
- mindspore/ops_generate/op_prim_py_generator.py +480 -0
- mindspore/ops_generate/op_proto.py +373 -108
- mindspore/ops_generate/op_template_parser.py +436 -0
- mindspore/ops_generate/ops_def_cc_generator.py +288 -0
- mindspore/ops_generate/ops_def_h_generator.py +74 -0
- mindspore/ops_generate/ops_name_h_generator.py +68 -0
- mindspore/ops_generate/ops_primitive_h_generator.py +81 -0
- mindspore/ops_generate/pyboost_functions_cpp_generator.py +370 -0
- mindspore/ops_generate/pyboost_functions_h_generator.py +68 -0
- mindspore/ops_generate/pyboost_functions_py_generator.py +148 -0
- mindspore/ops_generate/pyboost_grad_function_cpp_generator.py +154 -0
- mindspore/ops_generate/pyboost_inner_prim_generator.py +131 -0
- mindspore/ops_generate/pyboost_native_grad_functions_generator.py +268 -0
- mindspore/ops_generate/pyboost_op_cpp_code_generator.py +851 -0
- mindspore/ops_generate/pyboost_overload_functions_cpp_generator.py +344 -0
- mindspore/ops_generate/pyboost_utils.py +92 -33
- mindspore/ops_generate/template.py +294 -44
- mindspore/ops_generate/tensor_func_reg_cpp_generator.py +422 -0
- mindspore/parallel/__init__.py +3 -3
- mindspore/parallel/_auto_parallel_context.py +24 -33
- mindspore/parallel/_parallel_serialization.py +13 -2
- mindspore/parallel/_utils.py +4 -1
- mindspore/parallel/algo_parameter_config.py +1 -1
- mindspore/parallel/checkpoint_transform.py +44 -0
- mindspore/parallel/cluster/process_entity/_api.py +131 -37
- mindspore/parallel/cluster/process_entity/_utils.py +41 -6
- mindspore/parallel/cluster/run.py +20 -3
- mindspore/parallel/parameter_broadcast.py +1 -1
- mindspore/parallel/shard.py +3 -0
- mindspore/parallel/transform_safetensors.py +119 -253
- mindspore/profiler/__init__.py +17 -4
- mindspore/profiler/analysis/__init__.py +0 -0
- mindspore/profiler/analysis/parser/__init__.py +0 -0
- mindspore/profiler/analysis/parser/ascend_cann_parser.py +166 -0
- mindspore/profiler/analysis/parser/base_parser.py +158 -0
- mindspore/profiler/analysis/parser/framework_cann_relation_parser.py +45 -0
- mindspore/profiler/analysis/parser/ms_framework_parser.py +142 -0
- mindspore/profiler/analysis/parser/ms_minddata_parser.py +145 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +261 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +40 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +84 -0
- mindspore/profiler/analysis/parser/timeline_creator/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_creator/base_timeline_creator.py +44 -0
- mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +90 -0
- mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +76 -0
- mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +103 -0
- mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +134 -0
- mindspore/profiler/analysis/parser/timeline_event/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_event/base_event.py +233 -0
- mindspore/profiler/analysis/parser/timeline_event/cpu_op_event.py +47 -0
- mindspore/profiler/analysis/parser/timeline_event/flow_event.py +36 -0
- mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +260 -0
- mindspore/profiler/analysis/parser/timeline_event/msprof_event.py +73 -0
- mindspore/profiler/analysis/parser/timeline_event/scope_layer_event.py +53 -0
- mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +146 -0
- mindspore/profiler/analysis/task_manager.py +131 -0
- mindspore/profiler/analysis/time_converter.py +84 -0
- mindspore/profiler/analysis/viewer/__init__.py +0 -0
- mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +333 -0
- mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +87 -0
- mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +252 -0
- mindspore/profiler/analysis/viewer/ascend_memory_viewer.py +313 -0
- mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +322 -0
- mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +265 -0
- mindspore/profiler/analysis/viewer/ascend_timeline_viewer.py +58 -0
- mindspore/profiler/analysis/viewer/base_viewer.py +26 -0
- mindspore/profiler/analysis/viewer/ms_dataset_viewer.py +97 -0
- mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +581 -0
- mindspore/profiler/analysis/work_flow.py +73 -0
- mindspore/profiler/common/ascend_msprof_exporter.py +138 -0
- mindspore/profiler/common/command_executor.py +90 -0
- mindspore/profiler/common/constant.py +174 -3
- mindspore/profiler/common/file_manager.py +208 -0
- mindspore/profiler/common/log.py +130 -0
- mindspore/profiler/common/msprof_cmd_tool.py +202 -0
- mindspore/profiler/common/path_manager.py +371 -0
- mindspore/profiler/common/process_bar.py +168 -0
- mindspore/profiler/common/process_pool.py +9 -3
- mindspore/profiler/common/profiler_context.py +476 -0
- mindspore/profiler/common/profiler_info.py +304 -0
- mindspore/profiler/common/profiler_output_path.py +284 -0
- mindspore/profiler/common/profiler_parameters.py +210 -0
- mindspore/profiler/common/profiler_path_manager.py +120 -0
- mindspore/profiler/common/record_function.py +76 -0
- mindspore/profiler/common/tlv_decoder.py +76 -0
- mindspore/profiler/common/util.py +75 -2
- mindspore/profiler/dynamic_profiler.py +270 -37
- mindspore/profiler/envprofiler.py +138 -0
- mindspore/profiler/mstx.py +199 -0
- mindspore/profiler/platform/__init__.py +21 -0
- mindspore/profiler/platform/base_profiler.py +40 -0
- mindspore/profiler/platform/cpu_profiler.py +124 -0
- mindspore/profiler/platform/gpu_profiler.py +74 -0
- mindspore/profiler/platform/npu_profiler.py +309 -0
- mindspore/profiler/profiler.py +580 -93
- mindspore/profiler/profiler_action_controller.py +187 -0
- mindspore/profiler/profiler_interface.py +114 -0
- mindspore/profiler/schedule.py +208 -0
- mindspore/rewrite/api/symbol_tree.py +1 -2
- mindspore/run_check/_check_version.py +2 -6
- mindspore/runtime/__init__.py +37 -0
- mindspore/runtime/device.py +27 -0
- mindspore/runtime/event.py +209 -0
- mindspore/runtime/executor.py +148 -0
- mindspore/runtime/memory.py +392 -0
- mindspore/runtime/stream.py +460 -0
- mindspore/runtime/thread_bind_core.py +401 -0
- mindspore/train/__init__.py +2 -2
- mindspore/train/_utils.py +53 -18
- mindspore/train/amp.py +8 -4
- mindspore/train/callback/_checkpoint.py +32 -18
- mindspore/train/callback/_early_stop.py +1 -1
- mindspore/train/callback/_flops_collector.py +105 -69
- mindspore/train/callback/_history.py +1 -1
- mindspore/train/callback/_summary_collector.py +44 -6
- mindspore/train/callback/_tft_register.py +31 -10
- mindspore/train/dataset_helper.py +11 -11
- mindspore/train/metrics/precision.py +4 -5
- mindspore/train/mind_ir_pb2.py +167 -46
- mindspore/train/model.py +13 -15
- mindspore/train/serialization.py +462 -76
- mindspore/train/summary/summary_record.py +1 -2
- mindspore/train/train_thor/model_thor.py +1 -1
- mindspore/utils/__init__.py +4 -2
- mindspore/utils/bin/dataset-cache +0 -0
- mindspore/utils/bin/dataset-cache-server +0 -0
- mindspore/utils/dryrun.py +138 -0
- mindspore/utils/runtime_execution_order_check.py +550 -0
- mindspore/version.py +1 -1
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/METADATA +2 -3
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/RECORD +523 -457
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/entry_points.txt +1 -1
- mindspore/_data_dump.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/common/_tensor_overload.py +0 -139
- mindspore/lib/libmindspore_np_dtype.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -82
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -113
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -193
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/dtype_registry.h +0 -90
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_layer_norm_op.h +0 -60
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_op.h +0 -50
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_quant_op.h +0 -50
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_nz_op.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_op.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_only_ops.h +0 -94
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_op_base.h +0 -97
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/flash_attention_score_op.h +0 -97
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/gelu_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_add_rmsnorm_op.h +0 -73
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -108
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_impls_op.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_weight_matmul_op.h +0 -91
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/paged_attention_op.h +0 -99
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_nz_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/rms_norm_op.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -179
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/profiling_util.h +0 -366
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -56
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/kernel/add.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/tiling/add_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_base.h +0 -456
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_bf16.h +0 -217
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp.h +0 -391
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp16.h +0 -126
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -230
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_value.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/apply_rotary_pos_emb_nz_impl.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_base.h +0 -460
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp16.h +0 -116
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp32.h +0 -230
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_value.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -74
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -74
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/kernel/cast_kernel.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_tiling.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/kernel/compare_kernel.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +0 -68
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -99
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/lccl/lccl_wrapper.h +0 -58
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_types.h +0 -91
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_utils.h +0 -108
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/add_param.h +0 -68
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/cast_param.h +0 -30
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -38
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +0 -33
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -377
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/kernel/reshape_and_cache_nz.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_impl.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_tiling.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/kernel/sub_kernel.h +0 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +0 -399
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/utils.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_tiling.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +0 -30
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_core.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_entity.h +0 -38
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_sink.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_stream.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -71
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -165
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +0 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -121
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -106
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_acme_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bnsd_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bsh_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bnsd_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bsh_mix.o +0 -0
- mindspore/profiler/envprofiling.py +0 -254
- mindspore/profiler/profiling.py +0 -1926
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/WHEEL +0 -0
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/top_level.txt +0 -0
mindspore/mint/nn/functional.py
CHANGED
|
@@ -15,6 +15,10 @@
|
|
|
15
15
|
"""mint nn functional."""
|
|
16
16
|
from __future__ import absolute_import
|
|
17
17
|
import mindspore.ops as ops
|
|
18
|
+
import mindspore.mint as mint
|
|
19
|
+
from mindspore import log as logger
|
|
20
|
+
from mindspore import _checkparam as validator
|
|
21
|
+
from mindspore.ops.primitive import constexpr
|
|
18
22
|
from mindspore.ops.function.nn_func import max_pool2d_ext as max_pool2d
|
|
19
23
|
from mindspore.ops.functional import (
|
|
20
24
|
conv_transpose2d,
|
|
@@ -42,6 +46,9 @@ from mindspore.ops.functional import layer_norm
|
|
|
42
46
|
|
|
43
47
|
# 11
|
|
44
48
|
from mindspore.ops.functional import relu
|
|
49
|
+
|
|
50
|
+
from mindspore.ops.function.nn_func import relu_
|
|
51
|
+
|
|
45
52
|
# 12
|
|
46
53
|
|
|
47
54
|
# 13
|
|
@@ -49,7 +56,7 @@ from mindspore.ops.functional import relu
|
|
|
49
56
|
# 14
|
|
50
57
|
from mindspore.ops.function.nn_func import dropout_ext as dropout
|
|
51
58
|
# 15
|
|
52
|
-
|
|
59
|
+
from mindspore.ops.function.nn_func import conv2d_ext as conv2d
|
|
53
60
|
# 16
|
|
54
61
|
from mindspore.ops.function.nn_func import log_softmax_ext as log_softmax
|
|
55
62
|
# 18
|
|
@@ -59,7 +66,7 @@ from mindspore.ops.auto_generate import prelu
|
|
|
59
66
|
# 20
|
|
60
67
|
|
|
61
68
|
# 21
|
|
62
|
-
|
|
69
|
+
from mindspore.ops.function.nn_func import conv3d_ext as conv3d
|
|
63
70
|
# 22
|
|
64
71
|
|
|
65
72
|
# 23
|
|
@@ -196,7 +203,7 @@ from mindspore.ops.functional import embedding
|
|
|
196
203
|
# 88
|
|
197
204
|
|
|
198
205
|
# 89
|
|
199
|
-
|
|
206
|
+
from mindspore.ops.auto_generate import avg_pool1d_ext as avg_pool1d
|
|
200
207
|
# 90
|
|
201
208
|
from mindspore.ops.function.nn_func import avg_pool2d_ext as avg_pool2d
|
|
202
209
|
# 91
|
|
@@ -219,6 +226,8 @@ from mindspore.ops.function.math_func import tanh
|
|
|
219
226
|
from mindspore.ops.auto_generate import selu_ext as selu # pylint: disable=W0611
|
|
220
227
|
# 100
|
|
221
228
|
from mindspore.ops.auto_generate import softshrink # pylint: disable=W0611
|
|
229
|
+
# 152
|
|
230
|
+
from mindspore.ops.auto_generate import adaptive_avg_pool3d_ext
|
|
222
231
|
# 220
|
|
223
232
|
from mindspore.ops.function.nn_func import hardshrink # pylint: disable=W0611
|
|
224
233
|
# 221
|
|
@@ -230,6 +239,9 @@ from mindspore.ops.auto_generate import mish_ext as mish # pylint: disable=W061
|
|
|
230
239
|
# 238
|
|
231
240
|
from mindspore.ops.auto_generate import l1_loss_ext as l1_loss # pylint: disable=W0611
|
|
232
241
|
|
|
242
|
+
#254
|
|
243
|
+
from mindspore.ops.auto_generate import max_unpool2d_ext as max_unpool2d
|
|
244
|
+
|
|
233
245
|
# 257
|
|
234
246
|
|
|
235
247
|
# 258
|
|
@@ -241,12 +253,127 @@ from mindspore.ops.function.nn_func import mse_loss_ext as mse_loss
|
|
|
241
253
|
# 324
|
|
242
254
|
from mindspore.ops.auto_generate import elu_ext as elu
|
|
243
255
|
|
|
256
|
+
# 421
|
|
257
|
+
from mindspore.ops.auto_generate import flatten_ext as flatten
|
|
258
|
+
|
|
259
|
+
# 426
|
|
260
|
+
from mindspore.ops.function.clip_func import clamp
|
|
261
|
+
# 427
|
|
262
|
+
from mindspore.ops.function.math_func import norm_ext
|
|
263
|
+
# 428
|
|
264
|
+
from mindspore.ops.functional import broadcast_to
|
|
265
|
+
# 536
|
|
266
|
+
from mindspore.ops.function.nn_func import glu_ext as glu
|
|
267
|
+
# 537
|
|
268
|
+
from mindspore.ops.auto_generate import hardtanh as hardtanh_op
|
|
269
|
+
from mindspore.ops.auto_generate import inplace_hardtanh as hardtanh_
|
|
244
270
|
# 556
|
|
245
271
|
from mindspore.ops.function.nn_func import logsigmoid_ext as logsigmoid
|
|
246
272
|
|
|
247
273
|
from mindspore.ops.auto_generate import adaptive_avg_pool1d
|
|
248
274
|
|
|
249
275
|
from mindspore.ops.functional import adaptive_avg_pool2d_ext as adaptive_avg_pool2d
|
|
276
|
+
from mindspore.ops.function.nn_func import cross_entropy_ext as cross_entropy
|
|
277
|
+
from mindspore.ops.function.nn_func import nll_loss_ext as nll_loss
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def hardtanh(input, min_val=-1.0, max_val=1.0, inplace=False):
|
|
281
|
+
r"""
|
|
282
|
+
Applies the hardtanh activation function element-wise. The activation function is defined as:
|
|
283
|
+
|
|
284
|
+
.. math::
|
|
285
|
+
\text{hardtanh}(input) = \begin{cases}
|
|
286
|
+
max\_val, & \text{ if } input > max\_val \\
|
|
287
|
+
min\_val, & \text{ if } input < min\_val \\
|
|
288
|
+
input, & \text{ otherwise. }
|
|
289
|
+
\end{cases}
|
|
290
|
+
|
|
291
|
+
Linear region range :math:`[min\_val, max\_val]` can be adjusted using `min_val` and `max_val`.
|
|
292
|
+
|
|
293
|
+
Hardtanh Activation Function Graph:
|
|
294
|
+
|
|
295
|
+
.. image:: ../images/Hardtanh.png
|
|
296
|
+
:align: center
|
|
297
|
+
|
|
298
|
+
.. warning::
|
|
299
|
+
This is an experimental optimizer API that is subject to change.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
input (Tensor): Input Tensor.
|
|
303
|
+
min_val (Union[bool, int, float], optional): Minimum value of the linear region range. Default: ``-1.0`` .
|
|
304
|
+
max_val (Union[bool, int, float], optional): Maximum value of the linear region range. Default: ``1.0`` .
|
|
305
|
+
inplace (bool, optional): Whether to apply erasing inplace. Default: ``False``.
|
|
306
|
+
|
|
307
|
+
Returns:
|
|
308
|
+
Tensor, with the same dtype and shape as `input`.
|
|
309
|
+
|
|
310
|
+
Raises:
|
|
311
|
+
TypeError: If `input` is not a Tensor.
|
|
312
|
+
TypeError: If dtype of `input` is not one of: int8, int16, int32, int64, uint8, float16, float32, bfloat16.
|
|
313
|
+
TypeError: If dtype of `min_val` is neither float nor int.
|
|
314
|
+
TypeError: If dtype of `max_val` is neither float nor int.
|
|
315
|
+
|
|
316
|
+
Supported Platforms:
|
|
317
|
+
``Ascend``
|
|
318
|
+
|
|
319
|
+
Examples:
|
|
320
|
+
>>> import mindspore
|
|
321
|
+
>>> from mindspore import Tensor, mint
|
|
322
|
+
>>> x = Tensor([-1, -2, 0, 2, 1], mindspore.float16)
|
|
323
|
+
>>> output = mint.nn.functional.hardtanh(x, min_val=-1.0, max_val=1.0, inplace=False)
|
|
324
|
+
>>> print(output)
|
|
325
|
+
[-1. -1. 0. 1. 1.]
|
|
326
|
+
"""
|
|
327
|
+
if inplace:
|
|
328
|
+
return hardtanh_(input, min_val, max_val)
|
|
329
|
+
return hardtanh_op(input, min_val, max_val)
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def relu6(input, inplace=False):
|
|
333
|
+
r"""
|
|
334
|
+
Computes ReLU (Rectified Linear Unit) upper bounded by 6 of input tensors element-wise.
|
|
335
|
+
|
|
336
|
+
.. math::
|
|
337
|
+
|
|
338
|
+
\text{ReLU6}(input) = \min(\max(0,input), 6)
|
|
339
|
+
|
|
340
|
+
It returns :math:`\min(\max(0,input), 6)` element-wise.
|
|
341
|
+
|
|
342
|
+
ReLU6 Activation Function Graph:
|
|
343
|
+
|
|
344
|
+
.. image:: ../images/ReLU6.png
|
|
345
|
+
:align: center
|
|
346
|
+
|
|
347
|
+
.. warning::
|
|
348
|
+
This is an experimental optimizer API that is subject to change.
|
|
349
|
+
|
|
350
|
+
Args:
|
|
351
|
+
input (Tensor): input Tensor. Dtype is in int8, int16, int32, int64, uint8, float16, float32, bfloat16.
|
|
352
|
+
inplace (bool, optional): Whether to apply erasing inplace. Default: ``False``.
|
|
353
|
+
|
|
354
|
+
Returns:
|
|
355
|
+
Tensor, with the same dtype and shape as the `input`.
|
|
356
|
+
|
|
357
|
+
Raises:
|
|
358
|
+
TypeError: If `input` is not a Tensor.
|
|
359
|
+
TypeError: If dtype of `input` is not one of: int8, int16, int32, int64, uint8, float16, float32, bfloat16.
|
|
360
|
+
|
|
361
|
+
Supported Platforms:
|
|
362
|
+
``Ascend``
|
|
363
|
+
|
|
364
|
+
Examples:
|
|
365
|
+
>>> import mindspore
|
|
366
|
+
>>> import numpy as np
|
|
367
|
+
>>> from mindspore import Tensor, mint
|
|
368
|
+
>>> x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]), mindspore.float32)
|
|
369
|
+
>>> result = mint.nn.functional.relu6(x)
|
|
370
|
+
>>> print(result)
|
|
371
|
+
[[0. 4. 0.]
|
|
372
|
+
[2. 0. 6.]]
|
|
373
|
+
"""
|
|
374
|
+
if inplace:
|
|
375
|
+
return hardtanh_(input, 0, 6)
|
|
376
|
+
return hardtanh_op(input, 0, 6)
|
|
250
377
|
|
|
251
378
|
|
|
252
379
|
def binary_cross_entropy(input, target, weight=None, reduction='mean'):
|
|
@@ -323,7 +450,7 @@ def binary_cross_entropy_with_logits(input, target, weight=None, reduction='mean
|
|
|
323
450
|
r"""
|
|
324
451
|
Adds sigmoid activation function to `input` as logits, and uses this logits to compute binary cross entropy
|
|
325
452
|
between the logits and the target.
|
|
326
|
-
Consistent with the function of
|
|
453
|
+
Consistent with the function of :func:`mindspore.ops.binary_cross_entropy_with_logits` .
|
|
327
454
|
|
|
328
455
|
Sets input `input` as :math:`X`, input `target` as :math:`Y`, input `weight` as :math:`W`, output as :math:`L`.
|
|
329
456
|
Then,
|
|
@@ -453,6 +580,334 @@ def one_hot(tensor, num_classes=-1):
|
|
|
453
580
|
return ops.function.array_func.one_hot_ext(tensor, num_classes)
|
|
454
581
|
|
|
455
582
|
|
|
583
|
+
def smooth_l1_loss(input, target, reduction='mean', beta=1.0):
|
|
584
|
+
r"""
|
|
585
|
+
Computes smooth L1 loss, a robust L1 loss.
|
|
586
|
+
|
|
587
|
+
SmoothL1Loss is a Loss similar to MSELoss but less sensitive to outliers as described in the
|
|
588
|
+
`Fast R-CNN <https://arxiv.org/abs/1504.08083>`_ by Ross Girshick.
|
|
589
|
+
|
|
590
|
+
Given two inputs :math:`x,\ y` of length :math:`N`, the SmoothL1Loss can be described
|
|
591
|
+
as follows:
|
|
592
|
+
|
|
593
|
+
.. math::
|
|
594
|
+
L_{i} =
|
|
595
|
+
\begin{cases}
|
|
596
|
+
\frac{0.5 (x_i - y_i)^{2}}{\text{beta}}, & \text{if } |x_i - y_i| < \text{beta} \\
|
|
597
|
+
|x_i - y_i| - 0.5 * \text{beta}, & \text{otherwise. }
|
|
598
|
+
\end{cases}
|
|
599
|
+
|
|
600
|
+
If `reduction` is not `none`, then:
|
|
601
|
+
|
|
602
|
+
.. math::
|
|
603
|
+
L =
|
|
604
|
+
\begin{cases}
|
|
605
|
+
\operatorname{mean}(L_{i}), & \text{if reduction} = \text{'mean';}\\
|
|
606
|
+
\operatorname{sum}(L_{i}), & \text{if reduction} = \text{'sum'.}
|
|
607
|
+
\end{cases}
|
|
608
|
+
|
|
609
|
+
Here :math:`\text{beta}` controls the point where the loss function changes from quadratic to linear.
|
|
610
|
+
:math:`\text{beta} \geq 0` , its default value is ``1.0`` . :math:`N` is the batch size.
|
|
611
|
+
|
|
612
|
+
.. warning::
|
|
613
|
+
This is an experimental optimizer API that is subject to change.
|
|
614
|
+
|
|
615
|
+
Note:
|
|
616
|
+
- Arg `input` and `target` comply with the implicit type conversion rules to make the data types consistent.
|
|
617
|
+
If they have different data types, the lower precision data type will be converted to relatively the
|
|
618
|
+
highest precision data type.
|
|
619
|
+
|
|
620
|
+
Args:
|
|
621
|
+
input (Tensor): Tensor of shape :math:`(N, *)` where :math:`*` means, any number of additional dimensions.
|
|
622
|
+
Supported dtypes:
|
|
623
|
+
|
|
624
|
+
- Ascend: float16, float32, bfloat16.
|
|
625
|
+
|
|
626
|
+
target (Tensor): Ground truth data, tensor of shape :math:`(N, *)`, same shape as the `input`.
|
|
627
|
+
Supported dtypes:
|
|
628
|
+
|
|
629
|
+
- Ascend: float16, float32, bfloat16.
|
|
630
|
+
|
|
631
|
+
reduction (str, optional): Apply specific reduction method to the output: ``'none'`` , ``'mean'`` ,
|
|
632
|
+
``'sum'`` . Default: ``'mean'`` .
|
|
633
|
+
|
|
634
|
+
- ``'none'``: no reduction will be applied.
|
|
635
|
+
- ``'mean'``: compute the mean of elements in the output.
|
|
636
|
+
- ``'sum'``: the output elements will be summed.
|
|
637
|
+
beta (number, optional): A parameter used to control the point where the function will change between
|
|
638
|
+
L1 to L2 loss. The value should be greater than or equal to zero. Default: ``1.0`` .
|
|
639
|
+
|
|
640
|
+
Returns:
|
|
641
|
+
Tensor, the data type is the same as `input`.
|
|
642
|
+
If `reduction` is ``'none'``, then output is a tensor with the same shape as `input`.
|
|
643
|
+
Otherwise, the shape of output tensor is :math:`()`.
|
|
644
|
+
|
|
645
|
+
Raises:
|
|
646
|
+
TypeError: If `input` or `target` is not a Tensor.
|
|
647
|
+
RuntimeError: If dtype of `input` or `target` is not one of float16, float32, bfloat16.
|
|
648
|
+
ValueError: If shape of `input` is not the same as `target`.
|
|
649
|
+
ValueError: If `reduction` is not one of ``'none'``, ``'mean'``, ``'sum'``.
|
|
650
|
+
TypeError: If `beta` is not a float, int or bool.
|
|
651
|
+
RuntimeError: If `beta` is less than 0.
|
|
652
|
+
|
|
653
|
+
Supported Platforms:
|
|
654
|
+
``Ascend``
|
|
655
|
+
|
|
656
|
+
Examples:
|
|
657
|
+
>>> import mindspore
|
|
658
|
+
>>> import numpy as np
|
|
659
|
+
>>> from mindspore import Tensor, ops
|
|
660
|
+
>>> input = Tensor(np.array([2, 2, 3]), mindspore.float32)
|
|
661
|
+
>>> target = Tensor(np.array([2, 2, 2]), mindspore.float32)
|
|
662
|
+
>>> beta = 1.0
|
|
663
|
+
>>> reduction_1 = 'none'
|
|
664
|
+
>>> output = ops.nn.functional.smooth_l1_loss(input, target, reduction_1, beta)
|
|
665
|
+
>>> print(output)
|
|
666
|
+
[0. 0. 0.5]
|
|
667
|
+
>>> reduction_2 = 'mean'
|
|
668
|
+
>>> output = ops.nn.functional.smooth_l1_loss(input, target, reduction_2, beta)
|
|
669
|
+
>>> print(output)
|
|
670
|
+
0.16666667
|
|
671
|
+
>>> reduction_3 = 'sum'
|
|
672
|
+
>>> output = ops.nn.functional.smooth_l1_loss(input, target, reduction_3, beta)
|
|
673
|
+
>>> print(output)
|
|
674
|
+
0.5
|
|
675
|
+
"""
|
|
676
|
+
return ops.function.smooth_l1_loss(input, target, beta, reduction)
|
|
677
|
+
|
|
678
|
+
|
|
679
|
+
@constexpr
|
|
680
|
+
def log_warning(msg):
|
|
681
|
+
"""Adds warning to logger."""
|
|
682
|
+
logger.warning(msg)
|
|
683
|
+
|
|
684
|
+
|
|
685
|
+
def dropout2d(input, p=0.5, training=True):
|
|
686
|
+
r"""
|
|
687
|
+
During training, randomly zeroes some channels of the input tensor with probability `p`
|
|
688
|
+
from a Bernoulli distribution(For a 4-dimensional tensor with a shape of :math:`NCHW`,
|
|
689
|
+
the channel feature map refers to a 2-dimensional feature map with the shape of :math:`HW`).
|
|
690
|
+
|
|
691
|
+
For example, the :math:`j\_th` channel of the :math:`i\_th` sample in the batched input is a to-be-processed
|
|
692
|
+
`2D` tensor input[i,j].
|
|
693
|
+
Each channel will be zeroed out independently on every forward call which based on Bernoulli distribution
|
|
694
|
+
probability `p`.
|
|
695
|
+
The parper `Dropout: A Simple Way to Prevent Neural Networks from Overfitting
|
|
696
|
+
<http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf>`_ mentioned this technology, and it is proved that
|
|
697
|
+
it can effectively reduce over fitting and prevent neuronal coadaptation.
|
|
698
|
+
For more details, refer to `Improving neural networks by preventing co-adaptation of feature detectors
|
|
699
|
+
<https://arxiv.org/pdf/1207.0580.pdf>`_ .
|
|
700
|
+
|
|
701
|
+
`dropout2d` can improve the independence between channel feature maps.
|
|
702
|
+
|
|
703
|
+
.. warning::
|
|
704
|
+
This is an experimental API that is subject to change or deletion.
|
|
705
|
+
|
|
706
|
+
Args:
|
|
707
|
+
input (Tensor): A `4D` tensor with shape :math:`(N, C, H, W)`, where `N` is the batch size, `C` is the number
|
|
708
|
+
of channels, `H` is the feature height, and `W` is the feature width.
|
|
709
|
+
p (float): The dropping probability of a channel, between 0 and 1, e.g. `p` = 0.8,
|
|
710
|
+
which means dropping out 80% of channels. Default: ``0.5`` .
|
|
711
|
+
training(bool): If `training` is True, applying dropout, otherwise, not applying. Default: ``True`` .
|
|
712
|
+
|
|
713
|
+
Returns:
|
|
714
|
+
Tensor, output, with the same shape and data type as `input`.
|
|
715
|
+
|
|
716
|
+
Raises:
|
|
717
|
+
TypeError: If `input` is not a Tensor.
|
|
718
|
+
TypeError: If the data type of `p` is not float.
|
|
719
|
+
ValueError: If `p` is out of the range `[0.0, 1.0]`.
|
|
720
|
+
|
|
721
|
+
Supported Platforms:
|
|
722
|
+
``Ascend``
|
|
723
|
+
|
|
724
|
+
Examples:
|
|
725
|
+
>>> import mindspore
|
|
726
|
+
>>> import numpy as np
|
|
727
|
+
>>> from mindspore import Tensor, mint
|
|
728
|
+
>>> input = Tensor(np.ones([2, 1, 2, 3]), mindspore.float32)
|
|
729
|
+
>>> output = mint.nn.functional.dropout2d(input, 0.5)
|
|
730
|
+
>>> print(output.shape)
|
|
731
|
+
(2, 1, 2, 3)
|
|
732
|
+
"""
|
|
733
|
+
def dropout2d_impl_(input, p, training):
|
|
734
|
+
if p == 0 or not training or input.numel() == 0:
|
|
735
|
+
return input
|
|
736
|
+
|
|
737
|
+
if p == 1:
|
|
738
|
+
return mint.mul(input, mint.zeros((), dtype=input.dtype))
|
|
739
|
+
|
|
740
|
+
if input.ndim < 2:
|
|
741
|
+
raise ValueError(f'For dropout2d, input size after unsqueeze must be greater or equal to 2')
|
|
742
|
+
|
|
743
|
+
if ops.is_sequence_shape_unknown(input.shape):
|
|
744
|
+
input_tensor_shape = ops.TensorShape()(input)
|
|
745
|
+
nosie_tensor_shape = mint.ones_like(input_tensor_shape)
|
|
746
|
+
nosie_tensor_shape[0] = input_tensor_shape[0]
|
|
747
|
+
nosie_tensor_shape[1] = input_tensor_shape[1]
|
|
748
|
+
nosie_shape = ops.TensorToTuple()(nosie_tensor_shape)
|
|
749
|
+
else:
|
|
750
|
+
nosie_shape = input.shape[:2] + tuple(1 for _ in range(len(input.shape) - 2))
|
|
751
|
+
nosie = mint.full(nosie_shape, 1 - p, dtype=input.dtype)
|
|
752
|
+
nosie = mint.bernoulli(nosie)
|
|
753
|
+
nosie = mint.div(nosie, 1 - p)
|
|
754
|
+
|
|
755
|
+
return mint.mul(input, nosie)
|
|
756
|
+
|
|
757
|
+
validator.check_float_range(p, 0.0, 1.0, validator.INC_BOTH, "p", "dropout2d")
|
|
758
|
+
validator.check_bool(training, "training", "dropout2d")
|
|
759
|
+
|
|
760
|
+
if input.ndim not in (3, 4):
|
|
761
|
+
log_warning(f"dropout2d receviced a {input.ndim}-D input which is not recommended. Please use dropout instead.")
|
|
762
|
+
|
|
763
|
+
is_batched = input.ndim == 4
|
|
764
|
+
if not is_batched:
|
|
765
|
+
input_shape = input.shape
|
|
766
|
+
if ops.is_sequence_shape_unknown(input.shape):
|
|
767
|
+
input_shape = ops.TensorToTuple()(ops.TensorShape()(input))
|
|
768
|
+
input = input.reshape((1, *input_shape))
|
|
769
|
+
result = dropout2d_impl_(input, p, training)
|
|
770
|
+
result = result.reshape(input_shape)
|
|
771
|
+
else:
|
|
772
|
+
result = dropout2d_impl_(input, p, training)
|
|
773
|
+
|
|
774
|
+
return result
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
def normalize(input, p=2.0, dim=1, eps=1e-12):
|
|
778
|
+
r"""
|
|
779
|
+
Perform normalization of inputs over specified dimension
|
|
780
|
+
|
|
781
|
+
For a tensor input of sizes :math:`(n_{0},..., n_{dim},..., n_{k})`, each :math:`n_{dim}` -element vector `v`
|
|
782
|
+
along dimension `dim` is transformed as
|
|
783
|
+
|
|
784
|
+
.. math::
|
|
785
|
+
v=\frac{v}{\max(\left \| v \right \| _{p},\in )}
|
|
786
|
+
|
|
787
|
+
With the default arguments it uses the Euclidean norm over vectors along dimension ``1`` for normalization.
|
|
788
|
+
|
|
789
|
+
.. warning::
|
|
790
|
+
This is an experimental API that is subject to change or deletion.
|
|
791
|
+
|
|
792
|
+
Args:
|
|
793
|
+
input (Tensor): input tensor of any shape.
|
|
794
|
+
p (float): the exponent value in the norm formulation. default: ``2``.
|
|
795
|
+
dim (int): the dimension to reduce. default: ``1``.
|
|
796
|
+
eps (float): small value to avoid division by zero. default: ``1e-12``.
|
|
797
|
+
|
|
798
|
+
Returns:
|
|
799
|
+
Tensor, shape and data type are the same as input.
|
|
800
|
+
|
|
801
|
+
Supported Platforms:
|
|
802
|
+
``Ascend``
|
|
803
|
+
|
|
804
|
+
Examples:
|
|
805
|
+
>>> import mindspore
|
|
806
|
+
>>> import numpy as np
|
|
807
|
+
>>> from mindspore import Tensor, mint
|
|
808
|
+
>>> tensor = Tensor(np.array([[0, 1, 2], [3, 4, 5], [6, 7, 8]]), mindspore.float32)
|
|
809
|
+
>>> output = mint.nn.functional.normalize(tensor)
|
|
810
|
+
>>> print(output)
|
|
811
|
+
[[0.0000 0.4472 0.8944]
|
|
812
|
+
[0.4243 0.5657 0.7071]
|
|
813
|
+
[0.4915 0.5735 0.6554]]
|
|
814
|
+
"""
|
|
815
|
+
denom = broadcast_to(clamp(norm_ext(input, p, dim, keepdim=True), min=eps), input.shape)
|
|
816
|
+
return input / denom
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
def upsample(input, size=None, scale_factor=None, mode="nearest", align_corners=None):
|
|
820
|
+
r"""
|
|
821
|
+
Samples `input` by the given `size` or `scale_factor`.
|
|
822
|
+
|
|
823
|
+
.. warning::
|
|
824
|
+
This is an experimental API that is subject to change or deletion.
|
|
825
|
+
|
|
826
|
+
Refer to :func:`mindspore.mint.nn.functional.interpolate` for more details.
|
|
827
|
+
|
|
828
|
+
Supported Platforms:
|
|
829
|
+
``Ascend``
|
|
830
|
+
"""
|
|
831
|
+
return interpolate(input, size, scale_factor, mode, align_corners)
|
|
832
|
+
|
|
833
|
+
|
|
834
|
+
def adaptive_avg_pool3d(input, output_size):
|
|
835
|
+
r"""
|
|
836
|
+
Performs 3D adaptive average pooling on a multi-plane input signal.
|
|
837
|
+
That is, for any input size, the size of the specified output is :math:`(D, H, W)`.
|
|
838
|
+
The number of output features is equal to the number of input planes.
|
|
839
|
+
|
|
840
|
+
Suppose the last 3 dimension size of x is :math:`(inD, inH, inW)`, the last 3 dimension size of output is
|
|
841
|
+
:math:`(outD, outH, outW)`.
|
|
842
|
+
|
|
843
|
+
.. math::
|
|
844
|
+
\begin{array}{ll} \\
|
|
845
|
+
\forall \quad od \in [0,outD-1], oh \in [0,outH-1], ow \in [0,outW-1]\\
|
|
846
|
+
output[od,oh,ow] = \\
|
|
847
|
+
\qquad mean(x[istartD:iendD+1,istartH:iendH+1,istartW:iendW+1])\\
|
|
848
|
+
where,\\
|
|
849
|
+
\qquad istartD= \left\lceil \frac{od * inD}{outD} \right\rceil \\
|
|
850
|
+
\qquad iendD=\left\lfloor \frac{(od+1)* inD}{outD} \right\rfloor \\
|
|
851
|
+
\qquad istartH=\left\lceil \frac{oh * inH}{outH} \right\rceil \\
|
|
852
|
+
\qquad iendH=\left\lfloor \frac{(oh+1) * inH}{outH} \right\rfloor \\
|
|
853
|
+
\qquad istartW=\left\lceil \frac{ow * inW}{outW} \right\rceil \\
|
|
854
|
+
\qquad iendW=\left\lfloor \frac{(ow+1) * inW}{outW} \right\rfloor
|
|
855
|
+
\end{array}
|
|
856
|
+
|
|
857
|
+
.. warning::
|
|
858
|
+
For Ascend, it is only supported on Atlas A2 Training Series Products.
|
|
859
|
+
This is an experimental optimizer API that is subject to change or deletion.
|
|
860
|
+
|
|
861
|
+
Args:
|
|
862
|
+
input (Tensor): The input of adaptive_avg_pool3d, which is a 4D or 5D Tensor.
|
|
863
|
+
output_size (Union[int, tuple]): The target output size. `output_size` can be a tuple :math:`(D, H, W)`,
|
|
864
|
+
or an int D for :math:`(D, D, D)`. :math:`D`, :math:`H` and :math:`W` can be int or None
|
|
865
|
+
which means the output size is the same as that of the input.
|
|
866
|
+
|
|
867
|
+
Returns:
|
|
868
|
+
Tensor, with the same type as the `input`.
|
|
869
|
+
|
|
870
|
+
Raises:
|
|
871
|
+
TypeError: If `input` is not a Tensor.
|
|
872
|
+
ValueError: If the dimension of `input` is not 4D or 5D.
|
|
873
|
+
ValueError: If `output_size` value is not positive.
|
|
874
|
+
|
|
875
|
+
Supported Platforms:
|
|
876
|
+
``Ascend``
|
|
877
|
+
|
|
878
|
+
Examples:
|
|
879
|
+
>>> import mindspore
|
|
880
|
+
>>> import numpy as np
|
|
881
|
+
>>> from mindspore import Tensor, mint
|
|
882
|
+
>>> # case 1: output_size=(3, 3, 4)
|
|
883
|
+
>>> output_size=(3, 3, 4)
|
|
884
|
+
>>> input_val = np.random.randn(4, 3, 5, 6, 7)
|
|
885
|
+
>>> input = Tensor(input_val, mindspore.float32)
|
|
886
|
+
>>> output = mint.nn.functional.adaptive_avg_pool3d(input, output_size)
|
|
887
|
+
>>> print(output.shape)
|
|
888
|
+
(4, 3, 3, 3, 4)
|
|
889
|
+
>>> # case 2: output_size=4
|
|
890
|
+
>>> output_size=5
|
|
891
|
+
>>> input_val = np.random.randn(2, 3, 8, 6, 12)
|
|
892
|
+
>>> input = Tensor(input_val, mindspore.float32)
|
|
893
|
+
>>> output = mint.nn.functional.adaptive_avg_pool3d(input, output_size)
|
|
894
|
+
>>> print(output.shape)
|
|
895
|
+
(2, 3, 5, 5, 5)
|
|
896
|
+
>>> # case 3: output_size=(None, 4, 5)
|
|
897
|
+
>>> output_size=(None, 4, 5)
|
|
898
|
+
>>> input_val = np.random.randn(4, 1, 9, 10, 8)
|
|
899
|
+
>>> input = Tensor(input_val, mindspore.float32)
|
|
900
|
+
>>> output = mint.nn.functional.adaptive_avg_pool3d(input, output_size)
|
|
901
|
+
>>> print(output.shape)
|
|
902
|
+
(4, 1, 9, 4, 5)
|
|
903
|
+
"""
|
|
904
|
+
validator.check_value_type("output_size", output_size, [int, tuple, list], "adaptive_avg_pool3d")
|
|
905
|
+
if isinstance(output_size, int):
|
|
906
|
+
output_size = (output_size, output_size, output_size)
|
|
907
|
+
output_size = tuple(-1 if val is None else val for val in output_size)
|
|
908
|
+
return adaptive_avg_pool3d_ext(input, output_size)
|
|
909
|
+
|
|
910
|
+
|
|
456
911
|
__all__ = [
|
|
457
912
|
'conv_transpose2d',
|
|
458
913
|
'max_pool2d',
|
|
@@ -473,11 +928,14 @@ __all__ = [
|
|
|
473
928
|
# 8
|
|
474
929
|
'layer_norm',
|
|
475
930
|
# 9
|
|
476
|
-
|
|
931
|
+
'upsample',
|
|
477
932
|
# 10
|
|
478
933
|
|
|
479
934
|
# 11
|
|
480
935
|
'relu',
|
|
936
|
+
|
|
937
|
+
'relu_',
|
|
938
|
+
|
|
481
939
|
# 12
|
|
482
940
|
|
|
483
941
|
# 13
|
|
@@ -485,7 +943,7 @@ __all__ = [
|
|
|
485
943
|
# 14
|
|
486
944
|
'dropout',
|
|
487
945
|
# 15
|
|
488
|
-
|
|
946
|
+
'conv2d',
|
|
489
947
|
# 16
|
|
490
948
|
'log_softmax',
|
|
491
949
|
# 17
|
|
@@ -495,9 +953,10 @@ __all__ = [
|
|
|
495
953
|
# 19
|
|
496
954
|
'binary_cross_entropy',
|
|
497
955
|
# 20
|
|
498
|
-
|
|
956
|
+
'cross_entropy',
|
|
499
957
|
# 21
|
|
500
|
-
|
|
958
|
+
'conv3d',
|
|
959
|
+
'nll_loss',
|
|
501
960
|
# 22
|
|
502
961
|
|
|
503
962
|
# 23
|
|
@@ -633,7 +1092,7 @@ __all__ = [
|
|
|
633
1092
|
# 88
|
|
634
1093
|
|
|
635
1094
|
# 89
|
|
636
|
-
|
|
1095
|
+
'avg_pool1d',
|
|
637
1096
|
# 90
|
|
638
1097
|
'avg_pool2d',
|
|
639
1098
|
# 91
|
|
@@ -656,6 +1115,14 @@ __all__ = [
|
|
|
656
1115
|
|
|
657
1116
|
# 100
|
|
658
1117
|
|
|
1118
|
+
# 152
|
|
1119
|
+
'adaptive_avg_pool3d',
|
|
1120
|
+
# 254
|
|
1121
|
+
'max_unpool2d',
|
|
1122
|
+
|
|
1123
|
+
# 312
|
|
1124
|
+
'normalize',
|
|
1125
|
+
|
|
659
1126
|
# 323
|
|
660
1127
|
|
|
661
1128
|
# 324
|
|
@@ -674,6 +1141,14 @@ __all__ = [
|
|
|
674
1141
|
'adaptive_avg_pool1d',
|
|
675
1142
|
|
|
676
1143
|
'adaptive_avg_pool2d',
|
|
677
|
-
|
|
678
|
-
|
|
1144
|
+
# 393
|
|
1145
|
+
'dropout2d',
|
|
1146
|
+
# 421
|
|
1147
|
+
'flatten',
|
|
1148
|
+
# 536
|
|
1149
|
+
'glu',
|
|
1150
|
+
# 537
|
|
1151
|
+
'hardtanh',
|
|
1152
|
+
'hardtanh_',
|
|
1153
|
+
'relu6',
|
|
679
1154
|
]
|
|
@@ -27,13 +27,26 @@ from mindspore.mint.nn.layer.normalization import BatchNorm1d
|
|
|
27
27
|
from mindspore.mint.nn.layer.normalization import BatchNorm2d
|
|
28
28
|
from mindspore.mint.nn.layer.normalization import BatchNorm3d
|
|
29
29
|
from mindspore.mint.nn.layer.normalization import LayerNorm
|
|
30
|
+
from mindspore.mint.nn.layer.normalization import SyncBatchNorm
|
|
30
31
|
from mindspore.mint.nn.layer.activation import LogSigmoid
|
|
31
32
|
from mindspore.mint.nn.layer.activation import SiLU
|
|
33
|
+
from mindspore.mint.nn.layer.basic import Dropout2d
|
|
32
34
|
from mindspore.mint.nn.layer.pooling import AdaptiveAvgPool1d
|
|
33
35
|
from mindspore.mint.nn.layer.pooling import AdaptiveAvgPool2d
|
|
36
|
+
from mindspore.mint.nn.layer.pooling import AdaptiveAvgPool3d
|
|
34
37
|
|
|
35
38
|
|
|
36
|
-
__all__ = [
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
39
|
+
__all__ = [
|
|
40
|
+
'GroupNorm',
|
|
41
|
+
'BatchNorm1d',
|
|
42
|
+
'BatchNorm2d',
|
|
43
|
+
'BatchNorm3d',
|
|
44
|
+
'LayerNorm',
|
|
45
|
+
'LogSigmoid',
|
|
46
|
+
'SiLU',
|
|
47
|
+
'Dropout2d',
|
|
48
|
+
'AdaptiveAvgPool1d',
|
|
49
|
+
'AdaptiveAvgPool2d',
|
|
50
|
+
'AdaptiveAvgPool3d',
|
|
51
|
+
'SyncBatchNorm',
|
|
52
|
+
]
|