mindspore 2.4.10__cp39-cp39-manylinux1_x86_64.whl → 2.5.0__cp39-cp39-manylinux1_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +39 -0
- mindspore/__init__.py +8 -3
- mindspore/_akg/akg/composite/build_module.py +6 -2
- mindspore/_akg/akg/utils/kernel_exec.py +2 -2
- mindspore/_c_dataengine.cpython-39-x86_64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-39-x86_64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-39-x86_64-linux-gnu.so +0 -0
- mindspore/_checkparam.py +0 -5
- mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
- mindspore/_extends/parse/compile_config.py +64 -0
- mindspore/_extends/parse/deprecated/__init__.py +0 -0
- mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +375 -0
- mindspore/_extends/parse/parser.py +23 -5
- mindspore/_extends/parse/standard_method.py +123 -27
- mindspore/_extends/pijit/pijit_func_white_list.py +1 -1
- mindspore/amp.py +7 -1
- mindspore/boost/boost_cell_wrapper.py +136 -41
- mindspore/common/__init__.py +3 -1
- mindspore/common/_register_for_tensor.py +0 -1
- mindspore/common/_stub_tensor.py +25 -4
- mindspore/common/_tensor_cpp_method.py +17 -0
- mindspore/common/_tensor_docs.py +6132 -0
- mindspore/common/api.py +98 -21
- mindspore/common/dtype.py +34 -34
- mindspore/common/dump.py +2 -1
- mindspore/common/file_system.py +8 -3
- mindspore/common/generator.py +2 -0
- mindspore/common/hook_handle.py +3 -1
- mindspore/common/initializer.py +3 -4
- mindspore/common/lazy_inline.py +8 -2
- mindspore/common/mindir_util.py +10 -2
- mindspore/common/parameter.py +31 -15
- mindspore/common/tensor.py +713 -1337
- mindspore/communication/__init__.py +1 -1
- mindspore/communication/_comm_helper.py +5 -0
- mindspore/communication/comm_func.py +215 -173
- mindspore/communication/management.py +23 -20
- mindspore/context.py +285 -191
- mindspore/dataset/__init__.py +23 -19
- mindspore/dataset/callback/ds_callback.py +2 -1
- mindspore/dataset/core/config.py +84 -3
- mindspore/dataset/engine/cache_admin.py +3 -3
- mindspore/dataset/engine/cache_client.py +5 -4
- mindspore/dataset/engine/datasets.py +192 -149
- mindspore/dataset/engine/datasets_audio.py +14 -0
- mindspore/dataset/engine/datasets_standard_format.py +11 -11
- mindspore/dataset/engine/datasets_text.py +38 -1
- mindspore/dataset/engine/datasets_user_defined.py +100 -66
- mindspore/dataset/engine/datasets_vision.py +81 -8
- mindspore/dataset/engine/iterators.py +281 -63
- mindspore/dataset/engine/obs/util.py +8 -0
- mindspore/dataset/engine/queue.py +40 -0
- mindspore/dataset/engine/samplers.py +26 -2
- mindspore/dataset/engine/serializer_deserializer.py +1 -1
- mindspore/dataset/engine/validators.py +43 -11
- mindspore/dataset/transforms/py_transforms_util.py +17 -0
- mindspore/dataset/transforms/transforms.py +29 -12
- mindspore/dataset/vision/validators.py +1 -2
- mindspore/device_context/__init__.py +21 -0
- mindspore/device_context/ascend/__init__.py +25 -0
- mindspore/device_context/ascend/device.py +72 -0
- mindspore/device_context/ascend/op_debug.py +94 -0
- mindspore/device_context/ascend/op_precision.py +193 -0
- mindspore/device_context/ascend/op_tuning.py +127 -0
- mindspore/device_context/cpu/__init__.py +25 -0
- mindspore/device_context/cpu/device.py +62 -0
- mindspore/device_context/cpu/op_tuning.py +43 -0
- mindspore/device_context/gpu/__init__.py +21 -0
- mindspore/device_context/gpu/device.py +70 -0
- mindspore/device_context/gpu/op_precision.py +67 -0
- mindspore/device_context/gpu/op_tuning.py +175 -0
- mindspore/device_manager.py +134 -0
- mindspore/experimental/llm_boost/__init__.py +1 -0
- mindspore/experimental/llm_boost/ascend_native/__init__.py +22 -0
- mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +211 -0
- mindspore/experimental/llm_boost/ascend_native/llm_boost.py +52 -0
- mindspore/experimental/llm_boost/atb/boost_base.py +2 -3
- mindspore/experimental/llm_boost/atb/llama_boost.py +6 -1
- mindspore/experimental/llm_boost/register.py +1 -0
- mindspore/experimental/optim/adadelta.py +26 -22
- mindspore/experimental/optim/adam.py +3 -0
- mindspore/experimental/optim/lr_scheduler.py +33 -24
- mindspore/experimental/optim/radam.py +33 -30
- mindspore/hal/device.py +28 -0
- mindspore/hal/event.py +17 -0
- mindspore/hal/memory.py +94 -3
- mindspore/hal/stream.py +91 -6
- mindspore/include/api/context.h +0 -1
- mindspore/lib/libavcodec.so.59 +0 -0
- mindspore/lib/libavdevice.so.59 +0 -0
- mindspore/lib/libavfilter.so.8 +0 -0
- mindspore/lib/libavformat.so.59 +0 -0
- mindspore/lib/libavutil.so.57 +0 -0
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_ops.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libmpi_collective.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libopencv_imgcodecs.so.4.5 +0 -0
- mindspore/lib/libopencv_imgproc.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/libswresample.so.4 +0 -0
- mindspore/lib/libswscale.so.6 +0 -0
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +2048 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl/dynamic/decoder_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl/dynamic/prompt_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_proto/lib/linux/x86_64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/version.info +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +224 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/all_finite.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/decoder_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/prompt_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/all_finite.json +139 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/binary_info_config.json +361 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/decoder_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/prompt_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_proto/lib/linux/x86_64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/version.info +1 -1
- mindspore/lib/plugin/ascend/custom_compiler/setup.py +1 -1
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_internal_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libms_ascend_native_boost.so +0 -0
- mindspore/lib/plugin/ascend/libms_atb_boost.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +960 -958
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/base_type.h → base_type.h} +25 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{cast/cast_tiling.h → internal.h} +6 -4
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_op.h +114 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/boost_kernel.h +70 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/llama_impl.h +85 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/model_interface.h +52 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/tensor.h +81 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_creator.h +123 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +155 -110
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/tiling_info.h → tiling_info.h} +12 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tiling_utils.h +178 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layer_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcompare_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libllama_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_optiling.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmulti_weight_matmul_kernel_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libadd_rms_norm_quant_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_310p_impl.so → op_kernels/ascend310p/so_kernels/libapply_rotary_pos_emb_310p_ascend310p.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcast_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcompare_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libgelu_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libmatmul_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libreshape_and_cache_nz_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.json +163 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.json +163 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libadd_layer_norm_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libadd_rms_norm_impl.so → op_kernels/ascend910b/so_kernels/libadd_rms_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libadd_rms_norm_quant_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_impl.so → op_kernels/ascend910b/so_kernels/libapply_rotary_pos_emb_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libcast_impl.so → op_kernels/ascend910b/so_kernels/libcast_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libnot_equal_impl.so → op_kernels/ascend910b/so_kernels/libcompare_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libgelu_impl.so → op_kernels/ascend910b/so_kernels/libgelu_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libllama_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmatmul_impl.so → op_kernels/ascend910b/so_kernels/libmatmul_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmulti_weight_matmul_kernel_impl.so → op_kernels/ascend910b/so_kernels/libmulti_weight_matmul_kernel_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libreshape_and_cache_impl.so → op_kernels/ascend910b/so_kernels/libreshape_and_cache_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/librms_norm_impl.so → op_kernels/ascend910b/so_kernels/librms_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.10 +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.11 +0 -0
- mindspore/lib/plugin/gpu10.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu10.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.6/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.6/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.10.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.6 +0 -0
- mindspore/log.py +12 -0
- mindspore/mindrecord/__init__.py +1 -1
- mindspore/mindrecord/config.py +17 -316
- mindspore/mindrecord/filereader.py +1 -9
- mindspore/mindrecord/filewriter.py +5 -15
- mindspore/mindrecord/mindpage.py +1 -9
- mindspore/mint/__init__.py +824 -218
- mindspore/mint/distributed/__init__.py +66 -4
- mindspore/mint/distributed/distributed.py +2594 -44
- mindspore/mint/linalg/__init__.py +6 -0
- mindspore/mint/nn/__init__.py +473 -14
- mindspore/mint/nn/functional.py +486 -11
- mindspore/mint/nn/layer/__init__.py +17 -4
- mindspore/mint/nn/layer/_functions.py +330 -0
- mindspore/mint/nn/layer/activation.py +169 -1
- mindspore/mint/nn/layer/basic.py +123 -0
- mindspore/mint/nn/layer/conv.py +727 -0
- mindspore/mint/nn/layer/normalization.py +215 -19
- mindspore/mint/nn/layer/padding.py +797 -0
- mindspore/mint/nn/layer/pooling.py +170 -0
- mindspore/mint/optim/__init__.py +2 -1
- mindspore/mint/optim/adam.py +223 -0
- mindspore/mint/optim/adamw.py +26 -19
- mindspore/mint/special/__init__.py +2 -1
- mindspore/multiprocessing/__init__.py +5 -0
- mindspore/nn/cell.py +126 -19
- mindspore/nn/dynamic_lr.py +2 -1
- mindspore/nn/layer/activation.py +6 -6
- mindspore/nn/layer/basic.py +35 -25
- mindspore/nn/layer/channel_shuffle.py +3 -3
- mindspore/nn/layer/embedding.py +3 -3
- mindspore/nn/layer/normalization.py +8 -7
- mindspore/nn/layer/padding.py +4 -3
- mindspore/nn/layer/pooling.py +47 -13
- mindspore/nn/layer/rnn_cells.py +1 -1
- mindspore/nn/layer/rnns.py +2 -1
- mindspore/nn/layer/timedistributed.py +5 -5
- mindspore/nn/layer/transformer.py +48 -26
- mindspore/nn/learning_rate_schedule.py +5 -3
- mindspore/nn/loss/loss.py +31 -36
- mindspore/nn/optim/ada_grad.py +1 -0
- mindspore/nn/optim/adadelta.py +2 -2
- mindspore/nn/optim/adam.py +1 -1
- mindspore/nn/optim/lars.py +1 -4
- mindspore/nn/optim/optimizer.py +1 -1
- mindspore/nn/optim/rprop.py +2 -2
- mindspore/nn/optim/thor.py +2 -1
- mindspore/nn/utils/init.py +13 -11
- mindspore/nn/wrap/cell_wrapper.py +4 -6
- mindspore/nn/wrap/loss_scale.py +3 -4
- mindspore/numpy/array_creations.py +60 -62
- mindspore/numpy/array_ops.py +148 -143
- mindspore/numpy/logic_ops.py +41 -42
- mindspore/numpy/math_ops.py +361 -359
- mindspore/numpy/utils.py +16 -16
- mindspore/numpy/utils_const.py +4 -4
- mindspore/ops/__init__.py +2 -1
- mindspore/ops/_grad_experimental/grad_comm_ops.py +94 -13
- mindspore/ops/_grad_experimental/grad_debug_ops.py +6 -1
- mindspore/ops/_grad_experimental/grad_inner_ops.py +9 -0
- mindspore/ops/_grad_experimental/grad_math_ops.py +2 -1
- mindspore/ops/_op_impl/cpu/__init__.py +1 -0
- mindspore/ops/_op_impl/cpu/raise_op.py +28 -0
- mindspore/ops/_vmap/vmap_array_ops.py +20 -19
- mindspore/ops/_vmap/vmap_base.py +0 -2
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +19 -13
- mindspore/ops/_vmap/vmap_math_ops.py +11 -9
- mindspore/ops/_vmap/vmap_nn_ops.py +20 -34
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +149 -12
- mindspore/ops/auto_generate/gen_arg_handler.py +0 -61
- mindspore/ops/auto_generate/gen_extend_func.py +554 -60
- mindspore/ops/auto_generate/gen_ops_def.py +1621 -115
- mindspore/ops/auto_generate/gen_ops_prim.py +8024 -3409
- mindspore/ops/auto_generate/pyboost_inner_prim.py +183 -79
- mindspore/ops/composite/base.py +1 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +229 -30
- mindspore/ops/composite/multitype_ops/pow_impl.py +0 -29
- mindspore/ops/function/__init__.py +12 -0
- mindspore/ops/function/array_func.py +561 -159
- mindspore/ops/function/clip_func.py +64 -0
- mindspore/ops/function/debug_func.py +28 -20
- mindspore/ops/function/image_func.py +1 -1
- mindspore/ops/function/linalg_func.py +5 -4
- mindspore/ops/function/math_func.py +1659 -290
- mindspore/ops/function/nn_func.py +988 -317
- mindspore/ops/function/parameter_func.py +3 -56
- mindspore/ops/function/random_func.py +243 -33
- mindspore/ops/function/sparse_unary_func.py +1 -1
- mindspore/ops/functional.py +18 -5
- mindspore/ops/functional_overload.py +897 -0
- mindspore/ops/operations/__init__.py +3 -2
- mindspore/ops/operations/_embedding_cache_ops.py +4 -4
- mindspore/ops/operations/_grad_ops.py +2 -34
- mindspore/ops/operations/_infer_ops.py +2 -1
- mindspore/ops/operations/_inner_ops.py +38 -8
- mindspore/ops/operations/array_ops.py +45 -303
- mindspore/ops/operations/comm_ops.py +19 -16
- mindspore/ops/operations/custom_ops.py +11 -55
- mindspore/ops/operations/debug_ops.py +42 -47
- mindspore/ops/operations/inner_ops.py +6 -4
- mindspore/ops/operations/linalg_ops.py +3 -2
- mindspore/ops/operations/manually_defined/ops_def.py +185 -104
- mindspore/ops/operations/math_ops.py +11 -216
- mindspore/ops/operations/nn_ops.py +146 -308
- mindspore/ops/primitive.py +23 -21
- mindspore/ops/tensor_method.py +1669 -0
- mindspore/ops_generate/aclnn_kernel_register_auto_cc_generator.py +110 -0
- mindspore/ops_generate/add_tensor_docs_generator.py +54 -0
- mindspore/ops_generate/arg_handler.py +0 -61
- mindspore/ops_generate/auto_grad_impl_cc_generator.py +135 -0
- mindspore/ops_generate/auto_grad_reg_cc_generator.py +93 -0
- mindspore/ops_generate/base_generator.py +11 -0
- mindspore/ops_generate/cpp_create_prim_instance_helper_generator.py +108 -0
- mindspore/ops_generate/functional_map_cpp_generator.py +491 -0
- mindspore/ops_generate/functional_overload_py_generator.py +110 -0
- mindspore/ops_generate/functions_cc_generator.py +233 -0
- mindspore/ops_generate/gen_aclnn_implement.py +110 -114
- mindspore/ops_generate/gen_constants.py +157 -3
- mindspore/ops_generate/gen_ops.py +245 -990
- mindspore/ops_generate/gen_pyboost_func.py +97 -998
- mindspore/ops_generate/gen_utils.py +119 -33
- mindspore/ops_generate/lite_ops_cpp_generator.py +155 -0
- mindspore/ops_generate/op_api_proto.py +206 -0
- mindspore/ops_generate/op_def_py_generator.py +131 -0
- mindspore/ops_generate/op_prim_py_generator.py +480 -0
- mindspore/ops_generate/op_proto.py +373 -108
- mindspore/ops_generate/op_template_parser.py +436 -0
- mindspore/ops_generate/ops_def_cc_generator.py +288 -0
- mindspore/ops_generate/ops_def_h_generator.py +74 -0
- mindspore/ops_generate/ops_name_h_generator.py +68 -0
- mindspore/ops_generate/ops_primitive_h_generator.py +81 -0
- mindspore/ops_generate/pyboost_functions_cpp_generator.py +370 -0
- mindspore/ops_generate/pyboost_functions_h_generator.py +68 -0
- mindspore/ops_generate/pyboost_functions_py_generator.py +148 -0
- mindspore/ops_generate/pyboost_grad_function_cpp_generator.py +154 -0
- mindspore/ops_generate/pyboost_inner_prim_generator.py +131 -0
- mindspore/ops_generate/pyboost_native_grad_functions_generator.py +268 -0
- mindspore/ops_generate/pyboost_op_cpp_code_generator.py +851 -0
- mindspore/ops_generate/pyboost_overload_functions_cpp_generator.py +344 -0
- mindspore/ops_generate/pyboost_utils.py +92 -33
- mindspore/ops_generate/template.py +294 -44
- mindspore/ops_generate/tensor_func_reg_cpp_generator.py +422 -0
- mindspore/parallel/__init__.py +3 -3
- mindspore/parallel/_auto_parallel_context.py +24 -33
- mindspore/parallel/_parallel_serialization.py +13 -2
- mindspore/parallel/_utils.py +4 -1
- mindspore/parallel/algo_parameter_config.py +1 -1
- mindspore/parallel/checkpoint_transform.py +44 -0
- mindspore/parallel/cluster/process_entity/_api.py +131 -37
- mindspore/parallel/cluster/process_entity/_utils.py +41 -6
- mindspore/parallel/cluster/run.py +20 -3
- mindspore/parallel/parameter_broadcast.py +1 -1
- mindspore/parallel/shard.py +3 -0
- mindspore/parallel/transform_safetensors.py +119 -253
- mindspore/profiler/__init__.py +17 -4
- mindspore/profiler/analysis/__init__.py +0 -0
- mindspore/profiler/analysis/parser/__init__.py +0 -0
- mindspore/profiler/analysis/parser/ascend_cann_parser.py +166 -0
- mindspore/profiler/analysis/parser/base_parser.py +158 -0
- mindspore/profiler/analysis/parser/framework_cann_relation_parser.py +45 -0
- mindspore/profiler/analysis/parser/ms_framework_parser.py +142 -0
- mindspore/profiler/analysis/parser/ms_minddata_parser.py +145 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +261 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +40 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +84 -0
- mindspore/profiler/analysis/parser/timeline_creator/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_creator/base_timeline_creator.py +44 -0
- mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +90 -0
- mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +76 -0
- mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +103 -0
- mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +134 -0
- mindspore/profiler/analysis/parser/timeline_event/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_event/base_event.py +233 -0
- mindspore/profiler/analysis/parser/timeline_event/cpu_op_event.py +47 -0
- mindspore/profiler/analysis/parser/timeline_event/flow_event.py +36 -0
- mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +260 -0
- mindspore/profiler/analysis/parser/timeline_event/msprof_event.py +73 -0
- mindspore/profiler/analysis/parser/timeline_event/scope_layer_event.py +53 -0
- mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +146 -0
- mindspore/profiler/analysis/task_manager.py +131 -0
- mindspore/profiler/analysis/time_converter.py +84 -0
- mindspore/profiler/analysis/viewer/__init__.py +0 -0
- mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +333 -0
- mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +87 -0
- mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +252 -0
- mindspore/profiler/analysis/viewer/ascend_memory_viewer.py +313 -0
- mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +322 -0
- mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +265 -0
- mindspore/profiler/analysis/viewer/ascend_timeline_viewer.py +58 -0
- mindspore/profiler/analysis/viewer/base_viewer.py +26 -0
- mindspore/profiler/analysis/viewer/ms_dataset_viewer.py +97 -0
- mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +581 -0
- mindspore/profiler/analysis/work_flow.py +73 -0
- mindspore/profiler/common/ascend_msprof_exporter.py +138 -0
- mindspore/profiler/common/command_executor.py +90 -0
- mindspore/profiler/common/constant.py +174 -3
- mindspore/profiler/common/file_manager.py +208 -0
- mindspore/profiler/common/log.py +130 -0
- mindspore/profiler/common/msprof_cmd_tool.py +202 -0
- mindspore/profiler/common/path_manager.py +371 -0
- mindspore/profiler/common/process_bar.py +168 -0
- mindspore/profiler/common/process_pool.py +9 -3
- mindspore/profiler/common/profiler_context.py +476 -0
- mindspore/profiler/common/profiler_info.py +304 -0
- mindspore/profiler/common/profiler_output_path.py +284 -0
- mindspore/profiler/common/profiler_parameters.py +210 -0
- mindspore/profiler/common/profiler_path_manager.py +120 -0
- mindspore/profiler/common/record_function.py +76 -0
- mindspore/profiler/common/tlv_decoder.py +76 -0
- mindspore/profiler/common/util.py +75 -2
- mindspore/profiler/dynamic_profiler.py +270 -37
- mindspore/profiler/envprofiler.py +138 -0
- mindspore/profiler/mstx.py +199 -0
- mindspore/profiler/platform/__init__.py +21 -0
- mindspore/profiler/platform/base_profiler.py +40 -0
- mindspore/profiler/platform/cpu_profiler.py +124 -0
- mindspore/profiler/platform/gpu_profiler.py +74 -0
- mindspore/profiler/platform/npu_profiler.py +309 -0
- mindspore/profiler/profiler.py +580 -93
- mindspore/profiler/profiler_action_controller.py +187 -0
- mindspore/profiler/profiler_interface.py +114 -0
- mindspore/profiler/schedule.py +208 -0
- mindspore/rewrite/api/symbol_tree.py +1 -2
- mindspore/run_check/_check_version.py +2 -6
- mindspore/runtime/__init__.py +37 -0
- mindspore/runtime/device.py +27 -0
- mindspore/runtime/event.py +209 -0
- mindspore/runtime/executor.py +148 -0
- mindspore/runtime/memory.py +392 -0
- mindspore/runtime/stream.py +460 -0
- mindspore/runtime/thread_bind_core.py +401 -0
- mindspore/train/__init__.py +2 -2
- mindspore/train/_utils.py +53 -18
- mindspore/train/amp.py +8 -4
- mindspore/train/callback/_checkpoint.py +32 -18
- mindspore/train/callback/_early_stop.py +1 -1
- mindspore/train/callback/_flops_collector.py +105 -69
- mindspore/train/callback/_history.py +1 -1
- mindspore/train/callback/_summary_collector.py +44 -6
- mindspore/train/callback/_tft_register.py +31 -10
- mindspore/train/dataset_helper.py +11 -11
- mindspore/train/metrics/precision.py +4 -5
- mindspore/train/mind_ir_pb2.py +167 -46
- mindspore/train/model.py +13 -15
- mindspore/train/serialization.py +462 -76
- mindspore/train/summary/summary_record.py +1 -2
- mindspore/train/train_thor/model_thor.py +1 -1
- mindspore/utils/__init__.py +4 -2
- mindspore/utils/bin/dataset-cache +0 -0
- mindspore/utils/bin/dataset-cache-server +0 -0
- mindspore/utils/dryrun.py +138 -0
- mindspore/utils/runtime_execution_order_check.py +550 -0
- mindspore/version.py +1 -1
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/METADATA +2 -3
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/RECORD +533 -467
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/entry_points.txt +1 -1
- mindspore/_data_dump.cpython-39-x86_64-linux-gnu.so +0 -0
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/common/_tensor_overload.py +0 -139
- mindspore/lib/libmindspore_np_dtype.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -82
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -113
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -193
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/dtype_registry.h +0 -90
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_layer_norm_op.h +0 -60
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_op.h +0 -50
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_quant_op.h +0 -50
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_nz_op.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_op.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_only_ops.h +0 -94
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_op_base.h +0 -97
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/flash_attention_score_op.h +0 -97
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/gelu_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_add_rmsnorm_op.h +0 -73
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -108
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_impls_op.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_weight_matmul_op.h +0 -91
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/paged_attention_op.h +0 -99
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_nz_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/rms_norm_op.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -179
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/profiling_util.h +0 -366
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -56
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/kernel/add.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/tiling/add_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_base.h +0 -456
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_bf16.h +0 -217
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp.h +0 -391
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp16.h +0 -126
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -230
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_value.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/apply_rotary_pos_emb_nz_impl.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_base.h +0 -460
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp16.h +0 -116
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp32.h +0 -230
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_value.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -74
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -74
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/kernel/cast_kernel.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_tiling.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/kernel/compare_kernel.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +0 -68
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -99
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/lccl/lccl_wrapper.h +0 -58
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_types.h +0 -91
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_utils.h +0 -108
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/add_param.h +0 -68
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/cast_param.h +0 -30
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -38
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +0 -33
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -377
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/kernel/reshape_and_cache_nz.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_impl.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_tiling.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/kernel/sub_kernel.h +0 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +0 -399
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/utils.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_tiling.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +0 -30
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_core.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_entity.h +0 -38
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_sink.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_stream.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -71
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -165
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +0 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -121
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -106
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layer_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_acme_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bnsd_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bsh_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bnsd_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bsh_mix.o +0 -0
- mindspore/profiler/envprofiling.py +0 -254
- mindspore/profiler/profiling.py +0 -1926
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/WHEEL +0 -0
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/top_level.txt +0 -0
mindspore/nn/cell.py
CHANGED
|
@@ -60,7 +60,7 @@ class Cell(Cell_):
|
|
|
60
60
|
.. note::
|
|
61
61
|
Cell is the inference mode by default. For a class that inherits a Cell,
|
|
62
62
|
if the training and inference have different structures, the subclass performs the inference branch by default.
|
|
63
|
-
To set the training mode, refer to
|
|
63
|
+
To set the training mode, refer to :func:`mindspore.nn.Cell.set_train` .
|
|
64
64
|
|
|
65
65
|
.. warning::
|
|
66
66
|
In the subclass of Cell, it's not allowed to define a method named 'cast' and not allowed to define an attribute
|
|
@@ -105,7 +105,8 @@ class Cell(Cell_):
|
|
|
105
105
|
'_func_graph_flags', '_parameter_layout_dict', '_params_list', '_phase', '_bprop_debug',
|
|
106
106
|
'_forward_pre_hook', '_forward_hook', '_backward_pre_hook', '_backward_hook',
|
|
107
107
|
'_cell_backward_pre_hook', '_cell_backward_hook', '_is_run', '_param_prefix',
|
|
108
|
-
'_attr_synced', 'pynative', 'requires_grad', 'cell_type'
|
|
108
|
+
'_attr_synced', 'pynative', 'requires_grad', 'cell_type',
|
|
109
|
+
'_parameters_forward_hook', '_parameters_backward_hook']
|
|
109
110
|
total_instance_count = 0
|
|
110
111
|
|
|
111
112
|
def __init__(self, auto_prefix=True, flags=None):
|
|
@@ -143,6 +144,8 @@ class Cell(Cell_):
|
|
|
143
144
|
|
|
144
145
|
# call gc to release GE session resources used by non-used cell objects
|
|
145
146
|
if os.getenv('GC_COLLECT_IN_CELL') == '1':
|
|
147
|
+
logger.warning("The convenient environment 'GC_COLLECT_IN_CELL' is deprecated from version 2.5 "
|
|
148
|
+
"and will be removed in a future version.")
|
|
146
149
|
gc.collect()
|
|
147
150
|
|
|
148
151
|
if flags:
|
|
@@ -158,6 +161,10 @@ class Cell(Cell_):
|
|
|
158
161
|
self._cell_backward_hook = None
|
|
159
162
|
self._is_recursion_hook = False
|
|
160
163
|
|
|
164
|
+
# parameters hook
|
|
165
|
+
self._parameters_forward_hook = None
|
|
166
|
+
self._parameters_backward_hook = None
|
|
167
|
+
|
|
161
168
|
self.cell_type = None
|
|
162
169
|
self.cast = Cast()
|
|
163
170
|
self._has_config_recompute = False
|
|
@@ -492,14 +499,17 @@ class Cell(Cell_):
|
|
|
492
499
|
if self._forward_pre_hook:
|
|
493
500
|
inputs = self._run_forward_pre_hook(inputs)
|
|
494
501
|
|
|
495
|
-
if self.
|
|
496
|
-
output = self._backward_hook_construct(*inputs, **kwargs)
|
|
497
|
-
elif self._shard_fn is not None:
|
|
502
|
+
if self._shard_fn is not None:
|
|
498
503
|
output = self._shard_fn(*inputs, **kwargs)
|
|
499
|
-
elif
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
504
|
+
elif _pynative_executor.requires_grad():
|
|
505
|
+
if self._backward_hook:
|
|
506
|
+
output = self._backward_hook_construct(*inputs, **kwargs)
|
|
507
|
+
elif self._recompute_cell is not None:
|
|
508
|
+
output = self._recompute_cell(*inputs, **kwargs)
|
|
509
|
+
elif self.has_bprop:
|
|
510
|
+
output = self._call_custom_bprop(*inputs, **kwargs)
|
|
511
|
+
else:
|
|
512
|
+
output = self.construct(*inputs, **kwargs)
|
|
503
513
|
else:
|
|
504
514
|
output = self.construct(*inputs, **kwargs)
|
|
505
515
|
|
|
@@ -598,7 +608,7 @@ class Cell(Cell_):
|
|
|
598
608
|
strategy for others will be set by sharding propagation.
|
|
599
609
|
in_strategy and out_strategy define the input and output layout respectively.
|
|
600
610
|
in_strategy/out_strategy should be a tuple, each element of which corresponds to the desired layout of
|
|
601
|
-
this input/output, which can refer to the description of
|
|
611
|
+
this input/output, which can refer to the description of :func:`mindspore.ops.Primitive.shard`.
|
|
602
612
|
The parallel strategies of remaining operators are derived from the strategy specified by the input and output.
|
|
603
613
|
|
|
604
614
|
Note:
|
|
@@ -1350,7 +1360,7 @@ class Cell(Cell_):
|
|
|
1350
1360
|
def _updata(param):
|
|
1351
1361
|
if param in replace:
|
|
1352
1362
|
return replace.get(param)
|
|
1353
|
-
new_p = param.init_data(None, set_sliced=
|
|
1363
|
+
new_p = param.init_data(None, set_sliced=param.sliced)
|
|
1354
1364
|
replace[param] = new_p
|
|
1355
1365
|
return new_p
|
|
1356
1366
|
|
|
@@ -1822,6 +1832,9 @@ class Cell(Cell_):
|
|
|
1822
1832
|
if not hasattr(self, "_func_graph_flags"):
|
|
1823
1833
|
self._func_graph_flags = {}
|
|
1824
1834
|
self._func_graph_flags.update({**flags})
|
|
1835
|
+
if context._get_mode() == context.PYNATIVE_MODE and self._func_graph_flags.get("output_no_recompute"):
|
|
1836
|
+
raise TypeError("Recompute is not supported in PyNative mode currently, you can use "
|
|
1837
|
+
"'context.set_context(mode=context.GRAPH_MODE)' or @jit to set graph mode.")
|
|
1825
1838
|
self.__dict__.update({**flags})
|
|
1826
1839
|
self._add_mixed_precision_flag(**flags)
|
|
1827
1840
|
return self
|
|
@@ -1955,9 +1968,8 @@ class Cell(Cell_):
|
|
|
1955
1968
|
|
|
1956
1969
|
def set_grad(self, requires_grad=True):
|
|
1957
1970
|
"""
|
|
1958
|
-
Sets the cell flag for gradient.
|
|
1959
|
-
|
|
1960
|
-
network is executed.
|
|
1971
|
+
Sets the cell flag for gradient.
|
|
1972
|
+
|
|
1961
1973
|
|
|
1962
1974
|
Args:
|
|
1963
1975
|
requires_grad (bool): Specifies if the net need to grad, if it is
|
|
@@ -2217,6 +2229,8 @@ class Cell(Cell_):
|
|
|
2217
2229
|
(Tensor(shape=[1], dtype=Float32, value= [ 2.00000000e+00]), Tensor(shape=[1], dtype=Float32,
|
|
2218
2230
|
value= [ 2.00000000e+00]))
|
|
2219
2231
|
"""
|
|
2232
|
+
if self.has_bprop:
|
|
2233
|
+
return HookHandle()
|
|
2220
2234
|
if context._get_mode() == context.GRAPH_MODE:
|
|
2221
2235
|
return HookHandle()
|
|
2222
2236
|
if not check_hook_fn("register_forward_hook", hook_fn):
|
|
@@ -2334,9 +2348,12 @@ class Cell(Cell_):
|
|
|
2334
2348
|
Supported Platforms:
|
|
2335
2349
|
``Ascend`` ``GPU`` ``CPU``
|
|
2336
2350
|
"""
|
|
2337
|
-
ret = self._cell_backward_pre_hook(outputs)
|
|
2338
2351
|
if isinstance(outputs, tuple):
|
|
2339
|
-
|
|
2352
|
+
ret = self._cell_backward_pre_hook(*outputs)
|
|
2353
|
+
else:
|
|
2354
|
+
ret = self._cell_backward_pre_hook(outputs)
|
|
2355
|
+
if isinstance(outputs, tuple):
|
|
2356
|
+
if len(outputs) == 1:
|
|
2340
2357
|
ret = (ret,)
|
|
2341
2358
|
if len(ret) != len(outputs):
|
|
2342
2359
|
raise TypeError(
|
|
@@ -2452,9 +2469,14 @@ class Cell(Cell_):
|
|
|
2452
2469
|
outputs = self.construct(*outputs, **kwargs)
|
|
2453
2470
|
else:
|
|
2454
2471
|
outputs = self.construct(outputs, **kwargs)
|
|
2455
|
-
|
|
2456
|
-
|
|
2457
|
-
|
|
2472
|
+
if isinstance(outputs, tuple):
|
|
2473
|
+
new_outputs = self._cell_backward_hook(*outputs)
|
|
2474
|
+
else:
|
|
2475
|
+
new_outputs = self._cell_backward_hook(outputs)
|
|
2476
|
+
# if outputs is (X,) and new_outpus is X
|
|
2477
|
+
if isinstance(outputs, tuple) and len(outputs) == 1:
|
|
2478
|
+
new_outputs = (new_outputs,)
|
|
2479
|
+
return new_outputs
|
|
2458
2480
|
|
|
2459
2481
|
def set_param_ps(self, recurse=True, init_in_server=False):
|
|
2460
2482
|
"""
|
|
@@ -2699,6 +2721,91 @@ class Cell(Cell_):
|
|
|
2699
2721
|
for cell in self.cells():
|
|
2700
2722
|
cell._add_recompute_flag()
|
|
2701
2723
|
|
|
2724
|
+
def _register_parameters_hook(self, forward_hook=None, backward_hook=None, all=False):
|
|
2725
|
+
"""
|
|
2726
|
+
Register the forward hook for parameters and register the backward hook for the corresponding gradient.
|
|
2727
|
+
|
|
2728
|
+
.. warning::
|
|
2729
|
+
This is an experimental prototype that is subject to change and/or deletion.
|
|
2730
|
+
|
|
2731
|
+
Note:
|
|
2732
|
+
- The `_register_parameters_hook(forward_hook, backward_hook)` only work in graph mode
|
|
2733
|
+
- The `forward_hook` must be defined as the following code.
|
|
2734
|
+
`parameters`: the tuple of the trainble parameters of the Cell, each element in the tuple shuould be
|
|
2735
|
+
in the format of `(param_name, Parameter)`.
|
|
2736
|
+
- The `forward_hook` should have the following signature:
|
|
2737
|
+
forward_hook(parameters) -> None.
|
|
2738
|
+
- The `backward_hook` must be defined as the following code.
|
|
2739
|
+
`gradients`: the tuple of the gradients corresponding to the trainble parameters of the Cell, each
|
|
2740
|
+
element in the tuple shuould be in the format of `(param_name, gradient)`.
|
|
2741
|
+
- The `backward_hook` should have the following signature:
|
|
2742
|
+
backward_hook(parameters) -> New gradients.
|
|
2743
|
+
|
|
2744
|
+
Args:
|
|
2745
|
+
forward_hook (function, optional): Python function or ``None``, Forward hook function. Default: ``None``
|
|
2746
|
+
backward_hook (function, optional): Python function or ``None``, Backward hook function. Default ``None``
|
|
2747
|
+
all (bool, optional): bool, whether to set hooks for all sub cells recursively. Default: ``False``
|
|
2748
|
+
|
|
2749
|
+
Returns:
|
|
2750
|
+
None
|
|
2751
|
+
|
|
2752
|
+
Raises:
|
|
2753
|
+
RuntimeError: If the `forward_hook` or `backward_hook ` has unspoorted syntax under GRAPH MODE.
|
|
2754
|
+
TypeError: If the `forward_hook` or `backward_hook` is not defined as required.
|
|
2755
|
+
|
|
2756
|
+
Supported Platforms:
|
|
2757
|
+
``Ascend`` ``GPU`` ``CPU``
|
|
2758
|
+
|
|
2759
|
+
Examples:
|
|
2760
|
+
>>> import mindspore as ms
|
|
2761
|
+
>>> from mindspore import Tensor, nn, ops, Parameter
|
|
2762
|
+
>>>
|
|
2763
|
+
>>> ms.set_context(mode=ms.GRAPH_MODE)
|
|
2764
|
+
>>> def parameter_hook(parameters):
|
|
2765
|
+
... print("--- enter parameter hook ---")
|
|
2766
|
+
... for name, param in parameters:
|
|
2767
|
+
... print (name, param)
|
|
2768
|
+
... print("--- leave parameter hook ---")
|
|
2769
|
+
...
|
|
2770
|
+
>>> def gradient_hook(gradients):
|
|
2771
|
+
... print("--- enter gradient hook ---")
|
|
2772
|
+
... outs = []
|
|
2773
|
+
... for name, gradient in gradients:
|
|
2774
|
+
... print(name, gradient)
|
|
2775
|
+
... outs.append(gradient * 2) # double gradient
|
|
2776
|
+
... print("--- leave gradient hook ---")
|
|
2777
|
+
... return outs
|
|
2778
|
+
...
|
|
2779
|
+
>>> class Net(nn.Cell):
|
|
2780
|
+
... def __init__(self)
|
|
2781
|
+
... super(Net, self).__init__()
|
|
2782
|
+
... self.w = Parameter(Tensor(np.array([3.0], np.float32)), name='w')
|
|
2783
|
+
... def construct(self, x):
|
|
2784
|
+
... return self.w * x
|
|
2785
|
+
...
|
|
2786
|
+
>>> grad = ops.GradOperation(get_by_list=True)
|
|
2787
|
+
>>> net = Net()
|
|
2788
|
+
>>> net._register_parameters_hook(forward_hook=parameter_hook, backward_hook=gradient_hook)
|
|
2789
|
+
>>> x = Tensor(np.array([4.0]).astype(np.float32))
|
|
2790
|
+
>>> output = grad(net, net.trainable_params())(x)
|
|
2791
|
+
--- enter parameter hook ---
|
|
2792
|
+
w
|
|
2793
|
+
Tensor(shape=[1], dtype=Float32, value=[ 3.00000000e+00])
|
|
2794
|
+
--- leave parameter hook ---
|
|
2795
|
+
--- enter gradient hook ---
|
|
2796
|
+
w
|
|
2797
|
+
Tensor(shape=[1], dtype=Float32, value=[ 4.00000000e+00])
|
|
2798
|
+
--- leave gradient hook ---
|
|
2799
|
+
>>> print("doubled grad: ", output)
|
|
2800
|
+
doubled grad: (Tensor(shape=[1], dtype=Float32, value=[ 8.00000000e+00]),)
|
|
2801
|
+
"""
|
|
2802
|
+
if not all:
|
|
2803
|
+
self._parameters_forward_hook = forward_hook
|
|
2804
|
+
self._parameters_backward_hook = backward_hook
|
|
2805
|
+
else:
|
|
2806
|
+
for _, cell in self.cells_and_names():
|
|
2807
|
+
cell._parameters_forward_hook = forward_hook
|
|
2808
|
+
cell._parameters_backward_hook = backward_hook
|
|
2702
2809
|
|
|
2703
2810
|
class GraphCell(Cell):
|
|
2704
2811
|
"""
|
mindspore/nn/dynamic_lr.py
CHANGED
|
@@ -224,7 +224,8 @@ def inverse_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, deca
|
|
|
224
224
|
total_step (int): The total number of steps.
|
|
225
225
|
step_per_epoch (int): The number of steps in per epoch.
|
|
226
226
|
decay_epoch (int): Number of epochs to decay over.
|
|
227
|
-
is_stair (bool): If true, learning rate is decayed once every `decay_epoch` times.
|
|
227
|
+
is_stair (bool): If true, learning rate is decayed once every `decay_epoch` times. If False, the learning rate
|
|
228
|
+
decays for every epoch. Default: ``False`` .
|
|
228
229
|
|
|
229
230
|
Returns:
|
|
230
231
|
list[float]. The size of list is `total_step`.
|
mindspore/nn/layer/activation.py
CHANGED
|
@@ -179,7 +179,7 @@ class Softmax2d(Cell):
|
|
|
179
179
|
r"""
|
|
180
180
|
Softmax function applied to 2D features data.
|
|
181
181
|
|
|
182
|
-
Applies `Softmax` to each location
|
|
182
|
+
Applies `Softmax` to each location with an input Tensor of shape :math:`(C, H, W)` .
|
|
183
183
|
|
|
184
184
|
Inputs:
|
|
185
185
|
- **x** (Tensor) - Tensor of shape :math:`(N, C_{in}, H_{in}, W_{in})` or :math:`(C_{in}, H_{in}, W_{in})`.
|
|
@@ -1273,9 +1273,9 @@ class PReLUExt(Cell):
|
|
|
1273
1273
|
no channel dim and the number of channels = 1.
|
|
1274
1274
|
|
|
1275
1275
|
Args:
|
|
1276
|
-
num_parameters (int): number of `w` to learn. Although it takes an int as input,
|
|
1276
|
+
num_parameters (int, optional): number of `w` to learn. Although it takes an int as input,
|
|
1277
1277
|
there is only two legitimate values: 1, or the number of channels at Tensor `input`. Default: ``1`` .
|
|
1278
|
-
init (float): the initial value of `w`. Default: ``0.25`` .
|
|
1278
|
+
init (float, optional): the initial value of `w`. Default: ``0.25`` .
|
|
1279
1279
|
dtype (mindspore.dtype, optional): the type of `w`. Default: ``None`` . Supported data type
|
|
1280
1280
|
is {float16, float32, bfloat16}.
|
|
1281
1281
|
|
|
@@ -1320,7 +1320,7 @@ class HSwish(Cell):
|
|
|
1320
1320
|
Hard swish is defined as:
|
|
1321
1321
|
|
|
1322
1322
|
.. math::
|
|
1323
|
-
\text{
|
|
1323
|
+
\text{HSwish}(input) =
|
|
1324
1324
|
\begin{cases}
|
|
1325
1325
|
0, & \text{ if } input \leq -3, \\
|
|
1326
1326
|
input, & \text{ if } input \geq +3, \\
|
|
@@ -1372,7 +1372,7 @@ class HSigmoid(Cell):
|
|
|
1372
1372
|
Hard Sigmoid is defined as:
|
|
1373
1373
|
|
|
1374
1374
|
.. math::
|
|
1375
|
-
\text{
|
|
1375
|
+
\text{HSigmoid}(input) =
|
|
1376
1376
|
\begin{cases}
|
|
1377
1377
|
0, & \text{ if } input \leq -3, \\
|
|
1378
1378
|
1, & \text{ if } input \geq +3, \\
|
|
@@ -1578,7 +1578,7 @@ class HShrink(Cell):
|
|
|
1578
1578
|
The formula is defined as follows:
|
|
1579
1579
|
|
|
1580
1580
|
.. math::
|
|
1581
|
-
\text{
|
|
1581
|
+
\text{HShrink}(x) =
|
|
1582
1582
|
\begin{cases}
|
|
1583
1583
|
x, & \text{ if } x > \lambda \\
|
|
1584
1584
|
x, & \text{ if } x < -\lambda \\
|
mindspore/nn/layer/basic.py
CHANGED
|
@@ -25,10 +25,9 @@ from mindspore.ops.composite.multitype_ops import _constexpr_utils as const_util
|
|
|
25
25
|
from mindspore.common.seed import _get_graph_seed
|
|
26
26
|
from mindspore.common.tensor import Tensor
|
|
27
27
|
from mindspore.common.initializer import initializer, HeUniform, Uniform
|
|
28
|
+
from mindspore import ops
|
|
28
29
|
from mindspore.ops import operations as P
|
|
29
30
|
from mindspore.ops import functional as F
|
|
30
|
-
from mindspore.ops.function.nn_func import interpolate_ext
|
|
31
|
-
from mindspore.ops.auto_generate import unfold_ext
|
|
32
31
|
from mindspore.ops.operations import _inner_ops as inner
|
|
33
32
|
from mindspore.ops.primitive import constexpr, Primitive, _primexpr
|
|
34
33
|
from mindspore.common.parameter import Parameter
|
|
@@ -37,7 +36,6 @@ from mindspore import _checkparam as Validator
|
|
|
37
36
|
from mindspore.nn.cell import Cell
|
|
38
37
|
from mindspore.nn.layer.activation import get_activation
|
|
39
38
|
from mindspore.common._decorator import deprecated
|
|
40
|
-
from mindspore.ops.auto_generate import dropout_ext_op, fold_ext
|
|
41
39
|
from mindspore.common.generator import default_generator
|
|
42
40
|
|
|
43
41
|
__all__ = ['Dropout', 'Flatten', 'Dense', 'Linear', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', 'Unfold', 'Tril', 'Triu',
|
|
@@ -140,6 +138,7 @@ class Dropout(Cell):
|
|
|
140
138
|
|
|
141
139
|
Inputs:
|
|
142
140
|
- **x** (Tensor) - The input of Dropout with data type of float16 or float32.
|
|
141
|
+
The shape of `x` cannot be less than 1.
|
|
143
142
|
|
|
144
143
|
Outputs:
|
|
145
144
|
Tensor, output tensor with the same shape as the `x`.
|
|
@@ -225,8 +224,9 @@ class DropoutExt(Cell):
|
|
|
225
224
|
- Parameter `p` means the probability of the element of the input tensor to be zeroed.
|
|
226
225
|
|
|
227
226
|
Args:
|
|
228
|
-
p (float): The dropout rate of input neurons, E.g. `p` =0.9, dropping out 90% of input neurons.
|
|
227
|
+
p (float, optional): The dropout rate of input neurons, E.g. `p` =0.9, dropping out 90% of input neurons.
|
|
229
228
|
Default: ``0.5`` .
|
|
229
|
+
inplace (bool, optional): If set to ``True`` , will do this operation in-place. Default: ``False`` .
|
|
230
230
|
|
|
231
231
|
Inputs:
|
|
232
232
|
- **x** (Tensor) - The input of Dropout.
|
|
@@ -253,18 +253,23 @@ class DropoutExt(Cell):
|
|
|
253
253
|
(2, 2, 3)
|
|
254
254
|
"""
|
|
255
255
|
|
|
256
|
-
def __init__(self, p=0.5):
|
|
256
|
+
def __init__(self, p=0.5, inplace=False):
|
|
257
257
|
"""Initialize DropoutExt."""
|
|
258
258
|
super(DropoutExt, self).__init__()
|
|
259
259
|
self.p = p
|
|
260
|
-
self.
|
|
260
|
+
self.inplace = inplace
|
|
261
|
+
self.generator_step = Tensor(12, mstype.int64)
|
|
261
262
|
|
|
262
263
|
def construct(self, x):
|
|
263
264
|
if not self.training or self.p == 0:
|
|
264
265
|
return x
|
|
265
266
|
|
|
266
267
|
seed, offset = default_generator._step(self.generator_step) # pylint: disable=protected-access
|
|
267
|
-
out, _ = dropout_ext_op(x, self.p, seed, offset)
|
|
268
|
+
out, _ = ops.auto_generate.dropout_ext_op(x, self.p, seed, offset)
|
|
269
|
+
|
|
270
|
+
if self.inplace:
|
|
271
|
+
x.copy_(out)
|
|
272
|
+
return x
|
|
268
273
|
return out
|
|
269
274
|
|
|
270
275
|
|
|
@@ -479,6 +484,9 @@ class UpsampleExt(Cell):
|
|
|
479
484
|
r"""
|
|
480
485
|
For details, please refer to :func:`mindspore.mint.nn.functional.interpolate`.
|
|
481
486
|
|
|
487
|
+
.. warning::
|
|
488
|
+
This is an experimental API that is subject to change or deletion.
|
|
489
|
+
|
|
482
490
|
Supported Platforms:
|
|
483
491
|
``Ascend``
|
|
484
492
|
|
|
@@ -511,8 +519,8 @@ class UpsampleExt(Cell):
|
|
|
511
519
|
self.recompute_scale_factor = recompute_scale_factor
|
|
512
520
|
|
|
513
521
|
def construct(self, input):
|
|
514
|
-
out = interpolate_ext(input, self.size, self.scale_factor, self.mode,
|
|
515
|
-
|
|
522
|
+
out = ops.function.nn_func.interpolate_ext(input, self.size, self.scale_factor, self.mode,
|
|
523
|
+
self.align_corners, self.recompute_scale_factor)
|
|
516
524
|
return out
|
|
517
525
|
|
|
518
526
|
|
|
@@ -626,7 +634,7 @@ class Dense(Cell):
|
|
|
626
634
|
with the same data type as the :math:`X` created by the layer (only if has_bias is True).
|
|
627
635
|
|
|
628
636
|
.. warning::
|
|
629
|
-
In
|
|
637
|
+
In PyNative mode, if `bias` is ``False`` , the `x` cannot be greater than 6D.
|
|
630
638
|
|
|
631
639
|
Args:
|
|
632
640
|
in_channels (int): The number of channels in the input space.
|
|
@@ -660,7 +668,7 @@ class Dense(Cell):
|
|
|
660
668
|
is not equal to `out_channels` or shape[1] of `weight_init` is not equal to `in_channels`.
|
|
661
669
|
ValueError: If length of shape of `bias_init` is not equal to 1
|
|
662
670
|
or shape[0] of `bias_init` is not equal to `out_channels`.
|
|
663
|
-
RuntimeError: If `bias` is ``False`` and `x` is greater than 6D in
|
|
671
|
+
RuntimeError: If `bias` is ``False`` and `x` is greater than 6D in PyNative mode.
|
|
664
672
|
|
|
665
673
|
Supported Platforms:
|
|
666
674
|
``Ascend`` ``GPU`` ``CPU``
|
|
@@ -762,24 +770,26 @@ class Linear(Cell):
|
|
|
762
770
|
.. math::
|
|
763
771
|
\text{outputs} = X * kernel + bias
|
|
764
772
|
|
|
765
|
-
.. warning::
|
|
766
|
-
In PYNATIVE mode, if `bias` is ``False`` , the `x` cannot be greater than 6D.
|
|
767
|
-
|
|
768
773
|
where :math:`X` is the input tensors, :math:`\text{kernel}` is a weight matrix with the same
|
|
769
774
|
data type as the :math:`X` created by the layer, and :math:`\text{bias}` is a bias vector
|
|
770
|
-
with the same data type as the :math:`X` created by the layer (only if
|
|
775
|
+
with the same data type as the :math:`X` created by the layer (only if the parameter `bias` is True).
|
|
776
|
+
|
|
777
|
+
.. warning::
|
|
778
|
+
In PyNative mode, if `bias` is ``False`` , the `x` cannot be greater than 6D.
|
|
771
779
|
|
|
772
780
|
Args:
|
|
773
781
|
in_features (int): The number of features in the input space.
|
|
774
782
|
out_features (int): The number of features in the output space.
|
|
775
|
-
bias (bool): Specifies whether the layer uses a bias vector :math:`\text{bias}`. Default: ``True``.
|
|
776
|
-
weight_init (Union[Tensor, str, Initializer, numbers.Number]):
|
|
783
|
+
bias (bool, optional): Specifies whether the layer uses a bias vector :math:`\text{bias}`. Default: ``True``.
|
|
784
|
+
weight_init (Union[Tensor, str, Initializer, numbers.Number], optional):
|
|
785
|
+
The trainable weight_init parameter. The dtype
|
|
777
786
|
is same as `x`. The values of str refer to the function `initializer`. Default: ``None`` ,
|
|
778
787
|
weight will be initialized using HeUniform.
|
|
779
|
-
bias_init (Union[Tensor, str, Initializer, numbers.Number]):
|
|
788
|
+
bias_init (Union[Tensor, str, Initializer, numbers.Number], optional):
|
|
789
|
+
The trainable bias_init parameter. The dtype is
|
|
780
790
|
same as `x`. The values of str refer to the function `initializer`. Default: ``None`` ,
|
|
781
791
|
bias will be initialized using Uniform.
|
|
782
|
-
dtype (:class:`mindspore.dtype
|
|
792
|
+
dtype (:class:`mindspore.dtype`, optional): Data type of Parameter. Default: ``None`` .
|
|
783
793
|
If `dtype` is ``None`` , `dtype` is set to ``mstype.float32`` when initializing the method.
|
|
784
794
|
When `weight_init` is Tensor, Parameter has the same data type as `weight_init` ,
|
|
785
795
|
in other cases, Parameter has the same data type as `dtype`, the same goes for `bias_init`.
|
|
@@ -798,7 +808,7 @@ class Linear(Cell):
|
|
|
798
808
|
is not equal to `out_features` or shape[1] of `weight_init` is not equal to `in_features`.
|
|
799
809
|
ValueError: If length of shape of `bias_init` is not equal to 1
|
|
800
810
|
or shape[0] of `bias_init` is not equal to `out_features`.
|
|
801
|
-
RuntimeError: If `bias` is ``False`` and `x` is greater than 6D in
|
|
811
|
+
RuntimeError: If `bias` is ``False`` and `x` is greater than 6D in PyNative mode.
|
|
802
812
|
|
|
803
813
|
Supported Platforms:
|
|
804
814
|
``Ascend`` ``GPU`` ``CPU``
|
|
@@ -806,10 +816,10 @@ class Linear(Cell):
|
|
|
806
816
|
Examples:
|
|
807
817
|
>>> import mindspore
|
|
808
818
|
>>> from mindspore import Tensor
|
|
809
|
-
>>> from mindspore import
|
|
819
|
+
>>> from mindspore import mint
|
|
810
820
|
>>> import numpy as np
|
|
811
821
|
>>> x = Tensor(np.array([[180, 234, 154], [244, 48, 247]]), mindspore.float32)
|
|
812
|
-
>>> net =
|
|
822
|
+
>>> net = mint.nn.Linear(3, 4)
|
|
813
823
|
>>> output = net(x)
|
|
814
824
|
>>> print(output.shape)
|
|
815
825
|
(2, 4)
|
|
@@ -1285,7 +1295,7 @@ class UnfoldExt(Cell):
|
|
|
1285
1295
|
self.stride = stride
|
|
1286
1296
|
|
|
1287
1297
|
def construct(self, input):
|
|
1288
|
-
return unfold_ext(input, self.kernel_size, self.dilation, self.padding, self.stride)
|
|
1298
|
+
return ops.auto_generate.unfold_ext(input, self.kernel_size, self.dilation, self.padding, self.stride)
|
|
1289
1299
|
|
|
1290
1300
|
|
|
1291
1301
|
class Fold(Cell):
|
|
@@ -1316,8 +1326,8 @@ class Fold(Cell):
|
|
|
1316
1326
|
self.stride = stride
|
|
1317
1327
|
|
|
1318
1328
|
def construct(self, input):
|
|
1319
|
-
return fold_ext(input, self.output_size, self.kernel_size,
|
|
1320
|
-
|
|
1329
|
+
return ops.auto_generate.fold_ext(input, self.output_size, self.kernel_size,
|
|
1330
|
+
self.dilation, self.padding, self.stride)
|
|
1321
1331
|
|
|
1322
1332
|
|
|
1323
1333
|
@_primexpr
|
|
@@ -21,9 +21,9 @@ __all__ = ['ChannelShuffle']
|
|
|
21
21
|
|
|
22
22
|
class ChannelShuffle(Cell):
|
|
23
23
|
r"""
|
|
24
|
-
Divide the channels
|
|
25
|
-
|
|
26
|
-
|
|
24
|
+
Divide the channels in a tensor of shape :math:`(*, C, H, W)` into :math:`g` group and
|
|
25
|
+
rearrange them as :math:`(*, \frac{C}{g}, g, H*W)`, while retaining the original tensor
|
|
26
|
+
shape in the final output.
|
|
27
27
|
|
|
28
28
|
Args:
|
|
29
29
|
groups (int): Number of groups to divide channels in, must be greater than 0.
|
mindspore/nn/layer/embedding.py
CHANGED
|
@@ -179,7 +179,7 @@ class EmbeddingExt(Cell):
|
|
|
179
179
|
`[-num_embeddings, num_embeddings)` if it's not ``None``. Default ``None``.
|
|
180
180
|
max_norm (float, optional): If the value is not None, firstly get the p-norm result of the embedding
|
|
181
181
|
vector specified by `input` where p is specified by `norm_type`; if the result is larger then `max_norm`,
|
|
182
|
-
update the embedding vector
|
|
182
|
+
update the embedding vector with :math:`\frac{max\_norm}{result+1e^{-7}}`. Default ``None``.
|
|
183
183
|
norm_type (float, optional): Indicated the value of p in p-norm. Default ``2.0``.
|
|
184
184
|
scale_grad_by_freq (bool, optional): If ``True`` the gradients will be scaled by the inverse of frequency
|
|
185
185
|
of the index in `input`. Default ``False``.
|
|
@@ -193,8 +193,8 @@ class EmbeddingExt(Cell):
|
|
|
193
193
|
not None. Default: ``None``.
|
|
194
194
|
|
|
195
195
|
Variables:
|
|
196
|
-
weight (Parameter)
|
|
197
|
-
|
|
196
|
+
- **weight** (Parameter) - The learnable weights of this module of shape (num_embeddings, embedding_dim), which
|
|
197
|
+
initialized from :math:`{N}(\text{sigma=1.0}, \text{mean=0.0})` or `_weight` .
|
|
198
198
|
|
|
199
199
|
Inputs:
|
|
200
200
|
- **input** (Tensor) - The indices used to lookup in the embedding vector. The data type must be
|
|
@@ -36,7 +36,6 @@ from mindspore.common import dtype as mstype
|
|
|
36
36
|
from mindspore.parallel._utils import _is_in_auto_parallel_mode
|
|
37
37
|
from mindspore.nn.cell import Cell
|
|
38
38
|
from mindspore import log as logger
|
|
39
|
-
from mindspore.ops import group_norm
|
|
40
39
|
|
|
41
40
|
__all__ = ['BatchNorm1d', 'BatchNorm2d', 'BatchNorm3d', 'LayerNorm', 'LayerNormExt', 'GroupNorm',
|
|
42
41
|
'SyncBatchNorm', 'InstanceNorm1d', 'InstanceNorm2d', 'InstanceNorm3d']
|
|
@@ -795,13 +794,15 @@ class LayerNormExt(Cell):
|
|
|
795
794
|
This is an experimental API that is subject to change or deletion.
|
|
796
795
|
|
|
797
796
|
Args:
|
|
798
|
-
normalized_shape (Union(tuple[int], list[int], int)): The normalized shape of `x` for LayerNorm
|
|
799
|
-
eps (float): A value added to the denominator for numerical stability(:math:`\epsilon`).
|
|
800
|
-
|
|
797
|
+
normalized_shape (Union(tuple[int], list[int], int)): The normalized shape of `x` for LayerNorm.
|
|
798
|
+
eps (float, optional): A value added to the denominator for numerical stability( :math:`\epsilon` ).
|
|
799
|
+
Default: ``1e-5`` .
|
|
800
|
+
elementwise_affine (bool, optional): Whether affine transformation is required.
|
|
801
|
+
When this parameter is set to ``True``,
|
|
801
802
|
the weight parameter is initialized to 1 and the offset is initialized to 0. Default: ``True``.
|
|
802
|
-
bias (bool): If set to ``False``, the layer will not learn an additive bias (only relevant if
|
|
803
|
+
bias (bool, optional): If set to ``False``, the layer will not learn an additive bias (only relevant if
|
|
803
804
|
`elementwise_affine` is ``True``). Default: ``True``.
|
|
804
|
-
dtype (:class:`mindspore.dtype
|
|
805
|
+
dtype (:class:`mindspore.dtype`, optional): Dtype of Parameters. Default: ``None`` .
|
|
805
806
|
|
|
806
807
|
Inputs:
|
|
807
808
|
- **x** (Tensor) - The shape is :math:`(N, *)`, where :math:`*` is equal to normalized_shape.
|
|
@@ -1248,7 +1249,7 @@ class GroupNorm(Cell):
|
|
|
1248
1249
|
|
|
1249
1250
|
def _cal_output(self, x):
|
|
1250
1251
|
"""calculate groupnorm output"""
|
|
1251
|
-
return group_norm(x, self.num_groups, self.gamma.to(x.dtype), self.beta.to(x.dtype), self.eps)
|
|
1252
|
+
return ops.group_norm(x, self.num_groups, self.gamma.to(x.dtype), self.beta.to(x.dtype), self.eps)
|
|
1252
1253
|
|
|
1253
1254
|
@staticmethod
|
|
1254
1255
|
@_primexpr
|
mindspore/nn/layer/padding.py
CHANGED
|
@@ -442,7 +442,7 @@ class _ReflectionPadNd(Cell):
|
|
|
442
442
|
|
|
443
443
|
class ReflectionPad1d(_ReflectionPadNd):
|
|
444
444
|
r"""
|
|
445
|
-
Using a given padding to do reflection pad on the given tensor.
|
|
445
|
+
Using a given padding to do reflection pad on the given tensor. 1d means the dimension of padding is 1-dimension.
|
|
446
446
|
|
|
447
447
|
Args:
|
|
448
448
|
padding (union[int, tuple]): The padding size to pad the last dimension of input tensor.
|
|
@@ -490,7 +490,7 @@ class ReflectionPad1d(_ReflectionPadNd):
|
|
|
490
490
|
|
|
491
491
|
class ReflectionPad2d(_ReflectionPadNd):
|
|
492
492
|
r"""
|
|
493
|
-
Using a given padding to do reflection pad the given tensor.
|
|
493
|
+
Using a given padding to do reflection pad the given tensor. 2d means the dimension of padding is 2-dimension.
|
|
494
494
|
|
|
495
495
|
Args:
|
|
496
496
|
padding (union[int, tuple]): The padding size to pad the input tensor.
|
|
@@ -542,7 +542,8 @@ class ReflectionPad2d(_ReflectionPadNd):
|
|
|
542
542
|
|
|
543
543
|
class ReflectionPad3d(_ReflectionPadNd):
|
|
544
544
|
r"""
|
|
545
|
-
Pad the given tensor in a reflecting way using the input boundaries as the axis of symmetry.
|
|
545
|
+
Pad the given tensor in a reflecting way using the input boundaries as the axis of symmetry. 3d means the dimension
|
|
546
|
+
of padding is 3-dimension.
|
|
546
547
|
|
|
547
548
|
Note:
|
|
548
549
|
ReflectionPad3d has not supported 5D tensor yet.
|