mindspore 2.4.10__cp311-none-any.whl → 2.5.0__cp311-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +39 -0
- mindspore/__init__.py +8 -3
- mindspore/_akg/akg/composite/build_module.py +6 -2
- mindspore/_akg/akg/utils/kernel_exec.py +2 -2
- mindspore/_c_dataengine.cpython-311-aarch64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-311-aarch64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-311-aarch64-linux-gnu.so +0 -0
- mindspore/_checkparam.py +0 -5
- mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
- mindspore/_extends/parse/compile_config.py +64 -0
- mindspore/_extends/parse/deprecated/__init__.py +0 -0
- mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +375 -0
- mindspore/_extends/parse/parser.py +23 -5
- mindspore/_extends/parse/standard_method.py +123 -27
- mindspore/_extends/pijit/pijit_func_white_list.py +1 -1
- mindspore/amp.py +7 -1
- mindspore/boost/boost_cell_wrapper.py +136 -41
- mindspore/common/__init__.py +3 -1
- mindspore/common/_register_for_tensor.py +0 -1
- mindspore/common/_stub_tensor.py +25 -4
- mindspore/common/_tensor_cpp_method.py +17 -0
- mindspore/common/_tensor_docs.py +6132 -0
- mindspore/common/api.py +98 -21
- mindspore/common/dtype.py +34 -34
- mindspore/common/dump.py +2 -1
- mindspore/common/file_system.py +8 -3
- mindspore/common/generator.py +2 -0
- mindspore/common/hook_handle.py +3 -1
- mindspore/common/initializer.py +3 -4
- mindspore/common/lazy_inline.py +8 -2
- mindspore/common/mindir_util.py +10 -2
- mindspore/common/parameter.py +31 -15
- mindspore/common/tensor.py +713 -1337
- mindspore/communication/__init__.py +1 -1
- mindspore/communication/_comm_helper.py +5 -0
- mindspore/communication/comm_func.py +215 -173
- mindspore/communication/management.py +23 -20
- mindspore/context.py +285 -191
- mindspore/dataset/__init__.py +23 -19
- mindspore/dataset/callback/ds_callback.py +2 -1
- mindspore/dataset/core/config.py +84 -3
- mindspore/dataset/engine/cache_admin.py +3 -3
- mindspore/dataset/engine/cache_client.py +5 -4
- mindspore/dataset/engine/datasets.py +192 -149
- mindspore/dataset/engine/datasets_audio.py +14 -0
- mindspore/dataset/engine/datasets_standard_format.py +11 -11
- mindspore/dataset/engine/datasets_text.py +38 -1
- mindspore/dataset/engine/datasets_user_defined.py +100 -66
- mindspore/dataset/engine/datasets_vision.py +81 -8
- mindspore/dataset/engine/iterators.py +281 -63
- mindspore/dataset/engine/obs/util.py +8 -0
- mindspore/dataset/engine/queue.py +40 -0
- mindspore/dataset/engine/samplers.py +26 -2
- mindspore/dataset/engine/serializer_deserializer.py +1 -1
- mindspore/dataset/engine/validators.py +43 -11
- mindspore/dataset/transforms/py_transforms_util.py +17 -0
- mindspore/dataset/transforms/transforms.py +29 -12
- mindspore/dataset/vision/validators.py +1 -2
- mindspore/device_context/__init__.py +21 -0
- mindspore/device_context/ascend/__init__.py +25 -0
- mindspore/device_context/ascend/device.py +72 -0
- mindspore/device_context/ascend/op_debug.py +94 -0
- mindspore/device_context/ascend/op_precision.py +193 -0
- mindspore/device_context/ascend/op_tuning.py +127 -0
- mindspore/device_context/cpu/__init__.py +25 -0
- mindspore/device_context/cpu/device.py +62 -0
- mindspore/device_context/cpu/op_tuning.py +43 -0
- mindspore/device_context/gpu/__init__.py +21 -0
- mindspore/device_context/gpu/device.py +70 -0
- mindspore/device_context/gpu/op_precision.py +67 -0
- mindspore/device_context/gpu/op_tuning.py +175 -0
- mindspore/device_manager.py +134 -0
- mindspore/experimental/llm_boost/__init__.py +1 -0
- mindspore/experimental/llm_boost/ascend_native/__init__.py +22 -0
- mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +211 -0
- mindspore/experimental/llm_boost/ascend_native/llm_boost.py +52 -0
- mindspore/experimental/llm_boost/atb/boost_base.py +2 -3
- mindspore/experimental/llm_boost/atb/llama_boost.py +6 -1
- mindspore/experimental/llm_boost/register.py +1 -0
- mindspore/experimental/optim/adadelta.py +26 -22
- mindspore/experimental/optim/adam.py +3 -0
- mindspore/experimental/optim/lr_scheduler.py +33 -24
- mindspore/experimental/optim/radam.py +33 -30
- mindspore/hal/device.py +28 -0
- mindspore/hal/event.py +17 -0
- mindspore/hal/memory.py +94 -3
- mindspore/hal/stream.py +91 -6
- mindspore/include/api/context.h +0 -1
- mindspore/lib/libavcodec.so.59 +0 -0
- mindspore/lib/libavdevice.so.59 +0 -0
- mindspore/lib/libavfilter.so.8 +0 -0
- mindspore/lib/libavformat.so.59 +0 -0
- mindspore/lib/libavutil.so.57 +0 -0
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_ops.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libmpi_collective.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libopencv_imgcodecs.so.4.5 +0 -0
- mindspore/lib/libopencv_imgproc.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/libswresample.so.4 +0 -0
- mindspore/lib/libswscale.so.6 +0 -0
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +2048 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl/dynamic/decoder_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl/dynamic/prompt_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/version.info +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +224 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/all_finite.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/decoder_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/prompt_kv_cache.py +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/all_finite.json +139 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/binary_info_config.json +361 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/decoder_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/prompt_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/version.info +1 -1
- mindspore/lib/plugin/ascend/custom_compiler/setup.py +1 -1
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_internal_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libms_ascend_native_boost.so +0 -0
- mindspore/lib/plugin/ascend/libms_atb_boost.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +957 -955
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/liblcal_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/base_type.h → base_type.h} +25 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{cast/cast_tiling.h → internal.h} +6 -4
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_op.h +114 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/boost_kernel.h +70 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/llama_impl.h +85 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/model_interface.h +52 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/tensor.h +81 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_creator.h +123 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +155 -110
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/tiling_info.h → tiling_info.h} +12 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tiling_utils.h +178 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layer_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcompare_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libllama_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_optiling.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmulti_weight_matmul_kernel_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libadd_rms_norm_quant_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_310p_impl.so → op_kernels/ascend310p/so_kernels/libapply_rotary_pos_emb_310p_ascend310p.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcast_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcompare_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libgelu_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libmatmul_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libreshape_and_cache_nz_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.json +163 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.json +163 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_bf16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/{matmul_add_rmsnorm → object_kernels/matmul_add_rmsnorm}/matmul_add_rmsnorm_fp16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libadd_layer_norm_impl.so → op_kernels/ascend910b/so_kernels/libadd_layer_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libadd_rms_norm_impl.so → op_kernels/ascend910b/so_kernels/libadd_rms_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libadd_rms_norm_quant_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_impl.so → op_kernels/ascend910b/so_kernels/libapply_rotary_pos_emb_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libcast_impl.so → op_kernels/ascend910b/so_kernels/libcast_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libnot_equal_impl.so → op_kernels/ascend910b/so_kernels/libcompare_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libgelu_impl.so → op_kernels/ascend910b/so_kernels/libgelu_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libllama_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmatmul_impl.so → op_kernels/ascend910b/so_kernels/libmatmul_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmulti_weight_matmul_kernel_impl.so → op_kernels/ascend910b/so_kernels/libmulti_weight_matmul_kernel_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libreshape_and_cache_impl.so → op_kernels/ascend910b/so_kernels/libreshape_and_cache_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/librms_norm_impl.so → op_kernels/ascend910b/so_kernels/librms_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/log.py +12 -0
- mindspore/mindrecord/__init__.py +1 -1
- mindspore/mindrecord/config.py +17 -316
- mindspore/mindrecord/filereader.py +1 -9
- mindspore/mindrecord/filewriter.py +5 -15
- mindspore/mindrecord/mindpage.py +1 -9
- mindspore/mint/__init__.py +824 -218
- mindspore/mint/distributed/__init__.py +66 -4
- mindspore/mint/distributed/distributed.py +2594 -44
- mindspore/mint/linalg/__init__.py +6 -0
- mindspore/mint/nn/__init__.py +473 -14
- mindspore/mint/nn/functional.py +486 -11
- mindspore/mint/nn/layer/__init__.py +17 -4
- mindspore/mint/nn/layer/_functions.py +330 -0
- mindspore/mint/nn/layer/activation.py +169 -1
- mindspore/mint/nn/layer/basic.py +123 -0
- mindspore/mint/nn/layer/conv.py +727 -0
- mindspore/mint/nn/layer/normalization.py +215 -19
- mindspore/mint/nn/layer/padding.py +797 -0
- mindspore/mint/nn/layer/pooling.py +170 -0
- mindspore/mint/optim/__init__.py +2 -1
- mindspore/mint/optim/adam.py +223 -0
- mindspore/mint/optim/adamw.py +26 -19
- mindspore/mint/special/__init__.py +2 -1
- mindspore/multiprocessing/__init__.py +5 -0
- mindspore/nn/cell.py +126 -19
- mindspore/nn/dynamic_lr.py +2 -1
- mindspore/nn/layer/activation.py +6 -6
- mindspore/nn/layer/basic.py +35 -25
- mindspore/nn/layer/channel_shuffle.py +3 -3
- mindspore/nn/layer/embedding.py +3 -3
- mindspore/nn/layer/normalization.py +8 -7
- mindspore/nn/layer/padding.py +4 -3
- mindspore/nn/layer/pooling.py +47 -13
- mindspore/nn/layer/rnn_cells.py +1 -1
- mindspore/nn/layer/rnns.py +2 -1
- mindspore/nn/layer/timedistributed.py +5 -5
- mindspore/nn/layer/transformer.py +48 -26
- mindspore/nn/learning_rate_schedule.py +5 -3
- mindspore/nn/loss/loss.py +31 -36
- mindspore/nn/optim/ada_grad.py +1 -0
- mindspore/nn/optim/adadelta.py +2 -2
- mindspore/nn/optim/adam.py +1 -1
- mindspore/nn/optim/lars.py +1 -4
- mindspore/nn/optim/optimizer.py +1 -1
- mindspore/nn/optim/rprop.py +2 -2
- mindspore/nn/optim/thor.py +2 -1
- mindspore/nn/utils/init.py +13 -11
- mindspore/nn/wrap/cell_wrapper.py +4 -6
- mindspore/nn/wrap/loss_scale.py +3 -4
- mindspore/numpy/array_creations.py +60 -62
- mindspore/numpy/array_ops.py +148 -143
- mindspore/numpy/logic_ops.py +41 -42
- mindspore/numpy/math_ops.py +361 -359
- mindspore/numpy/utils.py +16 -16
- mindspore/numpy/utils_const.py +4 -4
- mindspore/ops/__init__.py +2 -1
- mindspore/ops/_grad_experimental/grad_comm_ops.py +94 -13
- mindspore/ops/_grad_experimental/grad_debug_ops.py +6 -1
- mindspore/ops/_grad_experimental/grad_inner_ops.py +9 -0
- mindspore/ops/_grad_experimental/grad_math_ops.py +2 -1
- mindspore/ops/_op_impl/cpu/__init__.py +1 -0
- mindspore/ops/_op_impl/cpu/raise_op.py +28 -0
- mindspore/ops/_vmap/vmap_array_ops.py +20 -19
- mindspore/ops/_vmap/vmap_base.py +0 -2
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +19 -13
- mindspore/ops/_vmap/vmap_math_ops.py +11 -9
- mindspore/ops/_vmap/vmap_nn_ops.py +20 -34
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +149 -12
- mindspore/ops/auto_generate/gen_arg_handler.py +0 -61
- mindspore/ops/auto_generate/gen_extend_func.py +554 -60
- mindspore/ops/auto_generate/gen_ops_def.py +1621 -115
- mindspore/ops/auto_generate/gen_ops_prim.py +8024 -3409
- mindspore/ops/auto_generate/pyboost_inner_prim.py +183 -79
- mindspore/ops/composite/base.py +1 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +229 -30
- mindspore/ops/composite/multitype_ops/pow_impl.py +0 -29
- mindspore/ops/function/__init__.py +12 -0
- mindspore/ops/function/array_func.py +561 -159
- mindspore/ops/function/clip_func.py +64 -0
- mindspore/ops/function/debug_func.py +28 -20
- mindspore/ops/function/image_func.py +1 -1
- mindspore/ops/function/linalg_func.py +5 -4
- mindspore/ops/function/math_func.py +1659 -290
- mindspore/ops/function/nn_func.py +988 -317
- mindspore/ops/function/parameter_func.py +3 -56
- mindspore/ops/function/random_func.py +243 -33
- mindspore/ops/function/sparse_unary_func.py +1 -1
- mindspore/ops/functional.py +18 -5
- mindspore/ops/functional_overload.py +897 -0
- mindspore/ops/operations/__init__.py +3 -2
- mindspore/ops/operations/_embedding_cache_ops.py +4 -4
- mindspore/ops/operations/_grad_ops.py +2 -34
- mindspore/ops/operations/_infer_ops.py +2 -1
- mindspore/ops/operations/_inner_ops.py +38 -8
- mindspore/ops/operations/array_ops.py +45 -303
- mindspore/ops/operations/comm_ops.py +19 -16
- mindspore/ops/operations/custom_ops.py +11 -55
- mindspore/ops/operations/debug_ops.py +42 -47
- mindspore/ops/operations/inner_ops.py +6 -4
- mindspore/ops/operations/linalg_ops.py +3 -2
- mindspore/ops/operations/manually_defined/ops_def.py +185 -104
- mindspore/ops/operations/math_ops.py +11 -216
- mindspore/ops/operations/nn_ops.py +146 -308
- mindspore/ops/primitive.py +23 -21
- mindspore/ops/tensor_method.py +1669 -0
- mindspore/ops_generate/aclnn_kernel_register_auto_cc_generator.py +110 -0
- mindspore/ops_generate/add_tensor_docs_generator.py +54 -0
- mindspore/ops_generate/arg_handler.py +0 -61
- mindspore/ops_generate/auto_grad_impl_cc_generator.py +135 -0
- mindspore/ops_generate/auto_grad_reg_cc_generator.py +93 -0
- mindspore/ops_generate/base_generator.py +11 -0
- mindspore/ops_generate/cpp_create_prim_instance_helper_generator.py +108 -0
- mindspore/ops_generate/functional_map_cpp_generator.py +491 -0
- mindspore/ops_generate/functional_overload_py_generator.py +110 -0
- mindspore/ops_generate/functions_cc_generator.py +233 -0
- mindspore/ops_generate/gen_aclnn_implement.py +110 -114
- mindspore/ops_generate/gen_constants.py +157 -3
- mindspore/ops_generate/gen_ops.py +245 -990
- mindspore/ops_generate/gen_pyboost_func.py +97 -998
- mindspore/ops_generate/gen_utils.py +119 -33
- mindspore/ops_generate/lite_ops_cpp_generator.py +155 -0
- mindspore/ops_generate/op_api_proto.py +206 -0
- mindspore/ops_generate/op_def_py_generator.py +131 -0
- mindspore/ops_generate/op_prim_py_generator.py +480 -0
- mindspore/ops_generate/op_proto.py +373 -108
- mindspore/ops_generate/op_template_parser.py +436 -0
- mindspore/ops_generate/ops_def_cc_generator.py +288 -0
- mindspore/ops_generate/ops_def_h_generator.py +74 -0
- mindspore/ops_generate/ops_name_h_generator.py +68 -0
- mindspore/ops_generate/ops_primitive_h_generator.py +81 -0
- mindspore/ops_generate/pyboost_functions_cpp_generator.py +370 -0
- mindspore/ops_generate/pyboost_functions_h_generator.py +68 -0
- mindspore/ops_generate/pyboost_functions_py_generator.py +148 -0
- mindspore/ops_generate/pyboost_grad_function_cpp_generator.py +154 -0
- mindspore/ops_generate/pyboost_inner_prim_generator.py +131 -0
- mindspore/ops_generate/pyboost_native_grad_functions_generator.py +268 -0
- mindspore/ops_generate/pyboost_op_cpp_code_generator.py +851 -0
- mindspore/ops_generate/pyboost_overload_functions_cpp_generator.py +344 -0
- mindspore/ops_generate/pyboost_utils.py +92 -33
- mindspore/ops_generate/template.py +294 -44
- mindspore/ops_generate/tensor_func_reg_cpp_generator.py +422 -0
- mindspore/parallel/__init__.py +3 -3
- mindspore/parallel/_auto_parallel_context.py +24 -33
- mindspore/parallel/_parallel_serialization.py +13 -2
- mindspore/parallel/_utils.py +4 -1
- mindspore/parallel/algo_parameter_config.py +1 -1
- mindspore/parallel/checkpoint_transform.py +44 -0
- mindspore/parallel/cluster/process_entity/_api.py +131 -37
- mindspore/parallel/cluster/process_entity/_utils.py +41 -6
- mindspore/parallel/cluster/run.py +20 -3
- mindspore/parallel/parameter_broadcast.py +1 -1
- mindspore/parallel/shard.py +3 -0
- mindspore/parallel/transform_safetensors.py +119 -253
- mindspore/profiler/__init__.py +17 -4
- mindspore/profiler/analysis/__init__.py +0 -0
- mindspore/profiler/analysis/parser/__init__.py +0 -0
- mindspore/profiler/analysis/parser/ascend_cann_parser.py +166 -0
- mindspore/profiler/analysis/parser/base_parser.py +158 -0
- mindspore/profiler/analysis/parser/framework_cann_relation_parser.py +45 -0
- mindspore/profiler/analysis/parser/ms_framework_parser.py +142 -0
- mindspore/profiler/analysis/parser/ms_minddata_parser.py +145 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +261 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +40 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +84 -0
- mindspore/profiler/analysis/parser/timeline_creator/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_creator/base_timeline_creator.py +44 -0
- mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +90 -0
- mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +76 -0
- mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +103 -0
- mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +134 -0
- mindspore/profiler/analysis/parser/timeline_event/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_event/base_event.py +233 -0
- mindspore/profiler/analysis/parser/timeline_event/cpu_op_event.py +47 -0
- mindspore/profiler/analysis/parser/timeline_event/flow_event.py +36 -0
- mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +260 -0
- mindspore/profiler/analysis/parser/timeline_event/msprof_event.py +73 -0
- mindspore/profiler/analysis/parser/timeline_event/scope_layer_event.py +53 -0
- mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +146 -0
- mindspore/profiler/analysis/task_manager.py +131 -0
- mindspore/profiler/analysis/time_converter.py +84 -0
- mindspore/profiler/analysis/viewer/__init__.py +0 -0
- mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +333 -0
- mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +87 -0
- mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +252 -0
- mindspore/profiler/analysis/viewer/ascend_memory_viewer.py +313 -0
- mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +322 -0
- mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +265 -0
- mindspore/profiler/analysis/viewer/ascend_timeline_viewer.py +58 -0
- mindspore/profiler/analysis/viewer/base_viewer.py +26 -0
- mindspore/profiler/analysis/viewer/ms_dataset_viewer.py +97 -0
- mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +581 -0
- mindspore/profiler/analysis/work_flow.py +73 -0
- mindspore/profiler/common/ascend_msprof_exporter.py +138 -0
- mindspore/profiler/common/command_executor.py +90 -0
- mindspore/profiler/common/constant.py +174 -3
- mindspore/profiler/common/file_manager.py +208 -0
- mindspore/profiler/common/log.py +130 -0
- mindspore/profiler/common/msprof_cmd_tool.py +202 -0
- mindspore/profiler/common/path_manager.py +371 -0
- mindspore/profiler/common/process_bar.py +168 -0
- mindspore/profiler/common/process_pool.py +9 -3
- mindspore/profiler/common/profiler_context.py +476 -0
- mindspore/profiler/common/profiler_info.py +304 -0
- mindspore/profiler/common/profiler_output_path.py +284 -0
- mindspore/profiler/common/profiler_parameters.py +210 -0
- mindspore/profiler/common/profiler_path_manager.py +120 -0
- mindspore/profiler/common/record_function.py +76 -0
- mindspore/profiler/common/tlv_decoder.py +76 -0
- mindspore/profiler/common/util.py +75 -2
- mindspore/profiler/dynamic_profiler.py +270 -37
- mindspore/profiler/envprofiler.py +138 -0
- mindspore/profiler/mstx.py +199 -0
- mindspore/profiler/platform/__init__.py +21 -0
- mindspore/profiler/platform/base_profiler.py +40 -0
- mindspore/profiler/platform/cpu_profiler.py +124 -0
- mindspore/profiler/platform/gpu_profiler.py +74 -0
- mindspore/profiler/platform/npu_profiler.py +309 -0
- mindspore/profiler/profiler.py +580 -93
- mindspore/profiler/profiler_action_controller.py +187 -0
- mindspore/profiler/profiler_interface.py +114 -0
- mindspore/profiler/schedule.py +208 -0
- mindspore/rewrite/api/symbol_tree.py +1 -2
- mindspore/run_check/_check_version.py +2 -6
- mindspore/runtime/__init__.py +37 -0
- mindspore/runtime/device.py +27 -0
- mindspore/runtime/event.py +209 -0
- mindspore/runtime/executor.py +148 -0
- mindspore/runtime/memory.py +392 -0
- mindspore/runtime/stream.py +460 -0
- mindspore/runtime/thread_bind_core.py +401 -0
- mindspore/train/__init__.py +2 -2
- mindspore/train/_utils.py +53 -18
- mindspore/train/amp.py +8 -4
- mindspore/train/callback/_checkpoint.py +32 -18
- mindspore/train/callback/_early_stop.py +1 -1
- mindspore/train/callback/_flops_collector.py +105 -69
- mindspore/train/callback/_history.py +1 -1
- mindspore/train/callback/_summary_collector.py +44 -6
- mindspore/train/callback/_tft_register.py +31 -10
- mindspore/train/dataset_helper.py +11 -11
- mindspore/train/metrics/precision.py +4 -5
- mindspore/train/mind_ir_pb2.py +167 -46
- mindspore/train/model.py +13 -15
- mindspore/train/serialization.py +462 -76
- mindspore/train/summary/summary_record.py +1 -2
- mindspore/train/train_thor/model_thor.py +1 -1
- mindspore/utils/__init__.py +4 -2
- mindspore/utils/bin/dataset-cache +0 -0
- mindspore/utils/bin/dataset-cache-server +0 -0
- mindspore/utils/dryrun.py +138 -0
- mindspore/utils/runtime_execution_order_check.py +550 -0
- mindspore/version.py +1 -1
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/METADATA +2 -3
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/RECORD +524 -458
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/entry_points.txt +1 -1
- mindspore/_data_dump.cpython-311-aarch64-linux-gnu.so +0 -0
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/common/_tensor_overload.py +0 -139
- mindspore/lib/libmindspore_np_dtype.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -82
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -113
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -193
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/dtype_registry.h +0 -90
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_layer_norm_op.h +0 -60
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_op.h +0 -50
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_quant_op.h +0 -50
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_nz_op.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_op.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_only_ops.h +0 -94
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_op_base.h +0 -97
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/flash_attention_score_op.h +0 -97
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/gelu_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_add_rmsnorm_op.h +0 -73
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -108
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_impls_op.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_weight_matmul_op.h +0 -91
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/paged_attention_op.h +0 -99
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_nz_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/rms_norm_op.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -179
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/profiling_util.h +0 -366
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -56
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/kernel/add.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/tiling/add_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_base.h +0 -456
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_bf16.h +0 -217
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp.h +0 -391
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp16.h +0 -126
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -230
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_value.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/apply_rotary_pos_emb_nz_impl.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_base.h +0 -460
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp16.h +0 -116
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp32.h +0 -230
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_value.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -74
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -74
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/kernel/cast_kernel.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_tiling.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/kernel/compare_kernel.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +0 -68
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -99
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/lccl/lccl_wrapper.h +0 -58
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_types.h +0 -91
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_utils.h +0 -108
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/add_param.h +0 -68
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/cast_param.h +0 -30
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -38
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +0 -33
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -377
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/kernel/reshape_and_cache_nz.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_impl.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_tiling.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/kernel/sub_kernel.h +0 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +0 -399
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/utils.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_tiling.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +0 -30
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_core.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_entity.h +0 -38
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_sink.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_stream.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -71
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -165
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +0 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -121
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -106
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_acme_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bnsd_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bsh_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bnsd_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bsh_mix.o +0 -0
- mindspore/profiler/envprofiling.py +0 -254
- mindspore/profiler/profiling.py +0 -1926
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/WHEEL +0 -0
- {mindspore-2.4.10.dist-info → mindspore-2.5.0.dist-info}/top_level.txt +0 -0
|
@@ -35,7 +35,6 @@ import stat
|
|
|
35
35
|
import subprocess
|
|
36
36
|
import warnings
|
|
37
37
|
|
|
38
|
-
import gc
|
|
39
38
|
import time
|
|
40
39
|
import uuid
|
|
41
40
|
import multiprocessing
|
|
@@ -53,8 +52,8 @@ import mindspore._c_dataengine as cde
|
|
|
53
52
|
from mindspore._c_expression import typing
|
|
54
53
|
|
|
55
54
|
from mindspore import log as logger
|
|
56
|
-
from mindspore.parallel._ps_context import _is_role_pserver, _is_role_sched, _get_ps_context
|
|
57
|
-
|
|
55
|
+
from mindspore.parallel._ps_context import _is_role_pserver, _is_role_sched, _get_ps_context, \
|
|
56
|
+
_enable_distributed_mindrt
|
|
58
57
|
from mindspore.dataset.engine.offload import GetOffloadModel
|
|
59
58
|
|
|
60
59
|
import mindspore.dataset.transforms.c_transforms as c_transforms
|
|
@@ -74,13 +73,14 @@ from .validators import check_batch, check_shuffle, check_map, check_filter, che
|
|
|
74
73
|
check_save, check_tuple_iterator, check_dict_iterator, check_schema, check_to_device_send, check_padded_batch, \
|
|
75
74
|
check_total_batch, check_sync_update
|
|
76
75
|
from ..core.config import get_callback_timeout, _init_device_info, get_enable_shared_mem, get_num_parallel_workers, \
|
|
77
|
-
get_enable_watchdog, get_seed, set_seed, get_debug_mode, get_multiprocessing_timeout_interval,
|
|
76
|
+
get_enable_watchdog, get_seed, set_seed, get_debug_mode, get_multiprocessing_timeout_interval, \
|
|
77
|
+
_get_debug_hook_list, get_multiprocessing_start_method
|
|
78
78
|
from ..core.datatypes import mstype_to_detype
|
|
79
79
|
from ..core.validator_helpers import replace_none
|
|
80
80
|
from ..core.py_util_helpers import ExceptionHandler
|
|
81
81
|
from ..transforms.py_transforms_util import FuncWrapper, Implementation
|
|
82
82
|
from ..vision.transforms import ToNumpy
|
|
83
|
-
from ...mindrecord.config import _get_enc_key, _get_enc_mode,
|
|
83
|
+
from ...mindrecord.config import _get_enc_key, _get_enc_mode, encrypt
|
|
84
84
|
|
|
85
85
|
try:
|
|
86
86
|
context = import_module("mindspore.context")
|
|
@@ -403,6 +403,7 @@ class Dataset:
|
|
|
403
403
|
parent = self.parent
|
|
404
404
|
self.parent = []
|
|
405
405
|
dataset = copy.deepcopy(self)
|
|
406
|
+
dataset = self.pre_process(dataset)
|
|
406
407
|
global _OP_NAME
|
|
407
408
|
_OP_NAME = Dataset._get_operator_id(dataset)
|
|
408
409
|
ir_tree = dataset.parse_tree(getter_mode)
|
|
@@ -410,6 +411,19 @@ class Dataset:
|
|
|
410
411
|
_init_device_info()
|
|
411
412
|
return ir_tree, dataset
|
|
412
413
|
|
|
414
|
+
def pre_process(self, dataset):
|
|
415
|
+
"""Insert batch operation for GeneratorDataset with batch_sampler."""
|
|
416
|
+
if hasattr(dataset, "has_batch_sampler") and dataset.has_batch_sampler:
|
|
417
|
+
original_parent = dataset.parent
|
|
418
|
+
dataset.parent = []
|
|
419
|
+
dataset = dataset.batch(batch_size=-1, num_parallel_workers=dataset.num_parallel_workers,
|
|
420
|
+
per_batch_map=dataset.collate_fn)
|
|
421
|
+
dataset.parent = original_parent
|
|
422
|
+
else:
|
|
423
|
+
for index in range(len(dataset.children)):
|
|
424
|
+
dataset.children[index] = self.pre_process(dataset.children[index])
|
|
425
|
+
return dataset
|
|
426
|
+
|
|
413
427
|
def parse_tree(self, getter_mode=False):
|
|
414
428
|
"""
|
|
415
429
|
Internal method to parse the API tree into an IR tree.
|
|
@@ -1557,8 +1571,8 @@ class Dataset:
|
|
|
1557
1571
|
>>> d1 = ds.GeneratorDataset(generator_1d, ["data"], shuffle=False)
|
|
1558
1572
|
>>> d1.save('/path/to/save_file')
|
|
1559
1573
|
"""
|
|
1560
|
-
if
|
|
1561
|
-
raise RuntimeError("When encode mode
|
|
1574
|
+
if _get_enc_key() is not None and num_files > 1:
|
|
1575
|
+
raise RuntimeError("When encode mode is enabled, " +
|
|
1562
1576
|
"the automatic sharding function is unavailable.")
|
|
1563
1577
|
|
|
1564
1578
|
ir_tree, api_tree = self.create_ir_tree()
|
|
@@ -1571,10 +1585,6 @@ class Dataset:
|
|
|
1571
1585
|
|
|
1572
1586
|
consumer.Save()
|
|
1573
1587
|
|
|
1574
|
-
if _get_hash_mode() is not None:
|
|
1575
|
-
append_hash_to_file(file_name)
|
|
1576
|
-
append_hash_to_file(file_name + ".db")
|
|
1577
|
-
|
|
1578
1588
|
if _get_enc_key() is not None:
|
|
1579
1589
|
encrypt(file_name, _get_enc_key(), _get_enc_mode())
|
|
1580
1590
|
encrypt(file_name + ".db", _get_enc_key(), _get_enc_mode())
|
|
@@ -2747,8 +2757,8 @@ class BatchDataset(UnionBaseDataset):
|
|
|
2747
2757
|
if self.num_parallel_workers is None:
|
|
2748
2758
|
self.num_parallel_workers = get_num_parallel_workers()
|
|
2749
2759
|
|
|
2750
|
-
self.process_pool = _PythonMultiprocessing(
|
|
2751
|
-
self.max_rowsize)
|
|
2760
|
+
self.process_pool = _PythonMultiprocessing(get_multiprocessing_start_method(), self.num_parallel_workers,
|
|
2761
|
+
str(self), [self.per_batch_map], self.max_rowsize)
|
|
2752
2762
|
# Wrap per_batch_map into _PythonCallable
|
|
2753
2763
|
self.per_batch_map = _PythonCallable(self.per_batch_map, 0, self.process_pool)
|
|
2754
2764
|
else:
|
|
@@ -3200,9 +3210,21 @@ def _worker_loop(operations, pipe, worker_id):
|
|
|
3200
3210
|
|
|
3201
3211
|
|
|
3202
3212
|
def worker_target(operations, worker_id):
|
|
3213
|
+
logger.info("Multiprocessing start method: {}".format(multiprocessing.get_start_method()))
|
|
3203
3214
|
return lambda pipe: _worker_loop(operations, pipe, worker_id)
|
|
3204
3215
|
|
|
3205
3216
|
|
|
3217
|
+
class WorkerTarget:
|
|
3218
|
+
def __init__(self, operations, pipe, worker_id):
|
|
3219
|
+
self.operations = operations
|
|
3220
|
+
self.pipe = pipe
|
|
3221
|
+
self.worker_id = worker_id
|
|
3222
|
+
logger.info("Multiprocessing start method: {}".format(multiprocessing.get_start_method()))
|
|
3223
|
+
|
|
3224
|
+
def __call__(self):
|
|
3225
|
+
return _worker_loop(self.operations, self.pipe, self.worker_id)
|
|
3226
|
+
|
|
3227
|
+
|
|
3206
3228
|
class _MPWorker(multiprocessing.Process):
|
|
3207
3229
|
"""
|
|
3208
3230
|
Worker process for multiprocessing.
|
|
@@ -3257,6 +3279,12 @@ class _MPWorker(multiprocessing.Process):
|
|
|
3257
3279
|
|
|
3258
3280
|
logger.info(f"Closing worker with PID: {self.pid}")
|
|
3259
3281
|
self.pipe.master_close()
|
|
3282
|
+
|
|
3283
|
+
process_dir = os.path.join('/proc', str(self.pid))
|
|
3284
|
+
while self.is_alive() and os.path.exists(process_dir):
|
|
3285
|
+
logger.info("Waiting for worker {} closed ...".format(self.pid))
|
|
3286
|
+
time.sleep(0.001)
|
|
3287
|
+
|
|
3260
3288
|
# del the handle which hold by master
|
|
3261
3289
|
del self.pipe.in_queue
|
|
3262
3290
|
del self.pipe.res_queue
|
|
@@ -3276,6 +3304,41 @@ class _MPWorker(multiprocessing.Process):
|
|
|
3276
3304
|
return False
|
|
3277
3305
|
|
|
3278
3306
|
|
|
3307
|
+
def worker_is_alive(worker):
|
|
3308
|
+
"""Check the subprocess worker status in spawn mode"""
|
|
3309
|
+
try:
|
|
3310
|
+
return worker.is_alive()
|
|
3311
|
+
except ValueError:
|
|
3312
|
+
return False
|
|
3313
|
+
|
|
3314
|
+
|
|
3315
|
+
def close_worker(worker, pipe):
|
|
3316
|
+
"""Close the subprocess worker in spawn mode"""
|
|
3317
|
+
try:
|
|
3318
|
+
if worker_is_alive(worker):
|
|
3319
|
+
# release the eager executor which is used by current process
|
|
3320
|
+
transforms.transforms.clean_unused_executors()
|
|
3321
|
+
|
|
3322
|
+
logger.info(f"Closing worker with PID: {worker.pid}")
|
|
3323
|
+
pipe.master_close()
|
|
3324
|
+
|
|
3325
|
+
process_dir = os.path.join('/proc', str(worker.pid))
|
|
3326
|
+
while worker_is_alive(worker) and os.path.exists(process_dir):
|
|
3327
|
+
logger.info("Waiting for worker {} closed ...".format(worker.pid))
|
|
3328
|
+
time.sleep(0.5)
|
|
3329
|
+
|
|
3330
|
+
# del the handle which hold by master
|
|
3331
|
+
del pipe.in_queue
|
|
3332
|
+
del pipe.res_queue
|
|
3333
|
+
worker.terminate()
|
|
3334
|
+
worker.join()
|
|
3335
|
+
worker.close()
|
|
3336
|
+
except ValueError:
|
|
3337
|
+
# Process has been closed already
|
|
3338
|
+
return
|
|
3339
|
+
return
|
|
3340
|
+
|
|
3341
|
+
|
|
3279
3342
|
class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
3280
3343
|
"""
|
|
3281
3344
|
A wrapper to multiprocessing.pool that performs cleanup and ensure proper termination of forked processes.
|
|
@@ -3302,10 +3365,11 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3302
3365
|
self.origin_hook(ex_type, value, tb)
|
|
3303
3366
|
self.mp_pool_exit_preprocess()
|
|
3304
3367
|
|
|
3305
|
-
def __init__(self,
|
|
3368
|
+
def __init__(self, start_method, num_parallel_workers, op_name, operations, max_rowsize=(-1, -1)):
|
|
3306
3369
|
super(_PythonMultiprocessing, self).__init__()
|
|
3307
|
-
self.
|
|
3370
|
+
self.start_method = start_method # python multiprocssing start method: fork / spawn
|
|
3308
3371
|
self.num_parallel_workers = num_parallel_workers
|
|
3372
|
+
self.op_name = op_name
|
|
3309
3373
|
self.operations = operations
|
|
3310
3374
|
self.max_rowsize = max_rowsize
|
|
3311
3375
|
|
|
@@ -3316,8 +3380,7 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3316
3380
|
self.queues_map = {}
|
|
3317
3381
|
self.next_queue = 0
|
|
3318
3382
|
|
|
3319
|
-
self.
|
|
3320
|
-
self.watch_dog = None
|
|
3383
|
+
self.cleaning_process = None
|
|
3321
3384
|
self.ppid = None
|
|
3322
3385
|
self.hook = None
|
|
3323
3386
|
self.warning_ctl = None
|
|
@@ -3331,60 +3394,6 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3331
3394
|
except TypeError:
|
|
3332
3395
|
pass
|
|
3333
3396
|
|
|
3334
|
-
# This wait function is for cleaning zombie subprocesses
|
|
3335
|
-
@staticmethod
|
|
3336
|
-
def wait_pid():
|
|
3337
|
-
"""
|
|
3338
|
-
This function is used by the main process to release subprocess resources.
|
|
3339
|
-
"""
|
|
3340
|
-
try:
|
|
3341
|
-
while True:
|
|
3342
|
-
child_pid, _ = os.waitpid(-1, os.WNOHANG)
|
|
3343
|
-
if child_pid == 0:
|
|
3344
|
-
break
|
|
3345
|
-
except OSError:
|
|
3346
|
-
# waitpid may fail for some reason, so we ignore this error
|
|
3347
|
-
pass
|
|
3348
|
-
|
|
3349
|
-
# Dataset need watch_dog thread to monitoring fork multiprocessing,
|
|
3350
|
-
# and thread can't be a member function otherwise python won't collect and release resources.
|
|
3351
|
-
@staticmethod
|
|
3352
|
-
def _watch_dog(eot, workers):
|
|
3353
|
-
"""
|
|
3354
|
-
This thread is for monitoring subprocesses forked by GeneratorDataset/map/batch
|
|
3355
|
-
"""
|
|
3356
|
-
if not isinstance(workers, list):
|
|
3357
|
-
raise TypeError("[Internal Error] The 2nd parameter of watch dog thread should be list of process, "
|
|
3358
|
-
"but got {}.".format(type(workers)))
|
|
3359
|
-
|
|
3360
|
-
while not eot.is_set():
|
|
3361
|
-
# Monitoring and count how many subprocesses already exit
|
|
3362
|
-
clear_subprocess_timeout = _PythonMultiprocessing._monitor_subprocess_exit(workers)
|
|
3363
|
-
# If find subprocess exit, we will wait for 30s and do some waitpid operations
|
|
3364
|
-
if clear_subprocess_timeout > 0:
|
|
3365
|
-
start = time.time()
|
|
3366
|
-
while time.time() - start < clear_subprocess_timeout:
|
|
3367
|
-
# We need to distinguishing get_dataset_size or train finished normally and hang scenario.
|
|
3368
|
-
# If get_dataset_size or train finished normally, _stop_subprocess can be execute and
|
|
3369
|
-
# self.need_abort can be set to True. If main process is hang in get(), self.need_abort
|
|
3370
|
-
# will never set to True, then we wait for 30s and kill main process
|
|
3371
|
-
if eot.is_set():
|
|
3372
|
-
return
|
|
3373
|
-
# Sometimes subprocess may be zombie, so in 30s we can wait and do some useful tasks(waitpid).
|
|
3374
|
-
_PythonMultiprocessing.wait_pid()
|
|
3375
|
-
# multiprocessing.queue may hang in .get() forever when put() process was killed.
|
|
3376
|
-
# We have to exit main process otherwise main process will hang.
|
|
3377
|
-
_PythonMultiprocessing._terminate_processes(workers)
|
|
3378
|
-
logger.critical("The subprocess of dataset may exit unexpected or be killed, "
|
|
3379
|
-
"main process will exit. If this is not an artificial operation, you can use "
|
|
3380
|
-
"ds.config.set_enable_watchdog(False) to block this error.")
|
|
3381
|
-
os.kill(os.getpid(), signal.SIGTERM)
|
|
3382
|
-
# sleep to release GIL
|
|
3383
|
-
time.sleep(1)
|
|
3384
|
-
|
|
3385
|
-
# release the workers
|
|
3386
|
-
del workers
|
|
3387
|
-
|
|
3388
3397
|
@staticmethod
|
|
3389
3398
|
def _terminate_processes(processes):
|
|
3390
3399
|
"""Terminate subprocesses"""
|
|
@@ -3401,45 +3410,12 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3401
3410
|
# We don't use w.join because join can only used in main process or join will raise an error.
|
|
3402
3411
|
p._popen.wait() # pylint: disable=W0212
|
|
3403
3412
|
|
|
3404
|
-
# Monitor the exit number of subprocesses
|
|
3405
|
-
@staticmethod
|
|
3406
|
-
def _monitor_subprocess_exit(workers):
|
|
3407
|
-
"""
|
|
3408
|
-
To monitor whether process is exit.
|
|
3409
|
-
|
|
3410
|
-
Args:
|
|
3411
|
-
workers (list of multiprocessing.Process): multiprocessing.Process.
|
|
3412
|
-
|
|
3413
|
-
Returns:
|
|
3414
|
-
int, the timeout(in seconds) when process exit.
|
|
3415
|
-
"""
|
|
3416
|
-
for w in workers:
|
|
3417
|
-
try:
|
|
3418
|
-
exit_code = w.exitcode
|
|
3419
|
-
if exit_code is not None:
|
|
3420
|
-
# For kill -9, we can exit quickly
|
|
3421
|
-
if exit_code == -9:
|
|
3422
|
-
return 1
|
|
3423
|
-
# For kill -15, we still exit after 30s
|
|
3424
|
-
if exit_code == -15:
|
|
3425
|
-
return 30
|
|
3426
|
-
# In some cases the subprocess has been killed but the exitcode is still None.
|
|
3427
|
-
# So we use os.kill(pid, 0) to check if it is alive.
|
|
3428
|
-
subprocess_alive = _PythonMultiprocessing.is_process_alive(w.pid)
|
|
3429
|
-
if not subprocess_alive:
|
|
3430
|
-
# Like kill -15, we wait 30s before exit
|
|
3431
|
-
return 30
|
|
3432
|
-
except ValueError:
|
|
3433
|
-
# process has been closed already
|
|
3434
|
-
return 0
|
|
3435
|
-
return 0
|
|
3436
|
-
|
|
3437
3413
|
@staticmethod
|
|
3438
3414
|
def is_process_alive(pid):
|
|
3439
3415
|
"""
|
|
3440
3416
|
Check if the process is alive or not.
|
|
3441
3417
|
Note: We hit a deadlock when we use psutil or w.exitcode to check whether a process is alive.
|
|
3442
|
-
Instead we use os.kill(ppid, 0).
|
|
3418
|
+
Instead, we use os.kill(ppid, 0).
|
|
3443
3419
|
|
|
3444
3420
|
Args:
|
|
3445
3421
|
pid: pid of the process to be checked
|
|
@@ -3466,6 +3442,8 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3466
3442
|
quit_signal: The flag of quit.
|
|
3467
3443
|
"""
|
|
3468
3444
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
3445
|
+
# Initialize C++ side signal handlers
|
|
3446
|
+
cde.register_worker_handlers()
|
|
3469
3447
|
while _PythonMultiprocessing.is_process_alive(ppid):
|
|
3470
3448
|
if quit_signal.is_set():
|
|
3471
3449
|
return
|
|
@@ -3477,6 +3455,8 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3477
3455
|
|
|
3478
3456
|
time.sleep(0.1)
|
|
3479
3457
|
|
|
3458
|
+
logger.info("Clean process detects that the main process {} has exited, begin to terminate the "
|
|
3459
|
+
"worker process(es): {}".format(ppid, [worker.pid for worker in workers]))
|
|
3480
3460
|
_PythonMultiprocessing._terminate_processes(workers)
|
|
3481
3461
|
del workers
|
|
3482
3462
|
os.kill(os.getpid(), signal.SIGTERM)
|
|
@@ -3493,10 +3473,10 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3493
3473
|
"""
|
|
3494
3474
|
self.python_threads_to_workers = {}
|
|
3495
3475
|
self.op_id = op_id
|
|
3496
|
-
logger.info("Launching new Python
|
|
3476
|
+
logger.info("Launching new Python multiprocessing pool for Op: " + str(self.op_id))
|
|
3497
3477
|
if self.is_mp_enabled():
|
|
3498
3478
|
message = "Launching a new Python multiprocessing pool while a pool already exists!" + \
|
|
3499
|
-
|
|
3479
|
+
" The existing pool will be terminated first."
|
|
3500
3480
|
logger.warning(message)
|
|
3501
3481
|
self.terminate()
|
|
3502
3482
|
self.reset()
|
|
@@ -3515,29 +3495,44 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3515
3495
|
if self.workers is not None:
|
|
3516
3496
|
raise Exception("Pool was already created, close it first.")
|
|
3517
3497
|
|
|
3518
|
-
# Let gc collect unreferenced memory to avoid child processes in the pool to do it
|
|
3519
|
-
gc.collect()
|
|
3520
|
-
|
|
3521
|
-
# Construct python worker processes
|
|
3522
3498
|
self.workers = []
|
|
3499
|
+
self.pipes = []
|
|
3500
|
+
self.check_interval = get_multiprocessing_timeout_interval()
|
|
3523
3501
|
self.warning_ctl = multiprocessing.Value('i', 0)
|
|
3524
|
-
|
|
3525
|
-
|
|
3526
|
-
|
|
3527
|
-
|
|
3502
|
+
if self.start_method == "fork":
|
|
3503
|
+
# Construct python worker processes
|
|
3504
|
+
for worker_id in range(self.num_parallel_workers):
|
|
3505
|
+
worker = _MPWorker(self.operations, self.warning_ctl, self.max_rowsize, worker_id)
|
|
3506
|
+
worker.start()
|
|
3507
|
+
self.workers.append(worker)
|
|
3508
|
+
else:
|
|
3509
|
+
multiprocessing.set_start_method(self.start_method, True)
|
|
3510
|
+
|
|
3511
|
+
# Construct python worker processes
|
|
3512
|
+
for worker_id in range(self.num_parallel_workers):
|
|
3513
|
+
shared_memory = get_enable_shared_mem()
|
|
3514
|
+
pipe = Pipe(self.warning_ctl, shared_memory=shared_memory, max_rowsize=self.max_rowsize)
|
|
3515
|
+
self.check_interval = get_multiprocessing_timeout_interval()
|
|
3516
|
+
worker = multiprocessing.Process(target=WorkerTarget(self.operations, pipe, worker_id),
|
|
3517
|
+
name="MapWorker" + str(worker_id), daemon=True)
|
|
3518
|
+
self.workers.append(worker)
|
|
3519
|
+
self.pipes.append(pipe)
|
|
3520
|
+
worker.start()
|
|
3521
|
+
|
|
3522
|
+
multiprocessing.set_start_method("fork", True)
|
|
3528
3523
|
|
|
3529
|
-
logger.info("
|
|
3524
|
+
logger.info("Launch worker process(es): {}".format(self.get_pids()))
|
|
3530
3525
|
|
|
3531
3526
|
self.hook = _PythonMultiprocessing._ExceptHookHandler()
|
|
3532
3527
|
|
|
3533
|
-
#
|
|
3534
|
-
self.
|
|
3528
|
+
# Launch a clean process and register worker processes to be monitored by the watch dog.
|
|
3529
|
+
self._launch_monitor()
|
|
3535
3530
|
|
|
3536
3531
|
atexit.register(self.terminate)
|
|
3537
3532
|
|
|
3538
3533
|
def terminate(self):
|
|
3539
|
-
#
|
|
3540
|
-
self.
|
|
3534
|
+
# abort the monitor first and then close all the workers
|
|
3535
|
+
self._abort_monitor()
|
|
3541
3536
|
self.close_all_workers()
|
|
3542
3537
|
if hasattr(self, "warning_ctl"):
|
|
3543
3538
|
del self.warning_ctl
|
|
@@ -3596,15 +3591,48 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3596
3591
|
|
|
3597
3592
|
# todo check_iterator_cleanup
|
|
3598
3593
|
if self.is_running() and check_iterator_cleanup() is False:
|
|
3599
|
-
|
|
3594
|
+
if self.start_method == "fork":
|
|
3595
|
+
return self.workers[worker_id].execute(idx, *args)
|
|
3596
|
+
# spawn mode
|
|
3597
|
+
self.pipes[worker_id].master_send(idx, args)
|
|
3598
|
+
time_s = time.time()
|
|
3599
|
+
wait_count = 1
|
|
3600
|
+
while True:
|
|
3601
|
+
cost_time = time.time() - time_s
|
|
3602
|
+
if cost_time / self.check_interval >= wait_count:
|
|
3603
|
+
wait_count += 1
|
|
3604
|
+
logger.warning("It has been waiting for " + "%.3f" % cost_time + "s because the sub-process "
|
|
3605
|
+
"worker of the map operation is hanging. "
|
|
3606
|
+
"Check whether the user defined data transform is too slow or the "
|
|
3607
|
+
"output data is too large. You can also set the timeout interval by "
|
|
3608
|
+
"ds.config.set_multiprocessing_timeout_interval to adjust the output frequency "
|
|
3609
|
+
"of this log.")
|
|
3610
|
+
pid = self.workers[worker_id].pid
|
|
3611
|
+
logger.warning("Map worker subprocess ID {} is stuck.".format(pid))
|
|
3612
|
+
install_status, _ = subprocess.getstatusoutput("py-spy --version")
|
|
3613
|
+
if install_status == 0:
|
|
3614
|
+
stack = subprocess.getoutput("py-spy dump -p {} -l".format(pid))
|
|
3615
|
+
logger.warning("Map worker subprocess stack:\n{}".format(stack))
|
|
3616
|
+
else:
|
|
3617
|
+
logger.warning("Please `pip install py-spy` to get the stacks of the stuck process.")
|
|
3618
|
+
try:
|
|
3619
|
+
res = self.pipes[worker_id].master_receive()
|
|
3620
|
+
except queue.Empty:
|
|
3621
|
+
continue
|
|
3622
|
+
if res is None:
|
|
3623
|
+
# receive finish signal
|
|
3624
|
+
return None
|
|
3625
|
+
if isinstance(res, ExceptionHandler):
|
|
3626
|
+
res.reraise()
|
|
3627
|
+
return res
|
|
3600
3628
|
|
|
3601
3629
|
return None
|
|
3602
3630
|
|
|
3603
|
-
def
|
|
3631
|
+
def _launch_monitor(self):
|
|
3604
3632
|
"""
|
|
3605
|
-
|
|
3606
|
-
The
|
|
3607
|
-
The
|
|
3633
|
+
Launch a clean process and register subprocess to be monitored by the watch dog.
|
|
3634
|
+
The clean process will clean up subprocesses when main process exited.
|
|
3635
|
+
The watch dog will clean up subprocesses and main process when any subprocess exited.
|
|
3608
3636
|
"""
|
|
3609
3637
|
if platform.system().lower() != 'windows':
|
|
3610
3638
|
self.eof = multiprocessing.Event()
|
|
@@ -3613,38 +3641,45 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3613
3641
|
args=(self.ppid, self.workers, self.eof),
|
|
3614
3642
|
daemon=True)
|
|
3615
3643
|
self.cleaning_process.start()
|
|
3644
|
+
logger.info("Launch clean process {} to monitor worker "
|
|
3645
|
+
"process(es): {}".format(self.cleaning_process.pid, self.get_pids()))
|
|
3616
3646
|
|
|
3617
3647
|
if get_enable_watchdog():
|
|
3618
|
-
|
|
3619
|
-
|
|
3620
|
-
|
|
3621
|
-
|
|
3622
|
-
|
|
3623
|
-
|
|
3624
|
-
|
|
3625
|
-
|
|
3626
|
-
if
|
|
3627
|
-
self.
|
|
3628
|
-
|
|
3629
|
-
def abort_watchdog(self):
|
|
3630
|
-
if hasattr(self, 'watch_dog') and self.watch_dog is not None and hasattr(self, 'eot') and self.eot is not None:
|
|
3631
|
-
self._abort_watchdog()
|
|
3648
|
+
worker_ids = [worker.pid for worker in self.workers]
|
|
3649
|
+
worker_ids.append(self.cleaning_process.pid)
|
|
3650
|
+
cde.register_worker_pids(id(self), set(worker_ids))
|
|
3651
|
+
|
|
3652
|
+
def _abort_monitor(self):
|
|
3653
|
+
"""Deregister workers monitored by the watch dog and join clean process."""
|
|
3654
|
+
if get_enable_watchdog():
|
|
3655
|
+
cde.deregister_worker_pids(id(self))
|
|
3656
|
+
if hasattr(self, 'eof') and self.eof is not None:
|
|
3657
|
+
self.eof.set()
|
|
3632
3658
|
if hasattr(self, 'cleaning_process') and self.cleaning_process is not None:
|
|
3633
|
-
|
|
3634
|
-
|
|
3635
|
-
|
|
3659
|
+
# let the quit event notify the cleaning process to exit
|
|
3660
|
+
self.cleaning_process.join(timeout=5)
|
|
3661
|
+
if self.cleaning_process.is_alive():
|
|
3662
|
+
# if the cleaning process did not exit, it may hang, try to terminate it
|
|
3663
|
+
_PythonMultiprocessing._terminate_processes([self.cleaning_process])
|
|
3636
3664
|
del self.cleaning_process
|
|
3637
3665
|
|
|
3638
3666
|
def is_running(self):
|
|
3639
3667
|
if hasattr(self, 'workers') and self.workers is not None:
|
|
3640
|
-
|
|
3668
|
+
if self.start_method == "fork":
|
|
3669
|
+
return all([w.is_alive() for w in self.workers])
|
|
3670
|
+
return all([worker_is_alive(w) for w in self.workers])
|
|
3641
3671
|
return False
|
|
3642
3672
|
|
|
3643
3673
|
def close_all_workers(self):
|
|
3644
3674
|
"""Close all the subprocess workers"""
|
|
3645
3675
|
if hasattr(self, 'workers') and self.workers is not None:
|
|
3646
|
-
|
|
3647
|
-
w.
|
|
3676
|
+
if self.start_method == "fork":
|
|
3677
|
+
for w in self.workers:
|
|
3678
|
+
w.close()
|
|
3679
|
+
else:
|
|
3680
|
+
for i, w in enumerate(self.workers):
|
|
3681
|
+
close_worker(w, self.pipes[i])
|
|
3682
|
+
|
|
3648
3683
|
check_interval = get_multiprocessing_timeout_interval()
|
|
3649
3684
|
for w in self.workers:
|
|
3650
3685
|
try:
|
|
@@ -3660,8 +3695,12 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3660
3695
|
continue
|
|
3661
3696
|
raise e
|
|
3662
3697
|
try:
|
|
3663
|
-
if
|
|
3664
|
-
|
|
3698
|
+
if self.start_method == "fork":
|
|
3699
|
+
if w.is_alive():
|
|
3700
|
+
os.close(subprocess_file_descriptor)
|
|
3701
|
+
else:
|
|
3702
|
+
if worker_is_alive(w):
|
|
3703
|
+
os.close(subprocess_file_descriptor)
|
|
3665
3704
|
except OSError as e:
|
|
3666
3705
|
# Maybe the file descriptor had been released, so ignore the 'Bad file descriptor'
|
|
3667
3706
|
if "Bad file descriptor" not in str(e):
|
|
@@ -3670,6 +3709,8 @@ class _PythonMultiprocessing(cde.PythonMultiprocessingRuntime):
|
|
|
3670
3709
|
# use clear to release the handle which is better than self.workers = None
|
|
3671
3710
|
self.workers.clear()
|
|
3672
3711
|
self.workers = None
|
|
3712
|
+
self.pipes.clear()
|
|
3713
|
+
self.pipes = None
|
|
3673
3714
|
self.pids = None
|
|
3674
3715
|
|
|
3675
3716
|
|
|
@@ -3915,8 +3956,9 @@ class MapDataset(UnionBaseDataset):
|
|
|
3915
3956
|
callable_list.append(op)
|
|
3916
3957
|
|
|
3917
3958
|
if callable_list:
|
|
3918
|
-
self.process_pool = _PythonMultiprocessing(
|
|
3919
|
-
self.
|
|
3959
|
+
self.process_pool = _PythonMultiprocessing(get_multiprocessing_start_method(),
|
|
3960
|
+
self.num_parallel_workers, str(self),
|
|
3961
|
+
callable_list, self.max_rowsize)
|
|
3920
3962
|
# Pass #2
|
|
3921
3963
|
idx = 0
|
|
3922
3964
|
for op in self.operations:
|
|
@@ -4142,6 +4184,7 @@ class ConcatDataset(UnionBaseDataset):
|
|
|
4142
4184
|
if isinstance(c, ConcatDataset):
|
|
4143
4185
|
c.use_sampler(sampler)
|
|
4144
4186
|
set_child(c)
|
|
4187
|
+
|
|
4145
4188
|
set_child(self)
|
|
4146
4189
|
|
|
4147
4190
|
return
|
|
@@ -60,6 +60,8 @@ class CMUArcticDataset(MappableDataset, AudioBaseDataset):
|
|
|
60
60
|
num_shards (int, optional): Number of shards that the dataset will be divided into.
|
|
61
61
|
Default: ``None``, no dividing. When this argument is specified, `num_samples`
|
|
62
62
|
reflects the max sample number of per shard.
|
|
63
|
+
Used in `data parallel training <https://www.mindspore.cn/docs/en/master/model_train/
|
|
64
|
+
parallel/data_parallel.html#data-parallel-mode-loads-datasets>`_ .
|
|
63
65
|
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None``, will use ``0``. This
|
|
64
66
|
argument can only be specified when `num_shards` is also specified.
|
|
65
67
|
cache (DatasetCache, optional): Use tensor caching service to speed up dataset processing. More details:
|
|
@@ -177,6 +179,8 @@ class GTZANDataset(MappableDataset, AudioBaseDataset):
|
|
|
177
179
|
dataset. Default: ``None`` , expected order behavior shown in the table below.
|
|
178
180
|
num_shards (int, optional): Number of shards that the dataset will be divided into. Default: ``None`` .
|
|
179
181
|
When this argument is specified, `num_samples` reflects the max sample number of per shard.
|
|
182
|
+
Used in `data parallel training <https://www.mindspore.cn/docs/en/master/model_train/
|
|
183
|
+
parallel/data_parallel.html#data-parallel-mode-loads-datasets>`_ .
|
|
180
184
|
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None`` . This
|
|
181
185
|
argument can only be specified when `num_shards` is also specified.
|
|
182
186
|
cache (DatasetCache, optional): Use tensor caching service to speed up dataset processing. More details:
|
|
@@ -295,6 +299,8 @@ class LibriTTSDataset(MappableDataset, AudioBaseDataset):
|
|
|
295
299
|
dataset. Default: ``None`` , expected order behavior shown in the table below.
|
|
296
300
|
num_shards (int, optional): Number of shards that the dataset will be divided into. Default: ``None`` .
|
|
297
301
|
When this argument is specified, `num_samples` reflects the max sample number of per shard.
|
|
302
|
+
Used in `data parallel training <https://www.mindspore.cn/docs/en/master/model_train/
|
|
303
|
+
parallel/data_parallel.html#data-parallel-mode-loads-datasets>`_ .
|
|
298
304
|
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None`` . This
|
|
299
305
|
argument can only be specified when `num_shards` is also specified.
|
|
300
306
|
cache (DatasetCache, optional): Use tensor caching service to speed up dataset processing. More details:
|
|
@@ -422,6 +428,8 @@ class LJSpeechDataset(MappableDataset, AudioBaseDataset):
|
|
|
422
428
|
num_shards (int, optional): Number of shards that the dataset will be divided into.
|
|
423
429
|
Default: ``None`` . When this argument is specified, `num_samples` reflects
|
|
424
430
|
the maximum sample number of per shard.
|
|
431
|
+
Used in `data parallel training <https://www.mindspore.cn/docs/en/master/model_train/
|
|
432
|
+
parallel/data_parallel.html#data-parallel-mode-loads-datasets>`_ .
|
|
425
433
|
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None`` . This
|
|
426
434
|
argument can only be specified when `num_shards` is also specified.
|
|
427
435
|
cache (DatasetCache, optional): Use tensor caching service to speed up dataset processing. More details:
|
|
@@ -545,6 +553,8 @@ class SpeechCommandsDataset(MappableDataset, AudioBaseDataset):
|
|
|
545
553
|
Default: ``None`` , expected order behavior shown in the table below.
|
|
546
554
|
num_shards (int, optional): Number of shards that the dataset will be divided into. Default: ``None`` .
|
|
547
555
|
When this argument is specified, `num_samples` reflects the maximum sample number of per shard.
|
|
556
|
+
Used in `data parallel training <https://www.mindspore.cn/docs/en/master/model_train/
|
|
557
|
+
parallel/data_parallel.html#data-parallel-mode-loads-datasets>`_ .
|
|
548
558
|
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None`` .
|
|
549
559
|
This argument can only be specified when `num_shards` is also specified.
|
|
550
560
|
cache (DatasetCache, optional): Use tensor caching service to speed up dataset processing. More details:
|
|
@@ -658,6 +668,8 @@ class TedliumDataset(MappableDataset, AudioBaseDataset):
|
|
|
658
668
|
num_shards (int, optional): Number of shards that the dataset will be divided
|
|
659
669
|
into. Default: ``None`` . When this argument is specified, `num_samples` reflects
|
|
660
670
|
the maximum sample number of per shard.
|
|
671
|
+
Used in `data parallel training <https://www.mindspore.cn/docs/en/master/model_train/
|
|
672
|
+
parallel/data_parallel.html#data-parallel-mode-loads-datasets>`_ .
|
|
661
673
|
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None`` . This
|
|
662
674
|
argument can only be specified when `num_shards` is also specified.
|
|
663
675
|
cache (DatasetCache, optional): Use tensor caching service to speed up dataset processing. More details:
|
|
@@ -838,6 +850,8 @@ class YesNoDataset(MappableDataset, AudioBaseDataset):
|
|
|
838
850
|
dataset. Default: ``None`` , expected order behavior shown in the table below.
|
|
839
851
|
num_shards (int, optional): Number of shards that the dataset will be divided into. Default: ``None`` .
|
|
840
852
|
When this argument is specified, `num_samples` reflects the maximum sample number of per shard.
|
|
853
|
+
Used in `data parallel training <https://www.mindspore.cn/docs/en/master/model_train/
|
|
854
|
+
parallel/data_parallel.html#data-parallel-mode-loads-datasets>`_ .
|
|
841
855
|
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None`` . This argument can only
|
|
842
856
|
be specified when `num_shards` is also specified.
|
|
843
857
|
cache (DatasetCache, optional): Use tensor caching service to speed up dataset processing. More details:
|