mindspore 2.4.1__cp311-cp311-manylinux1_x86_64.whl → 2.5.0__cp311-cp311-manylinux1_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +39 -0
- mindspore/__init__.py +8 -3
- mindspore/_akg/akg/composite/build_module.py +6 -2
- mindspore/_akg/akg/utils/kernel_exec.py +2 -2
- mindspore/_c_dataengine.cpython-311-x86_64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-311-x86_64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-311-x86_64-linux-gnu.so +0 -0
- mindspore/_checkparam.py +0 -5
- mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
- mindspore/_extends/parse/compile_config.py +64 -0
- mindspore/_extends/parse/deprecated/__init__.py +0 -0
- mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +375 -0
- mindspore/_extends/parse/parser.py +23 -5
- mindspore/_extends/parse/standard_method.py +123 -27
- mindspore/_extends/pijit/pijit_func_white_list.py +1 -1
- mindspore/amp.py +7 -1
- mindspore/boost/boost_cell_wrapper.py +136 -41
- mindspore/common/__init__.py +3 -1
- mindspore/common/_register_for_tensor.py +0 -1
- mindspore/common/_stub_tensor.py +25 -4
- mindspore/common/_tensor_cpp_method.py +17 -0
- mindspore/common/_tensor_docs.py +6132 -0
- mindspore/common/api.py +99 -25
- mindspore/common/dtype.py +34 -34
- mindspore/common/dump.py +2 -1
- mindspore/common/file_system.py +8 -1
- mindspore/common/generator.py +2 -0
- mindspore/common/hook_handle.py +3 -1
- mindspore/common/initializer.py +3 -4
- mindspore/common/lazy_inline.py +8 -2
- mindspore/common/mindir_util.py +10 -2
- mindspore/common/parameter.py +30 -27
- mindspore/common/tensor.py +713 -1337
- mindspore/communication/__init__.py +1 -1
- mindspore/communication/_comm_helper.py +10 -0
- mindspore/communication/comm_func.py +215 -173
- mindspore/communication/management.py +23 -20
- mindspore/context.py +292 -193
- mindspore/dataset/__init__.py +23 -19
- mindspore/dataset/callback/ds_callback.py +2 -1
- mindspore/dataset/core/config.py +84 -3
- mindspore/dataset/engine/cache_admin.py +3 -3
- mindspore/dataset/engine/cache_client.py +5 -4
- mindspore/dataset/engine/datasets.py +192 -149
- mindspore/dataset/engine/datasets_audio.py +14 -0
- mindspore/dataset/engine/datasets_standard_format.py +28 -11
- mindspore/dataset/engine/datasets_text.py +38 -1
- mindspore/dataset/engine/datasets_user_defined.py +125 -65
- mindspore/dataset/engine/datasets_vision.py +81 -8
- mindspore/dataset/engine/iterators.py +281 -63
- mindspore/dataset/engine/obs/util.py +8 -0
- mindspore/dataset/engine/queue.py +40 -0
- mindspore/dataset/engine/samplers.py +26 -2
- mindspore/dataset/engine/serializer_deserializer.py +1 -1
- mindspore/dataset/engine/validators.py +43 -11
- mindspore/dataset/transforms/py_transforms_util.py +17 -0
- mindspore/dataset/transforms/transforms.py +29 -12
- mindspore/dataset/vision/validators.py +1 -2
- mindspore/device_context/__init__.py +21 -0
- mindspore/device_context/ascend/__init__.py +25 -0
- mindspore/device_context/ascend/device.py +72 -0
- mindspore/device_context/ascend/op_debug.py +94 -0
- mindspore/device_context/ascend/op_precision.py +193 -0
- mindspore/device_context/ascend/op_tuning.py +127 -0
- mindspore/device_context/cpu/__init__.py +25 -0
- mindspore/device_context/cpu/device.py +62 -0
- mindspore/device_context/cpu/op_tuning.py +43 -0
- mindspore/device_context/gpu/__init__.py +21 -0
- mindspore/device_context/gpu/device.py +70 -0
- mindspore/device_context/gpu/op_precision.py +67 -0
- mindspore/device_context/gpu/op_tuning.py +175 -0
- mindspore/device_manager.py +134 -0
- mindspore/experimental/llm_boost/__init__.py +3 -2
- mindspore/experimental/llm_boost/ascend_native/__init__.py +22 -0
- mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +211 -0
- mindspore/experimental/llm_boost/ascend_native/llm_boost.py +52 -0
- mindspore/experimental/llm_boost/atb/boost_base.py +239 -64
- mindspore/experimental/llm_boost/atb/llama_boost.py +52 -30
- mindspore/experimental/llm_boost/atb/qwen_boost.py +47 -24
- mindspore/experimental/llm_boost/register.py +1 -0
- mindspore/experimental/optim/adadelta.py +26 -22
- mindspore/experimental/optim/adam.py +3 -0
- mindspore/experimental/optim/lr_scheduler.py +33 -24
- mindspore/experimental/optim/radam.py +33 -30
- mindspore/hal/device.py +28 -0
- mindspore/hal/event.py +17 -0
- mindspore/hal/memory.py +94 -3
- mindspore/hal/stream.py +91 -6
- mindspore/include/api/context.h +1 -2
- mindspore/include/dataset/constants.h +2 -2
- mindspore/lib/libavcodec.so.59 +0 -0
- mindspore/lib/libavdevice.so.59 +0 -0
- mindspore/lib/libavfilter.so.8 +0 -0
- mindspore/lib/libavformat.so.59 +0 -0
- mindspore/lib/libavutil.so.57 +0 -0
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_ops.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libmpi_collective.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libopencv_imgcodecs.so.4.5 +0 -0
- mindspore/lib/libopencv_imgproc.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/libswresample.so.4 +0 -0
- mindspore/lib/libswscale.so.6 +0 -0
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json +2048 -0
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_proto/libop_proto.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/framework/npu_supported_ops.json +10 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +182 -0
- mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl → custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl}/dynamic/decoder_kv_cache.py +51 -16
- mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl → custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl}/dynamic/prompt_kv_cache.py +51 -16
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/ascend910/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/config/ascend910/binary_info_config.json +302 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/config/ascend910/decoder_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/kernel/config/ascend910/prompt_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_proto/inc/op_proto.h +33 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/op_proto/lib/linux/x86_64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910/version.info +1 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/framework/npu_supported_ops.json +14 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_api/include/aclnn_decoder_kv_cache.h +59 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_api/include/aclnn_prompt_kv_cache.h +59 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl → custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl}/dynamic/all_finite.py +51 -16
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/decoder_kv_cache.cpp +192 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/decoder_kv_cache.py +215 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/prompt_kv_cache.cpp +274 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl/dynamic/prompt_kv_cache.py +215 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.json +80 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.json +80 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.json +80 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.json +158 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.json +167 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend310p/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.json +78 -0
- mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o → custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.o} +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.json +78 -0
- mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o → custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.o} +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.json +78 -0
- mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o → custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.o} +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910_93/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_52f59e2a65d9b1bb002de35c2819754a.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_6b5e50e30256d85838d6ce83514df20f.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.json +78 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_74e4ac02880d452e3308c94af273562e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_0d5520cc587ad44ce634bf3fbcffc272.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_20390d30b3c4c0d23167ccca6c030c2b.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_2d151f0b1d2db51faa2968d5b67544e2.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_561690ec17cc1def3d2fcf68c1b07b56.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_570f9aaa99e5e773b3dd0a33784363f4.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_59668a0f0764afb98fda8ab9e84126f1.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_91d9833e4792b70b670e4e2b916abd86.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.json +156 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/decoder_kv_cache/DecoderKvCache_c74cdc5fef094383401856f8519504af.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_0515c7b1a4cd614449e38c5e9a7e3f8d.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_09f22d898d6358c91e7c4fc48bac48e7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_0cb9a6f894b925250227136e5aab7061.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_2fa8702ffd7ca85e9e194f62644415d5.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_570b62f187dfd439b64613d881deedb7.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_585218c11411ff84709b9e725b66c435.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_5c9365ccde170b358c5b126d69dae13e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.json +165 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/ascend910b/prompt_kv_cache/PromptKvCache_6d97c45b7c43bc16fcff8baa5dacac4e.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend310p/all_finite.json +139 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend310p/binary_info_config.json +361 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend310p/decoder_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend310p/prompt_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/all_finite.json +139 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/binary_info_config.json +361 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/decoder_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910_93/prompt_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +139 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +361 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910b/decoder_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/kernel/config/ascend910b/prompt_kv_cache.json +892 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/op_proto/lib/linux/x86_64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_910b/version.info +1 -0
- mindspore/lib/plugin/ascend/custom_compiler/setup.py +1 -1
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
- mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_internal_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libms_ascend_native_boost.so +0 -0
- mindspore/lib/plugin/ascend/libms_atb_boost.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +960 -958
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/base_type.h → base_type.h} +25 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{cast/cast_tiling.h → internal.h} +6 -4
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_op.h +114 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/boost_kernel.h +70 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/llama_impl.h +85 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/model_interface.h +52 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/llm/tensor.h +81 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_creator.h +123 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +155 -110
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{acme/include/tiling_info.h → tiling_info.h} +12 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tiling_utils.h +178 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layer_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcompare_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libllama_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_optiling.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmulti_weight_matmul_kernel_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_op.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_f16_nz/internal_pp_matmul_f16_nz_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_i8_nz_compress/internal_pp_matmul_i8_nz_compress_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/object_kernels/internal_pp_matmul_int8_nz/internal_pp_matmul_int8_nz_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libadd_rms_norm_quant_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_310p_impl.so → op_kernels/ascend310p/so_kernels/libapply_rotary_pos_emb_310p_ascend310p.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcast_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libcompare_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libgelu_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libmatmul_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend310p/so_kernels/libreshape_and_cache_nz_ascend310p.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.json +163 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_4b60f88cdc28b25a36bad2d8b0a88092.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.json +163 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/hphol_kernels/add_rms_norm_dynamic_quant/AddRmsNormDynamicQuant_cde61da2bd6fededcb1ba310a6ad16ee.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_matmul_postfusion_mix/internal_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/internal_multi_weight_matmul_postfusion_mix/internal_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/object_kernels/paged_attention_v2/paged_attention_v2_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libadd_layer_norm_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libadd_rms_norm_impl.so → op_kernels/ascend910b/so_kernels/libadd_rms_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libadd_rms_norm_quant_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libapply_rotary_pos_emb_impl.so → op_kernels/ascend910b/so_kernels/libapply_rotary_pos_emb_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libcast_impl.so → op_kernels/ascend910b/so_kernels/libcast_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libnot_equal_impl.so → op_kernels/ascend910b/so_kernels/libcompare_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libgelu_impl.so → op_kernels/ascend910b/so_kernels/libgelu_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/so_kernels/libllama_ascend910b.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmatmul_impl.so → op_kernels/ascend910b/so_kernels/libmatmul_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libmulti_weight_matmul_kernel_impl.so → op_kernels/ascend910b/so_kernels/libmulti_weight_matmul_kernel_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/libreshape_and_cache_impl.so → op_kernels/ascend910b/so_kernels/libreshape_and_cache_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/{lib/librms_norm_impl.so → op_kernels/ascend910b/so_kernels/librms_norm_ascend910b.so} +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.10 +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.11 +0 -0
- mindspore/lib/plugin/gpu10.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu10.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.6/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.6/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.10.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.6 +0 -0
- mindspore/log.py +12 -0
- mindspore/mindrecord/__init__.py +1 -1
- mindspore/mindrecord/config.py +17 -316
- mindspore/mindrecord/filereader.py +1 -9
- mindspore/mindrecord/filewriter.py +5 -15
- mindspore/mindrecord/mindpage.py +1 -9
- mindspore/mint/__init__.py +824 -218
- mindspore/mint/distributed/__init__.py +66 -4
- mindspore/mint/distributed/distributed.py +2594 -44
- mindspore/mint/linalg/__init__.py +6 -0
- mindspore/mint/nn/__init__.py +473 -14
- mindspore/mint/nn/functional.py +486 -11
- mindspore/mint/nn/layer/__init__.py +17 -4
- mindspore/mint/nn/layer/_functions.py +330 -0
- mindspore/mint/nn/layer/activation.py +169 -1
- mindspore/mint/nn/layer/basic.py +123 -0
- mindspore/mint/nn/layer/conv.py +727 -0
- mindspore/mint/nn/layer/normalization.py +215 -19
- mindspore/mint/nn/layer/padding.py +797 -0
- mindspore/mint/nn/layer/pooling.py +170 -0
- mindspore/mint/optim/__init__.py +2 -1
- mindspore/mint/optim/adam.py +223 -0
- mindspore/mint/optim/adamw.py +26 -19
- mindspore/mint/special/__init__.py +2 -1
- mindspore/multiprocessing/__init__.py +5 -0
- mindspore/nn/__init__.py +2 -0
- mindspore/nn/cell.py +142 -21
- mindspore/nn/dynamic_lr.py +2 -1
- mindspore/nn/layer/activation.py +6 -6
- mindspore/nn/layer/basic.py +35 -25
- mindspore/nn/layer/channel_shuffle.py +3 -3
- mindspore/nn/layer/conv.py +3 -0
- mindspore/nn/layer/embedding.py +3 -3
- mindspore/nn/layer/normalization.py +8 -7
- mindspore/nn/layer/padding.py +4 -3
- mindspore/nn/layer/pooling.py +55 -23
- mindspore/nn/layer/rnn_cells.py +1 -1
- mindspore/nn/layer/rnns.py +2 -1
- mindspore/nn/layer/timedistributed.py +5 -5
- mindspore/nn/layer/transformer.py +48 -26
- mindspore/nn/learning_rate_schedule.py +5 -3
- mindspore/nn/loss/loss.py +31 -36
- mindspore/nn/optim/ada_grad.py +1 -0
- mindspore/nn/optim/adadelta.py +2 -2
- mindspore/nn/optim/adam.py +1 -1
- mindspore/nn/optim/lars.py +1 -4
- mindspore/nn/optim/optimizer.py +1 -1
- mindspore/nn/optim/rprop.py +2 -2
- mindspore/nn/optim/thor.py +2 -1
- mindspore/nn/utils/__init__.py +22 -0
- mindspore/nn/utils/init.py +73 -0
- mindspore/nn/wrap/cell_wrapper.py +4 -6
- mindspore/nn/wrap/loss_scale.py +3 -4
- mindspore/numpy/array_creations.py +60 -62
- mindspore/numpy/array_ops.py +148 -143
- mindspore/numpy/logic_ops.py +41 -42
- mindspore/numpy/math_ops.py +361 -359
- mindspore/numpy/utils.py +16 -16
- mindspore/numpy/utils_const.py +4 -4
- mindspore/ops/__init__.py +2 -1
- mindspore/ops/_grad_experimental/grad_comm_ops.py +107 -8
- mindspore/ops/_grad_experimental/grad_debug_ops.py +6 -1
- mindspore/ops/_grad_experimental/grad_inner_ops.py +9 -0
- mindspore/ops/_grad_experimental/grad_math_ops.py +2 -1
- mindspore/ops/_op_impl/cpu/__init__.py +1 -0
- mindspore/ops/_op_impl/cpu/raise_op.py +28 -0
- mindspore/ops/_vmap/vmap_array_ops.py +20 -19
- mindspore/ops/_vmap/vmap_base.py +0 -2
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +19 -13
- mindspore/ops/_vmap/vmap_math_ops.py +11 -9
- mindspore/ops/_vmap/vmap_nn_ops.py +20 -34
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +149 -12
- mindspore/ops/auto_generate/gen_arg_handler.py +0 -61
- mindspore/ops/auto_generate/gen_extend_func.py +554 -60
- mindspore/ops/auto_generate/gen_ops_def.py +1621 -115
- mindspore/ops/auto_generate/gen_ops_prim.py +8027 -3411
- mindspore/ops/auto_generate/pyboost_inner_prim.py +183 -79
- mindspore/ops/composite/base.py +1 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +229 -30
- mindspore/ops/composite/multitype_ops/pow_impl.py +0 -29
- mindspore/ops/function/__init__.py +12 -0
- mindspore/ops/function/array_func.py +561 -159
- mindspore/ops/function/clip_func.py +64 -0
- mindspore/ops/function/debug_func.py +28 -20
- mindspore/ops/function/image_func.py +1 -1
- mindspore/ops/function/linalg_func.py +5 -4
- mindspore/ops/function/math_func.py +1664 -294
- mindspore/ops/function/nn_func.py +988 -317
- mindspore/ops/function/parameter_func.py +3 -56
- mindspore/ops/function/random_func.py +243 -33
- mindspore/ops/function/sparse_unary_func.py +1 -1
- mindspore/ops/functional.py +18 -5
- mindspore/ops/functional_overload.py +897 -0
- mindspore/ops/operations/__init__.py +3 -2
- mindspore/ops/operations/_embedding_cache_ops.py +4 -4
- mindspore/ops/operations/_grad_ops.py +2 -34
- mindspore/ops/operations/_infer_ops.py +2 -1
- mindspore/ops/operations/_inner_ops.py +38 -8
- mindspore/ops/operations/array_ops.py +45 -303
- mindspore/ops/operations/comm_ops.py +23 -17
- mindspore/ops/operations/custom_ops.py +7 -49
- mindspore/ops/operations/debug_ops.py +42 -47
- mindspore/ops/operations/inner_ops.py +6 -4
- mindspore/ops/operations/linalg_ops.py +3 -2
- mindspore/ops/operations/manually_defined/ops_def.py +185 -104
- mindspore/ops/operations/math_ops.py +11 -216
- mindspore/ops/operations/nn_ops.py +153 -310
- mindspore/ops/primitive.py +23 -21
- mindspore/ops/tensor_method.py +1669 -0
- mindspore/ops_generate/aclnn_kernel_register_auto_cc_generator.py +110 -0
- mindspore/ops_generate/add_tensor_docs_generator.py +54 -0
- mindspore/ops_generate/arg_handler.py +0 -61
- mindspore/ops_generate/auto_grad_impl_cc_generator.py +135 -0
- mindspore/ops_generate/auto_grad_reg_cc_generator.py +93 -0
- mindspore/ops_generate/base_generator.py +11 -0
- mindspore/ops_generate/cpp_create_prim_instance_helper_generator.py +108 -0
- mindspore/ops_generate/functional_map_cpp_generator.py +491 -0
- mindspore/ops_generate/functional_overload_py_generator.py +110 -0
- mindspore/ops_generate/functions_cc_generator.py +233 -0
- mindspore/ops_generate/gen_aclnn_implement.py +110 -114
- mindspore/ops_generate/gen_constants.py +157 -3
- mindspore/ops_generate/gen_ops.py +245 -990
- mindspore/ops_generate/gen_pyboost_func.py +97 -998
- mindspore/ops_generate/gen_utils.py +119 -33
- mindspore/ops_generate/lite_ops_cpp_generator.py +155 -0
- mindspore/ops_generate/op_api_proto.py +206 -0
- mindspore/ops_generate/op_def_py_generator.py +131 -0
- mindspore/ops_generate/op_prim_py_generator.py +480 -0
- mindspore/ops_generate/op_proto.py +373 -108
- mindspore/ops_generate/op_template_parser.py +436 -0
- mindspore/ops_generate/ops_def_cc_generator.py +288 -0
- mindspore/ops_generate/ops_def_h_generator.py +74 -0
- mindspore/ops_generate/ops_name_h_generator.py +68 -0
- mindspore/ops_generate/ops_primitive_h_generator.py +81 -0
- mindspore/ops_generate/pyboost_functions_cpp_generator.py +370 -0
- mindspore/ops_generate/pyboost_functions_h_generator.py +68 -0
- mindspore/ops_generate/pyboost_functions_py_generator.py +148 -0
- mindspore/ops_generate/pyboost_grad_function_cpp_generator.py +154 -0
- mindspore/ops_generate/pyboost_inner_prim_generator.py +131 -0
- mindspore/ops_generate/pyboost_native_grad_functions_generator.py +268 -0
- mindspore/ops_generate/pyboost_op_cpp_code_generator.py +851 -0
- mindspore/ops_generate/pyboost_overload_functions_cpp_generator.py +344 -0
- mindspore/ops_generate/pyboost_utils.py +92 -33
- mindspore/ops_generate/template.py +294 -44
- mindspore/ops_generate/tensor_func_reg_cpp_generator.py +422 -0
- mindspore/parallel/__init__.py +3 -3
- mindspore/parallel/_auto_parallel_context.py +44 -34
- mindspore/parallel/_cell_wrapper.py +22 -3
- mindspore/parallel/_parallel_serialization.py +13 -2
- mindspore/parallel/_utils.py +4 -2
- mindspore/parallel/algo_parameter_config.py +1 -1
- mindspore/parallel/checkpoint_transform.py +44 -0
- mindspore/parallel/cluster/process_entity/_api.py +131 -37
- mindspore/parallel/cluster/process_entity/_utils.py +41 -6
- mindspore/parallel/cluster/run.py +20 -3
- mindspore/parallel/parameter_broadcast.py +1 -1
- mindspore/parallel/shard.py +3 -0
- mindspore/parallel/transform_safetensors.py +119 -253
- mindspore/profiler/__init__.py +17 -4
- mindspore/profiler/analysis/__init__.py +0 -0
- mindspore/profiler/analysis/parser/__init__.py +0 -0
- mindspore/profiler/analysis/parser/ascend_cann_parser.py +166 -0
- mindspore/profiler/analysis/parser/base_parser.py +158 -0
- mindspore/profiler/analysis/parser/framework_cann_relation_parser.py +45 -0
- mindspore/profiler/analysis/parser/ms_framework_parser.py +142 -0
- mindspore/profiler/analysis/parser/ms_minddata_parser.py +145 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +261 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +40 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +84 -0
- mindspore/profiler/analysis/parser/timeline_creator/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_creator/base_timeline_creator.py +44 -0
- mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +90 -0
- mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +76 -0
- mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +103 -0
- mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +134 -0
- mindspore/profiler/analysis/parser/timeline_event/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_event/base_event.py +233 -0
- mindspore/profiler/analysis/parser/timeline_event/cpu_op_event.py +47 -0
- mindspore/profiler/analysis/parser/timeline_event/flow_event.py +36 -0
- mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +260 -0
- mindspore/profiler/analysis/parser/timeline_event/msprof_event.py +73 -0
- mindspore/profiler/analysis/parser/timeline_event/scope_layer_event.py +53 -0
- mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +146 -0
- mindspore/profiler/analysis/task_manager.py +131 -0
- mindspore/profiler/analysis/time_converter.py +84 -0
- mindspore/profiler/analysis/viewer/__init__.py +0 -0
- mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +333 -0
- mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +87 -0
- mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +252 -0
- mindspore/profiler/analysis/viewer/ascend_memory_viewer.py +313 -0
- mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +322 -0
- mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +265 -0
- mindspore/profiler/analysis/viewer/ascend_timeline_viewer.py +58 -0
- mindspore/profiler/analysis/viewer/base_viewer.py +26 -0
- mindspore/profiler/analysis/viewer/ms_dataset_viewer.py +97 -0
- mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +581 -0
- mindspore/profiler/analysis/work_flow.py +73 -0
- mindspore/profiler/common/ascend_msprof_exporter.py +138 -0
- mindspore/profiler/common/command_executor.py +90 -0
- mindspore/profiler/common/constant.py +174 -3
- mindspore/profiler/common/file_manager.py +208 -0
- mindspore/profiler/common/log.py +130 -0
- mindspore/profiler/common/msprof_cmd_tool.py +202 -0
- mindspore/profiler/common/path_manager.py +371 -0
- mindspore/profiler/common/process_bar.py +168 -0
- mindspore/profiler/common/process_pool.py +9 -3
- mindspore/profiler/common/profiler_context.py +476 -0
- mindspore/profiler/common/profiler_info.py +304 -0
- mindspore/profiler/common/profiler_output_path.py +284 -0
- mindspore/profiler/common/profiler_parameters.py +210 -0
- mindspore/profiler/common/profiler_path_manager.py +120 -0
- mindspore/profiler/common/record_function.py +76 -0
- mindspore/profiler/common/tlv_decoder.py +76 -0
- mindspore/profiler/common/util.py +75 -2
- mindspore/profiler/dynamic_profiler.py +270 -37
- mindspore/profiler/envprofiler.py +138 -0
- mindspore/profiler/mstx.py +199 -0
- mindspore/profiler/platform/__init__.py +21 -0
- mindspore/profiler/platform/base_profiler.py +40 -0
- mindspore/profiler/platform/cpu_profiler.py +124 -0
- mindspore/profiler/platform/gpu_profiler.py +74 -0
- mindspore/profiler/platform/npu_profiler.py +309 -0
- mindspore/profiler/profiler.py +580 -93
- mindspore/profiler/profiler_action_controller.py +187 -0
- mindspore/profiler/profiler_interface.py +114 -0
- mindspore/profiler/schedule.py +208 -0
- mindspore/rewrite/api/symbol_tree.py +1 -2
- mindspore/run_check/_check_version.py +18 -13
- mindspore/runtime/__init__.py +37 -0
- mindspore/runtime/device.py +27 -0
- mindspore/runtime/event.py +209 -0
- mindspore/runtime/executor.py +148 -0
- mindspore/runtime/memory.py +392 -0
- mindspore/runtime/stream.py +460 -0
- mindspore/runtime/thread_bind_core.py +401 -0
- mindspore/train/__init__.py +2 -2
- mindspore/train/_utils.py +53 -18
- mindspore/train/amp.py +8 -4
- mindspore/train/callback/_checkpoint.py +32 -18
- mindspore/train/callback/_early_stop.py +1 -1
- mindspore/train/callback/_flops_collector.py +105 -69
- mindspore/train/callback/_history.py +1 -1
- mindspore/train/callback/_summary_collector.py +44 -6
- mindspore/train/callback/_tft_register.py +37 -15
- mindspore/train/dataset_helper.py +11 -11
- mindspore/train/metrics/precision.py +4 -5
- mindspore/train/mind_ir_pb2.py +167 -46
- mindspore/train/model.py +13 -14
- mindspore/train/serialization.py +461 -72
- mindspore/train/summary/summary_record.py +1 -2
- mindspore/train/train_thor/model_thor.py +1 -1
- mindspore/utils/__init__.py +4 -2
- mindspore/utils/bin/dataset-cache +0 -0
- mindspore/utils/bin/dataset-cache-server +0 -0
- mindspore/utils/dryrun.py +138 -0
- mindspore/utils/runtime_execution_order_check.py +550 -0
- mindspore/version.py +1 -1
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/METADATA +3 -4
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/RECORD +683 -490
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/entry_points.txt +1 -1
- mindspore/_data_dump.cpython-311-x86_64-linux-gnu.so +0 -0
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/common/_tensor_overload.py +0 -139
- mindspore/lib/libmindspore_np_dtype.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/x86_64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/version.info +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -82
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -113
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -193
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/dtype_registry.h +0 -90
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_layer_norm_op.h +0 -60
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_op.h +0 -50
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_rms_norm_quant_op.h +0 -50
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_nz_op.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/apply_rotary_pos_emb_op.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_only_ops.h +0 -94
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_op_base.h +0 -97
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/flash_attention_score_op.h +0 -92
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/gelu_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_add_rmsnorm_op.h +0 -73
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -108
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_impls_op.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/multi_weight_matmul_op.h +0 -91
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/paged_attention_op.h +0 -99
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_nz_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/reshape_and_cache_op.h +0 -44
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/rms_norm_op.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -179
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/profiling_util.h +0 -366
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -56
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/kernel/add.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/tiling/add_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_base.h +0 -456
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_bf16.h +0 -217
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp.h +0 -391
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp16.h +0 -126
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -230
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_value.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/apply_rotary_pos_emb_nz_impl.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_base.h +0 -460
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp16.h +0 -116
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_fp32.h +0 -230
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_tiling.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb_nz/kernel/apply_rotary_pos_emb_nz_value.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -74
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -74
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/kernel/cast_kernel.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_tiling.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/kernel/compare_kernel.h +0 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +0 -68
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -99
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +0 -21
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/lccl/lccl_wrapper.h +0 -58
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_types.h +0 -91
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/ms_int_utils.h +0 -108
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +0 -64
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/add_param.h +0 -68
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/cast_param.h +0 -30
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -38
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +0 -33
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -377
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/kernel/reshape_and_cache_nz.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_impl.h +0 -42
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache_nz/reshape_and_cache_nz_tiling.h +0 -27
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/kernel/sub_kernel.h +0 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +0 -399
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/utils.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_tiling.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +0 -30
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_core.h +0 -43
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_entity.h +0 -38
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_sink.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_stream.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -71
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -165
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +0 -20
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -121
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -106
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layer_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_quant_acme_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_310p_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_nz_old_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMatMulPostFusionMixTactic/acme_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/AcmeMultiWeightMatMulPostFusionMixTactic/acme_multi_weight_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/flash_attention_score/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_bf16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_bf16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_fp16.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/matmul_add_rmsnorm/matmul_add_rmsnorm_fp16_fp32.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bnsd_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_bf16_bsh_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bnsd_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/paged_attention/paged_attention_fp16_bsh_mix.o +0 -0
- mindspore/profiler/envprofiling.py +0 -254
- mindspore/profiler/profiling.py +0 -1926
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops → custom_ascendc_910}/op_api/include/aclnn_decoder_kv_cache.h +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops → custom_ascendc_910}/op_api/include/aclnn_prompt_kv_cache.h +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl → custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl}/dynamic/decoder_kv_cache.cpp +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl → custom_ascendc_910/op_impl/ai_core/tbe/custom_ascendc_910_impl}/dynamic/prompt_kv_cache.cpp +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops → custom_ascendc_910b}/op_api/include/aclnn_all_finite.h +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops → custom_ascendc_910b}/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json → custom_ascendc_910b/op_impl/ai_core/tbe/config/ascend910_93/aic-ascend910_93-ops-info.json} +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops → custom_ascendc_910b}/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl → custom_ascendc_910b/op_impl/ai_core/tbe/custom_ascendc_910b_impl}/dynamic/all_finite.cpp +0 -0
- /mindspore/lib/plugin/ascend/{custom_ascendc_ops → custom_ascendc_910b}/op_proto/inc/op_proto.h +0 -0
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/WHEEL +0 -0
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/top_level.txt +0 -0
mindspore/context.py
CHANGED
|
@@ -26,7 +26,7 @@ from collections import namedtuple
|
|
|
26
26
|
from types import FunctionType
|
|
27
27
|
|
|
28
28
|
from mindspore import log as logger
|
|
29
|
-
from mindspore._c_expression import MSContext, ms_ctx_param
|
|
29
|
+
from mindspore._c_expression import MSContext, ms_ctx_param, CollectiveManager
|
|
30
30
|
from mindspore import _checkparam as Validator
|
|
31
31
|
from mindspore._checkparam import args_type_check
|
|
32
32
|
from mindspore.parallel._auto_parallel_context import _set_auto_parallel_context, _get_auto_parallel_context, \
|
|
@@ -254,9 +254,9 @@ class _Context:
|
|
|
254
254
|
|
|
255
255
|
def set_exec_order(self, exec_order):
|
|
256
256
|
"""
|
|
257
|
-
The execution order mode, support "bfs", "dfs"
|
|
257
|
+
The execution order mode, support "bfs", "dfs".
|
|
258
258
|
"""
|
|
259
|
-
exec_order_modes = ["bfs", "dfs"
|
|
259
|
+
exec_order_modes = ["bfs", "dfs"]
|
|
260
260
|
if exec_order not in exec_order_modes:
|
|
261
261
|
raise ValueError(f"For 'context.set_context', the argument 'exec_order' must be one of "
|
|
262
262
|
f"{exec_order_modes}, but got {exec_order}.")
|
|
@@ -289,6 +289,11 @@ class _Context:
|
|
|
289
289
|
if deterministic not in deterministic_options:
|
|
290
290
|
raise ValueError(f"For 'context.set_context', the argument 'deterministic' must be one of "
|
|
291
291
|
f"{deterministic_options}, but got {deterministic}.")
|
|
292
|
+
|
|
293
|
+
# Must wait for all async created groups to be initialized so that
|
|
294
|
+
# deterministic feature could be consistent between all processes.
|
|
295
|
+
CollectiveManager.get_instance().wait_all_comm_init()
|
|
296
|
+
|
|
292
297
|
self.set_param(ms_ctx_param.deterministic, deterministic)
|
|
293
298
|
|
|
294
299
|
hccl_deterministic = os.getenv("HCCL_DETERMINISTIC")
|
|
@@ -846,6 +851,8 @@ class _Context:
|
|
|
846
851
|
(ms_ctx_param.enable_allreduce_slice_to_reducescatter, bool),
|
|
847
852
|
"enable_interleave_split_concat_branch":
|
|
848
853
|
(ms_ctx_param.enable_interleave_split_concat_branch, bool),
|
|
854
|
+
"enable_interleave_parallel_branch":
|
|
855
|
+
(ms_ctx_param.enable_interleave_parallel_branch, bool),
|
|
849
856
|
"enable_offloading_packed_experts": (ms_ctx_param.enable_offloading_packed_experts, bool),
|
|
850
857
|
"compute_communicate_fusion_level":
|
|
851
858
|
(ms_ctx_param.compute_communicate_fusion_level, int),
|
|
@@ -936,6 +943,8 @@ def set_auto_parallel_context(**kwargs):
|
|
|
936
943
|
\ group_ckpt_save_file
|
|
937
944
|
\ auto_pipeline
|
|
938
945
|
\ dump_local_norm
|
|
946
|
+
\ dump_local_norm_path
|
|
947
|
+
\ dump_device_local_norm
|
|
939
948
|
=========================== ===========================
|
|
940
949
|
|
|
941
950
|
Args:
|
|
@@ -991,7 +1000,8 @@ def set_auto_parallel_context(**kwargs):
|
|
|
991
1000
|
dataset_strategy="data_parallel" is equal to full_batch=False, dataset_strategy="full_batch" is
|
|
992
1001
|
equal to full_batch=True. For execution mode is 'GRAPH_MODE' and dataset load into net by model
|
|
993
1002
|
parallel strategy likes ds_stra ((1, 8), (1, 8)), it requires using
|
|
994
|
-
set_auto_parallel_context(dataset_strategy=ds_stra).
|
|
1003
|
+
set_auto_parallel_context(dataset_strategy=ds_stra). The dataset sharding strategy is not
|
|
1004
|
+
affected by the currently configured parallel mode.
|
|
995
1005
|
enable_parallel_optimizer (bool): This is a developing feature, which shards the weight update computation for
|
|
996
1006
|
data parallel training in the benefit of time and memory saving. Currently, auto and semi auto
|
|
997
1007
|
parallel mode support all optimizers in both Ascend and GPU. Data parallel mode only supports
|
|
@@ -1015,7 +1025,7 @@ def set_auto_parallel_context(**kwargs):
|
|
|
1015
1025
|
|
|
1016
1026
|
- pipeline_interleave(bool): Indicates whether to enable the interleaved execution mode.
|
|
1017
1027
|
- pipeline_scheduler(str): Indicates the scheduling mode for pipeline parallelism. Only support
|
|
1018
|
-
``gpipe/1f1b``.
|
|
1028
|
+
``gpipe/1f1b/seqpipe``.
|
|
1019
1029
|
parallel_optimizer_config (dict): A dict contains the keys and values for setting the parallel optimizer
|
|
1020
1030
|
configure. The configure provides more detailed behavior control about parallel training
|
|
1021
1031
|
when parallel optimizer is enabled. The configure will be effective when we use
|
|
@@ -1090,6 +1100,11 @@ def set_auto_parallel_context(**kwargs):
|
|
|
1090
1100
|
dump_local_norm (bool): Whether to dump local_norm value, when the `parallel_mode` is set to
|
|
1091
1101
|
``semi_auto_parallel`` or ``auto_parallel``.
|
|
1092
1102
|
Default: ``False`` .
|
|
1103
|
+
dump_local_norm_path (str): The path to save dump files of local_norm value.
|
|
1104
|
+
Default: ``''`` .
|
|
1105
|
+
dump_device_local_norm (bool): Whether to dump device_local_norm value, when the `parallel_mode` is set to
|
|
1106
|
+
``semi_auto_parallel`` or ``auto_parallel``.
|
|
1107
|
+
Default: ``False`` .
|
|
1093
1108
|
|
|
1094
1109
|
Raises:
|
|
1095
1110
|
ValueError: If input key is not attribute in auto parallel context.
|
|
@@ -1165,8 +1180,10 @@ def reset_auto_parallel_context():
|
|
|
1165
1180
|
- pipeline_stages: 1.
|
|
1166
1181
|
- pipeline_result_broadcast: False.
|
|
1167
1182
|
- fusion_threshold: 64.
|
|
1168
|
-
- dump_local_norm: False.
|
|
1169
1183
|
- auto_pipeline: False.
|
|
1184
|
+
- dump_local_norm: False.
|
|
1185
|
+
- dump_local_norm_path: ''.
|
|
1186
|
+
- dump_device_local_norm: False.
|
|
1170
1187
|
|
|
1171
1188
|
Examples:
|
|
1172
1189
|
>>> import mindspore as ms
|
|
@@ -1179,7 +1196,8 @@ def reset_auto_parallel_context():
|
|
|
1179
1196
|
@args_type_check(offload_config=dict)
|
|
1180
1197
|
def set_offload_context(offload_config):
|
|
1181
1198
|
r"""
|
|
1182
|
-
Configure heterogeneous training detailed parameters to adjust the offload strategy.
|
|
1199
|
+
Configure heterogeneous training detailed parameters to adjust the offload strategy. This function is deprecated and
|
|
1200
|
+
will be removed in future versions.
|
|
1183
1201
|
|
|
1184
1202
|
Note:
|
|
1185
1203
|
The offload configuration is only used if the memory offload feature is enabled
|
|
@@ -1220,7 +1238,8 @@ def set_offload_context(offload_config):
|
|
|
1220
1238
|
def get_offload_context():
|
|
1221
1239
|
"""
|
|
1222
1240
|
Gets the offload configuration parameters. Configure through interface mindspore.set_offload_context().
|
|
1223
|
-
If the user is not set, the default configuration is obtained.
|
|
1241
|
+
If the user is not set, the default configuration is obtained. This function is deprecated and will be removed in
|
|
1242
|
+
future versions.
|
|
1224
1243
|
|
|
1225
1244
|
Returns:
|
|
1226
1245
|
Dict, heterogeneous training offload detailed configuration parameters.
|
|
@@ -1235,8 +1254,6 @@ def get_offload_context():
|
|
|
1235
1254
|
def _check_target_specific_cfgs(device, arg_key):
|
|
1236
1255
|
"""Checking whether a config is suitable for a specified device"""
|
|
1237
1256
|
device_cfgs = {
|
|
1238
|
-
'enable_graph_kernel': ['Ascend', 'GPU', 'CPU'],
|
|
1239
|
-
'graph_kernel_flags': ['Ascend', 'GPU', 'CPU'],
|
|
1240
1257
|
'enable_reduce_precision': ['Ascend'],
|
|
1241
1258
|
'print_file_path': ['Ascend'],
|
|
1242
1259
|
'variable_memory_max_size': ['Ascend'],
|
|
@@ -1275,9 +1292,52 @@ def _check_key(key):
|
|
|
1275
1292
|
raise ValueError(f"Please set '{key}' through parameter ascend_config")
|
|
1276
1293
|
|
|
1277
1294
|
|
|
1295
|
+
def _check_context_deprecated(key):
|
|
1296
|
+
"""Checking whether a context key will be deprecated."""
|
|
1297
|
+
deprecated_context_dict = {'save_graphs': 'env MS_DEV_SAVE_GRAPHS',
|
|
1298
|
+
'save_graphs_path': 'env MS_DEV_SAVE_GRAPHS_PATH',
|
|
1299
|
+
'precompile_only': 'env MS_DEV_PRECOMPILE_ONLY',
|
|
1300
|
+
'check_bprop': '',
|
|
1301
|
+
'max_call_depth': 'api mindspore.set_recursion_limit()',
|
|
1302
|
+
'grad_for_scalar': 'tensor derivative',
|
|
1303
|
+
'enable_compile_cache': 'env MS_COMPILER_CACHE_ENABLE',
|
|
1304
|
+
'enable_cache_path': 'env MS_COMPILER_CACHE_PATH',
|
|
1305
|
+
'debug_level': '',
|
|
1306
|
+
'device_target': 'api mindspore.set_device()',
|
|
1307
|
+
'device_id': 'api mindspore.set_device()',
|
|
1308
|
+
'deterministic': 'api mindspore.set_deterministic()',
|
|
1309
|
+
'inter_op_parallel_num': 'api mindspore.runtime.dispatch_threads_num()',
|
|
1310
|
+
'pynative_synchronize': 'api mindspore.runtime.launch_blocking()',
|
|
1311
|
+
'max_device_memory': 'api mindspore.runtime.set_memory()',
|
|
1312
|
+
'variable_memory_max_size': 'api mindspore.runtime.set_memory()',
|
|
1313
|
+
'mempool_block_size': 'api mindspore.runtime.set_memory()',
|
|
1314
|
+
'memory_optimize_level': 'api mindspore.runtime.set_memory()',
|
|
1315
|
+
'ascend_config': '''api mindspore.device_context.ascend.op_precision.precision_mode(),
|
|
1316
|
+
mindspore.device_context.ascend.op_precision.op_precision_mode(),
|
|
1317
|
+
mindspore.device_context.ascend.op_precision.matmul_allow_hf32(),
|
|
1318
|
+
mindspore.device_context.ascend.op_precision.conv_allow_hf32(),
|
|
1319
|
+
mindspore.device_context.ascend.op_tuning.op_compile()''',
|
|
1320
|
+
'aoe_tune_mode': 'api mindspore.device_context.ascend.op_tuning.aoe_tune_mode()',
|
|
1321
|
+
'aoe_config': 'api mindspore.device_context.ascend.op_tuning.aoe_job_type()',
|
|
1322
|
+
'op_timeout': 'api mindspore.device_context.ascend.op_debug.execute_timeout()',
|
|
1323
|
+
'op_debug_option': 'api mindspore.device_context.ascend.op_debug.debug_option()',
|
|
1324
|
+
'gpu_config': '''api mindspore.device_context.gpu.op_precision.conv_allow_tf32(),
|
|
1325
|
+
mindspore.device_context.gpu.op_precision.matmul_allow_tf32(),
|
|
1326
|
+
mindspore.device_context.gpu.op_precision.conv_fprop_algo(),
|
|
1327
|
+
mindspore.device_context.gpu.op_precision.conv_wgrad_algo(),
|
|
1328
|
+
mindspore.device_context.gpu.op_precision.conv_dgrad_algo()''',
|
|
1329
|
+
'runtime_num_threads': 'api mindspore.device_context.cpu.op_tuning.threads_num()',
|
|
1330
|
+
'memory_offload': "`device` parameter of `mindspore.Parameter`"}
|
|
1331
|
+
if key in deprecated_context_dict:
|
|
1332
|
+
log = f"For 'context.set_context', the parameter '{key}' will be deprecated and removed in a future version."
|
|
1333
|
+
if deprecated_context_dict.get(key) != '':
|
|
1334
|
+
log += f" Please use the {deprecated_context_dict.get(key)} instead."
|
|
1335
|
+
logger.warning(log)
|
|
1336
|
+
|
|
1337
|
+
|
|
1278
1338
|
@args_type_check(mode=int, precompile_only=bool, device_target=str, device_id=int, save_graphs=(bool, int),
|
|
1279
|
-
save_graphs_path=str,
|
|
1280
|
-
|
|
1339
|
+
save_graphs_path=str, aoe_tune_mode=str, aoe_config=dict,
|
|
1340
|
+
enable_reduce_precision=bool, variable_memory_max_size=str,
|
|
1281
1341
|
enable_auto_mixed_precision=bool, inter_op_parallel_num=int,
|
|
1282
1342
|
enable_graph_kernel=bool, reserve_class_name_in_scope=bool, check_bprop=bool,
|
|
1283
1343
|
max_device_memory=str, print_file_path=str, max_call_depth=int, env_config_path=str,
|
|
@@ -1286,7 +1346,7 @@ def _check_key(key):
|
|
|
1286
1346
|
op_timeout=int, deterministic=str, ascend_config=dict, jit_syntax_level=int, debug_level=int,
|
|
1287
1347
|
jit_enable_inplace_ops=bool, gpu_config=dict, jit_config=dict, enable_compile_cache=bool)
|
|
1288
1348
|
def set_context(**kwargs):
|
|
1289
|
-
"""
|
|
1349
|
+
r"""
|
|
1290
1350
|
Set context for running environment.
|
|
1291
1351
|
|
|
1292
1352
|
Context should be configured before running your program. If there is no configuration,
|
|
@@ -1297,105 +1357,127 @@ def set_context(**kwargs):
|
|
|
1297
1357
|
The mode is not recommended to be changed after net was initialized because the implementations of some
|
|
1298
1358
|
operations are different in graph mode and pynative mode. Default: ``PYNATIVE_MODE`` .
|
|
1299
1359
|
|
|
1300
|
-
Some configurations are device specific,
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
|
1306
|
-
|
|
1307
|
-
|
|
|
1308
|
-
|
|
|
1309
|
-
| |
|
|
1310
|
-
|
|
|
1311
|
-
| |
|
|
1312
|
-
|
|
|
1313
|
-
| |
|
|
1314
|
-
|
|
|
1315
|
-
| |
|
|
1316
|
-
|
|
1317
|
-
|
|
|
1318
|
-
|
|
1319
|
-
|
|
|
1320
|
-
|
|
|
1321
|
-
| |
|
|
1322
|
-
|
|
|
1323
|
-
| |
|
|
1324
|
-
|
|
|
1325
|
-
| |
|
|
1326
|
-
|
|
|
1327
|
-
| |
|
|
1328
|
-
|
|
|
1329
|
-
| |
|
|
1330
|
-
|
|
|
1331
|
-
| |
|
|
1332
|
-
|
|
|
1333
|
-
| |
|
|
1334
|
-
|
|
|
1335
|
-
| |
|
|
1336
|
-
|
|
1337
|
-
|
|
|
1338
|
-
|
|
1339
|
-
|
|
|
1340
|
-
|
|
|
1341
|
-
| |
|
|
1342
|
-
|
|
|
1343
|
-
| |
|
|
1344
|
-
|
|
|
1345
|
-
| |
|
|
1346
|
-
|
|
|
1347
|
-
| |
|
|
1348
|
-
|
|
|
1349
|
-
|
|
|
1350
|
-
|
|
|
1351
|
-
|
|
|
1352
|
-
|
|
|
1353
|
-
|
|
|
1354
|
-
|
|
|
1355
|
-
| |
|
|
1356
|
-
|
|
|
1357
|
-
| |
|
|
1358
|
-
|
|
|
1359
|
-
| |
|
|
1360
|
-
|
|
|
1361
|
-
| |
|
|
1362
|
-
|
|
|
1363
|
-
| |
|
|
1364
|
-
|
|
|
1365
|
-
| |
|
|
1366
|
-
|
|
|
1367
|
-
| |
|
|
1368
|
-
|
|
|
1369
|
-
| |
|
|
1370
|
-
|
|
|
1371
|
-
| |
|
|
1372
|
-
|
|
|
1373
|
-
| |
|
|
1374
|
-
|
|
|
1375
|
-
| |
|
|
1376
|
-
|
|
|
1377
|
-
| |
|
|
1378
|
-
|
|
|
1379
|
-
| |
|
|
1380
|
-
|
|
|
1381
|
-
| |
|
|
1382
|
-
|
|
1360
|
+
Some configurations are device specific, and some parameters will be deprecated and removed in the future version
|
|
1361
|
+
(marked ``D`` in the second column), please use the replacement in the fourth column.
|
|
1362
|
+
see the below table for details:
|
|
1363
|
+
|
|
1364
|
+
+-------------------------+------------------------------+---------------------------+----------------------------+
|
|
1365
|
+
| Function Classification | Configuration Parameters | Hardware Platform Support| Replacement |
|
|
1366
|
+
+=========================+==============================+===========================+============================+
|
|
1367
|
+
| System Configuration | device_id (D) | CPU/GPU/Ascend | :func:`~.set_device` |
|
|
1368
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1369
|
+
| | device_target (D) | CPU/GPU/Ascend | :func:`~.set_device` |
|
|
1370
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1371
|
+
| | max_device_memory(D) | GPU/Ascend | :func:`~.set_memory` |
|
|
1372
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1373
|
+
| | variable_memory_max_size (D) | Ascend | :func:`~.set_memory` |
|
|
1374
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1375
|
+
| | mempool_block_size (D) | GPU/Ascend | :func:`~.set_memory` |
|
|
1376
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1377
|
+
| | op_timeout (D) | Ascend | :func:`~.execute_timeout` |
|
|
1378
|
+
+-------------------------+------------------------------+---------------------------+----------------------------+
|
|
1379
|
+
| Debug Configuration | save_graphs (D) | CPU/GPU/Ascend | MS_DEV_SAVE_GRAPHS |
|
|
1380
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1381
|
+
| | save_graphs_path (D) | CPU/GPU/Ascend | MS_DEV_SAVE_GRAPHS_PATH |
|
|
1382
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1383
|
+
| | deterministic (D) | Ascend |:func:`~.set_deterministic` |
|
|
1384
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1385
|
+
| | print_file_path | Ascend | NA |
|
|
1386
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1387
|
+
| | env_config_path | CPU/GPU/Ascend | NA |
|
|
1388
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1389
|
+
| | precompile_only (D) | CPU/GPU/Ascend | MS_DEV_PRECOMPILE_ONLY |
|
|
1390
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1391
|
+
| | reserve_class_name_in_scope | CPU/GPU/Ascend | NA |
|
|
1392
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1393
|
+
| | pynative_synchronize (D) | CPU/GPU/Ascend | :func:`~.launch_blocking` |
|
|
1394
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1395
|
+
| | debug_level (D) | CPU/GPU/Ascend | NA |
|
|
1396
|
+
+-------------------------+------------------------------+---------------------------+----------------------------+
|
|
1397
|
+
| Executive Control | mode | CPU/GPU/Ascend | NA |
|
|
1398
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1399
|
+
| | enable_reduce_precision | Ascend | NA |
|
|
1400
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1401
|
+
| | aoe_tune_mode (D) | Ascend | :func:`~.aoe_tune_mode` |
|
|
1402
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1403
|
+
| | aoe_config (D) | Ascend | :func:`~.aoe_job_type` |
|
|
1404
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1405
|
+
| | check_bprop (D) | CPU/GPU/Ascend | NA |
|
|
1406
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1407
|
+
| | max_call_depth (D) | CPU/GPU/Ascend | :func:`~.set_recur\ |
|
|
1408
|
+
| | | | sion_limit` |
|
|
1409
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1410
|
+
| | grad_for_scalar (D) | CPU/GPU/Ascend | derivative |
|
|
1411
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1412
|
+
| | enable_compile_cache (D) | CPU/GPU/Ascend | MS_COMPILER_CACHE_ENABLE |
|
|
1413
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1414
|
+
| | inter_op_parallel_num (D) | CPU/GPU/Ascend | :func:`~.dispatch\ |
|
|
1415
|
+
| | | | _threads_num` |
|
|
1416
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1417
|
+
| |runtime_num_threads (D) | CPU/GPU/Ascend | :func:`~.threads_num` |
|
|
1418
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1419
|
+
| | compile_cache_path | CPU/GPU/Ascend | NA |
|
|
1420
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1421
|
+
| | disable_format_transform | GPU | NA |
|
|
1422
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1423
|
+
| | support_binary | CPU/GPU/Ascend | NA |
|
|
1424
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1425
|
+
| | memory_optimize_level (D) | CPU/GPU/Ascend | :func:`~.set_memory` |
|
|
1426
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1427
|
+
| | memory_offload | GPU/Ascend | NA |
|
|
1428
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1429
|
+
| | ascend_config (D) | Ascend | :func:`~.precision_mode` |
|
|
1430
|
+
| | | | |
|
|
1431
|
+
| | | | :func:`~.op_precision_mode`|
|
|
1432
|
+
| | | | |
|
|
1433
|
+
| | | | :func:`~.matmul_allow_hf32`|
|
|
1434
|
+
| | | | |
|
|
1435
|
+
| | | | :func:`~.conv_allow_hf32` |
|
|
1436
|
+
| | | | |
|
|
1437
|
+
| | | | :func:`~.op_compile` |
|
|
1438
|
+
| | | | |
|
|
1439
|
+
| | | | :func:`~.debug_option` |
|
|
1440
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1441
|
+
| | jit_syntax_level | CPU/GPU/Ascend | NA |
|
|
1442
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1443
|
+
| | gpu_config (D) | GPU | :func:`~.conv_allow_tf32` |
|
|
1444
|
+
| | | | |
|
|
1445
|
+
| | | | :func:`~.matmul_allow_tf32`|
|
|
1446
|
+
| | | | |
|
|
1447
|
+
| | | | :func:`~.conv_fprop_algo` |
|
|
1448
|
+
| | | | |
|
|
1449
|
+
| | | | :func:`~.conv_wgrad_algo` |
|
|
1450
|
+
| | | | |
|
|
1451
|
+
| | | | :func:`~.conv_dgrad_algo` |
|
|
1452
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1453
|
+
| | jit_config | CPU/GPU/Ascend | NA |
|
|
1454
|
+
| +------------------------------+---------------------------+----------------------------+
|
|
1455
|
+
| | exec_order | Ascend | NA |
|
|
1456
|
+
+-------------------------+------------------------------+---------------------------+----------------------------+
|
|
1383
1457
|
|
|
1384
1458
|
Args:
|
|
1385
1459
|
device_id (int): ID of the target device, the value must be in [0, device_num_per_host-1],
|
|
1386
|
-
while device_num_per_host should be no more than 4096. Default: ``0`` .
|
|
1460
|
+
while device_num_per_host should be no more than 4096. Default: ``0`` . This parameter will be deprecated
|
|
1461
|
+
and will be removed in future versions.Please use api :func:`mindspore.set_device`
|
|
1462
|
+
with 'device_target' instead.
|
|
1387
1463
|
device_target (str): The target device to run, support "Ascend", "GPU", and "CPU".
|
|
1388
|
-
If device target is not set, the version of MindSpore package is used.
|
|
1464
|
+
If device target is not set, the version of MindSpore package is used. This parameter will be deprecated
|
|
1465
|
+
and will be removed in future versions.Please use api :func:`mindspore.set_device`
|
|
1466
|
+
with 'device_id' instead.
|
|
1389
1467
|
max_device_memory (str): Set the maximum memory available for devices. The format is "xxGB".
|
|
1390
1468
|
Default: ``" 1024GB"`` . The actual used memory size is the minimum of the available memory of the device
|
|
1391
1469
|
and max_device_memory. 'max_device_memory' should be set before the program runs. When virtual memory is
|
|
1392
1470
|
enabled, a too small 'max_device_memory' will cause frequent defragmentation, affecting performance.
|
|
1393
|
-
|
|
1394
|
-
|
|
1471
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1472
|
+
api :func:`mindspore.runtime.set_memory` instead.
|
|
1473
|
+
variable_memory_max_size (str): This parameter will be deprecated and will be removed in future versions. Please
|
|
1474
|
+
use the api :func:`mindspore.runtime.set_memory` instead.
|
|
1395
1475
|
mempool_block_size (str): It takes effect when virtual memory is turned off, set the size of the memory pool
|
|
1396
1476
|
block for devices. The format is "xxGB". Default: ``"1GB"`` . Minimum size is "1G". The actual used memory
|
|
1397
1477
|
block size is the minimum of the available memory of the device and mempool_block_size. When there is
|
|
1398
1478
|
enough memory, the memory will be expanded by this value.
|
|
1479
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1480
|
+
api :func:`mindspore.runtime.set_memory` instead.
|
|
1399
1481
|
op_timeout (int): Set the maximum duration of executing an operator in seconds.
|
|
1400
1482
|
If the execution time exceeds this value, system will terminate the task.
|
|
1401
1483
|
0 means endless wait. The defaults for AI Core and AICPU operators vary on different hardware.
|
|
@@ -1403,6 +1485,8 @@ def set_context(**kwargs):
|
|
|
1403
1485
|
please refer to `Ascend Community document about aclrtSetOpExecuteTimeOut
|
|
1404
1486
|
<https://www.hiascend.com/document/detail/en/CANNCommunityEdition/600alphaX/infacldevg/aclcppdevg/aclcppdevg_03_0069.html>`_.
|
|
1405
1487
|
Default: ``900`` .
|
|
1488
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1489
|
+
api :func:`mindspore.device_context.ascend.op_debug.execute_timeout` instead.
|
|
1406
1490
|
save_graphs (bool or int): Whether to save intermediate compilation graphs. Default: ``0`` .
|
|
1407
1491
|
Available values are:
|
|
1408
1492
|
|
|
@@ -1417,10 +1501,14 @@ def set_context(**kwargs):
|
|
|
1417
1501
|
When the `save_graphs` attribute is set as ``True`` , ``1`` , ``2`` or ``3`` , attribute of
|
|
1418
1502
|
`save_graphs_path` is used to set the intermediate compilation graph storage path. By default, the graphs
|
|
1419
1503
|
are saved in the current directory.
|
|
1504
|
+
This parameter will be deprecated and removed in a future version. Please use the environment variable
|
|
1505
|
+
`MS_DEV_SAVE_GRAPHS` instead.
|
|
1420
1506
|
save_graphs_path (str): Path to save graphs. Default: ``"."``.
|
|
1421
1507
|
If the specified directory does not exist, the system will automatically create the directory.
|
|
1422
1508
|
During distributed training, graphs will be saved to the directory of
|
|
1423
1509
|
`save_graphs_path/rank_${rank_id}/`. `rank_id` is the ID of the current device in the cluster.
|
|
1510
|
+
This parameter will be deprecated and removed in a future version. Please use the environment variable
|
|
1511
|
+
`MS_DEV_SAVE_GRAPHS_PATH` instead.
|
|
1424
1512
|
deterministic (str): Whether to enable op run in deterministic mode. The value must be in the
|
|
1425
1513
|
range of ['ON', 'OFF'], and the default value is ``'OFF'`` .
|
|
1426
1514
|
|
|
@@ -1430,96 +1518,37 @@ def set_context(**kwargs):
|
|
|
1430
1518
|
When deterministic mode is on, model ops will be deterministic in Ascend. This means that if op run
|
|
1431
1519
|
multiple times with the same inputs on the same hardware, it will have the exact same outputs each time.
|
|
1432
1520
|
This is useful for debugging models.
|
|
1433
|
-
|
|
1434
|
-
|
|
1435
|
-
|
|
1436
|
-
|
|
1437
|
-
|
|
1438
|
-
|
|
1439
|
-
|
|
1440
|
-
When print data to file, the total output bytes of single print must be less then 2GB(limited by
|
|
1441
|
-
protobuf).
|
|
1442
|
-
env_config_path (str): Config path for DFX.
|
|
1443
|
-
Through mindspore.set_context(env_config_path="./mindspore_config.json")
|
|
1444
|
-
|
|
1445
|
-
configure RDR:
|
|
1446
|
-
|
|
1447
|
-
- enable: controls whether the RDR is enabled to collect the key data during training and
|
|
1448
|
-
save key data in the fault scenario. When set to ``true`` , the RDR will be turned on.
|
|
1449
|
-
When set to ``false`` , the RDR will be turned off.
|
|
1450
|
-
- mode: sets the mode of RDR on exporting data. When set to ``1`` , the RDR only exports data
|
|
1451
|
-
in the fault scenario. When set to ``2`` , the RDR exports data in the fault scenario and the
|
|
1452
|
-
normal end scenario. Default: ``1`` .
|
|
1453
|
-
- path: sets the path where RDR saves data. The current path must be absolute.
|
|
1454
|
-
|
|
1455
|
-
Memory reuse:
|
|
1456
|
-
|
|
1457
|
-
- mem_Reuse: controls whether the memory reuse function is turned on. When set to ``True`` ,
|
|
1458
|
-
the memory reuse function is turned on. When set to ``False`` , the memory reuse function is turned off.
|
|
1521
|
+
In distributed scenario, we suggest user to set deterministic mode before
|
|
1522
|
+
calling :func:`mindspore.communication.init` to enable deterministic operation for
|
|
1523
|
+
communication operators in the global communication group.
|
|
1524
|
+
This parameter will be deprecated and will be removed in
|
|
1525
|
+
future versions. Please use the api :func:`mindspore.set_deterministic` instead.
|
|
1526
|
+
print_file_path (str): This parameter will be deprecated and will be removed in future versions.
|
|
1527
|
+
env_config_path (str): This parameter will be deprecated and will be removed in future versions.
|
|
1459
1528
|
|
|
1460
1529
|
precompile_only (bool): Whether to only precompile the network. Default: ``False`` .
|
|
1461
1530
|
If set to ``True`` , the network will only be compiled, not executed.
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
For example:
|
|
1466
|
-
|
|
1467
|
-
Default/net-Net1/net-Net2 (reserve_class_name_in_scope=True)
|
|
1468
|
-
|
|
1469
|
-
Default/net/net (reserve_class_name_in_scope=False)
|
|
1470
|
-
|
|
1531
|
+
This parameter will be deprecated and removed in a future version. Please use the environment variable
|
|
1532
|
+
`MS_DEV_PRECOMPILE_ONLY` instead.
|
|
1533
|
+
reserve_class_name_in_scope (bool): This parameter will be deprecated and will be removed in future versions.
|
|
1471
1534
|
pynative_synchronize (bool): Whether to enable synchronous execution of the device in PyNative mode.
|
|
1472
1535
|
Default: ``False`` . When the value is set to ``False`` , the operator is executed asynchronously on the
|
|
1473
1536
|
device. When an error occurs in the execution of the operator, the specific error script code location
|
|
1474
1537
|
cannot be located, when the value is set to ``True`` , the operator is executed synchronously on the
|
|
1475
1538
|
device. It will reduce the execution performance of the program. At this time, when an error occurs in the
|
|
1476
1539
|
execution of the operator, the location of the error script code can be located according to the call stack
|
|
1477
|
-
of the error.
|
|
1540
|
+
of the error. This parameter will be deprecated and will be removed in future versions.Please use
|
|
1541
|
+
the api :func:`mindspore.runtime.launch_blocking` instead.
|
|
1478
1542
|
mode (int): Running in GRAPH_MODE(0) or PYNATIVE_MODE(1).
|
|
1479
1543
|
Both modes support all backends. Default: ``PYNATIVE_MODE`` .
|
|
1480
|
-
enable_graph_kernel (bool): Whether to enable graph kernel fusion to optimize network execution performance.
|
|
1481
|
-
Default: ``False`` .
|
|
1482
|
-
Indicates whether to enable image-computing convergence to optimize network execution performance.
|
|
1483
|
-
If enable_graph_kernel is set to ``True`` , acceleration can be enabled.
|
|
1484
|
-
For details of graph kernel fusion, please check
|
|
1485
|
-
`Enabling Graph Kernel Fusion
|
|
1486
|
-
<https://www.mindspore.cn/docs/en/master/model_train/optimize/graph_fusion_engine.html>`_.
|
|
1487
|
-
graph_kernel_flags (str):
|
|
1488
|
-
Optimization options of graph kernel fusion, and the priority is higher when it conflicts
|
|
1489
|
-
with enable_graph_kernel. Only for experienced users.
|
|
1490
|
-
For example,
|
|
1491
|
-
|
|
1492
|
-
.. code-block::
|
|
1493
|
-
|
|
1494
|
-
mindspore.set_context(graph_kernel_flags="--opt_level=2 --dump_as_text")
|
|
1495
|
-
|
|
1496
|
-
Some general options:
|
|
1497
|
-
|
|
1498
|
-
- opt_level: Set the optimization level.
|
|
1499
|
-
Default: ``2`` . Graph kernel fusion can be enabled equivalently by setting opt_level greater than 0.
|
|
1500
|
-
Available values are:
|
|
1501
|
-
|
|
1502
|
-
- 0: disables graph kernel fusion;
|
|
1503
|
-
- 1: enables the basic fusion of operators;
|
|
1504
|
-
- 2: includes all optimizations of level 1,
|
|
1505
|
-
and turns on more optimizations such as CSE, arithmetic simplification and so on;
|
|
1506
|
-
- 3: includes all optimizations of level 2, and turns on more optimizations such as SitchingFusion,
|
|
1507
|
-
ParallelFusion and so on. Optimizations of this level are radical and unstable in some scenarios.
|
|
1508
|
-
Be caution when using this level.
|
|
1509
|
-
|
|
1510
|
-
- dump_as_text: dumps detail info as text files. Default: ``False`` .
|
|
1511
|
-
- enable_cluster_ops: Add user-specified operator to the set of operators involved in fusion. For example,
|
|
1512
|
-
by setting ``--enable_cluster_ops=MatMul``, MatMul operator can be included in the fusion process.
|
|
1513
|
-
- enable_pass/disable_pass: Enable/disable user-specified custom fusion passes. See details in
|
|
1514
|
-
`Custom Fusion Pass
|
|
1515
|
-
<https://www.mindspore.cn/docs/en/master/model_train/custom_program/fusion_pass.html>`_.
|
|
1516
|
-
|
|
1517
1544
|
enable_reduce_precision (bool): Whether to enable precision reduction.
|
|
1518
1545
|
If the operator does not support the user-specified precision, the precision will
|
|
1519
1546
|
be changed automatically. Default: ``True`` .
|
|
1520
1547
|
aoe_tune_mode (str): AOE tuning mode setting, which is not set by default.
|
|
1521
1548
|
When set to ``"online"`` , the tuning in online function is turned on.
|
|
1522
1549
|
When set to ``"offline"`` , ge graph will be save for offline tuning.
|
|
1550
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1551
|
+
api :func:`mindspore.device_context.ascend.op_tuning.aoe_tune_mode` instead.
|
|
1523
1552
|
aoe_config (dict): Set the parameters specific to Ascend Optimization Engine. It is not set by default.
|
|
1524
1553
|
|
|
1525
1554
|
- job_type (str): Mode type setting, default value is ``"2"``.
|
|
@@ -1527,34 +1556,48 @@ def set_context(**kwargs):
|
|
|
1527
1556
|
- ``"1"``: subgraph tuning;
|
|
1528
1557
|
- ``"2"``: operator tuning.
|
|
1529
1558
|
|
|
1559
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1560
|
+
api :func:`mindspore.device_context.ascend.op_tuning.aoe_job_type` instead.
|
|
1561
|
+
|
|
1530
1562
|
check_bprop (bool): Whether to check back propagation nodes. The checking ensures that the shape and dtype
|
|
1531
1563
|
of back propagation node outputs is the same as input parameters. Default: ``False`` .
|
|
1564
|
+
This parameter will be deprecated and removed in a future version.
|
|
1532
1565
|
max_call_depth (int): Specify the maximum depth of function call. Must be positive integer. Default: ``1000`` .
|
|
1533
1566
|
The max_call_depth parameter needs to be set when the nested call is too deep or the number
|
|
1534
1567
|
of subgraphs is too large. If max_call_depth is set larger than before, the system max stack depth should be
|
|
1535
1568
|
set larger too, otherwise a `core dumped` exception may be raised because of system stack overflow.
|
|
1569
|
+
This parameter will be deprecated and removed in a future version. Please use the api
|
|
1570
|
+
:func:`mindspore.set_recursion_limit` instead.
|
|
1536
1571
|
grad_for_scalar (bool): Whether to get gradient for scalar. Default: ``False`` .
|
|
1537
1572
|
When grad_for_scalar is set to ``True`` , the function's scalar input can be derived.
|
|
1538
1573
|
The default value is ``False`` . Because the back-end does not support scaling operations currently,
|
|
1539
1574
|
this interface only supports simple operations that can be deduced by the front-end.
|
|
1540
|
-
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
|
|
1544
|
-
|
|
1545
|
-
|
|
1575
|
+
This parameter will be deprecated and removed in a future version. Please take the tensor derivative.
|
|
1576
|
+
enable_compile_cache (bool): Whether to save or load the compiled cache of the graph.
|
|
1577
|
+
After enable_compile_cache is set to ``True`` , during the first execution, a compilation cache is
|
|
1578
|
+
generated and exported to a MINDIR file. When the network is executed again, if enable_compile_cache is
|
|
1579
|
+
still set to ``True`` and the network scripts are not changed, the compile cache is loaded.
|
|
1580
|
+
Note that only limited automatic detection for the changes of python scripts is supported by now,
|
|
1581
|
+
which means that there is a correctness risk. Default: ``False`` .
|
|
1546
1582
|
Currently, do not support the graph which is larger than 2G after compiled.
|
|
1547
1583
|
This is an experimental prototype that is subject to change and/or deletion.
|
|
1584
|
+
This parameter will be deprecated and removed in a future version. Please use the environment variable
|
|
1585
|
+
`MS_COMPILER_CACHE_ENABLE` instead.
|
|
1548
1586
|
compile_cache_path (str): Path to save the compile cache. Default: ``"."``.
|
|
1549
1587
|
If the specified directory does not exist, the system will automatically create the directory.
|
|
1550
1588
|
The cache will be saved to the directory of `compile_cache_path/rank_${rank_id}/`. The `rank_id` is
|
|
1551
1589
|
the ID of the current device in the cluster.
|
|
1590
|
+
This parameter will be deprecated and removed in a future version. Please use the environment variable
|
|
1591
|
+
`MS_COMPILER_CACHE_PATH` instead.
|
|
1552
1592
|
inter_op_parallel_num(int): The thread number of op parallel at the same time. Default value is ``0`` ,
|
|
1553
|
-
which means use the default num.
|
|
1593
|
+
which means use the default num. This parameter will be deprecated and will be removed in future versions.
|
|
1594
|
+
Please use the api :func:`mindspore.runtime.dispatch_threads_num` instead.
|
|
1554
1595
|
runtime_num_threads(int): The thread pool number of cpu kernel used in runtime,
|
|
1555
1596
|
which must bigger than or equal to 0. Default value is ``30`` , if you run many processes at
|
|
1556
1597
|
the same time, you should set the value smaller to avoid thread contention. If set runtime_num_threads to 1,
|
|
1557
1598
|
the runtime asynchronous pipeline capability cannot be enabled, which may affect performance.
|
|
1599
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1600
|
+
api :func:`mindspore.device_context.cpu.op_tuning.threads_num` instead.
|
|
1558
1601
|
disable_format_transform (bool): Whether to disable the automatic format transform function from NCHW to NHWC.
|
|
1559
1602
|
When the network training performance of fp16 is worse than fp32, `disable_format_transform` can be set to
|
|
1560
1603
|
``True`` to try to improve training performance. Default: ``False`` .
|
|
@@ -1562,6 +1605,7 @@ def set_context(**kwargs):
|
|
|
1562
1605
|
in graph mode, coulde set 'support_binary' to be ``True`` , and run once .py file. It would save the source
|
|
1563
1606
|
of the interfaces would be compiled by MindSpore to the interfaces definition .py file that should be
|
|
1564
1607
|
guaranteed to be writable. Then compile the .py file to the .pyc or .so file, and could run in Graph mode.
|
|
1608
|
+
Currently, this config option only support stand_alone.
|
|
1565
1609
|
memory_optimize_level (str): The memory optimize level.
|
|
1566
1610
|
On Ascend hardware platform, default: ``O1``, on other hardware platforms, default: ``O0``.
|
|
1567
1611
|
The value must be in ['O0', 'O1'].
|
|
@@ -1569,6 +1613,10 @@ def set_context(**kwargs):
|
|
|
1569
1613
|
- O0: priority performance option, disable SOMAS (Safe Optimized Memory Allocation Solver)
|
|
1570
1614
|
and some other memory optimizations.
|
|
1571
1615
|
- O1: priority memory option, enable SOMAS and some other memory optimizations.
|
|
1616
|
+
|
|
1617
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1618
|
+
api :func:`mindspore.runtime.set_memory` instead.
|
|
1619
|
+
|
|
1572
1620
|
memory_offload (str): Whether to enable the memory offload function. When it is enabled, the idle data will be
|
|
1573
1621
|
temporarily copied to the host side in the case of insufficient device memory. The value must be in the
|
|
1574
1622
|
range of ['ON', 'OFF'], and the default value is ``'OFF'`` .
|
|
@@ -1577,6 +1625,10 @@ def set_context(**kwargs):
|
|
|
1577
1625
|
when the graph compilation level is not 'O0'; This parameter does not take effect when
|
|
1578
1626
|
memory_optimize_level is set 'O1'.
|
|
1579
1627
|
- OFF: Turn off the memory Offload function.
|
|
1628
|
+
|
|
1629
|
+
This parameter is deprecated and will be removed in future versions. Please use the `device` parameter
|
|
1630
|
+
of `mindspore.Parameter` instead.
|
|
1631
|
+
|
|
1580
1632
|
ascend_config (dict): Set the parameters specific to Ascend hardware platform. It is not set by default.
|
|
1581
1633
|
The default value of `precision_mode`, `jit_compile` and
|
|
1582
1634
|
`atomic_clean_policy` are experimental parameters, may change in the future.
|
|
@@ -1602,10 +1654,15 @@ def set_context(**kwargs):
|
|
|
1602
1654
|
- allow_mix_precision_bf16: Automatic mixing precision, facing the whole network operator, according to
|
|
1603
1655
|
the built-in optimization strategy, automatically reduces the precision of some operators to bfloat16.
|
|
1604
1656
|
|
|
1657
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1658
|
+
api :func:`mindspore.device_context.ascend.op_precision.precision_mode` instead.
|
|
1659
|
+
|
|
1605
1660
|
- jit_compile (bool): Whether to select online compilation. When set to 'True', online compilation is
|
|
1606
1661
|
prioritized. When set to 'False', compiled operator binary files are prioritized to improve compilation
|
|
1607
1662
|
performance. The default settings are online compilation for static shape, and compiled operator binary
|
|
1608
1663
|
files for dynamic shape.
|
|
1664
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1665
|
+
api :func:`mindspore.device_context.ascend.op_tuning.op_compile` instead.
|
|
1609
1666
|
- atomic_clean_policy (int): The policy for cleaning memory occupied by atomic operators in the network.
|
|
1610
1667
|
Default: ``1`` .
|
|
1611
1668
|
|
|
@@ -1616,9 +1673,13 @@ def set_context(**kwargs):
|
|
|
1616
1673
|
- matmul_allow_hf32 (bool): Whether to convert FP32 to HF32 for Matmul operators. Default value: ``False``.
|
|
1617
1674
|
This is an experimental prototype that is subject to change and/or deletion.
|
|
1618
1675
|
For detailed information, please refer to `Ascend community <https://www.hiascend.com/>`_ .
|
|
1676
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1677
|
+
api :func:`mindspore.device_context.ascend.op_precision.matmul_allow_hf32` instead.
|
|
1619
1678
|
- conv_allow_hf32 (bool): Whether to convert FP32 to HF32 for Conv operators. Default value: ``True``.
|
|
1620
1679
|
This is an experimental prototype that is subject to change and/or deletion.
|
|
1621
1680
|
For detailed information, please refer to `Ascend community <https://www.hiascend.com/>`_ .
|
|
1681
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1682
|
+
api :func:`mindspore.device_context.ascend.op_precision.conv_allow_hf32` instead.
|
|
1622
1683
|
- exception_dump (str): Enable exception dump for Ascend operators, providing the input and output data for
|
|
1623
1684
|
failing Ascend operators. The value can be ``"0"`` , ``"1"`` and ``"2"``. For ``"0"`` , exception dump is
|
|
1624
1685
|
turned off; for ``"1"``, all inputs and outputs will be dumped for AICore exception operators;
|
|
@@ -1626,15 +1687,20 @@ def set_context(**kwargs):
|
|
|
1626
1687
|
but improving performance. Default: ``"2"`` .
|
|
1627
1688
|
- op_precision_mode (str): Path to config file of op precision mode. For detailed information, please refer
|
|
1628
1689
|
to `Ascend community <https://www.hiascend.com/>`_ .
|
|
1690
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1691
|
+
api :func:`mindspore.device_context.ascend.op_precision.op_precision_mode` instead.
|
|
1629
1692
|
- op_debug_option (str): Enable debugging options for Ascend operators, default not enabled.
|
|
1630
1693
|
The value currently only supports being set to ``"oom"``.
|
|
1631
1694
|
|
|
1632
1695
|
- ``"oom"``: When there is a memory out of bounds during the execution of an operator,
|
|
1633
1696
|
AscendCL will return an error code of ``EZ9999``.
|
|
1634
1697
|
|
|
1698
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1699
|
+
api :func:`mindspore.device_context.ascend.op_debug.debug_option` instead.
|
|
1700
|
+
|
|
1635
1701
|
- ge_options (dict): Set options for CANN. The options are divided into two categories: global and session.
|
|
1636
1702
|
This is an experimental prototype that is subject to change and/or deletion.
|
|
1637
|
-
For detailed information, please refer to `Ascend community <https://www.hiascend.com/document/detail/zh/canncommercial/
|
|
1703
|
+
For detailed information, please refer to `Ascend community <https://www.hiascend.com/document/detail/zh/canncommercial/80RC3/apiref/ascendgraphapi/atlasgeapi_07_0146.html>`_ .
|
|
1638
1704
|
The configuration options in `ge_options` may be duplicated with the options in `ascend_config`. If the
|
|
1639
1705
|
same configuration options are set in both `ascend_config` and `ge_options`, the one set in `ge_options`
|
|
1640
1706
|
shall prevail.
|
|
@@ -1658,11 +1724,11 @@ def set_context(**kwargs):
|
|
|
1658
1724
|
Default: False.
|
|
1659
1725
|
- enable_grad_comm_opt (bool): Enable overlap between dx ops and data parallel communication ops if True.
|
|
1660
1726
|
Currently, do not support
|
|
1661
|
-
`
|
|
1727
|
+
`O2 <https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.JitConfig.html>`_
|
|
1662
1728
|
Default: False.
|
|
1663
1729
|
- enable_opt_shard_comm_opt (bool): Enable overlap between forward ops
|
|
1664
1730
|
and optimizer parallel allgather communication if True. Currently, do not support
|
|
1665
|
-
`
|
|
1731
|
+
`O2 <https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.JitConfig.html>`_
|
|
1666
1732
|
Default: False.
|
|
1667
1733
|
- compute_communicate_fusion_level (int): Enable the fusion between compute and communicate.
|
|
1668
1734
|
Default: ``0``. Note: This function must be used with Ascend Training Solution 24.0.RC2 or later.
|
|
@@ -1695,6 +1761,10 @@ def set_context(**kwargs):
|
|
|
1695
1761
|
used in MoE parallel scenario. After splitting the input data, each slice of data is processed by the
|
|
1696
1762
|
MoE module, and then the branch results are concatenated. When the optimization is enable,
|
|
1697
1763
|
communication and computation will be executed in parallel between branches. Default: ``False``.
|
|
1764
|
+
- enable_interleave_parallel_branch (bool): Enable communication computation parallel optimization
|
|
1765
|
+
for parallel branches with ``parallel_branch`` attribute in branches merge node. It is typical
|
|
1766
|
+
used in MoE parallel scenario with routed and shared expert. When the optimization is enable,
|
|
1767
|
+
communication and computation will be executed in parallel between branches. Default: ``False``.
|
|
1698
1768
|
- host_scheduling_max_threshold(int): The max threshold to control whether the dynamic shape process is
|
|
1699
1769
|
used when run the static graph, the default value is 0. When the number of operations in the static graph
|
|
1700
1770
|
is less than the max threshold, this graph will be executed in dynamic shape process. In large model
|
|
@@ -1716,6 +1786,7 @@ def set_context(**kwargs):
|
|
|
1716
1786
|
compiling performance.
|
|
1717
1787
|
- ``DEBUG``: Used for debugging when errors occur, more information will be record in compiling process.
|
|
1718
1788
|
|
|
1789
|
+
This parameter will be deprecated and removed in a future version.
|
|
1719
1790
|
gpu_config (dict): Set the parameters specific to gpu hardware platform. It is not set by default.
|
|
1720
1791
|
Currently, only setting `conv_fprop_algo` and `conv_dgrad_algo` and `conv_wgrad_algo` and `conv_allow_tf32`
|
|
1721
1792
|
and `matmul_allow_tf32` are supported on GPU hardware platform.
|
|
@@ -1744,6 +1815,10 @@ def set_context(**kwargs):
|
|
|
1744
1815
|
sized workspace is needed to store intermediate results.
|
|
1745
1816
|
- winograd_nonfused: This algorithm uses the Winograd Transform approach to compute the convolution. A
|
|
1746
1817
|
significant workspace may be needed to store intermediate results.
|
|
1818
|
+
|
|
1819
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1820
|
+
api :func:`mindspore.device_context.gpu.op_tuning.conv_fprop_algo` instead.
|
|
1821
|
+
|
|
1747
1822
|
- conv_dgrad_algo (str): Specifies convolution data grad algorithm and the default value is 'normal',
|
|
1748
1823
|
The value range is as follows:
|
|
1749
1824
|
|
|
@@ -1763,6 +1838,10 @@ def set_context(**kwargs):
|
|
|
1763
1838
|
sized workspace is needed to store intermediate results. The results are deterministic.
|
|
1764
1839
|
- winograd_nonfused: This algorithm uses the Winograd Transform approach to compute the convolution.
|
|
1765
1840
|
A significant workspace may be needed to store intermediate results. The results are deterministic.
|
|
1841
|
+
|
|
1842
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1843
|
+
api :func:`mindspore.device_context.gpu.op_tuning.conv_dgrad_algo` instead.
|
|
1844
|
+
|
|
1766
1845
|
- conv_wgrad_algo (str): Specifies convolution filter grad algorithm and the default value is 'normal',
|
|
1767
1846
|
The value range is as follows:
|
|
1768
1847
|
|
|
@@ -1782,10 +1861,18 @@ def set_context(**kwargs):
|
|
|
1782
1861
|
- fft_tiling: This algorithm uses the Fast-Fourier Transform approach but splits the inputs into tiles.
|
|
1783
1862
|
A significant memory workspace is needed to store intermediate results but less than fft for large size
|
|
1784
1863
|
images. The results are deterministic.
|
|
1864
|
+
|
|
1865
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1866
|
+
api :func:`mindspore.device_context.gpu.op_tuning.conv_wgrad_algo` instead.
|
|
1867
|
+
|
|
1785
1868
|
- conv_allow_tf32 (bool): The flag below controls to allow Tensor core TF32 computation on CUDNN and the
|
|
1786
1869
|
default value is ``True``.
|
|
1870
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1871
|
+
api :func:`mindspore.device_context.gpu.op_precision.conv_allow_tf32` instead.
|
|
1787
1872
|
- matmul_allow_tf32 (bool): The flag below controls to allow Tensor core TF32 computation on CUBLAS and the
|
|
1788
1873
|
default value is ``False``.
|
|
1874
|
+
This parameter will be deprecated and will be removed in future versions. Please use the
|
|
1875
|
+
api :func:`mindspore.device_context.gpu.op_precision.matmul_allow_tf32` instead.
|
|
1789
1876
|
|
|
1790
1877
|
jit_config (dict): Set the global jit config for compile, take effect in network defined in Cell or jit
|
|
1791
1878
|
decorators. It is not set by default.
|
|
@@ -1793,7 +1880,7 @@ def set_context(**kwargs):
|
|
|
1793
1880
|
When both exist simultaneously, the global jit config will not overwrite the local network's jit config.
|
|
1794
1881
|
|
|
1795
1882
|
- jit_level (str): Used to control the compilation optimization level. Default: ``""`` , The framework
|
|
1796
|
-
automatically selects the execution method based on product,
|
|
1883
|
+
automatically selects the execution method based on product, Atlas training product is O2, and all other
|
|
1797
1884
|
products are O0. In addition, The option of the dynamic shape must be O0 or O1, O2 is not supported.
|
|
1798
1885
|
The value range is as follows:
|
|
1799
1886
|
|
|
@@ -1808,17 +1895,14 @@ def set_context(**kwargs):
|
|
|
1808
1895
|
- ``"on"``: Enable infer mode, get better infer performance.
|
|
1809
1896
|
- ``"off"``: Disable infer mode, use forward to infer, performance is not good.
|
|
1810
1897
|
|
|
1811
|
-
exec_order (str): Set the sorting method for operator execution in GRAPH_MODE Currently, only
|
|
1812
|
-
methods are supported: bfs and
|
|
1898
|
+
exec_order (str): Set the sorting method for operator execution in GRAPH_MODE Currently, only two sorting
|
|
1899
|
+
methods are supported: bfs and dfs, and the default method is bfs.
|
|
1813
1900
|
|
|
1814
1901
|
- ``"bfs"``: The default sorting method, breadth priority, good communication masking, relatively good
|
|
1815
1902
|
performance.
|
|
1816
1903
|
- ``"dfs"``: An optional sorting method, depth-first sorting. The performance is relatively worse than that
|
|
1817
1904
|
of bfs execution order, but it occupies less memory. It is recommended to try dfs in scenarios where other
|
|
1818
1905
|
execution orders run out of memory (OOM).
|
|
1819
|
-
- ``"gpto"``: An optional sorting method. This method combines multiple execution orders and selects a
|
|
1820
|
-
method with relatively good performance. There may be some performance gains in scenarios with multiple
|
|
1821
|
-
replicas running in parallel.
|
|
1822
1906
|
|
|
1823
1907
|
Raises:
|
|
1824
1908
|
ValueError: If input key is not an attribute in context.
|
|
@@ -1831,8 +1915,6 @@ def set_context(**kwargs):
|
|
|
1831
1915
|
>>> ms.set_context(device_id=0)
|
|
1832
1916
|
>>> ms.set_context(save_graphs=True, save_graphs_path="./model.ms")
|
|
1833
1917
|
>>> ms.set_context(enable_reduce_precision=True)
|
|
1834
|
-
>>> ms.set_context(enable_graph_kernel=True)
|
|
1835
|
-
>>> ms.set_context(graph_kernel_flags="--opt_level=2 --dump_as_text")
|
|
1836
1918
|
>>> ms.set_context(reserve_class_name_in_scope=True)
|
|
1837
1919
|
>>> ms.set_context(variable_memory_max_size="6GB")
|
|
1838
1920
|
>>> ms.set_context(aoe_tune_mode="online")
|
|
@@ -1862,7 +1944,7 @@ def set_context(**kwargs):
|
|
|
1862
1944
|
>>> ms.set_context(gpu_config={"conv_fprop_algo": "performance", "conv_allow_tf32": True,
|
|
1863
1945
|
... "matmul_allow_tf32": True})
|
|
1864
1946
|
>>> ms.set_context(jit_config={"jit_level": "O0"})
|
|
1865
|
-
>>> ms.set_context(exec_order="
|
|
1947
|
+
>>> ms.set_context(exec_order="bfs")
|
|
1866
1948
|
"""
|
|
1867
1949
|
ctx = _context()
|
|
1868
1950
|
# set device target first
|
|
@@ -1872,14 +1954,22 @@ def set_context(**kwargs):
|
|
|
1872
1954
|
_check_ascend_device_context_initialized(device, kwargs)
|
|
1873
1955
|
|
|
1874
1956
|
for key, value in kwargs.items():
|
|
1957
|
+
_check_context_deprecated(key)
|
|
1875
1958
|
if key in ('enable_sparse', 'auto_tune_mode'):
|
|
1876
1959
|
logger.warning(f"For 'context.set_context', '{key}' parameter is deprecated, "
|
|
1877
1960
|
"and will be removed in the next version.")
|
|
1878
1961
|
continue
|
|
1879
|
-
if key in ('enable_auto_mixed_precision',
|
|
1962
|
+
if key in ('enable_auto_mixed_precision',):
|
|
1880
1963
|
logger.warning(f"For 'context.set_context', '{key}' parameter is deprecated. "
|
|
1881
1964
|
"For details, please see the interface parameter API comments")
|
|
1882
1965
|
continue
|
|
1966
|
+
if key == "print_file_path":
|
|
1967
|
+
logger.warning(f"For 'context.set_context', '{key}' parameter is deprecated due to changes in the behavior"
|
|
1968
|
+
f" of the print operator. Recommend not using this parameter and"
|
|
1969
|
+
f" directly viewing the screen output.")
|
|
1970
|
+
if key in ('reserve_class_name_in_scope', 'env_config_path'):
|
|
1971
|
+
logger.warning(f"For 'context.set_context', '{key}' parameter is deprecated, "
|
|
1972
|
+
"and will be removed in the next version.")
|
|
1883
1973
|
_check_key(key)
|
|
1884
1974
|
if key == 'save_graphs':
|
|
1885
1975
|
if value is True:
|
|
@@ -1898,6 +1988,14 @@ def set_context(**kwargs):
|
|
|
1898
1988
|
setattr(ctx, key, value)
|
|
1899
1989
|
ctx.set_param(ms_ctx_param.__members__[key], int(value))
|
|
1900
1990
|
continue
|
|
1991
|
+
if key == 'enable_graph_kernel':
|
|
1992
|
+
logger.warning(f"For 'context.set_context', '{key}' parameter is deprecated, "
|
|
1993
|
+
"and will be removed in the next version. "
|
|
1994
|
+
"Please use jit_config={'jit_level': 'O1'} instead.")
|
|
1995
|
+
if key == 'graph_kernel_flags':
|
|
1996
|
+
logger.warning(f"For 'context.set_context', '{key}' parameter is deprecated, "
|
|
1997
|
+
"and will be removed in the next version. "
|
|
1998
|
+
"Please use environ variable 'MS_DEV_GRAPH_KERNEL_FLAGS' instead.")
|
|
1901
1999
|
if not _check_target_specific_cfgs(device, key):
|
|
1902
2000
|
continue
|
|
1903
2001
|
if key in ctx.setters:
|
|
@@ -1915,6 +2013,7 @@ def set_context(**kwargs):
|
|
|
1915
2013
|
|
|
1916
2014
|
|
|
1917
2015
|
def get_context(attr_key):
|
|
2016
|
+
|
|
1918
2017
|
"""
|
|
1919
2018
|
Get context attribute value according to the input key.
|
|
1920
2019
|
If some attributes are not set, they will be automatically obtained.
|