mindspore 2.3.0__cp39-none-any.whl → 2.3.0rc2__cp39-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +0 -1512
- mindspore/__init__.py +1 -2
- mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_checkparam.py +25 -5
- mindspore/_extends/graph_kernel/model/graph_parallel.py +1 -1
- mindspore/_extends/parse/__init__.py +2 -2
- mindspore/_extends/parse/compile_config.py +0 -29
- mindspore/_extends/parse/namespace.py +2 -2
- mindspore/_extends/parse/parser.py +5 -21
- mindspore/_extends/parse/resources.py +7 -5
- mindspore/_extends/parse/standard_method.py +59 -40
- mindspore/_mindspore_offline_debug.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/amp.py +5 -26
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/boost/adasum.py +1 -1
- mindspore/boost/base.py +1 -1
- mindspore/boost/boost_cell_wrapper.py +1 -1
- mindspore/boost/grad_freeze.py +2 -2
- mindspore/boost/less_batch_normalization.py +6 -9
- mindspore/common/__init__.py +1 -8
- mindspore/common/_register_for_tensor.py +9 -8
- mindspore/common/api.py +65 -275
- mindspore/common/dtype.py +4 -8
- mindspore/common/dump.py +5 -2
- mindspore/common/jit_config.py +1 -1
- mindspore/common/lazy_inline.py +2 -14
- mindspore/common/parameter.py +15 -14
- mindspore/common/recompute.py +5 -20
- mindspore/common/sparse_tensor.py +6 -21
- mindspore/common/tensor.py +52 -100
- mindspore/communication/__init__.py +11 -6
- mindspore/communication/management.py +94 -92
- mindspore/context.py +18 -180
- mindspore/dataset/engine/datasets.py +46 -69
- mindspore/dataset/engine/datasets_user_defined.py +53 -72
- mindspore/dataset/engine/datasets_vision.py +2 -2
- mindspore/dataset/engine/queue.py +38 -56
- mindspore/dataset/engine/validators.py +5 -11
- mindspore/dataset/vision/__init__.py +5 -5
- mindspore/dataset/vision/c_transforms.py +5 -5
- mindspore/dataset/vision/py_transforms_util.py +1 -1
- mindspore/dataset/vision/transforms.py +46 -591
- mindspore/dataset/vision/utils.py +1 -121
- mindspore/dataset/vision/validators.py +3 -9
- mindspore/hal/__init__.py +1 -7
- mindspore/hal/device.py +1 -1
- mindspore/include/api/model.h +0 -3
- mindspore/include/dataset/vision.h +2 -54
- mindspore/include/mindapi/base/types.h +0 -1
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libmindspore.so +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_shared_lib.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libmpi_collective.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -35
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/vector_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +0 -72
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/{aclnn_all_finite.h → aclnn_add_custom.h} +11 -9
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_decoder_kv_cache.h +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_prompt_kv_cache.h +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +12 -184
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +15 -7
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +15 -7
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.cpp +81 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.py +134 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/decoder_kv_cache.py +31 -77
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/prompt_kv_cache.py +31 -77
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/inc/op_proto.h +5 -4
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
- mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +286 -275
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -3
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_tiling.h +45 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +4 -8
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_tiling.h +4 -11
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/kernel/flash_attention_score_mix_hwsync.h +0 -18
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -6
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +75 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/kernel/matmul.h +5 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +3 -18
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_common_tiling.h +5 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_info.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_data.h +3 -36
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/kernel/matmul_stridedslice_fusion.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +4 -22
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +2 -16
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +3 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +4 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +4 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +2 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +4 -10
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +12 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +1 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +2 -10
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +1 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -17
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +7 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/mindrecord/filewriter.py +2 -2
- mindspore/mint/__init__.py +40 -720
- mindspore/mint/nn/__init__.py +7 -89
- mindspore/mint/nn/functional.py +16 -165
- mindspore/mint/optim/adamw.py +16 -15
- mindspore/nn/__init__.py +2 -0
- mindspore/nn/cell.py +98 -97
- mindspore/nn/extend/basic.py +2 -2
- mindspore/nn/extend/embedding.py +1 -1
- mindspore/nn/extend/layer/normalization.py +5 -7
- mindspore/nn/generator.py +297 -0
- mindspore/nn/layer/activation.py +3 -4
- mindspore/nn/layer/basic.py +16 -79
- mindspore/nn/layer/conv.py +8 -17
- mindspore/nn/layer/embedding.py +4 -1
- mindspore/nn/layer/math.py +1 -1
- mindspore/nn/layer/normalization.py +1 -1
- mindspore/nn/layer/pooling.py +0 -5
- mindspore/nn/layer/rnn_cells.py +2 -2
- mindspore/nn/loss/loss.py +19 -19
- mindspore/nn/optim/adasum.py +1 -1
- mindspore/nn/optim/sgd.py +2 -3
- mindspore/nn/probability/distribution/exponential.py +1 -1
- mindspore/nn/probability/distribution/geometric.py +1 -1
- mindspore/nn/probability/distribution/logistic.py +1 -1
- mindspore/nn/wrap/cell_wrapper.py +1 -25
- mindspore/nn/wrap/loss_scale.py +1 -24
- mindspore/numpy/array_ops.py +1 -5
- mindspore/numpy/dtypes.py +3 -3
- mindspore/numpy/math_ops.py +8 -8
- mindspore/ops/__init__.py +1 -1
- mindspore/ops/_grad_experimental/grad_comm_ops.py +16 -75
- mindspore/ops/_vmap/vmap_array_ops.py +0 -27
- mindspore/ops/_vmap/vmap_math_ops.py +1 -29
- mindspore/ops/_vmap/vmap_nn_ops.py +18 -19
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +8 -34
- mindspore/ops/auto_generate/gen_arg_dtype_cast.py +9 -2
- mindspore/ops/auto_generate/gen_arg_handler.py +0 -26
- mindspore/ops/auto_generate/gen_extend_func.py +27 -603
- mindspore/ops/auto_generate/gen_ops_def.py +203 -993
- mindspore/ops/auto_generate/gen_ops_prim.py +402 -1946
- mindspore/ops/auto_generate/pyboost_inner_prim.py +20 -90
- mindspore/ops/composite/base.py +6 -3
- mindspore/ops/composite/math_ops.py +1 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +17 -24
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
- mindspore/ops/extend/__init__.py +3 -2
- mindspore/ops/extend/array_func.py +51 -10
- mindspore/ops/extend/nn_func.py +78 -2
- mindspore/ops/function/__init__.py +13 -8
- mindspore/ops/function/array_func.py +179 -455
- mindspore/ops/function/clip_func.py +1 -1
- mindspore/ops/function/grad/grad_func.py +3 -3
- mindspore/ops/function/math_func.py +103 -117
- mindspore/ops/function/nn_func.py +163 -275
- mindspore/ops/function/other_func.py +2 -2
- mindspore/ops/function/random_func.py +69 -202
- mindspore/ops/function/sparse_func.py +4 -4
- mindspore/ops/functional.py +327 -332
- mindspore/ops/operations/__init__.py +3 -13
- mindspore/ops/operations/_grad_ops.py +27 -3
- mindspore/ops/operations/_inner_ops.py +356 -53
- mindspore/ops/operations/_rl_inner_ops.py +2 -2
- mindspore/ops/operations/_tensor_array.py +8 -8
- mindspore/ops/operations/array_ops.py +65 -82
- mindspore/ops/operations/comm_ops.py +93 -784
- mindspore/ops/operations/custom_ops.py +28 -51
- mindspore/ops/operations/debug_ops.py +4 -4
- mindspore/ops/operations/inner_ops.py +2 -2
- mindspore/ops/operations/manually_defined/ops_def.py +4 -304
- mindspore/ops/operations/math_ops.py +50 -3
- mindspore/ops/operations/nn_ops.py +247 -14
- mindspore/ops/operations/other_ops.py +3 -3
- mindspore/ops/operations/random_ops.py +1 -1
- mindspore/ops/operations/sparse_ops.py +1 -1
- mindspore/ops/primitive.py +8 -9
- mindspore/ops/silent_check.py +5 -5
- mindspore/ops_generate/arg_dtype_cast.py +9 -2
- mindspore/ops_generate/arg_handler.py +0 -26
- mindspore/ops_generate/gen_aclnn_implement.py +4 -1
- mindspore/ops_generate/gen_ops.py +4 -26
- mindspore/ops_generate/gen_pyboost_func.py +12 -41
- mindspore/ops_generate/gen_utils.py +0 -21
- mindspore/ops_generate/pyboost_utils.py +2 -7
- mindspore/ops_generate/template.py +0 -1
- mindspore/parallel/_auto_parallel_context.py +1 -21
- mindspore/parallel/_tensor.py +5 -0
- mindspore/parallel/_transformer/transformer.py +1 -1
- mindspore/parallel/_utils.py +1 -15
- mindspore/parallel/algo_parameter_config.py +3 -1
- mindspore/parallel/checkpoint_transform.py +9 -12
- mindspore/parallel/cluster/process_entity/_api.py +29 -28
- mindspore/parallel/cluster/process_entity/_utils.py +3 -13
- mindspore/parallel/cluster/run.py +16 -13
- mindspore/parallel/parameter_broadcast.py +2 -2
- mindspore/parallel/shard.py +17 -31
- mindspore/profiler/__init__.py +2 -3
- mindspore/profiler/common/util.py +2 -107
- mindspore/profiler/envprofiling.py +1 -1
- mindspore/profiler/parser/ascend_analysis/constant.py +21 -8
- mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -82
- mindspore/profiler/parser/ascend_analysis/function_event.py +28 -43
- mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +27 -49
- mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +10 -15
- mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +20 -25
- mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +5 -5
- mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +1 -10
- mindspore/profiler/parser/ascend_hccl_generator.py +1 -4
- mindspore/profiler/parser/ascend_msprof_exporter.py +22 -43
- mindspore/profiler/parser/ascend_timeline_generator.py +5 -7
- mindspore/profiler/parser/minddata_parser.py +3 -72
- mindspore/profiler/profiling.py +59 -176
- mindspore/rewrite/api/node.py +1 -1
- mindspore/rewrite/common/namespace.py +5 -5
- mindspore/rewrite/parsers/assign_parser.py +0 -2
- mindspore/rewrite/parsers/class_def_parser.py +4 -8
- mindspore/run_check/_check_version.py +1 -1
- mindspore/scipy/fft.py +3 -1
- mindspore/scipy/linalg.py +3 -2
- mindspore/scipy/ops.py +3 -5
- mindspore/scipy/optimize/__init__.py +2 -2
- mindspore/train/__init__.py +4 -4
- mindspore/train/anf_ir_pb2.py +2 -8
- mindspore/train/callback/__init__.py +2 -5
- mindspore/train/callback/_backup_and_restore.py +2 -2
- mindspore/train/callback/_checkpoint.py +16 -104
- mindspore/train/callback/_landscape.py +1 -1
- mindspore/train/callback/_time_monitor.py +1 -1
- mindspore/train/data_sink.py +4 -5
- mindspore/train/dataset_helper.py +20 -45
- mindspore/train/model.py +38 -266
- mindspore/train/serialization.py +105 -256
- mindspore/train/summary/_summary_adapter.py +1 -1
- mindspore/version.py +1 -1
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +2 -2
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +303 -420
- mindspore/_extends/pijit/__init__.py +0 -23
- mindspore/_extends/pijit/pijit_func_white_list.py +0 -343
- mindspore/common/file_system.py +0 -48
- mindspore/common/generator.py +0 -260
- mindspore/common/no_inline.py +0 -54
- mindspore/common/np_dtype.py +0 -25
- mindspore/communication/comm_func.py +0 -1140
- mindspore/hal/memory.py +0 -326
- mindspore/lib/libavcodec.so.59 +0 -0
- mindspore/lib/libavdevice.so.59 +0 -0
- mindspore/lib/libavfilter.so.8 +0 -0
- mindspore/lib/libavformat.so.59 +0 -0
- mindspore/lib/libavutil.so.57 +0 -0
- mindspore/lib/libmindspore_np_dtype.so +0 -0
- mindspore/lib/libswresample.so.4 +0 -0
- mindspore/lib/libswscale.so.6 +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.cpp +0 -326
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.py +0 -180
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
- mindspore/lib/plugin/ascend/custom_compiler/OWNERS +0 -12
- mindspore/lib/plugin/ascend/custom_compiler/setup.py +0 -255
- mindspore/lib/plugin/ascend/custom_compiler/start.sh +0 -26
- mindspore/lib/plugin/ascend/custom_compiler/template.json +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h +0 -133
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -32
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h +0 -60
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -37
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_op.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_backoff_base.h +0 -62
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -33
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_ops.h +0 -88
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_pa_op.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -95
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -84
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -61
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -224
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_impl.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_tiling.h +0 -187
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul.h +0 -245
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_interface.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_utils.h +0 -111
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/tiling_data.h +0 -54
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -364
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -114
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -98
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/mint/linalg/__init__.py +0 -22
- mindspore/nn/layer/embedding_service.py +0 -531
- mindspore/nn/layer/embedding_service_layer.py +0 -393
- mindspore/ops/function/reshard_func.py +0 -102
- mindspore/ops/operations/_infer_ops.py +0 -19
- mindspore/ops/operations/reshard_ops.py +0 -53
- mindspore/profiler/common/process_pool.py +0 -41
- mindspore/profiler/common/singleton.py +0 -28
- mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
- mindspore/profiler/parser/ascend_memory_generator.py +0 -185
- mindspore/train/callback/_cluster_monitor.py +0 -201
- mindspore/train/callback/_flops_collector.py +0 -238
- mindspore/train/callback/_mindio_ttp.py +0 -443
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
|
@@ -174,7 +174,7 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
174
174
|
|
|
175
175
|
- "hybrid": supports ["GPU", "CPU"].
|
|
176
176
|
- "akg": supports ["GPU", "CPU"].
|
|
177
|
-
- "aot": supports ["GPU", "CPU"
|
|
177
|
+
- "aot": supports ["GPU", "CPU"].
|
|
178
178
|
- "pyfunc": supports ["CPU"].
|
|
179
179
|
- "julia": supports ["CPU"].
|
|
180
180
|
|
|
@@ -193,7 +193,7 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
193
193
|
|
|
194
194
|
1. for "aot":
|
|
195
195
|
|
|
196
|
-
|
|
196
|
+
Currently "aot" supports GPU/CPU(linux only) platform.
|
|
197
197
|
"aot" means ahead of time, in which case Custom directly launches user defined "xxx.so" file as an
|
|
198
198
|
operator. Users need to compile a handwriting "xxx.cu"/"xxx.cc" file into "xxx.so" ahead of time,
|
|
199
199
|
and offer the path of the file along with a function name.
|
|
@@ -249,21 +249,6 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
249
249
|
(ex. Custom(func="./reorganize.so:CustomReorganize", out_shape=[1], out_dtype=mstype.float32,
|
|
250
250
|
"aot"))
|
|
251
251
|
|
|
252
|
-
b) ASCEND platform
|
|
253
|
-
Before using Custom operators on the ASCEND platform, users must first develop custom operators
|
|
254
|
-
based on Ascend C and compile them. For operator development, you can refer to the tutorial on
|
|
255
|
-
`Quick Start for End-to-End Operator Development
|
|
256
|
-
<https://www.hiascend.com/document/detail/zh/canncommercial/70RC1/operatordev/Ascendcopdevg/atlas_ascendc_10_0022.html>`_,
|
|
257
|
-
and for compiling custom operators, you can use the `Offline Compilation of Ascend C Custom Operators
|
|
258
|
-
<https://www.mindspore.cn/tutorials/experts/en/master/operation/op_custom_ascendc.html>` tool.
|
|
259
|
-
When passing the operator's name into the func parameter, taking AddCustom as an example for the
|
|
260
|
-
name given in the custom operator implementation, there are several ways to use it:
|
|
261
|
-
|
|
262
|
-
- Usin TBE: func="AddCustom"
|
|
263
|
-
- Using AclNN: func="aclnnAddCustom"
|
|
264
|
-
- Inferring the shape of the operator through C++ derivation: func="infer_shape.cc:aclnnAddCustom",
|
|
265
|
-
where infer_shape.cc is the shape derivation implemented in C++.
|
|
266
|
-
|
|
267
252
|
2. for "julia":
|
|
268
253
|
|
|
269
254
|
Currently "julia" supports CPU(linux only) platform.
|
|
@@ -338,7 +323,7 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
338
323
|
or the attributes of `func` differs in different targets.
|
|
339
324
|
|
|
340
325
|
Supported Platforms:
|
|
341
|
-
``GPU`` ``CPU``
|
|
326
|
+
``GPU`` ``CPU``
|
|
342
327
|
|
|
343
328
|
Examples:
|
|
344
329
|
>>> import numpy as np
|
|
@@ -417,7 +402,6 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
417
402
|
self.func_source_str = ""
|
|
418
403
|
self._func_compile_attrs = {}
|
|
419
404
|
self._is_ms_kernel = False
|
|
420
|
-
self.out_shape = out_shape
|
|
421
405
|
|
|
422
406
|
self._check_platform()
|
|
423
407
|
self._check_func()
|
|
@@ -433,6 +417,7 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
433
417
|
add_pyfunc(func_id, self.func)
|
|
434
418
|
self.add_prim_attr("fn_id", func_id)
|
|
435
419
|
|
|
420
|
+
self.out_shape = out_shape
|
|
436
421
|
if self.out_shape is None and self.func_type == "aot":
|
|
437
422
|
self.add_prim_attr("cpp_infer_shape", True)
|
|
438
423
|
self.out_dtype = out_dtype
|
|
@@ -518,7 +503,6 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
518
503
|
return out
|
|
519
504
|
|
|
520
505
|
def get_bprop(self):
|
|
521
|
-
"""return back propagation function"""
|
|
522
506
|
return self.bprop
|
|
523
507
|
|
|
524
508
|
def _set_akg_kernel_type(self):
|
|
@@ -547,37 +531,7 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
547
531
|
raise Exception("{}, function {} is not found in source file {}!"
|
|
548
532
|
.format(self.log_prefix, func, source_file))
|
|
549
533
|
|
|
550
|
-
def _check_aot_func(self):
|
|
551
|
-
"""Check the source code and bin lib for aot type custom op"""
|
|
552
|
-
if not isinstance(self.func, str):
|
|
553
|
-
raise TypeError("{}, 'func' must be of type str, but got {}".format(
|
|
554
|
-
self.log_prefix, type(self.func)))
|
|
555
|
-
file_name_list = self.func.split(":")
|
|
556
|
-
if len(file_name_list) != 2:
|
|
557
|
-
if callable(self.out_shape):
|
|
558
|
-
return
|
|
559
|
-
raise TypeError(
|
|
560
|
-
"{}, 'func' should be like 'file_name:func_name', but got {}".format(
|
|
561
|
-
self.log_prefix, self.func))
|
|
562
|
-
file_path = os.path.abspath(file_name_list[0])
|
|
563
|
-
if os.environ.get('MS_CUSTOM_AOT_WHITE_LIST') is None:
|
|
564
|
-
if Custom.custom_aot_warning:
|
|
565
|
-
logger.info("{}, no white list is set and it might cause problems. "
|
|
566
|
-
"Set the legal path of the file in MS_CUSTOM_AOT_WHITE_LIST"
|
|
567
|
-
.format(self.log_prefix))
|
|
568
|
-
Custom.custom_aot_warning = False
|
|
569
|
-
else:
|
|
570
|
-
legal_path = os.path.abspath(os.environ.get('MS_CUSTOM_AOT_WHITE_LIST'))
|
|
571
|
-
if legal_path not in file_path:
|
|
572
|
-
raise TypeError(
|
|
573
|
-
"{}, the legal path for the file is {}, but the file is {}".format(
|
|
574
|
-
self.log_prefix, legal_path, file_path))
|
|
575
|
-
if file_path.endswith(("cu", "cpp", "cc")):
|
|
576
|
-
file_path = _compile_aot(file_path)
|
|
577
|
-
self.func = file_path + ":" + file_name_list[1]
|
|
578
|
-
|
|
579
534
|
def _check_platform(self):
|
|
580
|
-
"""check the platform"""
|
|
581
535
|
if platform.system() != 'Linux':
|
|
582
536
|
raise Exception("Custom op only supported on Linux platform currently.")
|
|
583
537
|
|
|
@@ -587,7 +541,30 @@ class Custom(ops.PrimitiveWithInfer):
|
|
|
587
541
|
raise ValueError("{}, 'func_type' must be one of {}, but got {}"
|
|
588
542
|
.format(self.log_prefix, self.supported_func_type, self.func_type))
|
|
589
543
|
if self.func_type == "aot":
|
|
590
|
-
self.
|
|
544
|
+
if not isinstance(self.func, str):
|
|
545
|
+
raise TypeError("{}, 'func' must be of type str, but got {}".format(
|
|
546
|
+
self.log_prefix, type(self.func)))
|
|
547
|
+
file_name_list = self.func.split(":")
|
|
548
|
+
if len(file_name_list) != 2:
|
|
549
|
+
raise TypeError(
|
|
550
|
+
"{}, 'func' should be like 'file_name:func_name', but got {}".format(
|
|
551
|
+
self.log_prefix, self.func))
|
|
552
|
+
file_path = os.path.abspath(file_name_list[0])
|
|
553
|
+
if os.environ.get('MS_CUSTOM_AOT_WHITE_LIST') is None:
|
|
554
|
+
if Custom.custom_aot_warning:
|
|
555
|
+
logger.info("{}, no white list is set and it might cause problems. "
|
|
556
|
+
"Set the legal path of the file in MS_CUSTOM_AOT_WHITE_LIST"
|
|
557
|
+
.format(self.log_prefix))
|
|
558
|
+
Custom.custom_aot_warning = False
|
|
559
|
+
else:
|
|
560
|
+
legal_path = os.path.abspath(os.environ.get('MS_CUSTOM_AOT_WHITE_LIST'))
|
|
561
|
+
if legal_path not in file_path:
|
|
562
|
+
raise TypeError(
|
|
563
|
+
"{}, the legal path for the file is {}, but the file is {}".format(
|
|
564
|
+
self.log_prefix, legal_path, file_path))
|
|
565
|
+
if file_path.endswith(("cu", "cpp", "cc")):
|
|
566
|
+
file_path = _compile_aot(file_path)
|
|
567
|
+
self.func = file_path + ":" + file_name_list[1]
|
|
591
568
|
|
|
592
569
|
elif self.func_type == "julia":
|
|
593
570
|
self._check_julia_func()
|
|
@@ -80,7 +80,7 @@ class ScalarSummary(Primitive):
|
|
|
80
80
|
Examples:
|
|
81
81
|
>>> import mindspore
|
|
82
82
|
>>> import mindspore.nn as nn
|
|
83
|
-
>>>
|
|
83
|
+
>>> import mindspore.ops as ops
|
|
84
84
|
>>> from mindspore import Tensor, set_context
|
|
85
85
|
>>>
|
|
86
86
|
>>>
|
|
@@ -137,7 +137,7 @@ class ImageSummary(Primitive):
|
|
|
137
137
|
Examples:
|
|
138
138
|
|
|
139
139
|
>>> import mindspore.nn as nn
|
|
140
|
-
>>>
|
|
140
|
+
>>> import mindspore.ops as ops
|
|
141
141
|
>>>
|
|
142
142
|
>>>
|
|
143
143
|
>>> class Net(nn.Cell):
|
|
@@ -189,7 +189,7 @@ class TensorSummary(Primitive):
|
|
|
189
189
|
Examples:
|
|
190
190
|
>>> import mindspore
|
|
191
191
|
>>> import mindspore.nn as nn
|
|
192
|
-
>>>
|
|
192
|
+
>>> import mindspore.ops as ops
|
|
193
193
|
>>> from mindspore import Tensor, set_context
|
|
194
194
|
>>>
|
|
195
195
|
>>>
|
|
@@ -329,7 +329,7 @@ class HistogramSummary(Primitive):
|
|
|
329
329
|
Examples:
|
|
330
330
|
>>> import mindspore
|
|
331
331
|
>>> import mindspore.nn as nn
|
|
332
|
-
>>>
|
|
332
|
+
>>> import mindspore.ops as ops
|
|
333
333
|
>>> from mindspore import Tensor, set_context
|
|
334
334
|
>>>
|
|
335
335
|
>>>
|
|
@@ -412,7 +412,7 @@ class FusedCastAdamWeightDecay(PrimitiveWithInfer):
|
|
|
412
412
|
>>> import numpy as np
|
|
413
413
|
>>> import mindspore as ms
|
|
414
414
|
>>> import mindspore.nn as nn
|
|
415
|
-
>>>
|
|
415
|
+
>>> import mindspore.ops as ops
|
|
416
416
|
>>> from mindspore import Tensor, Parameter
|
|
417
417
|
>>> from mindspore import dtype as mstype
|
|
418
418
|
>>> class Net(nn.Cell):
|
|
@@ -542,7 +542,7 @@ class FusedAdaFactor(PrimitiveWithInfer):
|
|
|
542
542
|
>>> import numpy as np
|
|
543
543
|
>>> import mindspore as ms
|
|
544
544
|
>>> import mindspore.nn as nn
|
|
545
|
-
>>>
|
|
545
|
+
>>> import mindspore.ops as ops
|
|
546
546
|
>>> from mindspore import Tensor, Parameter
|
|
547
547
|
>>> from mindspore import dtype as mstype
|
|
548
548
|
>>> param_shape = [2, 3, 2]
|
|
@@ -1181,309 +1181,6 @@ class Cast(Primitive):
|
|
|
1181
1181
|
return output
|
|
1182
1182
|
return _convert_stub(pyboost_cast(self, [input_x, dtype_to_type_id('Cast', 'dtype', dtype)]))
|
|
1183
1183
|
|
|
1184
|
-
|
|
1185
|
-
def to_sequence(val):
|
|
1186
|
-
"""
|
|
1187
|
-
to_sequence
|
|
1188
|
-
"""
|
|
1189
|
-
if isinstance(val, (tuple, list)):
|
|
1190
|
-
return val
|
|
1191
|
-
return (val,)
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
class EmbeddingTableExport(Primitive):
|
|
1195
|
-
"""
|
|
1196
|
-
EmbeddingTableExport
|
|
1197
|
-
"""
|
|
1198
|
-
|
|
1199
|
-
@prim_attr_register
|
|
1200
|
-
def __init__(self, embedding_dim, value_total_len, export_mode="all",
|
|
1201
|
-
only_var_flag=False, file_type="bin", table_name=(),
|
|
1202
|
-
filter_export_flag=False, steps_to_live_list=()):
|
|
1203
|
-
"""Initialize EmbeddingTableExport"""
|
|
1204
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
class EmbeddingTableImport(Primitive):
|
|
1208
|
-
"""
|
|
1209
|
-
EmbeddingTableImport
|
|
1210
|
-
"""
|
|
1211
|
-
|
|
1212
|
-
@prim_attr_register
|
|
1213
|
-
def __init__(self, embedding_dim, value_total_len,
|
|
1214
|
-
only_var_flag=False, file_type="bin", table_name=()):
|
|
1215
|
-
"""Initialize EmbeddingTableImport"""
|
|
1216
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
class EmbeddingComputeVarImport(Primitive):
|
|
1220
|
-
"""
|
|
1221
|
-
EmbeddingComputeVarImport
|
|
1222
|
-
"""
|
|
1223
|
-
|
|
1224
|
-
@prim_attr_register
|
|
1225
|
-
def __init__(self, table_name=()):
|
|
1226
|
-
"""Initialize EmbeddingComputeVarImport"""
|
|
1227
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
class EmbeddingComputeVarExport(Primitive):
|
|
1231
|
-
"""
|
|
1232
|
-
EmbeddingComputeVarExport
|
|
1233
|
-
"""
|
|
1234
|
-
|
|
1235
|
-
@prim_attr_register
|
|
1236
|
-
def __init__(self, table_name=()):
|
|
1237
|
-
"""Initialize EmbeddingComputeVarExport"""
|
|
1238
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
class InitEmbeddingHashmap(Primitive):
|
|
1242
|
-
"""
|
|
1243
|
-
InitEmbeddingHashmap
|
|
1244
|
-
"""
|
|
1245
|
-
@prim_attr_register
|
|
1246
|
-
def __init__(self, value_total_len, embedding_dim, _table_id,
|
|
1247
|
-
bucket_size=0, dtype=mstype.float32, initializer_mode="",
|
|
1248
|
-
constant_valu=0., min=-2., max=2., mu=0., sigma=1., seed=0,
|
|
1249
|
-
seed2=0, filter_mode="no_filter", optimizer_mode="",
|
|
1250
|
-
optimizer_params=()):
|
|
1251
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
def init_embedding_hashmap(table_id, value_total_len, embedding_dim, _table_id,
|
|
1255
|
-
bucket_size=0, dtype=mstype.float32, initializer_mode='',
|
|
1256
|
-
constant_value=0.0, min=-2.0, max=2.0, mu=0.0, sigma=1.0,
|
|
1257
|
-
seed=0, seed2=0, filter_mode='no_filter',
|
|
1258
|
-
optimizer_mode='', optimizer_params=()):
|
|
1259
|
-
"""
|
|
1260
|
-
init_embedding_hashmap
|
|
1261
|
-
"""
|
|
1262
|
-
op = _get_cache_prim(InitEmbeddingHashmap)(value_total_len, embedding_dim, _table_id,
|
|
1263
|
-
bucket_size, dtype, initializer_mode,
|
|
1264
|
-
constant_value, min, max, mu, sigma, seed,
|
|
1265
|
-
seed2, filter_mode, optimizer_mode, optimizer_params)
|
|
1266
|
-
return op(table_id)
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
class InitPartitionMap(Primitive):
|
|
1270
|
-
"""
|
|
1271
|
-
InitPartitionMap
|
|
1272
|
-
"""
|
|
1273
|
-
@prim_attr_register
|
|
1274
|
-
def __init__(self, _embedding_dim, _max_key_num,
|
|
1275
|
-
_ps_num=1, partition_num=65537):
|
|
1276
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
def init_partition_map(ps_num, ps_ids, _embedding_dim, _max_key_num,
|
|
1280
|
-
_ps_num=1, partition_num=65537):
|
|
1281
|
-
"""
|
|
1282
|
-
init_partition_map
|
|
1283
|
-
"""
|
|
1284
|
-
op = _get_cache_prim(InitPartitionMap)(_embedding_dim, _max_key_num, _ps_num, partition_num)
|
|
1285
|
-
return op(ps_num, ps_ids)
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
class EmbeddingApplyAdam(Primitive):
|
|
1289
|
-
"""
|
|
1290
|
-
EmbeddingApplyAdam
|
|
1291
|
-
"""
|
|
1292
|
-
@prim_attr_register
|
|
1293
|
-
def __init__(self, embedding_dim, _max_key_num, mask_zero=(0,),
|
|
1294
|
-
padding_key=(0,), padding_key_mask=(1,),
|
|
1295
|
-
completion_key=(0,), completion_key_mask=(1,)):
|
|
1296
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
class EmbeddingApplyAdamW(Primitive):
|
|
1300
|
-
"""
|
|
1301
|
-
EmbeddingApplyAdam
|
|
1302
|
-
"""
|
|
1303
|
-
@prim_attr_register
|
|
1304
|
-
def __init__(self, embedding_dim, _max_key_num, amsgrad=(0,),
|
|
1305
|
-
maximize=(0,), mask_zero=(0,), padding_key=(0,),
|
|
1306
|
-
padding_key_mask=(1,), completion_key=(0,), completion_key_mask=(1,)):
|
|
1307
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
class EmbeddingApplyAdaGrad(Primitive):
|
|
1311
|
-
"""
|
|
1312
|
-
EmbeddingApplyAdaGrad
|
|
1313
|
-
"""
|
|
1314
|
-
@prim_attr_register
|
|
1315
|
-
def __init__(self, embedding_dim, _max_key_num, mask_zero=(0,),
|
|
1316
|
-
padding_key=(0,), padding_key_mask=(1,),
|
|
1317
|
-
completion_key=(0,), completion_key_mask=(1,)):
|
|
1318
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
class EmbeddingApplyFtrl(Primitive):
|
|
1322
|
-
"""
|
|
1323
|
-
EmbeddingApplyFtrl
|
|
1324
|
-
"""
|
|
1325
|
-
@prim_attr_register
|
|
1326
|
-
def __init__(self, embedding_dim, _max_key_num, mask_zero=(0,),
|
|
1327
|
-
padding_key=(0,), padding_key_mask=(1,),
|
|
1328
|
-
completion_key=(0,), completion_key_mask=(1,)):
|
|
1329
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
class EmbeddingTableFind(Primitive):
|
|
1333
|
-
"""
|
|
1334
|
-
EmbeddingTableFind
|
|
1335
|
-
"""
|
|
1336
|
-
@prim_attr_register
|
|
1337
|
-
def __init__(self, embedding_dim, _embedding_dim, _max_key_num,
|
|
1338
|
-
_table_id, default_value=(-1.), _use_counter_filter=0):
|
|
1339
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1340
|
-
self.add_prim_attr("_execute_times", 2)
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
def embedding_table_find(table_id, keys, embedding_dim, _max_key_num,
|
|
1344
|
-
_table_id, default_value=(-1.0,), _use_counter_filter=0):
|
|
1345
|
-
r"""
|
|
1346
|
-
embedding_table_find
|
|
1347
|
-
"""
|
|
1348
|
-
_embedding_dim = embedding_dim if isinstance(embedding_dim, int) else embedding_dim[_table_id]
|
|
1349
|
-
op = _get_cache_prim(EmbeddingTableFind)(to_sequence(embedding_dim), _embedding_dim,
|
|
1350
|
-
_max_key_num, _table_id,
|
|
1351
|
-
to_sequence(default_value),
|
|
1352
|
-
_use_counter_filter)
|
|
1353
|
-
return op(table_id, keys)
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
class EmbeddingTableFindAndInit(Primitive):
|
|
1357
|
-
"""
|
|
1358
|
-
EmbeddingTableFindAndInit
|
|
1359
|
-
"""
|
|
1360
|
-
@prim_attr_register
|
|
1361
|
-
def __init__(self, embedding_dim, value_total_len, _embedding_dim, _table_id,
|
|
1362
|
-
_max_key_num, initializer_mode=("random_uniform",),
|
|
1363
|
-
constant_value=(0.,), min=(-2.,), max=(2.,), mu=(0.,),
|
|
1364
|
-
sigma=(1.,), seed=(0,), seed2=(0,),
|
|
1365
|
-
filter_mode=("no_filter",), filter_freq=(0,),
|
|
1366
|
-
default_key_or_value=(0,), default_key=(0,),
|
|
1367
|
-
default_value=(0.,), completion_key=(0,),
|
|
1368
|
-
completion_key_mask=(1,), optimizer_mode=(),
|
|
1369
|
-
optimizer_params=(), _use_counter_filter=0,
|
|
1370
|
-
backward_mode="adam",
|
|
1371
|
-
backward_int_params=((0,), (0,), (0,), (1,)),
|
|
1372
|
-
backward_float_params=(0.9, 0.99, 0.001, 0.9, 0.999, 1e-08)):
|
|
1373
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1374
|
-
self.add_prim_attr("_execute_times", 2)
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
def embedding_table_find_and_init(table_id, keys, max_grad_norm, parameter, embedding_dim,
|
|
1378
|
-
value_total_len, _table_id, _max_key_num,
|
|
1379
|
-
initializer_mode=('random_uniform',), constant_value=(0.,),
|
|
1380
|
-
min=(-2.,), max=(2.,), mu=(0.,), sigma=(1.,), seed=(0,),
|
|
1381
|
-
seed2=(0,), filter_mode=("no_filter",),
|
|
1382
|
-
filter_freq=(0,), default_key_or_value=(0,),
|
|
1383
|
-
default_key=(0,), default_value=(0.,),
|
|
1384
|
-
completion_key=(0,), completion_key_mask=(1,),
|
|
1385
|
-
optimizer_mode=(), optimizer_params=(), _use_counter_filter=0,
|
|
1386
|
-
backward_mode="adam", backward_int_params=((0,), (0,), (0,), (1,)),
|
|
1387
|
-
backward_float_params=(0.9, 0.99, 0.001, 0.9, 0.999, 1e-08)):
|
|
1388
|
-
"""
|
|
1389
|
-
embedding_table_find_and_init
|
|
1390
|
-
|
|
1391
|
-
backward_int_params (Union[tuple[tuple[int]], list[list[int]]]):
|
|
1392
|
-
- when the backward_mode is 'adam', 'ftrl' or 'adagrad',
|
|
1393
|
-
it means [[global_step], mask_zero, padding_key, padding_key_mask]
|
|
1394
|
-
- when the backward_mode is 'adamw', it means:
|
|
1395
|
-
[[global_step], amsgrad, maximize, mask_zero, padding_key, padding_key_mask]
|
|
1396
|
-
backward_float_params (Union[tuple[float], list[float]]):
|
|
1397
|
-
- when the backward_mode is 'adam', it means:
|
|
1398
|
-
[beta1_power, beta2_power, lr, beta1, beta2, epsilon]
|
|
1399
|
-
- when the backward_mode is 'ftrl', it means:
|
|
1400
|
-
[lr, lr_power, lambda1, lambda2]
|
|
1401
|
-
- when the backward_mode is 'adamw', it means:
|
|
1402
|
-
[beta1_power, beta2_power, lr, weight_decay, beta1, beta2, epsilon]
|
|
1403
|
-
- when the backward_mode is 'adagrad', it means [lr,]
|
|
1404
|
-
"""
|
|
1405
|
-
_embedding_dim = embedding_dim if isinstance(embedding_dim, int) else embedding_dim[_table_id]
|
|
1406
|
-
op = _get_cache_prim(EmbeddingTableFindAndInit)(to_sequence(embedding_dim), to_sequence(value_total_len),
|
|
1407
|
-
_embedding_dim, _table_id, _max_key_num,
|
|
1408
|
-
to_sequence(initializer_mode),
|
|
1409
|
-
to_sequence(constant_value), to_sequence(min),
|
|
1410
|
-
to_sequence(max), to_sequence(mu),
|
|
1411
|
-
to_sequence(sigma), to_sequence(seed),
|
|
1412
|
-
to_sequence(seed2), to_sequence(filter_mode),
|
|
1413
|
-
to_sequence(filter_freq), to_sequence(default_key_or_value),
|
|
1414
|
-
to_sequence(default_key), to_sequence(default_value),
|
|
1415
|
-
to_sequence(completion_key), to_sequence(completion_key_mask),
|
|
1416
|
-
to_sequence(optimizer_mode), to_sequence(optimizer_params),
|
|
1417
|
-
_use_counter_filter,
|
|
1418
|
-
backward_mode, backward_int_params, backward_float_params)
|
|
1419
|
-
return op(table_id, keys, max_grad_norm, parameter)
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
class FakeRemoteLookupUniqued(Primitive):
|
|
1423
|
-
|
|
1424
|
-
"""
|
|
1425
|
-
FakeRemoteLookupUniqued
|
|
1426
|
-
"""
|
|
1427
|
-
@prim_attr_register
|
|
1428
|
-
def __init__(self, embedding_dim, value_total_len, _embedding_dim, _table_id,
|
|
1429
|
-
_max_key_num, initializer_mode=('random_uniform',), constant_value=(0.,),
|
|
1430
|
-
min=(-2.,), max=(2.,), mu=(0.,), sigma=(1.,), seed=(0,), seed2=(0,),
|
|
1431
|
-
filter_mode=("no_filter",), filter_freq=(0,),
|
|
1432
|
-
default_key_or_value=(0,), default_key=(0,), default_value=(0.,),
|
|
1433
|
-
completion_key=(0,), completion_key_mask=(1,),
|
|
1434
|
-
optimizer_mode=(), optimizer_params=(), _use_counter_filter=0,
|
|
1435
|
-
backward_mode="adam", backward_int_params=((0,), (0,), (0,), (1,)),
|
|
1436
|
-
backward_float_params=(0.9, 0.99, 0.001, 0.9, 0.999, 1e-08)):
|
|
1437
|
-
self.add_prim_attr("_process_node_engine_id", "PS")
|
|
1438
|
-
self.add_prim_attr("_execute_times", 2)
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
def fake_remote_lookup_uniqued(table_id, keys, actual_keys_num, unique_indices,
|
|
1442
|
-
key_count, max_grad_norm, parameter,
|
|
1443
|
-
embedding_dim, value_total_len, _table_id, _max_key_num,
|
|
1444
|
-
initializer_mode=('random_uniform',), constant_value=(0.,),
|
|
1445
|
-
min=(-2.,), max=(2.,), mu=(0.,), sigma=(1.,), seed=(0,),
|
|
1446
|
-
seed2=(0,), filter_mode=("no_filter",),
|
|
1447
|
-
filter_freq=(0,), default_key_or_value=(0,),
|
|
1448
|
-
default_key=(0,), default_value=(0.,),
|
|
1449
|
-
completion_key=(0,), completion_key_mask=(1,),
|
|
1450
|
-
optimizer_mode=(), optimizer_params=(), _use_counter_filter=0,
|
|
1451
|
-
backward_mode='adam', backward_int_params=((0,), (0,), (0,), (1,)),
|
|
1452
|
-
backward_float_params=(0.9, 0.99, 0.001, 0.9, 0.999, 1e-08)):
|
|
1453
|
-
"""
|
|
1454
|
-
fake_remote_lookup_uniqued
|
|
1455
|
-
|
|
1456
|
-
backward_mode (str): determine the optimizer used by backpropagation,
|
|
1457
|
-
valid values are ["adam", "adamw", "adagrad", "ftrl"]
|
|
1458
|
-
backward_int_params (Union[tuple[tuple[int]], list[list[int]]]):
|
|
1459
|
-
- when the backward_mode is 'adam', 'ftrl' or 'adagrad',
|
|
1460
|
-
it means [[global_step], mask_zero, padding_key, padding_key_mask]
|
|
1461
|
-
- when the backward_mode is 'adamw', it means:
|
|
1462
|
-
[[global_step], amsgrad, maximize, mask_zero, padding_key, padding_key_mask]
|
|
1463
|
-
backward_float_params (Union[tuple[float], list[float]]):
|
|
1464
|
-
- when the backward_mode is 'adam', it means:
|
|
1465
|
-
[beta1_power, beta2_power, lr, beta1, beta2, epsilon]
|
|
1466
|
-
- when the backward_mode is 'ftrl', it means:
|
|
1467
|
-
[lr, lr_power, lambda1, lambda2]
|
|
1468
|
-
- when the backward_mode is 'adamw', it means:
|
|
1469
|
-
[beta1_power, beta2_power, lr, weight_decay, beta1, beta2, epsilon]
|
|
1470
|
-
- when the backward_mode is 'adagrad', it means [lr,]
|
|
1471
|
-
"""
|
|
1472
|
-
_embedding_dim = embedding_dim if isinstance(embedding_dim, int) else embedding_dim[_table_id]
|
|
1473
|
-
op = _get_cache_prim(FakeRemoteLookupUniqued)(to_sequence(embedding_dim), to_sequence(value_total_len),
|
|
1474
|
-
_embedding_dim, _table_id, _max_key_num,
|
|
1475
|
-
to_sequence(initializer_mode), to_sequence(constant_value),
|
|
1476
|
-
to_sequence(min), to_sequence(max), to_sequence(mu),
|
|
1477
|
-
to_sequence(sigma), to_sequence(seed), to_sequence(seed2),
|
|
1478
|
-
to_sequence(filter_mode), to_sequence(filter_freq),
|
|
1479
|
-
to_sequence(default_key_or_value), to_sequence(default_key),
|
|
1480
|
-
to_sequence(default_value), to_sequence(completion_key),
|
|
1481
|
-
to_sequence(completion_key_mask), to_sequence(optimizer_mode),
|
|
1482
|
-
to_sequence(optimizer_params), _use_counter_filter,
|
|
1483
|
-
backward_mode, backward_int_params, backward_float_params)
|
|
1484
|
-
return op(table_id, keys, actual_keys_num, unique_indices, key_count, max_grad_norm, parameter)
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
1184
|
# Following is Python Infer Value.
|
|
1488
1185
|
# A valid infer value function should be:
|
|
1489
1186
|
#
|
|
@@ -1622,6 +1319,9 @@ def infer_value_for_Cast(x, dst_type_enum=None):
|
|
|
1622
1319
|
if isinstance(x, (int, float)):
|
|
1623
1320
|
value = Tensor(np.array(x).astype(np_dst_type), dtype=dst_type)
|
|
1624
1321
|
else:
|
|
1322
|
+
if x.dtype == mstype.bfloat16:
|
|
1323
|
+
cpu_cast = Cast().set_device("CPU")
|
|
1324
|
+
x = cpu_cast(x, mstype.float32)
|
|
1625
1325
|
value = Tensor_(x.asnumpy().astype(np_dst_type), dtype=dst_type)
|
|
1626
1326
|
return value
|
|
1627
1327
|
|
|
@@ -1877,7 +1577,7 @@ def flash_attention_score(query, key, value, head_num, real_shift=None, drop_mas
|
|
|
1877
1577
|
|
|
1878
1578
|
.. math::
|
|
1879
1579
|
\begin{array}{ll} \\
|
|
1880
|
-
y = Dropout(Softmax(Mask(scale_value \mul (real_shift + query * key), attn_mask), -1),
|
|
1580
|
+
y = Dropout(Softmax(Mask(scale_value \mul (real_shift + query * key), attn_mask), -1), keep_prob) \\
|
|
1881
1581
|
\mul value \\
|
|
1882
1582
|
\end{array}
|
|
1883
1583
|
|
|
@@ -40,7 +40,7 @@ from ..auto_generate import (Add, Addcdiv, Addcmul, ReduceMean, ReduceSum, Reduc
|
|
|
40
40
|
LinSpace, MatrixDeterminant, LogMatrixDeterminant, Erfinv, Conj,
|
|
41
41
|
Real, Complex, Angle, MatrixExp, CholeskyInverse, Trace, Cholesky,
|
|
42
42
|
FFTWithSize, NextAfter, NanToNum, Eig, Qr, Roll, Maximum, Div, DivMod, CumProd,
|
|
43
|
-
CumSum, Less, LessEqual, AssignAdd, IsFinite,
|
|
43
|
+
CumSum, Less, LessEqual, AssignAdd, IsFinite, TanhGrad)
|
|
44
44
|
|
|
45
45
|
|
|
46
46
|
def _infer_shape_reduce(x, axis, keep_dims, prim_name):
|
|
@@ -2664,7 +2664,7 @@ class SquareSumAll(Primitive):
|
|
|
2664
2664
|
Examples:
|
|
2665
2665
|
>>> import numpy as np
|
|
2666
2666
|
>>> import mindspore
|
|
2667
|
-
>>>
|
|
2667
|
+
>>> import mindspore.ops as ops
|
|
2668
2668
|
>>> from mindspore import Tensor
|
|
2669
2669
|
>>> x = Tensor(np.array([0, 0, 2, 0]), mindspore.float32)
|
|
2670
2670
|
>>> y = Tensor(np.array([0, 0, 2, 4]), mindspore.float32)
|
|
@@ -3881,6 +3881,53 @@ class Igammac(Primitive):
|
|
|
3881
3881
|
self.init_prim_io_names(inputs=['a', 'x'], outputs=['z'])
|
|
3882
3882
|
|
|
3883
3883
|
|
|
3884
|
+
class IsClose(Primitive):
|
|
3885
|
+
r"""
|
|
3886
|
+
Returns a tensor of Boolean values indicating whether two input tensors
|
|
3887
|
+
are element-wise equal within a given tolerance.
|
|
3888
|
+
|
|
3889
|
+
Refer to :func:`mindspore.ops.isclose` for more details.
|
|
3890
|
+
|
|
3891
|
+
Args:
|
|
3892
|
+
rtol(float, optional): Relative tolerance. Default: ``1e-05`` .
|
|
3893
|
+
atol(float, optional): Absolute tolerance. Default: ``1e-08`` .
|
|
3894
|
+
equal_nan(bool, optional): If ``True`` , then two NaNs will be considered equal. Default: ``True`` .
|
|
3895
|
+
|
|
3896
|
+
Inputs:
|
|
3897
|
+
- **input** (Tensor) - First tensor to compare, with data type belongs to float32, float16, int32.
|
|
3898
|
+
- **other** (Tensor) - Second tensor to compare, with data type belongs to float32, float16, int32.
|
|
3899
|
+
|
|
3900
|
+
Outputs:
|
|
3901
|
+
Tensor, with the same shape as `input` and `other` after broadcasting, its dtype is bool.
|
|
3902
|
+
|
|
3903
|
+
Supported Platforms:
|
|
3904
|
+
``Ascend`` ``GPU`` ``CPU``
|
|
3905
|
+
|
|
3906
|
+
Examples:
|
|
3907
|
+
>>> import mindspore
|
|
3908
|
+
>>> import numpy as np
|
|
3909
|
+
>>> from mindspore import Tensor
|
|
3910
|
+
>>> from mindspore.ops import IsClose
|
|
3911
|
+
>>> input = Tensor(np.array([1.3, 2.1, 3.2, 4.1, 5.1]), mindspore.float16)
|
|
3912
|
+
>>> other = Tensor(np.array([1.3, 3.3, 2.3, 3.1, 5.1]), mindspore.float16)
|
|
3913
|
+
>>> isclose = IsClose()
|
|
3914
|
+
>>> output = isclose(input, other)
|
|
3915
|
+
>>> print(output)
|
|
3916
|
+
[ True False False False True]
|
|
3917
|
+
"""
|
|
3918
|
+
|
|
3919
|
+
@prim_attr_register
|
|
3920
|
+
def __init__(self, rtol=1e-05, atol=1e-08, equal_nan=True):
|
|
3921
|
+
"""Initialize IsClose"""
|
|
3922
|
+
validator.check_value_type('rtol', rtol, [float], self.name)
|
|
3923
|
+
validator.check_value_type('atol', atol, [float], self.name)
|
|
3924
|
+
validator.check_value_type('equal_nan', equal_nan, [bool], self.name)
|
|
3925
|
+
if context.get_context("device_target") == "Ascend" and not equal_nan:
|
|
3926
|
+
raise ValueError("For IsClose, the `equal_nan` must be True on Ascend, but got False.")
|
|
3927
|
+
validator.check_non_negative_float(rtol, 'rtol', self.name)
|
|
3928
|
+
validator.check_non_negative_float(atol, 'atol', self.name)
|
|
3929
|
+
|
|
3930
|
+
|
|
3884
3931
|
class MatrixSolve(Primitive):
|
|
3885
3932
|
"""
|
|
3886
3933
|
Solves systems of linear equations.
|
|
@@ -4322,7 +4369,7 @@ class Cross(Primitive):
|
|
|
4322
4369
|
>>> import numpy as np
|
|
4323
4370
|
>>> from mindspore import Tensor
|
|
4324
4371
|
>>> from mindspore import dtype as mstype
|
|
4325
|
-
>>>
|
|
4372
|
+
>>> import mindspore.ops as ops
|
|
4326
4373
|
>>> cross = ops.Cross(dim = 0)
|
|
4327
4374
|
>>> x1 = Tensor([1, 2, 3], mstype.int8)
|
|
4328
4375
|
>>> x2 = Tensor([1, 2, 3], mstype.int8)
|