mindspore 2.3.0__cp39-none-any.whl → 2.3.0rc2__cp39-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +0 -1512
- mindspore/__init__.py +1 -2
- mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_checkparam.py +25 -5
- mindspore/_extends/graph_kernel/model/graph_parallel.py +1 -1
- mindspore/_extends/parse/__init__.py +2 -2
- mindspore/_extends/parse/compile_config.py +0 -29
- mindspore/_extends/parse/namespace.py +2 -2
- mindspore/_extends/parse/parser.py +5 -21
- mindspore/_extends/parse/resources.py +7 -5
- mindspore/_extends/parse/standard_method.py +59 -40
- mindspore/_mindspore_offline_debug.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/amp.py +5 -26
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/boost/adasum.py +1 -1
- mindspore/boost/base.py +1 -1
- mindspore/boost/boost_cell_wrapper.py +1 -1
- mindspore/boost/grad_freeze.py +2 -2
- mindspore/boost/less_batch_normalization.py +6 -9
- mindspore/common/__init__.py +1 -8
- mindspore/common/_register_for_tensor.py +9 -8
- mindspore/common/api.py +65 -275
- mindspore/common/dtype.py +4 -8
- mindspore/common/dump.py +5 -2
- mindspore/common/jit_config.py +1 -1
- mindspore/common/lazy_inline.py +2 -14
- mindspore/common/parameter.py +15 -14
- mindspore/common/recompute.py +5 -20
- mindspore/common/sparse_tensor.py +6 -21
- mindspore/common/tensor.py +52 -100
- mindspore/communication/__init__.py +11 -6
- mindspore/communication/management.py +94 -92
- mindspore/context.py +18 -180
- mindspore/dataset/engine/datasets.py +46 -69
- mindspore/dataset/engine/datasets_user_defined.py +53 -72
- mindspore/dataset/engine/datasets_vision.py +2 -2
- mindspore/dataset/engine/queue.py +38 -56
- mindspore/dataset/engine/validators.py +5 -11
- mindspore/dataset/vision/__init__.py +5 -5
- mindspore/dataset/vision/c_transforms.py +5 -5
- mindspore/dataset/vision/py_transforms_util.py +1 -1
- mindspore/dataset/vision/transforms.py +46 -591
- mindspore/dataset/vision/utils.py +1 -121
- mindspore/dataset/vision/validators.py +3 -9
- mindspore/hal/__init__.py +1 -7
- mindspore/hal/device.py +1 -1
- mindspore/include/api/model.h +0 -3
- mindspore/include/dataset/vision.h +2 -54
- mindspore/include/mindapi/base/types.h +0 -1
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libmindspore.so +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_shared_lib.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libmpi_collective.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -35
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/vector_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +0 -72
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/{aclnn_all_finite.h → aclnn_add_custom.h} +11 -9
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_decoder_kv_cache.h +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_prompt_kv_cache.h +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +12 -184
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +15 -7
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +15 -7
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.cpp +81 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.py +134 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/decoder_kv_cache.py +31 -77
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/prompt_kv_cache.py +31 -77
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/inc/op_proto.h +5 -4
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
- mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +286 -275
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -3
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_tiling.h +45 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +4 -8
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_tiling.h +4 -11
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/kernel/flash_attention_score_mix_hwsync.h +0 -18
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -6
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +75 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/kernel/matmul.h +5 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +3 -18
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_common_tiling.h +5 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_info.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_data.h +3 -36
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/kernel/matmul_stridedslice_fusion.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +4 -22
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +2 -16
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +3 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +4 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +4 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +2 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +4 -10
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +12 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +1 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +2 -10
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +1 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -17
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +7 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/mindrecord/filewriter.py +2 -2
- mindspore/mint/__init__.py +40 -720
- mindspore/mint/nn/__init__.py +7 -89
- mindspore/mint/nn/functional.py +16 -165
- mindspore/mint/optim/adamw.py +16 -15
- mindspore/nn/__init__.py +2 -0
- mindspore/nn/cell.py +98 -97
- mindspore/nn/extend/basic.py +2 -2
- mindspore/nn/extend/embedding.py +1 -1
- mindspore/nn/extend/layer/normalization.py +5 -7
- mindspore/nn/generator.py +297 -0
- mindspore/nn/layer/activation.py +3 -4
- mindspore/nn/layer/basic.py +16 -79
- mindspore/nn/layer/conv.py +8 -17
- mindspore/nn/layer/embedding.py +4 -1
- mindspore/nn/layer/math.py +1 -1
- mindspore/nn/layer/normalization.py +1 -1
- mindspore/nn/layer/pooling.py +0 -5
- mindspore/nn/layer/rnn_cells.py +2 -2
- mindspore/nn/loss/loss.py +19 -19
- mindspore/nn/optim/adasum.py +1 -1
- mindspore/nn/optim/sgd.py +2 -3
- mindspore/nn/probability/distribution/exponential.py +1 -1
- mindspore/nn/probability/distribution/geometric.py +1 -1
- mindspore/nn/probability/distribution/logistic.py +1 -1
- mindspore/nn/wrap/cell_wrapper.py +1 -25
- mindspore/nn/wrap/loss_scale.py +1 -24
- mindspore/numpy/array_ops.py +1 -5
- mindspore/numpy/dtypes.py +3 -3
- mindspore/numpy/math_ops.py +8 -8
- mindspore/ops/__init__.py +1 -1
- mindspore/ops/_grad_experimental/grad_comm_ops.py +16 -75
- mindspore/ops/_vmap/vmap_array_ops.py +0 -27
- mindspore/ops/_vmap/vmap_math_ops.py +1 -29
- mindspore/ops/_vmap/vmap_nn_ops.py +18 -19
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +8 -34
- mindspore/ops/auto_generate/gen_arg_dtype_cast.py +9 -2
- mindspore/ops/auto_generate/gen_arg_handler.py +0 -26
- mindspore/ops/auto_generate/gen_extend_func.py +27 -603
- mindspore/ops/auto_generate/gen_ops_def.py +203 -993
- mindspore/ops/auto_generate/gen_ops_prim.py +402 -1946
- mindspore/ops/auto_generate/pyboost_inner_prim.py +20 -90
- mindspore/ops/composite/base.py +6 -3
- mindspore/ops/composite/math_ops.py +1 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +17 -24
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
- mindspore/ops/extend/__init__.py +3 -2
- mindspore/ops/extend/array_func.py +51 -10
- mindspore/ops/extend/nn_func.py +78 -2
- mindspore/ops/function/__init__.py +13 -8
- mindspore/ops/function/array_func.py +179 -455
- mindspore/ops/function/clip_func.py +1 -1
- mindspore/ops/function/grad/grad_func.py +3 -3
- mindspore/ops/function/math_func.py +103 -117
- mindspore/ops/function/nn_func.py +163 -275
- mindspore/ops/function/other_func.py +2 -2
- mindspore/ops/function/random_func.py +69 -202
- mindspore/ops/function/sparse_func.py +4 -4
- mindspore/ops/functional.py +327 -332
- mindspore/ops/operations/__init__.py +3 -13
- mindspore/ops/operations/_grad_ops.py +27 -3
- mindspore/ops/operations/_inner_ops.py +356 -53
- mindspore/ops/operations/_rl_inner_ops.py +2 -2
- mindspore/ops/operations/_tensor_array.py +8 -8
- mindspore/ops/operations/array_ops.py +65 -82
- mindspore/ops/operations/comm_ops.py +93 -784
- mindspore/ops/operations/custom_ops.py +28 -51
- mindspore/ops/operations/debug_ops.py +4 -4
- mindspore/ops/operations/inner_ops.py +2 -2
- mindspore/ops/operations/manually_defined/ops_def.py +4 -304
- mindspore/ops/operations/math_ops.py +50 -3
- mindspore/ops/operations/nn_ops.py +247 -14
- mindspore/ops/operations/other_ops.py +3 -3
- mindspore/ops/operations/random_ops.py +1 -1
- mindspore/ops/operations/sparse_ops.py +1 -1
- mindspore/ops/primitive.py +8 -9
- mindspore/ops/silent_check.py +5 -5
- mindspore/ops_generate/arg_dtype_cast.py +9 -2
- mindspore/ops_generate/arg_handler.py +0 -26
- mindspore/ops_generate/gen_aclnn_implement.py +4 -1
- mindspore/ops_generate/gen_ops.py +4 -26
- mindspore/ops_generate/gen_pyboost_func.py +12 -41
- mindspore/ops_generate/gen_utils.py +0 -21
- mindspore/ops_generate/pyboost_utils.py +2 -7
- mindspore/ops_generate/template.py +0 -1
- mindspore/parallel/_auto_parallel_context.py +1 -21
- mindspore/parallel/_tensor.py +5 -0
- mindspore/parallel/_transformer/transformer.py +1 -1
- mindspore/parallel/_utils.py +1 -15
- mindspore/parallel/algo_parameter_config.py +3 -1
- mindspore/parallel/checkpoint_transform.py +9 -12
- mindspore/parallel/cluster/process_entity/_api.py +29 -28
- mindspore/parallel/cluster/process_entity/_utils.py +3 -13
- mindspore/parallel/cluster/run.py +16 -13
- mindspore/parallel/parameter_broadcast.py +2 -2
- mindspore/parallel/shard.py +17 -31
- mindspore/profiler/__init__.py +2 -3
- mindspore/profiler/common/util.py +2 -107
- mindspore/profiler/envprofiling.py +1 -1
- mindspore/profiler/parser/ascend_analysis/constant.py +21 -8
- mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -82
- mindspore/profiler/parser/ascend_analysis/function_event.py +28 -43
- mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +27 -49
- mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +10 -15
- mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +20 -25
- mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +5 -5
- mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +1 -10
- mindspore/profiler/parser/ascend_hccl_generator.py +1 -4
- mindspore/profiler/parser/ascend_msprof_exporter.py +22 -43
- mindspore/profiler/parser/ascend_timeline_generator.py +5 -7
- mindspore/profiler/parser/minddata_parser.py +3 -72
- mindspore/profiler/profiling.py +59 -176
- mindspore/rewrite/api/node.py +1 -1
- mindspore/rewrite/common/namespace.py +5 -5
- mindspore/rewrite/parsers/assign_parser.py +0 -2
- mindspore/rewrite/parsers/class_def_parser.py +4 -8
- mindspore/run_check/_check_version.py +1 -1
- mindspore/scipy/fft.py +3 -1
- mindspore/scipy/linalg.py +3 -2
- mindspore/scipy/ops.py +3 -5
- mindspore/scipy/optimize/__init__.py +2 -2
- mindspore/train/__init__.py +4 -4
- mindspore/train/anf_ir_pb2.py +2 -8
- mindspore/train/callback/__init__.py +2 -5
- mindspore/train/callback/_backup_and_restore.py +2 -2
- mindspore/train/callback/_checkpoint.py +16 -104
- mindspore/train/callback/_landscape.py +1 -1
- mindspore/train/callback/_time_monitor.py +1 -1
- mindspore/train/data_sink.py +4 -5
- mindspore/train/dataset_helper.py +20 -45
- mindspore/train/model.py +38 -266
- mindspore/train/serialization.py +105 -256
- mindspore/train/summary/_summary_adapter.py +1 -1
- mindspore/version.py +1 -1
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +2 -2
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +303 -420
- mindspore/_extends/pijit/__init__.py +0 -23
- mindspore/_extends/pijit/pijit_func_white_list.py +0 -343
- mindspore/common/file_system.py +0 -48
- mindspore/common/generator.py +0 -260
- mindspore/common/no_inline.py +0 -54
- mindspore/common/np_dtype.py +0 -25
- mindspore/communication/comm_func.py +0 -1140
- mindspore/hal/memory.py +0 -326
- mindspore/lib/libavcodec.so.59 +0 -0
- mindspore/lib/libavdevice.so.59 +0 -0
- mindspore/lib/libavfilter.so.8 +0 -0
- mindspore/lib/libavformat.so.59 +0 -0
- mindspore/lib/libavutil.so.57 +0 -0
- mindspore/lib/libmindspore_np_dtype.so +0 -0
- mindspore/lib/libswresample.so.4 +0 -0
- mindspore/lib/libswscale.so.6 +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.cpp +0 -326
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.py +0 -180
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
- mindspore/lib/plugin/ascend/custom_compiler/OWNERS +0 -12
- mindspore/lib/plugin/ascend/custom_compiler/setup.py +0 -255
- mindspore/lib/plugin/ascend/custom_compiler/start.sh +0 -26
- mindspore/lib/plugin/ascend/custom_compiler/template.json +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h +0 -133
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -32
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h +0 -60
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -37
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_op.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_backoff_base.h +0 -62
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -33
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_ops.h +0 -88
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_pa_op.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -95
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -84
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -61
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -224
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_impl.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_tiling.h +0 -187
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul.h +0 -245
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_interface.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_utils.h +0 -111
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/tiling_data.h +0 -54
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -364
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -114
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -98
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/mint/linalg/__init__.py +0 -22
- mindspore/nn/layer/embedding_service.py +0 -531
- mindspore/nn/layer/embedding_service_layer.py +0 -393
- mindspore/ops/function/reshard_func.py +0 -102
- mindspore/ops/operations/_infer_ops.py +0 -19
- mindspore/ops/operations/reshard_ops.py +0 -53
- mindspore/profiler/common/process_pool.py +0 -41
- mindspore/profiler/common/singleton.py +0 -28
- mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
- mindspore/profiler/parser/ascend_memory_generator.py +0 -185
- mindspore/train/callback/_cluster_monitor.py +0 -201
- mindspore/train/callback/_flops_collector.py +0 -238
- mindspore/train/callback/_mindio_ttp.py +0 -443
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
|
@@ -1,255 +0,0 @@
|
|
|
1
|
-
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
# ============================================================================
|
|
15
|
-
"""setup package for custom compiler tool"""
|
|
16
|
-
import argparse
|
|
17
|
-
import json
|
|
18
|
-
import os
|
|
19
|
-
import subprocess
|
|
20
|
-
import shutil
|
|
21
|
-
from mindspore import log as logger
|
|
22
|
-
|
|
23
|
-
OP_HOST = "op_host"
|
|
24
|
-
OP_KERNEL = "op_kernel"
|
|
25
|
-
SUFFIX_CPP = "cpp"
|
|
26
|
-
SUFFIX_H = "h"
|
|
27
|
-
CONFIG_KEY_CONFIGUREPRESET = "configurePresets"
|
|
28
|
-
CONFIG_KEY_VALUE = "value"
|
|
29
|
-
CONFIG_KEY_VARIABLE = "cacheVariables"
|
|
30
|
-
CONFIG_KEY_CANN_PATH = "ASCEND_CANN_PACKAGE_PATH"
|
|
31
|
-
CONFIG_KEY_VENDOR_NAME = "vendor_name"
|
|
32
|
-
CONFIG_KEY_COMPUTE_UNIT = "ASCEND_COMPUTE_UNIT"
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def get_config():
|
|
36
|
-
"""get config from user"""
|
|
37
|
-
parser = argparse.ArgumentParser()
|
|
38
|
-
parser.add_argument("-o", "--op_host_path", type=str, required=True)
|
|
39
|
-
parser.add_argument("-k", "--op_kernel_path", type=str, required=True)
|
|
40
|
-
parser.add_argument("--soc_version", type=str, default="")
|
|
41
|
-
parser.add_argument("--ascend_cann_package_path", type=str, default="")
|
|
42
|
-
parser.add_argument("--vendor_name", type=str, default="customize")
|
|
43
|
-
parser.add_argument("--install_path", type=str, default="")
|
|
44
|
-
parser.add_argument("-c", "--clear", action="store_true")
|
|
45
|
-
parser.add_argument("-i", "--install", action="store_true")
|
|
46
|
-
return parser.parse_args()
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
class CustomOOC():
|
|
50
|
-
"""
|
|
51
|
-
Custom Operator Offline Compilation
|
|
52
|
-
"""
|
|
53
|
-
|
|
54
|
-
def __init__(self, args):
|
|
55
|
-
self.args = args
|
|
56
|
-
self.ori_cmake_preset = ""
|
|
57
|
-
script_path = os.path.abspath(__file__)
|
|
58
|
-
dir_path, _ = os.path.split(script_path)
|
|
59
|
-
self.current_path = dir_path
|
|
60
|
-
self.custom_project = os.path.join(dir_path, "CustomProject")
|
|
61
|
-
|
|
62
|
-
def check_args(self):
|
|
63
|
-
"""check config"""
|
|
64
|
-
if not os.path.isdir(self.args.op_host_path):
|
|
65
|
-
raise ValueError(
|
|
66
|
-
f"Config error! op host path [{self.args.op_host_path}] is not exist,"
|
|
67
|
-
f" please check your set --op_host_path")
|
|
68
|
-
|
|
69
|
-
if not os.path.isdir(self.args.op_kernel_path):
|
|
70
|
-
raise ValueError(
|
|
71
|
-
f"Config error! op kernel path [{self.args.op_kernel_path}] is not exist, "
|
|
72
|
-
f"please check your set --op_kernel_path")
|
|
73
|
-
|
|
74
|
-
if self.args.soc_version != "":
|
|
75
|
-
support_soc_version = {"ascend310p", "ascend310b", "ascend910", "ascend910b", "ascend910c"}
|
|
76
|
-
for item in self.args.soc_version.split(';'):
|
|
77
|
-
if item not in support_soc_version:
|
|
78
|
-
raise ValueError(
|
|
79
|
-
f"Config error! Unsupported soc version {self.args.soc_version}! "
|
|
80
|
-
f"Please check your set --soc_version and use ';' to separate multiple soc_versions, "
|
|
81
|
-
f"support soc version is {support_soc_version}")
|
|
82
|
-
|
|
83
|
-
if self.args.ascend_cann_package_path != "":
|
|
84
|
-
if not os.path.isdir(self.args.ascend_cann_package_path):
|
|
85
|
-
raise ValueError(
|
|
86
|
-
f"Config error! ascend cann package path [{self.args.ascend_cann_package_path}] is not valid path, "
|
|
87
|
-
f"please check your set --ascend_cann_package_path")
|
|
88
|
-
|
|
89
|
-
if self.args.install or self.args.install_path != "":
|
|
90
|
-
if self.args.install_path == "":
|
|
91
|
-
opp_path = os.environ.get('ASCEND_OPP_PATH')
|
|
92
|
-
if opp_path is None:
|
|
93
|
-
raise ValueError(
|
|
94
|
-
"Config error! Can not find install path, please set install path by --install_path")
|
|
95
|
-
self.args.install_path = opp_path
|
|
96
|
-
|
|
97
|
-
if not os.path.isdir(self.args.install_path):
|
|
98
|
-
raise ValueError(
|
|
99
|
-
f"Install path [{self.args.install_path}] is not valid path, please check your set"
|
|
100
|
-
f" --install_path is set correctly")
|
|
101
|
-
|
|
102
|
-
def generate_compile_project(self):
|
|
103
|
-
"""generate compile project by msopgen"""
|
|
104
|
-
if os.path.exists(self.custom_project) and os.path.isdir(self.custom_project):
|
|
105
|
-
shutil.rmtree(self.custom_project)
|
|
106
|
-
command = ['msopgen', '-h']
|
|
107
|
-
result = subprocess.run(command, shell=False, stderr=subprocess.STDOUT)
|
|
108
|
-
if result.returncode != 0:
|
|
109
|
-
raise RuntimeError(
|
|
110
|
-
"[msopgen] is not existed, Please check if the [toolkit] is installed in the current environment.")
|
|
111
|
-
log_fd = os.open("generate.log", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o700)
|
|
112
|
-
log_file = os.fdopen(log_fd, "w")
|
|
113
|
-
command = ['msopgen', 'gen', '-i', './template.json', '-c', 'ai_core-Ascend310P1', '-lan', 'cpp', '-out',
|
|
114
|
-
self.custom_project]
|
|
115
|
-
result = subprocess.run(command, shell=False, stdout=log_file, stderr=subprocess.STDOUT)
|
|
116
|
-
log_file.close()
|
|
117
|
-
if result.returncode == 0:
|
|
118
|
-
logger.info("Generate custom project successfully!")
|
|
119
|
-
else:
|
|
120
|
-
with open('generate.log', 'r') as file:
|
|
121
|
-
for line in file:
|
|
122
|
-
logger.error(line.strip())
|
|
123
|
-
raise RuntimeError("Generate custom project failed!")
|
|
124
|
-
with open(os.path.join(self.custom_project, 'CMakePresets.json'), 'r', encoding='utf-8') as f:
|
|
125
|
-
data = json.load(f)
|
|
126
|
-
data[CONFIG_KEY_CONFIGUREPRESET][0][CONFIG_KEY_VARIABLE][CONFIG_KEY_COMPUTE_UNIT][
|
|
127
|
-
CONFIG_KEY_VALUE] = "ascend310p;ascend310b;ascend910;ascend910b"
|
|
128
|
-
with os.fdopen(
|
|
129
|
-
os.open(os.path.join(self.custom_project, 'CMakePresets.json'), os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
|
|
130
|
-
0o700), "w") as f:
|
|
131
|
-
json.dump(data, f, ensure_ascii=False, indent=4)
|
|
132
|
-
ascend_suffix = {SUFFIX_CPP, SUFFIX_H}
|
|
133
|
-
for item in os.listdir(os.path.join(self.custom_project, OP_HOST)):
|
|
134
|
-
if item.split('.')[-1] in ascend_suffix:
|
|
135
|
-
os.remove(os.path.join(self.custom_project, OP_HOST, item))
|
|
136
|
-
|
|
137
|
-
for item in os.listdir(os.path.join(self.custom_project, OP_KERNEL)):
|
|
138
|
-
if item.split('.')[-1] in ascend_suffix:
|
|
139
|
-
os.remove(os.path.join(self.custom_project, OP_KERNEL, item))
|
|
140
|
-
|
|
141
|
-
def compile_config(self):
|
|
142
|
-
"""create CMakePresets.json by config"""
|
|
143
|
-
with open(os.path.join(self.custom_project, 'CMakePresets.json'), 'r', encoding='utf-8') as f:
|
|
144
|
-
data = json.load(f)
|
|
145
|
-
self.ori_cmake_preset = data
|
|
146
|
-
if self.args.ascend_cann_package_path != "":
|
|
147
|
-
cann_package_path = self.args.ascend_cann_package_path
|
|
148
|
-
else:
|
|
149
|
-
cann_package_path = os.environ.get('ASCEND_AICPU_PATH')
|
|
150
|
-
if cann_package_path is None:
|
|
151
|
-
raise ValueError("Config error! Can not find cann package path, "
|
|
152
|
-
"please set cann package path by --ascend_cann_package_path.")
|
|
153
|
-
if not os.path.isdir(cann_package_path):
|
|
154
|
-
logger.error(f"The path '{cann_package_path}' is not a valid path.")
|
|
155
|
-
logger.info("ASCEND_CANN_PACKAGE_PATH is {}".format(cann_package_path))
|
|
156
|
-
data[CONFIG_KEY_CONFIGUREPRESET][0][CONFIG_KEY_VARIABLE][CONFIG_KEY_CANN_PATH][
|
|
157
|
-
CONFIG_KEY_VALUE] = cann_package_path
|
|
158
|
-
|
|
159
|
-
if self.args.soc_version != "":
|
|
160
|
-
data[CONFIG_KEY_CONFIGUREPRESET][0][CONFIG_KEY_VARIABLE][CONFIG_KEY_COMPUTE_UNIT][
|
|
161
|
-
CONFIG_KEY_VALUE] = self.args.soc_version
|
|
162
|
-
|
|
163
|
-
data[CONFIG_KEY_CONFIGUREPRESET][0][CONFIG_KEY_VARIABLE][CONFIG_KEY_VENDOR_NAME][
|
|
164
|
-
CONFIG_KEY_VALUE] = self.args.vendor_name
|
|
165
|
-
|
|
166
|
-
with os.fdopen(
|
|
167
|
-
os.open(os.path.join(self.custom_project, 'CMakePresets.json'), os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
|
|
168
|
-
0o700), "w") as f:
|
|
169
|
-
json.dump(data, f, ensure_ascii=False, indent=4)
|
|
170
|
-
|
|
171
|
-
def clear(self):
|
|
172
|
-
"""clear log and build out"""
|
|
173
|
-
if self.args.clear:
|
|
174
|
-
command = ['rm', '-rf', 'build_out', 'install.log', 'build.log', 'generate.log']
|
|
175
|
-
result = subprocess.run(command, shell=False, stderr=subprocess.STDOUT)
|
|
176
|
-
if result.returncode == 0:
|
|
177
|
-
logger.info("Delete build_out install.log build.log successfully!")
|
|
178
|
-
else:
|
|
179
|
-
logger.error("Delete failed with return code: {} ".format(result.returncode))
|
|
180
|
-
logger.error("Error output:\n{}".format(result.stderr))
|
|
181
|
-
raise RuntimeError("Delete failed!")
|
|
182
|
-
|
|
183
|
-
def install_custom(self):
|
|
184
|
-
"""install custom run"""
|
|
185
|
-
if self.args.install or self.args.install_path != "":
|
|
186
|
-
logger.info("Install custom opp run in {}".format(self.args.install_path))
|
|
187
|
-
os.environ['ASCEND_CUSTOM_OPP_PATH'] = self.args.install_path
|
|
188
|
-
result = subprocess.run(['bash', self.custom_project + '/build_out/*.run'], stdout=os.fdopen(
|
|
189
|
-
os.open("install.log", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o700), "w"),
|
|
190
|
-
stderr=subprocess.STDOUT)
|
|
191
|
-
if result.returncode == 0:
|
|
192
|
-
logger.info("Install custom run opp successfully!")
|
|
193
|
-
logger.info(
|
|
194
|
-
"Please set [source ASCEND_CUSTOM_OPP_PATH={}/vendors/{}:$ASCEND_CUSTOM_OPP_PATH] to "
|
|
195
|
-
"make the custom operator effective in the current path.".format(
|
|
196
|
-
self.args.install_path, self.args.vendor_name))
|
|
197
|
-
else:
|
|
198
|
-
with open('install.log', 'r') as file:
|
|
199
|
-
for line in file:
|
|
200
|
-
logger.error(line.strip())
|
|
201
|
-
raise RuntimeError("Install failed!")
|
|
202
|
-
|
|
203
|
-
def copy_src(self):
|
|
204
|
-
"""copy new src code"""
|
|
205
|
-
ascend_suffix = {SUFFIX_CPP, SUFFIX_H}
|
|
206
|
-
for item in os.listdir(self.args.op_host_path):
|
|
207
|
-
if item.split('.')[-1] in ascend_suffix:
|
|
208
|
-
item_path = os.path.join(self.args.op_host_path, item)
|
|
209
|
-
target_path = os.path.join(self.custom_project, OP_HOST, item)
|
|
210
|
-
if os.path.isfile(item_path):
|
|
211
|
-
shutil.copy(item_path, target_path)
|
|
212
|
-
for item in os.listdir(self.args.op_kernel_path):
|
|
213
|
-
if item.split('.')[-1] in ascend_suffix:
|
|
214
|
-
item_path = os.path.join(self.args.op_kernel_path, item)
|
|
215
|
-
target_path = os.path.join(self.custom_project, OP_KERNEL, item)
|
|
216
|
-
if os.path.isfile(item_path):
|
|
217
|
-
shutil.copy(item_path, target_path)
|
|
218
|
-
|
|
219
|
-
for root, _, files in os.walk(self.custom_project):
|
|
220
|
-
for f in files:
|
|
221
|
-
_, file_extension = os.path.splitext(f)
|
|
222
|
-
if file_extension == ".sh":
|
|
223
|
-
os.chmod(os.path.join(root, f), 0o700)
|
|
224
|
-
|
|
225
|
-
def compile_custom(self):
|
|
226
|
-
"""compile custom op"""
|
|
227
|
-
self.copy_src()
|
|
228
|
-
log_fd = os.open("build.log", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o700)
|
|
229
|
-
log_file = os.fdopen(log_fd, "w")
|
|
230
|
-
result = subprocess.run(['bash', 'start.sh', self.custom_project],
|
|
231
|
-
stdout=log_file,
|
|
232
|
-
stderr=subprocess.STDOUT)
|
|
233
|
-
log_file.close()
|
|
234
|
-
if result.returncode == 0:
|
|
235
|
-
logger.info("Compile custom op successfully!")
|
|
236
|
-
else:
|
|
237
|
-
with open('build.log', 'r') as file:
|
|
238
|
-
for line in file:
|
|
239
|
-
logger.debug(line.strip())
|
|
240
|
-
raise RuntimeError("Compile failed! Please see build.log in current directory for detail info.")
|
|
241
|
-
|
|
242
|
-
def compile(self):
|
|
243
|
-
"""compile op"""
|
|
244
|
-
self.check_args()
|
|
245
|
-
self.generate_compile_project()
|
|
246
|
-
self.compile_config()
|
|
247
|
-
self.compile_custom()
|
|
248
|
-
self.install_custom()
|
|
249
|
-
self.clear()
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
if __name__ == "__main__":
|
|
253
|
-
config = get_config()
|
|
254
|
-
custom_ooc = CustomOOC(config)
|
|
255
|
-
custom_ooc.compile()
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
3
|
-
#
|
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
# you may not use this file except in compliance with the License.
|
|
6
|
-
# You may obtain a copy of the License at
|
|
7
|
-
#
|
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
#
|
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
# See the License for the specific language governing permissions and
|
|
14
|
-
# limitations under the License.
|
|
15
|
-
# ============================================================================
|
|
16
|
-
|
|
17
|
-
expected_args=1
|
|
18
|
-
args_num=$#
|
|
19
|
-
if [ "$args_num" -ne "$expected_args" ]; then
|
|
20
|
-
echo "Error: Incorrect number of arguments $args_num"
|
|
21
|
-
exit 1
|
|
22
|
-
fi
|
|
23
|
-
project_path=$1
|
|
24
|
-
unset ASCEND_CUSTOM_OPP_PATH
|
|
25
|
-
cd ${project_path}
|
|
26
|
-
bash build.sh
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
[
|
|
2
|
-
{
|
|
3
|
-
"op": "AddCustom",
|
|
4
|
-
"language":"cpp",
|
|
5
|
-
"input_desc": [
|
|
6
|
-
{
|
|
7
|
-
"name": "x",
|
|
8
|
-
"param_type": "required",
|
|
9
|
-
"format": [
|
|
10
|
-
"ND"
|
|
11
|
-
],
|
|
12
|
-
"type": [
|
|
13
|
-
"fp16"
|
|
14
|
-
]
|
|
15
|
-
},
|
|
16
|
-
{
|
|
17
|
-
"name": "y",
|
|
18
|
-
"param_type": "required",
|
|
19
|
-
"format": [
|
|
20
|
-
"ND"
|
|
21
|
-
],
|
|
22
|
-
"type": [
|
|
23
|
-
"fp16"
|
|
24
|
-
]
|
|
25
|
-
}
|
|
26
|
-
],
|
|
27
|
-
"output_desc": [
|
|
28
|
-
{
|
|
29
|
-
"name": "z",
|
|
30
|
-
"param_type": "required",
|
|
31
|
-
"format": [
|
|
32
|
-
"ND"
|
|
33
|
-
],
|
|
34
|
-
"type": [
|
|
35
|
-
"fp16"
|
|
36
|
-
]
|
|
37
|
-
}
|
|
38
|
-
]
|
|
39
|
-
}
|
|
40
|
-
]
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Copyright 2024 Huawei Technologies Co., Ltd
|
|
3
|
-
*
|
|
4
|
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
* you may not use this file except in compliance with the License.
|
|
6
|
-
* You may obtain a copy of the License at
|
|
7
|
-
*
|
|
8
|
-
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
*
|
|
10
|
-
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
* See the License for the specific language governing permissions and
|
|
14
|
-
* limitations under the License.
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
#ifndef MS_KERNELS_INTERNAL_KERNEL_ACME_ACME_H_
|
|
18
|
-
#define MS_KERNELS_INTERNAL_KERNEL_ACME_ACME_H_
|
|
19
|
-
|
|
20
|
-
#include "acme/include/op_param.h"
|
|
21
|
-
#include "acme/include/op_creator.h"
|
|
22
|
-
#include "acme/include/tiling_info.h"
|
|
23
|
-
|
|
24
|
-
#endif // MS_KERNELS_INTERNAL_KERNEL_ACME_ACME_H_
|
mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h
DELETED
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Copyright 2024 Huawei Technologies Co., Ltd
|
|
3
|
-
*
|
|
4
|
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
* you may not use this file except in compliance with the License.
|
|
6
|
-
* You may obtain a copy of the License at
|
|
7
|
-
*
|
|
8
|
-
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
*
|
|
10
|
-
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
* See the License for the specific language governing permissions and
|
|
14
|
-
* limitations under the License.
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
#ifndef MS_KERNELS_INTERNAL_KERNEL_ACME_ACME_OP_H_
|
|
18
|
-
#define MS_KERNELS_INTERNAL_KERNEL_ACME_ACME_OP_H_
|
|
19
|
-
|
|
20
|
-
#include <vector>
|
|
21
|
-
#include "acme/include/base_type.h"
|
|
22
|
-
#include "acme/include/tiling_info.h"
|
|
23
|
-
|
|
24
|
-
namespace mindspore {
|
|
25
|
-
namespace acme {
|
|
26
|
-
class AcmeOp {
|
|
27
|
-
public:
|
|
28
|
-
AcmeOp(const InputsImmutableInfoList &inputs_ii, const OutputsImmutableInfoList &outputs_ii);
|
|
29
|
-
virtual ~AcmeOp() = default;
|
|
30
|
-
|
|
31
|
-
AcmeStatus Init();
|
|
32
|
-
|
|
33
|
-
virtual AcmeStatus UpdateShape(const ShapeInfoList &inputs_shape, const ShapeInfoList &outputs_shape);
|
|
34
|
-
|
|
35
|
-
size_t GetTilingSize() const;
|
|
36
|
-
virtual std::vector<size_t> GetWorkspaceSize() const;
|
|
37
|
-
|
|
38
|
-
virtual void SetTilingInfo(const TilingInfoPtr &tiling_info);
|
|
39
|
-
|
|
40
|
-
AcmeStatus Launch(const InputsAddrList &input_ptrs, const OutputsAddrList &output_ptrs, const WsAddrList &ws_ptrs,
|
|
41
|
-
void *stream);
|
|
42
|
-
AcmeStatus Tiling(RawHostAddr host_ptr, HostRunInfoPtr *run_info_ptr);
|
|
43
|
-
virtual std::string DumpTiling(const RawHostAddr host_ptr) const = 0;
|
|
44
|
-
|
|
45
|
-
virtual ShapeInfoList InferShape(const ShapeInfoList &inputs_shape) const = 0;
|
|
46
|
-
|
|
47
|
-
virtual AcmeStatus TilingFromTuning(const RawDeviceAddr tiling_addr);
|
|
48
|
-
virtual bool IsSupported(const InputDataTypes &dtypes);
|
|
49
|
-
|
|
50
|
-
protected:
|
|
51
|
-
virtual AcmeStatus InitImpl();
|
|
52
|
-
virtual AcmeStatus TilingImpl(RawHostAddr host_ptr, HostRunInfoPtr *run_info_ptr) = 0;
|
|
53
|
-
virtual AcmeStatus LaunchImpl(const InputsAddrList &input_ptrs, const OutputsAddrList &output_ptrs,
|
|
54
|
-
const WsAddrList &ws_ptrs, void *stream) = 0;
|
|
55
|
-
void SetHostRunInfoComm(const HostRunInfoComm &, HostRunInfoPtr *);
|
|
56
|
-
InputsDescList inputs_;
|
|
57
|
-
OutputsDescList outputs_;
|
|
58
|
-
size_t tiling_size_{0};
|
|
59
|
-
std::vector<size_t> ws_size_;
|
|
60
|
-
RawDeviceAddr tiling_device_addr_{nullptr};
|
|
61
|
-
std::string soc_;
|
|
62
|
-
HostRunInfoCommPtr host_run_info_comm_ptr_{nullptr};
|
|
63
|
-
};
|
|
64
|
-
|
|
65
|
-
using AcmeOpPtr = std::shared_ptr<AcmeOp>;
|
|
66
|
-
} // namespace acme
|
|
67
|
-
} // namespace mindspore
|
|
68
|
-
|
|
69
|
-
#endif // MS_KERNELS_INTERNAL_KERNEL_ACME_ACME_OP_H_
|
mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h
DELETED
|
@@ -1,133 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Copyright 2024 Huawei Technologies Co., Ltd
|
|
3
|
-
*
|
|
4
|
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
* you may not use this file except in compliance with the License.
|
|
6
|
-
* You may obtain a copy of the License at
|
|
7
|
-
*
|
|
8
|
-
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
*
|
|
10
|
-
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
* See the License for the specific language governing permissions and
|
|
14
|
-
* limitations under the License.
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
#ifndef MS_KERNELS_INTERNAL_KERNEL_ACME_BASE_TYPE_H_
|
|
18
|
-
#define MS_KERNELS_INTERNAL_KERNEL_ACME_BASE_TYPE_H_
|
|
19
|
-
|
|
20
|
-
#include <vector>
|
|
21
|
-
#include <cstdint>
|
|
22
|
-
#include <memory>
|
|
23
|
-
|
|
24
|
-
namespace mindspore {
|
|
25
|
-
namespace acme {
|
|
26
|
-
using ShapeInfo = std::vector<int64_t>;
|
|
27
|
-
|
|
28
|
-
enum DataType : int {
|
|
29
|
-
kTypeUnknown = 0,
|
|
30
|
-
kTypeFloat16,
|
|
31
|
-
kTypeFloat32,
|
|
32
|
-
kTypeFloat64,
|
|
33
|
-
kTypeInt8,
|
|
34
|
-
kTypeInt16,
|
|
35
|
-
kTypeInt32,
|
|
36
|
-
kTypeInt64,
|
|
37
|
-
kTypeUint8,
|
|
38
|
-
kTypeUint16,
|
|
39
|
-
kTypeUint32,
|
|
40
|
-
kTypeUint64,
|
|
41
|
-
kTypeBF16,
|
|
42
|
-
kTypeBool,
|
|
43
|
-
kTypeComplex64,
|
|
44
|
-
kTypeComplex128,
|
|
45
|
-
};
|
|
46
|
-
|
|
47
|
-
enum TensorFormat : int { kFormatUnknown, kFormatND, kFormatNCHW, kFormatNHWC };
|
|
48
|
-
|
|
49
|
-
enum AcmeStatus {
|
|
50
|
-
kAcmeOk = 0,
|
|
51
|
-
kAcmeError,
|
|
52
|
-
};
|
|
53
|
-
|
|
54
|
-
class ArgImmutableInfo {
|
|
55
|
-
public:
|
|
56
|
-
ArgImmutableInfo(DataType type, TensorFormat format) : d_type_(type), format_(format) {}
|
|
57
|
-
ArgImmutableInfo() {}
|
|
58
|
-
~ArgImmutableInfo() = default;
|
|
59
|
-
|
|
60
|
-
void SetDtype(DataType type) { d_type_ = type; }
|
|
61
|
-
|
|
62
|
-
DataType GetDtype() const { return d_type_; }
|
|
63
|
-
|
|
64
|
-
void SetFormat(TensorFormat format) { format_ = format; }
|
|
65
|
-
|
|
66
|
-
TensorFormat GetFormat() const { return format_; }
|
|
67
|
-
|
|
68
|
-
private:
|
|
69
|
-
DataType d_type_{kTypeUnknown};
|
|
70
|
-
TensorFormat format_{kFormatUnknown};
|
|
71
|
-
};
|
|
72
|
-
|
|
73
|
-
class ArgDesc {
|
|
74
|
-
public:
|
|
75
|
-
ArgDesc(const ArgImmutableInfo &arg_ii) : immutable_info_(arg_ii) {}
|
|
76
|
-
ArgDesc(DataType type, TensorFormat format) : immutable_info_(type, format) {}
|
|
77
|
-
ArgDesc(const ShapeInfo &shape, DataType type, TensorFormat format) : shape_(shape), immutable_info_(type, format) {}
|
|
78
|
-
|
|
79
|
-
~ArgDesc() = default;
|
|
80
|
-
const ShapeInfo &GetShape() const { return shape_; }
|
|
81
|
-
|
|
82
|
-
void SetShape(const ShapeInfo &shape) { shape_ = shape; }
|
|
83
|
-
|
|
84
|
-
void SetDtype(DataType type) { immutable_info_.SetDtype(type); }
|
|
85
|
-
|
|
86
|
-
DataType GetDtype() const {
|
|
87
|
-
return immutable_info_.GetDtype();
|
|
88
|
-
;
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
void SetFormat(TensorFormat format) { immutable_info_.SetFormat(format); }
|
|
92
|
-
|
|
93
|
-
TensorFormat GetFormat() const {
|
|
94
|
-
return immutable_info_.GetFormat();
|
|
95
|
-
;
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
const ArgImmutableInfo &GetImmutableInfo() const { return immutable_info_; }
|
|
99
|
-
|
|
100
|
-
size_t ElementNum() const {
|
|
101
|
-
if (shape_.empty()) {
|
|
102
|
-
return 0;
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
size_t num = 1;
|
|
106
|
-
for (auto s : shape_) {
|
|
107
|
-
num *= static_cast<size_t>(s);
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
return num;
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
private:
|
|
114
|
-
ShapeInfo shape_{0};
|
|
115
|
-
ArgImmutableInfo immutable_info_;
|
|
116
|
-
};
|
|
117
|
-
using ArgDescPtr = std::shared_ptr<ArgDesc>;
|
|
118
|
-
|
|
119
|
-
using InputsDescList = std::vector<ArgDesc>;
|
|
120
|
-
using OutputsDescList = std::vector<ArgDesc>;
|
|
121
|
-
using InputsImmutableInfoList = std::vector<ArgImmutableInfo>;
|
|
122
|
-
using OutputsImmutableInfoList = std::vector<ArgImmutableInfo>;
|
|
123
|
-
using InputDataTypes = std::vector<DataType>;
|
|
124
|
-
using RawDeviceAddr = void *;
|
|
125
|
-
using RawHostAddr = void *;
|
|
126
|
-
using InputsAddrList = std::vector<RawDeviceAddr>;
|
|
127
|
-
using OutputsAddrList = std::vector<RawDeviceAddr>;
|
|
128
|
-
using WsAddrList = std::vector<RawDeviceAddr>;
|
|
129
|
-
using ShapeInfoList = std::vector<ShapeInfo>;
|
|
130
|
-
} // namespace acme
|
|
131
|
-
} // namespace mindspore
|
|
132
|
-
|
|
133
|
-
#endif // MS_KERNELS_INTERNAL_KERNEL_ACME_BASE_TYPE_H_
|
mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Copyright 2024 Huawei Technologies Co., Ltd
|
|
3
|
-
*
|
|
4
|
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
* you may not use this file except in compliance with the License.
|
|
6
|
-
* You may obtain a copy of the License at
|
|
7
|
-
*
|
|
8
|
-
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
*
|
|
10
|
-
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
* See the License for the specific language governing permissions and
|
|
14
|
-
* limitations under the License.
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
#ifndef MS_KERNELS_INTERNAL_KERNEL_ACME_OP_CREATOR_H_
|
|
18
|
-
#define MS_KERNELS_INTERNAL_KERNEL_ACME_OP_CREATOR_H_
|
|
19
|
-
|
|
20
|
-
#include "acme/include/acme_op.h"
|
|
21
|
-
#include "acme/include/op_param.h"
|
|
22
|
-
|
|
23
|
-
namespace mindspore {
|
|
24
|
-
namespace acme {
|
|
25
|
-
AcmeOpPtr CreateMatmulOp(const InputsImmutableInfoList &inputs_ii, const OutputsImmutableInfoList &outputs_ii,
|
|
26
|
-
const MatmulParam ¶m);
|
|
27
|
-
AcmeOpPtr CreateAddOp(const InputsImmutableInfoList &inputs_ii, const OutputsImmutableInfoList &outputs_ii);
|
|
28
|
-
AcmeOpPtr CreateCastOp(const InputsImmutableInfoList &inputs_ii, const OutputsImmutableInfoList &outputs_ii);
|
|
29
|
-
} // namespace acme
|
|
30
|
-
} // namespace mindspore
|
|
31
|
-
|
|
32
|
-
#endif // MS_KERNELS_INTERNAL_KERNEL_ACME_OP_CREATOR_H_
|
mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h
DELETED
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Copyright 2024 Huawei Technologies Co., Ltd
|
|
3
|
-
*
|
|
4
|
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
* you may not use this file except in compliance with the License.
|
|
6
|
-
* You may obtain a copy of the License at
|
|
7
|
-
*
|
|
8
|
-
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
*
|
|
10
|
-
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
* See the License for the specific language governing permissions and
|
|
14
|
-
* limitations under the License.
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
#ifndef MS_KERNELS_INTERNAL_KERNEL_ACME_OP_PARAM_H_
|
|
18
|
-
#define MS_KERNELS_INTERNAL_KERNEL_ACME_OP_PARAM_H_
|
|
19
|
-
|
|
20
|
-
#include <stdint.h>
|
|
21
|
-
|
|
22
|
-
namespace mindspore {
|
|
23
|
-
namespace acme {
|
|
24
|
-
struct MatmulParam {
|
|
25
|
-
bool transpose_a{false};
|
|
26
|
-
bool transpose_b{false};
|
|
27
|
-
bool with_relu{false};
|
|
28
|
-
bool with_gelu{false};
|
|
29
|
-
bool with_bias{false};
|
|
30
|
-
bool with_bias_fastgelu{false};
|
|
31
|
-
};
|
|
32
|
-
} // namespace acme
|
|
33
|
-
} // namespace mindspore
|
|
34
|
-
|
|
35
|
-
#endif // MS_KERNELS_INTERNAL_KERNEL_ACME_OP_PARAM_H_
|
mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h
DELETED
|
@@ -1,60 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Copyright 2024 Huawei Technologies Co., Ltd
|
|
3
|
-
*
|
|
4
|
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
* you may not use this file except in compliance with the License.
|
|
6
|
-
* You may obtain a copy of the License at
|
|
7
|
-
*
|
|
8
|
-
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
*
|
|
10
|
-
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
* See the License for the specific language governing permissions and
|
|
14
|
-
* limitations under the License.
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
#ifndef MS_KERNELS_INTERNAL_KERNEL_ACME_TILING_INFO_H_
|
|
18
|
-
#define MS_KERNELS_INTERNAL_KERNEL_ACME_TILING_INFO_H_
|
|
19
|
-
|
|
20
|
-
#include "acme/include/base_type.h"
|
|
21
|
-
|
|
22
|
-
namespace mindspore {
|
|
23
|
-
namespace acme {
|
|
24
|
-
class HostRunInfo {
|
|
25
|
-
public:
|
|
26
|
-
HostRunInfo() = default;
|
|
27
|
-
virtual ~HostRunInfo() = default;
|
|
28
|
-
};
|
|
29
|
-
using HostRunInfoPtr = std::shared_ptr<HostRunInfo>;
|
|
30
|
-
|
|
31
|
-
class HostRunInfoComm : public HostRunInfo {
|
|
32
|
-
public:
|
|
33
|
-
HostRunInfoComm() = default;
|
|
34
|
-
explicit HostRunInfoComm(size_t block_dim) : block_dims_(block_dim) {}
|
|
35
|
-
~HostRunInfoComm() = default;
|
|
36
|
-
uint32_t block_dims_{0};
|
|
37
|
-
uint64_t any_value0_{0};
|
|
38
|
-
uint64_t any_value1_{0};
|
|
39
|
-
uint64_t any_value2_{0};
|
|
40
|
-
uint64_t any_value3_{0};
|
|
41
|
-
uint64_t any_value4_{0};
|
|
42
|
-
uint64_t any_value5_{0};
|
|
43
|
-
};
|
|
44
|
-
using HostRunInfoCommPtr = std::shared_ptr<HostRunInfoComm>;
|
|
45
|
-
|
|
46
|
-
class TilingInfo {
|
|
47
|
-
public:
|
|
48
|
-
TilingInfo() = default;
|
|
49
|
-
TilingInfo(RawDeviceAddr tiling_addr, const HostRunInfoPtr &host_run_info)
|
|
50
|
-
: tiling_addr_(tiling_addr), host_run_info_{host_run_info} {}
|
|
51
|
-
~TilingInfo() = default;
|
|
52
|
-
|
|
53
|
-
RawDeviceAddr tiling_addr_{nullptr};
|
|
54
|
-
HostRunInfoPtr host_run_info_{nullptr};
|
|
55
|
-
};
|
|
56
|
-
using TilingInfoPtr = std::shared_ptr<TilingInfo>;
|
|
57
|
-
} // namespace acme
|
|
58
|
-
} // namespace mindspore
|
|
59
|
-
|
|
60
|
-
#endif // MS_KERNELS_INTERNAL_KERNEL_ACME_TILING_INFO_H_
|