mindspore 2.3.0__cp39-none-any.whl → 2.3.0rc2__cp39-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +0 -1512
- mindspore/__init__.py +1 -2
- mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/_checkparam.py +25 -5
- mindspore/_extends/graph_kernel/model/graph_parallel.py +1 -1
- mindspore/_extends/parse/__init__.py +2 -2
- mindspore/_extends/parse/compile_config.py +0 -29
- mindspore/_extends/parse/namespace.py +2 -2
- mindspore/_extends/parse/parser.py +5 -21
- mindspore/_extends/parse/resources.py +7 -5
- mindspore/_extends/parse/standard_method.py +59 -40
- mindspore/_mindspore_offline_debug.cpython-39-aarch64-linux-gnu.so +0 -0
- mindspore/amp.py +5 -26
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/boost/adasum.py +1 -1
- mindspore/boost/base.py +1 -1
- mindspore/boost/boost_cell_wrapper.py +1 -1
- mindspore/boost/grad_freeze.py +2 -2
- mindspore/boost/less_batch_normalization.py +6 -9
- mindspore/common/__init__.py +1 -8
- mindspore/common/_register_for_tensor.py +9 -8
- mindspore/common/api.py +65 -275
- mindspore/common/dtype.py +4 -8
- mindspore/common/dump.py +5 -2
- mindspore/common/jit_config.py +1 -1
- mindspore/common/lazy_inline.py +2 -14
- mindspore/common/parameter.py +15 -14
- mindspore/common/recompute.py +5 -20
- mindspore/common/sparse_tensor.py +6 -21
- mindspore/common/tensor.py +52 -100
- mindspore/communication/__init__.py +11 -6
- mindspore/communication/management.py +94 -92
- mindspore/context.py +18 -180
- mindspore/dataset/engine/datasets.py +46 -69
- mindspore/dataset/engine/datasets_user_defined.py +53 -72
- mindspore/dataset/engine/datasets_vision.py +2 -2
- mindspore/dataset/engine/queue.py +38 -56
- mindspore/dataset/engine/validators.py +5 -11
- mindspore/dataset/vision/__init__.py +5 -5
- mindspore/dataset/vision/c_transforms.py +5 -5
- mindspore/dataset/vision/py_transforms_util.py +1 -1
- mindspore/dataset/vision/transforms.py +46 -591
- mindspore/dataset/vision/utils.py +1 -121
- mindspore/dataset/vision/validators.py +3 -9
- mindspore/hal/__init__.py +1 -7
- mindspore/hal/device.py +1 -1
- mindspore/include/api/model.h +0 -3
- mindspore/include/dataset/vision.h +2 -54
- mindspore/include/mindapi/base/types.h +0 -1
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libmindspore.so +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_shared_lib.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libmpi_collective.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -35
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
- mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/vector_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +0 -72
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/{aclnn_all_finite.h → aclnn_add_custom.h} +11 -9
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_decoder_kv_cache.h +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_prompt_kv_cache.h +1 -1
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +12 -184
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +15 -7
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +15 -7
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.cpp +81 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.py +134 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/decoder_kv_cache.py +31 -77
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/prompt_kv_cache.py +31 -77
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/inc/op_proto.h +5 -4
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
- mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +286 -275
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -3
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_tiling.h +45 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +4 -8
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_tiling.h +4 -11
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/kernel/flash_attention_score_mix_hwsync.h +0 -18
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -6
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +75 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/kernel/matmul.h +5 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +3 -18
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_common_tiling.h +5 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_info.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_data.h +3 -36
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/kernel/matmul_stridedslice_fusion.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +4 -22
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +2 -16
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +3 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +4 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +4 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +2 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +4 -10
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +12 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +1 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +2 -10
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +1 -5
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -17
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +7 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/mindrecord/filewriter.py +2 -2
- mindspore/mint/__init__.py +40 -720
- mindspore/mint/nn/__init__.py +7 -89
- mindspore/mint/nn/functional.py +16 -165
- mindspore/mint/optim/adamw.py +16 -15
- mindspore/nn/__init__.py +2 -0
- mindspore/nn/cell.py +98 -97
- mindspore/nn/extend/basic.py +2 -2
- mindspore/nn/extend/embedding.py +1 -1
- mindspore/nn/extend/layer/normalization.py +5 -7
- mindspore/nn/generator.py +297 -0
- mindspore/nn/layer/activation.py +3 -4
- mindspore/nn/layer/basic.py +16 -79
- mindspore/nn/layer/conv.py +8 -17
- mindspore/nn/layer/embedding.py +4 -1
- mindspore/nn/layer/math.py +1 -1
- mindspore/nn/layer/normalization.py +1 -1
- mindspore/nn/layer/pooling.py +0 -5
- mindspore/nn/layer/rnn_cells.py +2 -2
- mindspore/nn/loss/loss.py +19 -19
- mindspore/nn/optim/adasum.py +1 -1
- mindspore/nn/optim/sgd.py +2 -3
- mindspore/nn/probability/distribution/exponential.py +1 -1
- mindspore/nn/probability/distribution/geometric.py +1 -1
- mindspore/nn/probability/distribution/logistic.py +1 -1
- mindspore/nn/wrap/cell_wrapper.py +1 -25
- mindspore/nn/wrap/loss_scale.py +1 -24
- mindspore/numpy/array_ops.py +1 -5
- mindspore/numpy/dtypes.py +3 -3
- mindspore/numpy/math_ops.py +8 -8
- mindspore/ops/__init__.py +1 -1
- mindspore/ops/_grad_experimental/grad_comm_ops.py +16 -75
- mindspore/ops/_vmap/vmap_array_ops.py +0 -27
- mindspore/ops/_vmap/vmap_math_ops.py +1 -29
- mindspore/ops/_vmap/vmap_nn_ops.py +18 -19
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +8 -34
- mindspore/ops/auto_generate/gen_arg_dtype_cast.py +9 -2
- mindspore/ops/auto_generate/gen_arg_handler.py +0 -26
- mindspore/ops/auto_generate/gen_extend_func.py +27 -603
- mindspore/ops/auto_generate/gen_ops_def.py +203 -993
- mindspore/ops/auto_generate/gen_ops_prim.py +402 -1946
- mindspore/ops/auto_generate/pyboost_inner_prim.py +20 -90
- mindspore/ops/composite/base.py +6 -3
- mindspore/ops/composite/math_ops.py +1 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +17 -24
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
- mindspore/ops/extend/__init__.py +3 -2
- mindspore/ops/extend/array_func.py +51 -10
- mindspore/ops/extend/nn_func.py +78 -2
- mindspore/ops/function/__init__.py +13 -8
- mindspore/ops/function/array_func.py +179 -455
- mindspore/ops/function/clip_func.py +1 -1
- mindspore/ops/function/grad/grad_func.py +3 -3
- mindspore/ops/function/math_func.py +103 -117
- mindspore/ops/function/nn_func.py +163 -275
- mindspore/ops/function/other_func.py +2 -2
- mindspore/ops/function/random_func.py +69 -202
- mindspore/ops/function/sparse_func.py +4 -4
- mindspore/ops/functional.py +327 -332
- mindspore/ops/operations/__init__.py +3 -13
- mindspore/ops/operations/_grad_ops.py +27 -3
- mindspore/ops/operations/_inner_ops.py +356 -53
- mindspore/ops/operations/_rl_inner_ops.py +2 -2
- mindspore/ops/operations/_tensor_array.py +8 -8
- mindspore/ops/operations/array_ops.py +65 -82
- mindspore/ops/operations/comm_ops.py +93 -784
- mindspore/ops/operations/custom_ops.py +28 -51
- mindspore/ops/operations/debug_ops.py +4 -4
- mindspore/ops/operations/inner_ops.py +2 -2
- mindspore/ops/operations/manually_defined/ops_def.py +4 -304
- mindspore/ops/operations/math_ops.py +50 -3
- mindspore/ops/operations/nn_ops.py +247 -14
- mindspore/ops/operations/other_ops.py +3 -3
- mindspore/ops/operations/random_ops.py +1 -1
- mindspore/ops/operations/sparse_ops.py +1 -1
- mindspore/ops/primitive.py +8 -9
- mindspore/ops/silent_check.py +5 -5
- mindspore/ops_generate/arg_dtype_cast.py +9 -2
- mindspore/ops_generate/arg_handler.py +0 -26
- mindspore/ops_generate/gen_aclnn_implement.py +4 -1
- mindspore/ops_generate/gen_ops.py +4 -26
- mindspore/ops_generate/gen_pyboost_func.py +12 -41
- mindspore/ops_generate/gen_utils.py +0 -21
- mindspore/ops_generate/pyboost_utils.py +2 -7
- mindspore/ops_generate/template.py +0 -1
- mindspore/parallel/_auto_parallel_context.py +1 -21
- mindspore/parallel/_tensor.py +5 -0
- mindspore/parallel/_transformer/transformer.py +1 -1
- mindspore/parallel/_utils.py +1 -15
- mindspore/parallel/algo_parameter_config.py +3 -1
- mindspore/parallel/checkpoint_transform.py +9 -12
- mindspore/parallel/cluster/process_entity/_api.py +29 -28
- mindspore/parallel/cluster/process_entity/_utils.py +3 -13
- mindspore/parallel/cluster/run.py +16 -13
- mindspore/parallel/parameter_broadcast.py +2 -2
- mindspore/parallel/shard.py +17 -31
- mindspore/profiler/__init__.py +2 -3
- mindspore/profiler/common/util.py +2 -107
- mindspore/profiler/envprofiling.py +1 -1
- mindspore/profiler/parser/ascend_analysis/constant.py +21 -8
- mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -82
- mindspore/profiler/parser/ascend_analysis/function_event.py +28 -43
- mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +27 -49
- mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +10 -15
- mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +20 -25
- mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +5 -5
- mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +1 -10
- mindspore/profiler/parser/ascend_hccl_generator.py +1 -4
- mindspore/profiler/parser/ascend_msprof_exporter.py +22 -43
- mindspore/profiler/parser/ascend_timeline_generator.py +5 -7
- mindspore/profiler/parser/minddata_parser.py +3 -72
- mindspore/profiler/profiling.py +59 -176
- mindspore/rewrite/api/node.py +1 -1
- mindspore/rewrite/common/namespace.py +5 -5
- mindspore/rewrite/parsers/assign_parser.py +0 -2
- mindspore/rewrite/parsers/class_def_parser.py +4 -8
- mindspore/run_check/_check_version.py +1 -1
- mindspore/scipy/fft.py +3 -1
- mindspore/scipy/linalg.py +3 -2
- mindspore/scipy/ops.py +3 -5
- mindspore/scipy/optimize/__init__.py +2 -2
- mindspore/train/__init__.py +4 -4
- mindspore/train/anf_ir_pb2.py +2 -8
- mindspore/train/callback/__init__.py +2 -5
- mindspore/train/callback/_backup_and_restore.py +2 -2
- mindspore/train/callback/_checkpoint.py +16 -104
- mindspore/train/callback/_landscape.py +1 -1
- mindspore/train/callback/_time_monitor.py +1 -1
- mindspore/train/data_sink.py +4 -5
- mindspore/train/dataset_helper.py +20 -45
- mindspore/train/model.py +38 -266
- mindspore/train/serialization.py +105 -256
- mindspore/train/summary/_summary_adapter.py +1 -1
- mindspore/version.py +1 -1
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +2 -2
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +303 -420
- mindspore/_extends/pijit/__init__.py +0 -23
- mindspore/_extends/pijit/pijit_func_white_list.py +0 -343
- mindspore/common/file_system.py +0 -48
- mindspore/common/generator.py +0 -260
- mindspore/common/no_inline.py +0 -54
- mindspore/common/np_dtype.py +0 -25
- mindspore/communication/comm_func.py +0 -1140
- mindspore/hal/memory.py +0 -326
- mindspore/lib/libavcodec.so.59 +0 -0
- mindspore/lib/libavdevice.so.59 +0 -0
- mindspore/lib/libavfilter.so.8 +0 -0
- mindspore/lib/libavformat.so.59 +0 -0
- mindspore/lib/libavutil.so.57 +0 -0
- mindspore/lib/libmindspore_np_dtype.so +0 -0
- mindspore/lib/libswresample.so.4 +0 -0
- mindspore/lib/libswscale.so.6 +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.cpp +0 -326
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.py +0 -180
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o +0 -0
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
- mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
- mindspore/lib/plugin/ascend/custom_compiler/OWNERS +0 -12
- mindspore/lib/plugin/ascend/custom_compiler/setup.py +0 -255
- mindspore/lib/plugin/ascend/custom_compiler/start.sh +0 -26
- mindspore/lib/plugin/ascend/custom_compiler/template.json +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h +0 -133
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -32
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h +0 -60
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -37
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_op.h +0 -34
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_backoff_base.h +0 -62
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -33
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_ops.h +0 -88
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_pa_op.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -95
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -84
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -61
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -224
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_impl.h +0 -45
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_tiling.h +0 -187
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul.h +0 -245
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_interface.h +0 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_utils.h +0 -111
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/tiling_data.h +0 -54
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -364
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -69
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -114
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -98
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.json +0 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aic_0.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aiv_0.o +0 -0
- mindspore/mint/linalg/__init__.py +0 -22
- mindspore/nn/layer/embedding_service.py +0 -531
- mindspore/nn/layer/embedding_service_layer.py +0 -393
- mindspore/ops/function/reshard_func.py +0 -102
- mindspore/ops/operations/_infer_ops.py +0 -19
- mindspore/ops/operations/reshard_ops.py +0 -53
- mindspore/profiler/common/process_pool.py +0 -41
- mindspore/profiler/common/singleton.py +0 -28
- mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
- mindspore/profiler/parser/ascend_memory_generator.py +0 -185
- mindspore/train/callback/_cluster_monitor.py +0 -201
- mindspore/train/callback/_flops_collector.py +0 -238
- mindspore/train/callback/_mindio_ttp.py +0 -443
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
- {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
{
|
|
2
|
-
"
|
|
2
|
+
"AddCustom":{
|
|
3
3
|
"dynamicCompileStatic":{
|
|
4
4
|
"flag":"true"
|
|
5
5
|
},
|
|
@@ -13,209 +13,37 @@
|
|
|
13
13
|
"flag":"true"
|
|
14
14
|
},
|
|
15
15
|
"input0":{
|
|
16
|
-
"dtype":"float16,float32,
|
|
16
|
+
"dtype":"float16,float32,int32",
|
|
17
17
|
"format":"ND,ND,ND",
|
|
18
|
-
"name":"
|
|
18
|
+
"name":"x",
|
|
19
19
|
"paramType":"required",
|
|
20
20
|
"shape":"all",
|
|
21
21
|
"unknownshape_format":"ND,ND,ND"
|
|
22
22
|
},
|
|
23
|
-
"
|
|
24
|
-
"
|
|
25
|
-
},
|
|
26
|
-
"opFile":{
|
|
27
|
-
"value":"all_finite"
|
|
28
|
-
},
|
|
29
|
-
"opInterface":{
|
|
30
|
-
"value":"all_finite"
|
|
31
|
-
},
|
|
32
|
-
"output0":{
|
|
33
|
-
"dtype":"bool,bool,bool",
|
|
23
|
+
"input1":{
|
|
24
|
+
"dtype":"float16,float32,int32",
|
|
34
25
|
"format":"ND,ND,ND",
|
|
35
|
-
"name":"
|
|
26
|
+
"name":"y",
|
|
36
27
|
"paramType":"required",
|
|
37
28
|
"shape":"all",
|
|
38
29
|
"unknownshape_format":"ND,ND,ND"
|
|
39
30
|
},
|
|
40
|
-
"precision_reduce":{
|
|
41
|
-
"flag":"true"
|
|
42
|
-
}
|
|
43
|
-
},
|
|
44
|
-
"DecoderKvCache":{
|
|
45
|
-
"dynamicCompileStatic":{
|
|
46
|
-
"flag":"true"
|
|
47
|
-
},
|
|
48
|
-
"dynamicFormat":{
|
|
49
|
-
"flag":"true"
|
|
50
|
-
},
|
|
51
|
-
"dynamicRankSupport":{
|
|
52
|
-
"flag":"true"
|
|
53
|
-
},
|
|
54
|
-
"dynamicShapeSupport":{
|
|
55
|
-
"flag":"true"
|
|
56
|
-
},
|
|
57
|
-
"input0":{
|
|
58
|
-
"dtype":"float16,float32,int8,int16,int32,uint8,uint16,bfloat16",
|
|
59
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
60
|
-
"name":"cache",
|
|
61
|
-
"paramType":"required",
|
|
62
|
-
"shape":"all",
|
|
63
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
64
|
-
},
|
|
65
|
-
"input1":{
|
|
66
|
-
"dtype":"float16,float32,int8,int16,int32,uint8,uint16,bfloat16",
|
|
67
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
68
|
-
"name":"update",
|
|
69
|
-
"paramType":"required",
|
|
70
|
-
"shape":"all",
|
|
71
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
72
|
-
},
|
|
73
|
-
"input2":{
|
|
74
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
75
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
76
|
-
"name":"valid_seq_len",
|
|
77
|
-
"paramType":"required",
|
|
78
|
-
"shape":"all",
|
|
79
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
80
|
-
},
|
|
81
|
-
"input3":{
|
|
82
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
83
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
84
|
-
"name":"batch_index",
|
|
85
|
-
"paramType":"required",
|
|
86
|
-
"shape":"all",
|
|
87
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
88
|
-
},
|
|
89
|
-
"input4":{
|
|
90
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
91
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
92
|
-
"name":"seq_len_axis",
|
|
93
|
-
"paramType":"required",
|
|
94
|
-
"shape":"all",
|
|
95
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
96
|
-
},
|
|
97
|
-
"input5":{
|
|
98
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
99
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
100
|
-
"name":"new_max_seq_len",
|
|
101
|
-
"paramType":"required",
|
|
102
|
-
"shape":"all",
|
|
103
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
104
|
-
},
|
|
105
|
-
"input6":{
|
|
106
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
107
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
108
|
-
"name":"cur_max_seq_len",
|
|
109
|
-
"paramType":"required",
|
|
110
|
-
"shape":"all",
|
|
111
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
112
|
-
},
|
|
113
|
-
"needCheckSupport":{
|
|
114
|
-
"flag":"false"
|
|
115
|
-
},
|
|
116
|
-
"opFile":{
|
|
117
|
-
"value":"decoder_kv_cache"
|
|
118
|
-
},
|
|
119
|
-
"opInterface":{
|
|
120
|
-
"value":"decoder_kv_cache"
|
|
121
|
-
},
|
|
122
|
-
"output0":{
|
|
123
|
-
"dtype":"float16,float32,int8,int16,int32,uint8,uint16,bfloat16",
|
|
124
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
125
|
-
"name":"out",
|
|
126
|
-
"paramType":"required",
|
|
127
|
-
"shape":"all",
|
|
128
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
129
|
-
},
|
|
130
|
-
"precision_reduce":{
|
|
131
|
-
"flag":"true"
|
|
132
|
-
}
|
|
133
|
-
},
|
|
134
|
-
"PromptKvCache":{
|
|
135
|
-
"dynamicCompileStatic":{
|
|
136
|
-
"flag":"true"
|
|
137
|
-
},
|
|
138
|
-
"dynamicFormat":{
|
|
139
|
-
"flag":"true"
|
|
140
|
-
},
|
|
141
|
-
"dynamicRankSupport":{
|
|
142
|
-
"flag":"true"
|
|
143
|
-
},
|
|
144
|
-
"dynamicShapeSupport":{
|
|
145
|
-
"flag":"true"
|
|
146
|
-
},
|
|
147
|
-
"input0":{
|
|
148
|
-
"dtype":"float16,float32,int8,int16,int32,uint8,uint16,bfloat16",
|
|
149
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
150
|
-
"name":"cache",
|
|
151
|
-
"paramType":"required",
|
|
152
|
-
"shape":"all",
|
|
153
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
154
|
-
},
|
|
155
|
-
"input1":{
|
|
156
|
-
"dtype":"float16,float32,int8,int16,int32,uint8,uint16,bfloat16",
|
|
157
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
158
|
-
"name":"update",
|
|
159
|
-
"paramType":"required",
|
|
160
|
-
"shape":"all",
|
|
161
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
162
|
-
},
|
|
163
|
-
"input2":{
|
|
164
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
165
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
166
|
-
"name":"valid_seq_len",
|
|
167
|
-
"paramType":"required",
|
|
168
|
-
"shape":"all",
|
|
169
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
170
|
-
},
|
|
171
|
-
"input3":{
|
|
172
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
173
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
174
|
-
"name":"batch_index",
|
|
175
|
-
"paramType":"required",
|
|
176
|
-
"shape":"all",
|
|
177
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
178
|
-
},
|
|
179
|
-
"input4":{
|
|
180
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
181
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
182
|
-
"name":"seq_len_axis",
|
|
183
|
-
"paramType":"required",
|
|
184
|
-
"shape":"all",
|
|
185
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
186
|
-
},
|
|
187
|
-
"input5":{
|
|
188
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
189
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
190
|
-
"name":"new_max_seq_len",
|
|
191
|
-
"paramType":"required",
|
|
192
|
-
"shape":"all",
|
|
193
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
194
|
-
},
|
|
195
|
-
"input6":{
|
|
196
|
-
"dtype":"int64,int64,int64,int64,int64,int64,int64,int64",
|
|
197
|
-
"format":"ND,ND,ND,ND,ND,ND,ND,ND",
|
|
198
|
-
"name":"cur_max_seq_len",
|
|
199
|
-
"paramType":"required",
|
|
200
|
-
"shape":"all",
|
|
201
|
-
"unknownshape_format":"ND,ND,ND,ND,ND,ND,ND,ND"
|
|
202
|
-
},
|
|
203
31
|
"needCheckSupport":{
|
|
204
32
|
"flag":"false"
|
|
205
33
|
},
|
|
206
34
|
"opFile":{
|
|
207
|
-
"value":"
|
|
35
|
+
"value":"add_custom"
|
|
208
36
|
},
|
|
209
37
|
"opInterface":{
|
|
210
|
-
"value":"
|
|
38
|
+
"value":"add_custom"
|
|
211
39
|
},
|
|
212
40
|
"output0":{
|
|
213
|
-
"dtype":"float16,float32,
|
|
214
|
-
"format":"ND,ND,ND
|
|
215
|
-
"name":"
|
|
41
|
+
"dtype":"float16,float32,int32",
|
|
42
|
+
"format":"ND,ND,ND",
|
|
43
|
+
"name":"z",
|
|
216
44
|
"paramType":"required",
|
|
217
45
|
"shape":"all",
|
|
218
|
-
"unknownshape_format":"ND,ND,ND
|
|
46
|
+
"unknownshape_format":"ND,ND,ND"
|
|
219
47
|
},
|
|
220
48
|
"precision_reduce":{
|
|
221
49
|
"flag":"true"
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
{
|
|
2
|
-
"
|
|
2
|
+
"AddCustom":{
|
|
3
3
|
"dynamicCompileStatic":{
|
|
4
4
|
"flag":"true"
|
|
5
5
|
},
|
|
@@ -13,9 +13,17 @@
|
|
|
13
13
|
"flag":"true"
|
|
14
14
|
},
|
|
15
15
|
"input0":{
|
|
16
|
-
"dtype":"float16,float32,
|
|
16
|
+
"dtype":"float16,float32,int32",
|
|
17
17
|
"format":"ND,ND,ND",
|
|
18
|
-
"name":"
|
|
18
|
+
"name":"x",
|
|
19
|
+
"paramType":"required",
|
|
20
|
+
"shape":"all",
|
|
21
|
+
"unknownshape_format":"ND,ND,ND"
|
|
22
|
+
},
|
|
23
|
+
"input1":{
|
|
24
|
+
"dtype":"float16,float32,int32",
|
|
25
|
+
"format":"ND,ND,ND",
|
|
26
|
+
"name":"y",
|
|
19
27
|
"paramType":"required",
|
|
20
28
|
"shape":"all",
|
|
21
29
|
"unknownshape_format":"ND,ND,ND"
|
|
@@ -24,15 +32,15 @@
|
|
|
24
32
|
"flag":"false"
|
|
25
33
|
},
|
|
26
34
|
"opFile":{
|
|
27
|
-
"value":"
|
|
35
|
+
"value":"add_custom"
|
|
28
36
|
},
|
|
29
37
|
"opInterface":{
|
|
30
|
-
"value":"
|
|
38
|
+
"value":"add_custom"
|
|
31
39
|
},
|
|
32
40
|
"output0":{
|
|
33
|
-
"dtype":"
|
|
41
|
+
"dtype":"float16,float32,int32",
|
|
34
42
|
"format":"ND,ND,ND",
|
|
35
|
-
"name":"
|
|
43
|
+
"name":"z",
|
|
36
44
|
"paramType":"required",
|
|
37
45
|
"shape":"all",
|
|
38
46
|
"unknownshape_format":"ND,ND,ND"
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
{
|
|
2
|
-
"
|
|
2
|
+
"AddCustom":{
|
|
3
3
|
"dynamicCompileStatic":{
|
|
4
4
|
"flag":"true"
|
|
5
5
|
},
|
|
@@ -13,9 +13,17 @@
|
|
|
13
13
|
"flag":"true"
|
|
14
14
|
},
|
|
15
15
|
"input0":{
|
|
16
|
-
"dtype":"float16,float32,
|
|
16
|
+
"dtype":"float16,float32,int32",
|
|
17
17
|
"format":"ND,ND,ND",
|
|
18
|
-
"name":"
|
|
18
|
+
"name":"x",
|
|
19
|
+
"paramType":"required",
|
|
20
|
+
"shape":"all",
|
|
21
|
+
"unknownshape_format":"ND,ND,ND"
|
|
22
|
+
},
|
|
23
|
+
"input1":{
|
|
24
|
+
"dtype":"float16,float32,int32",
|
|
25
|
+
"format":"ND,ND,ND",
|
|
26
|
+
"name":"y",
|
|
19
27
|
"paramType":"required",
|
|
20
28
|
"shape":"all",
|
|
21
29
|
"unknownshape_format":"ND,ND,ND"
|
|
@@ -24,15 +32,15 @@
|
|
|
24
32
|
"flag":"false"
|
|
25
33
|
},
|
|
26
34
|
"opFile":{
|
|
27
|
-
"value":"
|
|
35
|
+
"value":"add_custom"
|
|
28
36
|
},
|
|
29
37
|
"opInterface":{
|
|
30
|
-
"value":"
|
|
38
|
+
"value":"add_custom"
|
|
31
39
|
},
|
|
32
40
|
"output0":{
|
|
33
|
-
"dtype":"
|
|
41
|
+
"dtype":"float16,float32,int32",
|
|
34
42
|
"format":"ND,ND,ND",
|
|
35
|
-
"name":"
|
|
43
|
+
"name":"z",
|
|
36
44
|
"paramType":"required",
|
|
37
45
|
"shape":"all",
|
|
38
46
|
"unknownshape_format":"ND,ND,ND"
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* Copyright (c) Huawei Technologies Co., Ltd. 2022-2023. All rights reserved.
|
|
3
|
+
*
|
|
4
|
+
* Function : z = x + y
|
|
5
|
+
* This sample is a very basic sample that implements vector add on Ascend platform.
|
|
6
|
+
*/
|
|
7
|
+
#include "kernel_operator.h"
|
|
8
|
+
using namespace AscendC;
|
|
9
|
+
|
|
10
|
+
constexpr int32_t BUFFER_NUM = 2;
|
|
11
|
+
|
|
12
|
+
class KernelAdd {
|
|
13
|
+
public:
|
|
14
|
+
__aicore__ inline KernelAdd() {}
|
|
15
|
+
__aicore__ inline void Init(GM_ADDR x, GM_ADDR y, GM_ADDR z, uint32_t totalLength, uint32_t tileNum) {
|
|
16
|
+
ASSERT(GetBlockNum() != 0 && "block dim can not be zero!");
|
|
17
|
+
this->blockLength = totalLength / GetBlockNum();
|
|
18
|
+
this->tileNum = tileNum;
|
|
19
|
+
ASSERT(tileNum != 0 && "tile num can not be zero!");
|
|
20
|
+
this->tileLength = this->blockLength / tileNum / BUFFER_NUM;
|
|
21
|
+
|
|
22
|
+
xGm.SetGlobalBuffer((__gm__ DTYPE_X *)x + this->blockLength * GetBlockIdx(), this->blockLength);
|
|
23
|
+
yGm.SetGlobalBuffer((__gm__ DTYPE_Y *)y + this->blockLength * GetBlockIdx(), this->blockLength);
|
|
24
|
+
zGm.SetGlobalBuffer((__gm__ DTYPE_Z *)z + this->blockLength * GetBlockIdx(), this->blockLength);
|
|
25
|
+
pipe.InitBuffer(inQueueX, BUFFER_NUM, this->tileLength * sizeof(DTYPE_X));
|
|
26
|
+
pipe.InitBuffer(inQueueY, BUFFER_NUM, this->tileLength * sizeof(DTYPE_Y));
|
|
27
|
+
pipe.InitBuffer(outQueueZ, BUFFER_NUM, this->tileLength * sizeof(DTYPE_Z));
|
|
28
|
+
}
|
|
29
|
+
__aicore__ inline void Process() {
|
|
30
|
+
int32_t loopCount = this->tileNum * BUFFER_NUM;
|
|
31
|
+
for (int32_t i = 0; i < loopCount; i++) {
|
|
32
|
+
CopyIn(i);
|
|
33
|
+
Compute(i);
|
|
34
|
+
CopyOut(i);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
private:
|
|
39
|
+
__aicore__ inline void CopyIn(int32_t progress) {
|
|
40
|
+
LocalTensor<DTYPE_X> xLocal = inQueueX.AllocTensor<DTYPE_X>();
|
|
41
|
+
LocalTensor<DTYPE_Y> yLocal = inQueueY.AllocTensor<DTYPE_Y>();
|
|
42
|
+
DataCopy(xLocal, xGm[progress * this->tileLength], this->tileLength);
|
|
43
|
+
DataCopy(yLocal, yGm[progress * this->tileLength], this->tileLength);
|
|
44
|
+
inQueueX.EnQue(xLocal);
|
|
45
|
+
inQueueY.EnQue(yLocal);
|
|
46
|
+
}
|
|
47
|
+
__aicore__ inline void Compute(int32_t progress) {
|
|
48
|
+
LocalTensor<DTYPE_X> xLocal = inQueueX.DeQue<DTYPE_X>();
|
|
49
|
+
LocalTensor<DTYPE_Y> yLocal = inQueueY.DeQue<DTYPE_Y>();
|
|
50
|
+
LocalTensor<DTYPE_Z> zLocal = outQueueZ.AllocTensor<DTYPE_Z>();
|
|
51
|
+
Add(zLocal, xLocal, yLocal, this->tileLength);
|
|
52
|
+
outQueueZ.EnQue<DTYPE_Z>(zLocal);
|
|
53
|
+
inQueueX.FreeTensor(xLocal);
|
|
54
|
+
inQueueY.FreeTensor(yLocal);
|
|
55
|
+
}
|
|
56
|
+
__aicore__ inline void CopyOut(int32_t progress) {
|
|
57
|
+
LocalTensor<DTYPE_Z> zLocal = outQueueZ.DeQue<DTYPE_Z>();
|
|
58
|
+
DataCopy(zGm[progress * this->tileLength], zLocal, this->tileLength);
|
|
59
|
+
outQueueZ.FreeTensor(zLocal);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
private:
|
|
63
|
+
TPipe pipe;
|
|
64
|
+
TQue<QuePosition::VECIN, BUFFER_NUM> inQueueX, inQueueY;
|
|
65
|
+
TQue<QuePosition::VECOUT, BUFFER_NUM> outQueueZ;
|
|
66
|
+
GlobalTensor<DTYPE_X> xGm;
|
|
67
|
+
GlobalTensor<DTYPE_Y> yGm;
|
|
68
|
+
GlobalTensor<DTYPE_Z> zGm;
|
|
69
|
+
uint32_t blockLength = 0;
|
|
70
|
+
uint32_t tileNum = 0;
|
|
71
|
+
uint32_t tileLength = 0;
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
extern "C" __global__ __aicore__ void add_custom(GM_ADDR x, GM_ADDR y, GM_ADDR z, GM_ADDR workspace, GM_ADDR tiling) {
|
|
75
|
+
GET_TILING_DATA(tilingData, tiling);
|
|
76
|
+
KernelAdd op;
|
|
77
|
+
op.Init(x, y, z, tilingData.totalLength, tilingData.tileNum);
|
|
78
|
+
if (TILING_KEY_IS(1)) {
|
|
79
|
+
op.Process();
|
|
80
|
+
}
|
|
81
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
|
|
2
|
+
import os, sys
|
|
3
|
+
import ctypes
|
|
4
|
+
import json
|
|
5
|
+
import shutil
|
|
6
|
+
from tbe.common.platform import get_soc_spec
|
|
7
|
+
from tbe.common.utils import para_check
|
|
8
|
+
from tbe.tikcpp import compile_op, replay_op, check_op_cap, generalize_op_params, get_code_channel, OpInfo
|
|
9
|
+
from tbe.common.buildcfg import get_default_build_config
|
|
10
|
+
from impl.util.platform_adapter import tbe_register
|
|
11
|
+
from tbe.common.buildcfg import get_current_build_config
|
|
12
|
+
PYF_PATH = os.path.dirname(os.path.realpath(__file__))
|
|
13
|
+
|
|
14
|
+
DTYPE_MAP = {"float32": ["DT_FLOAT", "float"],
|
|
15
|
+
"float16": ["DT_FLOAT16", "half"],
|
|
16
|
+
"int8": ["DT_INT8", "int8_t"],
|
|
17
|
+
"int16": ["DT_INT16", "int16_t"],
|
|
18
|
+
"int32": ["DT_INT32", "int32_t"],
|
|
19
|
+
"int64": ["DT_INT64", "int64_t"],
|
|
20
|
+
"uint1": ["DT_UINT1", "uint8_t"],
|
|
21
|
+
"uint8": ["DT_UINT8", "uint8_t"],
|
|
22
|
+
"uint16": ["DT_UINT16", "uint16_t"],
|
|
23
|
+
"uint32": ["DT_UINT32", "uint32_t"],
|
|
24
|
+
"uint64": ["DT_UINT64", "uint64_t"],
|
|
25
|
+
"bool": ["DT_BOOL", "bool"],
|
|
26
|
+
"double": ["DT_DOUBLE", "double"],
|
|
27
|
+
"dual": ["DT_DUAL", "unknown"],
|
|
28
|
+
"dual_sub_int8": ["DT_DUAL_SUB_INT8", "unknown"],
|
|
29
|
+
"dual_sub_uint8": ["DT_DUAL_SUB_UINT8", "unknown"],
|
|
30
|
+
"string": ["DT_STRING", "unknown"],
|
|
31
|
+
"complex64": ["DT_COMPLEX64", "unknown"],
|
|
32
|
+
"complex128": ["DT_COMPLEX128", "unknown"],
|
|
33
|
+
"qint8": ["DT_QINT8", "unknown"],
|
|
34
|
+
"qint16": ["DT_QINT16", "unknown"],
|
|
35
|
+
"qint32": ["DT_QINT32", "unknown"],
|
|
36
|
+
"quint8": ["DT_QUINT8", "unknown"],
|
|
37
|
+
"quint16": ["DT_QUINT16", "unknown"],
|
|
38
|
+
"resource": ["DT_RESOURCE", "unknown"],
|
|
39
|
+
"string_ref": ["DT_STRING_REF", "unknown"],
|
|
40
|
+
"int4": ["DT_INT4", "int8_t"],
|
|
41
|
+
"bfloat16": ["DT_BF16", "bfloat16_t"]}
|
|
42
|
+
|
|
43
|
+
def get_dtype_fmt_options(__inputs__, __outputs__):
|
|
44
|
+
options = []
|
|
45
|
+
for x in __inputs__ + __outputs__:
|
|
46
|
+
x_n = x.get("param_name").upper()
|
|
47
|
+
x_fmt = x.get("format")
|
|
48
|
+
x_dtype = x.get("dtype")
|
|
49
|
+
options.append("-DDTYPE_{n}={t}".format(n=x_n, t=DTYPE_MAP.get(x_dtype)[1]))
|
|
50
|
+
options.append("-DORIG_DTYPE_{n}={ot}".format(n=x_n, ot=DTYPE_MAP.get(x_dtype)[0]))
|
|
51
|
+
options.append("-DFORMAT_{n}=FORMAT_{f}".format(n=x_n, f=x_fmt))
|
|
52
|
+
return options
|
|
53
|
+
|
|
54
|
+
def load_dso(so_path):
|
|
55
|
+
try:
|
|
56
|
+
ctypes.CDLL(so_path)
|
|
57
|
+
except OSError as error :
|
|
58
|
+
print(error)
|
|
59
|
+
raise RuntimeError("cannot open %s" %(so_path))
|
|
60
|
+
else:
|
|
61
|
+
print("load so succ ", so_path)
|
|
62
|
+
|
|
63
|
+
def _build_args(x, y, z):
|
|
64
|
+
__inputs__ = []
|
|
65
|
+
for arg in [x, y]:
|
|
66
|
+
if arg != None:
|
|
67
|
+
if type(arg) is list:
|
|
68
|
+
if len(arg) == 0:
|
|
69
|
+
continue
|
|
70
|
+
__inputs__.append(arg[0])
|
|
71
|
+
else:
|
|
72
|
+
__inputs__.append(arg)
|
|
73
|
+
__outputs__ = []
|
|
74
|
+
for arg in [z]:
|
|
75
|
+
if arg != None:
|
|
76
|
+
if type(arg) is list:
|
|
77
|
+
if len(arg) == 0:
|
|
78
|
+
continue
|
|
79
|
+
__outputs__.append(arg[0])
|
|
80
|
+
else:
|
|
81
|
+
__outputs__.append(arg)
|
|
82
|
+
__attrs__ = []
|
|
83
|
+
return __inputs__, __outputs__, __attrs__
|
|
84
|
+
|
|
85
|
+
@tbe_register.register_operator("AddCustom")
|
|
86
|
+
@para_check.check_op_params(para_check.REQUIRED_INPUT, para_check.REQUIRED_INPUT, para_check.REQUIRED_OUTPUT, para_check.KERNEL_NAME)
|
|
87
|
+
def add_custom(x, y, z, kernel_name="add_custom", impl_mode=""):
|
|
88
|
+
if get_current_build_config("enable_op_prebuild"):
|
|
89
|
+
return
|
|
90
|
+
__inputs__, __outputs__, __attrs__ = _build_args(x, y, z)
|
|
91
|
+
options = get_dtype_fmt_options(__inputs__, __outputs__)
|
|
92
|
+
options += ["-x", "cce"]
|
|
93
|
+
ccec = os.environ.get('CCEC_REAL_PATH')
|
|
94
|
+
if ccec is None:
|
|
95
|
+
ccec = shutil.which("ccec")
|
|
96
|
+
if ccec != None:
|
|
97
|
+
ccec_path = os.path.dirname(ccec)
|
|
98
|
+
tikcpp_path = os.path.realpath(os.path.join(ccec_path, "..", "..", "tikcpp"))
|
|
99
|
+
else:
|
|
100
|
+
tikcpp_path = os.path.realpath("/usr/local/Ascend/latest/compiler/tikcpp")
|
|
101
|
+
options.append("-I" + tikcpp_path)
|
|
102
|
+
options.append("-I" + os.path.join(tikcpp_path, "tikcfw"))
|
|
103
|
+
options.append("-I" + os.path.join(tikcpp_path, "tikcfw", "impl"))
|
|
104
|
+
options.append("-I" + os.path.join(tikcpp_path, "tikcfw", "interface"))
|
|
105
|
+
options.append("-I" + os.path.join(PYF_PATH, "..", "ascendc", "common"))
|
|
106
|
+
if impl_mode == "high_performance":
|
|
107
|
+
options.append("-DHIGH_PERFORMANCE=1")
|
|
108
|
+
elif impl_mode == "high_precision":
|
|
109
|
+
options.append("-DHIGH_PRECISION=1")
|
|
110
|
+
if get_default_build_config("enable_deterministic_mode") == 1:
|
|
111
|
+
options.append("-DDETEMINISTIC_MODE=1")
|
|
112
|
+
origin_func_name = "add_custom"
|
|
113
|
+
ascendc_src_dir = "add_custom"
|
|
114
|
+
ascendc_src_file = "add_custom.cpp"
|
|
115
|
+
src = os.path.join(PYF_PATH, "..", "ascendc", ascendc_src_dir, ascendc_src_file)
|
|
116
|
+
if not os.path.exists(src):
|
|
117
|
+
src = os.path.join(PYF_PATH, ascendc_src_file)
|
|
118
|
+
|
|
119
|
+
print("start compile Ascend C operator AddCustom. kernel name is add_custom")
|
|
120
|
+
op_type = "AddCustom"
|
|
121
|
+
code_channel = get_code_channel(src, kernel_name, op_type, options)
|
|
122
|
+
op_info = OpInfo(kernel_name = kernel_name, op_type = op_type, inputs = __inputs__, outputs = __outputs__,\
|
|
123
|
+
attrs = __attrs__, impl_mode = impl_mode, origin_inputs=[x, y], origin_outputs = [z])
|
|
124
|
+
compile_op(src, origin_func_name, op_info, options, code_channel, '{}')
|
|
125
|
+
|
|
126
|
+
def op_select_format(x, y, z, impl_mode=""):
|
|
127
|
+
__inputs__, __outputs__, __attrs__ = _build_args(x, y, z)
|
|
128
|
+
result = check_op_cap("op_select_format", "AddCustom", __inputs__, __outputs__, __attrs__)
|
|
129
|
+
return result.decode("utf-8")
|
|
130
|
+
|
|
131
|
+
def get_op_specific_info(x, y, z, impl_mode=""):
|
|
132
|
+
__inputs__, __outputs__, __attrs__ = _build_args(x, y, z)
|
|
133
|
+
result = check_op_cap("get_op_specific_info", "AddCustom", __inputs__, __outputs__, __attrs__)
|
|
134
|
+
return result.decode("utf-8")
|