mindspore 2.3.0rc1__cp38-none-any.whl → 2.3.0rc2__cp38-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/__init__.py +1 -1
- mindspore/_akg/akg/utils/tbe_codegen_utils.py +13 -3
- mindspore/_c_dataengine.cpython-38-aarch64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-38-aarch64-linux-gnu.so +0 -0
- mindspore/_checkparam.py +20 -0
- mindspore/_extends/parse/parser.py +1 -1
- mindspore/_extends/parse/standard_method.py +6 -5
- mindspore/_mindspore_offline_debug.cpython-38-aarch64-linux-gnu.so +0 -0
- mindspore/amp.py +5 -5
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/boost/boost_cell_wrapper.py +1 -1
- mindspore/boost/group_loss_scale_manager.py +1 -1
- mindspore/common/__init__.py +4 -2
- mindspore/common/_register_for_recompute.py +48 -0
- mindspore/common/_stub_tensor.py +1 -0
- mindspore/common/api.py +56 -4
- mindspore/common/dtype.py +5 -3
- mindspore/common/dump.py +2 -2
- mindspore/common/hook_handle.py +51 -4
- mindspore/common/initializer.py +1 -1
- mindspore/common/jit_config.py +17 -6
- mindspore/common/parameter.py +7 -2
- mindspore/common/recompute.py +247 -0
- mindspore/common/sparse_tensor.py +2 -2
- mindspore/common/symbol.py +1 -1
- mindspore/common/tensor.py +74 -36
- mindspore/communication/__init__.py +3 -3
- mindspore/communication/management.py +30 -30
- mindspore/context.py +28 -15
- mindspore/dataset/__init__.py +5 -5
- mindspore/dataset/audio/__init__.py +2 -2
- mindspore/dataset/audio/transforms.py +51 -51
- mindspore/dataset/callback/ds_callback.py +2 -2
- mindspore/dataset/engine/cache_client.py +1 -1
- mindspore/dataset/engine/datasets.py +3 -3
- mindspore/dataset/engine/datasets_audio.py +14 -14
- mindspore/dataset/engine/datasets_standard_format.py +3 -3
- mindspore/dataset/engine/datasets_text.py +38 -38
- mindspore/dataset/engine/datasets_user_defined.py +3 -3
- mindspore/dataset/engine/datasets_vision.py +68 -68
- mindspore/dataset/text/__init__.py +3 -3
- mindspore/dataset/text/transforms.py +26 -26
- mindspore/dataset/transforms/__init__.py +1 -1
- mindspore/dataset/vision/__init__.py +3 -3
- mindspore/dataset/vision/transforms.py +92 -92
- mindspore/dataset/vision/utils.py +1 -1
- mindspore/experimental/optim/adadelta.py +2 -2
- mindspore/experimental/optim/adagrad.py +2 -2
- mindspore/experimental/optim/adam.py +2 -2
- mindspore/experimental/optim/adamax.py +2 -2
- mindspore/experimental/optim/adamw.py +2 -2
- mindspore/experimental/optim/asgd.py +2 -2
- mindspore/experimental/optim/lr_scheduler.py +24 -20
- mindspore/experimental/optim/nadam.py +2 -2
- mindspore/experimental/optim/optimizer.py +1 -1
- mindspore/experimental/optim/radam.py +2 -2
- mindspore/experimental/optim/rmsprop.py +2 -2
- mindspore/experimental/optim/rprop.py +2 -2
- mindspore/experimental/optim/sgd.py +2 -2
- mindspore/hal/stream.py +2 -0
- mindspore/include/mindapi/base/types.h +5 -0
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libmindspore.so +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_shared_lib.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libopencv_imgcodecs.so.4.5 +0 -0
- mindspore/lib/libopencv_imgproc.so.4.5 +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +6 -6
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +101787 -98559
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/base/op_register.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/params/mix.h +8 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/params/norm.h +5 -3
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/params/reduce.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/rt/backend/backend.h +3 -3
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/rt/backend/rtbackend.h +3 -3
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/rt/base/types.h +0 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/rt/module/module.h +3 -3
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/svector/svector.h +3 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/tiling/add_tiling.h +9 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +2 -6
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_base.h +460 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_bf16.h +217 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp16.h +116 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_tiling.h +16 -24
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_value.h +27 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -4
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention/FlashAttentionScore_impl.h → flash_attention_score/flash_attention_score_impl.h} +2 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention/bs_attention_tiling.h → flash_attention_score/flash_attention_score_tiling.h} +15 -19
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/gelu/tiling/gelu_tiling.h +7 -9
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/lccl/lccl_wrapper.h +58 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +19 -8
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{matmul → matmul_common}/pp_matmul_common_tiling.h +18 -8
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{matmul → matmul_common}/pp_matmul_info.h +7 -4
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{matmul → matmul_common}/tiling_data.h +44 -6
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_utils.h +65 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +10 -6
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +4 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +41 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention/PagedAttention_impl.h → paged_attention/paged_attention_impl.h} +1 -1
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +63 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/add_param.h +2 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention_param.h → param/attention_param.h} +11 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +37 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +45 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache/reshape_and_cache_tiling.h +1 -2
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/rms_norm.h +23 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/rms_norm_base.h +175 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/rms_norm_normal.h +276 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/rms_norm_split_d.h +280 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/tiling_data.h +35 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +45 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/kernel/sub_kernel.h +20 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +47 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_tiling.h +25 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +323 -23
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/types.h +15 -4
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +8 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lcal.h +22 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lcal_comm.h +70 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lcal_types.h +103 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lccl.h +47 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lccl_wrapper.h +58 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lcoc.h +154 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/log.py +2 -2
- mindspore/mint/__init__.py +457 -0
- mindspore/mint/nn/__init__.py +430 -0
- mindspore/mint/nn/functional.py +424 -0
- mindspore/mint/optim/__init__.py +24 -0
- mindspore/mint/optim/adamw.py +186 -0
- mindspore/multiprocessing/__init__.py +4 -0
- mindspore/nn/__init__.py +3 -0
- mindspore/nn/cell.py +51 -47
- mindspore/nn/extend/__init__.py +29 -0
- mindspore/nn/extend/basic.py +140 -0
- mindspore/nn/extend/embedding.py +143 -0
- mindspore/nn/extend/layer/__init__.py +27 -0
- mindspore/nn/extend/layer/normalization.py +107 -0
- mindspore/nn/extend/pooling.py +117 -0
- mindspore/nn/generator.py +297 -0
- mindspore/nn/layer/basic.py +109 -1
- mindspore/nn/layer/container.py +2 -2
- mindspore/nn/layer/conv.py +6 -6
- mindspore/nn/layer/embedding.py +1 -1
- mindspore/nn/layer/normalization.py +21 -43
- mindspore/nn/layer/padding.py +4 -0
- mindspore/nn/optim/ada_grad.py +2 -2
- mindspore/nn/optim/adadelta.py +1 -1
- mindspore/nn/optim/adafactor.py +1 -1
- mindspore/nn/optim/adam.py +7 -7
- mindspore/nn/optim/adamax.py +2 -2
- mindspore/nn/optim/adasum.py +2 -2
- mindspore/nn/optim/asgd.py +2 -2
- mindspore/nn/optim/ftrl.py +1 -1
- mindspore/nn/optim/lamb.py +3 -3
- mindspore/nn/optim/lars.py +1 -1
- mindspore/nn/optim/lazyadam.py +2 -2
- mindspore/nn/optim/momentum.py +2 -2
- mindspore/nn/optim/optimizer.py +2 -2
- mindspore/nn/optim/proximal_ada_grad.py +2 -2
- mindspore/nn/optim/rmsprop.py +2 -2
- mindspore/nn/optim/rprop.py +2 -2
- mindspore/nn/optim/sgd.py +2 -2
- mindspore/nn/optim/thor.py +2 -2
- mindspore/nn/wrap/cell_wrapper.py +9 -9
- mindspore/nn/wrap/grad_reducer.py +5 -5
- mindspore/ops/_grad_experimental/grad_comm_ops.py +4 -2
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +41 -2
- mindspore/ops/_vmap/vmap_math_ops.py +27 -8
- mindspore/ops/_vmap/vmap_nn_ops.py +66 -8
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +73 -1
- mindspore/ops/auto_generate/gen_arg_dtype_cast.py +12 -3
- mindspore/ops/auto_generate/gen_arg_handler.py +24 -0
- mindspore/ops/auto_generate/gen_extend_func.py +274 -0
- mindspore/ops/auto_generate/gen_ops_def.py +889 -22
- mindspore/ops/auto_generate/gen_ops_prim.py +3541 -253
- mindspore/ops/auto_generate/pyboost_inner_prim.py +282 -0
- mindspore/ops/composite/multitype_ops/_compile_utils.py +2 -1
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +9 -0
- mindspore/ops/extend/__init__.py +9 -1
- mindspore/ops/extend/array_func.py +134 -27
- mindspore/ops/extend/math_func.py +3 -3
- mindspore/ops/extend/nn_func.py +363 -2
- mindspore/ops/function/__init__.py +19 -2
- mindspore/ops/function/array_func.py +463 -439
- mindspore/ops/function/clip_func.py +7 -18
- mindspore/ops/function/grad/grad_func.py +5 -5
- mindspore/ops/function/linalg_func.py +4 -4
- mindspore/ops/function/math_func.py +260 -243
- mindspore/ops/function/nn_func.py +825 -62
- mindspore/ops/function/random_func.py +73 -4
- mindspore/ops/function/sparse_unary_func.py +1 -1
- mindspore/ops/function/vmap_func.py +1 -1
- mindspore/ops/functional.py +2 -2
- mindspore/ops/op_info_register.py +1 -31
- mindspore/ops/operations/__init__.py +2 -3
- mindspore/ops/operations/_grad_ops.py +2 -107
- mindspore/ops/operations/_inner_ops.py +5 -5
- mindspore/ops/operations/_sequence_ops.py +2 -2
- mindspore/ops/operations/array_ops.py +11 -233
- mindspore/ops/operations/comm_ops.py +32 -32
- mindspore/ops/operations/custom_ops.py +7 -89
- mindspore/ops/operations/manually_defined/ops_def.py +329 -4
- mindspore/ops/operations/math_ops.py +13 -163
- mindspore/ops/operations/nn_ops.py +9 -316
- mindspore/ops/operations/random_ops.py +1 -1
- mindspore/ops/operations/sparse_ops.py +3 -3
- mindspore/ops/primitive.py +2 -2
- mindspore/ops_generate/arg_dtype_cast.py +12 -3
- mindspore/ops_generate/arg_handler.py +24 -0
- mindspore/ops_generate/gen_ops_inner_prim.py +2 -0
- mindspore/ops_generate/gen_pyboost_func.py +13 -6
- mindspore/ops_generate/pyboost_utils.py +2 -17
- mindspore/parallel/__init__.py +3 -2
- mindspore/parallel/_auto_parallel_context.py +106 -1
- mindspore/parallel/_parallel_serialization.py +34 -2
- mindspore/parallel/_utils.py +16 -0
- mindspore/parallel/algo_parameter_config.py +4 -4
- mindspore/parallel/checkpoint_transform.py +249 -77
- mindspore/parallel/cluster/process_entity/_api.py +1 -1
- mindspore/parallel/parameter_broadcast.py +1 -1
- mindspore/parallel/shard.py +1 -1
- mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +1 -0
- mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +17 -5
- mindspore/profiler/parser/ascend_msprof_exporter.py +3 -3
- mindspore/profiler/parser/ascend_msprof_generator.py +10 -3
- mindspore/profiler/parser/ascend_op_generator.py +26 -9
- mindspore/profiler/parser/ascend_timeline_generator.py +7 -4
- mindspore/profiler/parser/profiler_info.py +11 -1
- mindspore/profiler/profiling.py +13 -5
- mindspore/rewrite/api/node.py +12 -12
- mindspore/rewrite/api/symbol_tree.py +11 -11
- mindspore/run_check/_check_version.py +1 -1
- mindspore/safeguard/rewrite_obfuscation.py +2 -2
- mindspore/train/amp.py +4 -4
- mindspore/train/anf_ir_pb2.py +8 -2
- mindspore/train/callback/_backup_and_restore.py +2 -2
- mindspore/train/callback/_callback.py +4 -4
- mindspore/train/callback/_checkpoint.py +2 -2
- mindspore/train/callback/_early_stop.py +2 -2
- mindspore/train/callback/_landscape.py +4 -4
- mindspore/train/callback/_loss_monitor.py +2 -2
- mindspore/train/callback/_on_request_exit.py +2 -2
- mindspore/train/callback/_reduce_lr_on_plateau.py +2 -2
- mindspore/train/callback/_summary_collector.py +2 -2
- mindspore/train/callback/_time_monitor.py +2 -2
- mindspore/train/dataset_helper.py +8 -3
- mindspore/train/loss_scale_manager.py +2 -2
- mindspore/train/metrics/metric.py +3 -3
- mindspore/train/mind_ir_pb2.py +22 -17
- mindspore/train/model.py +15 -15
- mindspore/train/serialization.py +18 -18
- mindspore/train/summary/summary_record.py +7 -7
- mindspore/train/train_thor/convert_utils.py +3 -3
- mindspore/version.py +1 -1
- {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +1 -1
- {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +309 -262
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/tiling_data.h +0 -59
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/FlashAttentionScore_bf16_BNSD_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/FlashAttentionScore_bf16_BSH_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/FlashAttentionScore_fp16_BNSD_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/FlashAttentionScore_fp16_BSH_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/PagedAttention_bf16_BNSD_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/PagedAttention_bf16_BSH_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/PagedAttention_fp16_BNSD_mix.o +0 -0
- mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/PagedAttention_fp16_BSH_mix.o +0 -0
- /mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention/bs_attention_mix_hwsync.h → flash_attention_score/kernel/flash_attention_score_mix_hwsync.h} +0 -0
- {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
- {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
- {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
|
@@ -114,13 +114,13 @@ class CheckpointConfig:
|
|
|
114
114
|
>>> from mindspore.train import Model, CheckpointConfig, ModelCheckpoint
|
|
115
115
|
>>>
|
|
116
116
|
>>> # Define the network structure of LeNet5. Refer to
|
|
117
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
117
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
118
118
|
>>> net = LeNet5()
|
|
119
119
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
120
120
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
121
121
|
>>> model = Model(net, loss_fn=loss, optimizer=optim)
|
|
122
122
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
123
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
123
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
124
124
|
>>> dataset = create_dataset()
|
|
125
125
|
>>> config = CheckpointConfig(save_checkpoint_seconds=100, keep_checkpoint_per_n_minutes=5, saved_network=net)
|
|
126
126
|
>>> config.save_checkpoint_steps
|
|
@@ -85,13 +85,13 @@ class EarlyStopping(Callback):
|
|
|
85
85
|
>>> from mindspore import nn
|
|
86
86
|
>>> from mindspore.train import Model, EarlyStopping
|
|
87
87
|
>>> # Define the network structure of LeNet5. Refer to
|
|
88
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
88
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
89
89
|
>>> net = LeNet5()
|
|
90
90
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
91
91
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
92
92
|
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={"acc"})
|
|
93
93
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
94
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
94
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
95
95
|
>>> dataset = create_dataset()
|
|
96
96
|
>>> cb = EarlyStopping(monitor="acc", patience=3, verbose=True)
|
|
97
97
|
>>> model.fit(10, dataset, callbacks=cb)
|
|
@@ -186,10 +186,10 @@ class SummaryLandscape:
|
|
|
186
186
|
... # If the device_target is Ascend, set the device_target to "Ascend"
|
|
187
187
|
... ms.set_context(mode=ms.GRAPH_MODE, device_target="GPU")
|
|
188
188
|
... # Create the dataset taking MNIST as an example. Refer to
|
|
189
|
-
... # https://gitee.com/mindspore/docs/blob/
|
|
189
|
+
... # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
190
190
|
... ds_train = create_dataset()
|
|
191
191
|
... # Define the network structure of LeNet5. Refer to
|
|
192
|
-
... # https://gitee.com/mindspore/docs/blob/
|
|
192
|
+
... # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
193
193
|
... network = LeNet5()
|
|
194
194
|
... net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
|
195
195
|
... net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)
|
|
@@ -209,13 +209,13 @@ class SummaryLandscape:
|
|
|
209
209
|
... # Simple usage for visualization landscape:
|
|
210
210
|
... def callback_fn():
|
|
211
211
|
... # Define the network structure of LeNet5. Refer to
|
|
212
|
-
... # https://gitee.com/mindspore/docs/blob/
|
|
212
|
+
... # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
213
213
|
... network = LeNet5()
|
|
214
214
|
... net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
|
215
215
|
... metrics = {"Loss": Loss()}
|
|
216
216
|
... model = Model(network, net_loss, metrics=metrics)
|
|
217
217
|
... # Create the dataset taking MNIST as an example. Refer to
|
|
218
|
-
... # https://gitee.com/mindspore/docs/blob/
|
|
218
|
+
... # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
219
219
|
... ds_eval = create_dataset()
|
|
220
220
|
... return model, network, ds_eval, metrics
|
|
221
221
|
...
|
|
@@ -43,13 +43,13 @@ class LossMonitor(Callback):
|
|
|
43
43
|
>>> from mindspore.train import Model, LossMonitor
|
|
44
44
|
>>>
|
|
45
45
|
>>> # Define the network structure of LeNet5. Refer to
|
|
46
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
46
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
47
47
|
>>> net = LeNet5()
|
|
48
48
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
49
49
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
50
50
|
>>> model = Model(net, loss_fn=loss, optimizer=optim)
|
|
51
51
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
52
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
52
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
53
53
|
>>> dataset = create_dataset()
|
|
54
54
|
>>> loss_monitor = LossMonitor()
|
|
55
55
|
>>> model.train(10, dataset, callbacks=loss_monitor)
|
|
@@ -55,13 +55,13 @@ class OnRequestExit(Callback):
|
|
|
55
55
|
>>> import mindspore as ms
|
|
56
56
|
>>>
|
|
57
57
|
>>> # Define the network structure of LeNet5. Refer to
|
|
58
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
58
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
59
59
|
>>> net = LeNet5()
|
|
60
60
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
61
61
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
62
62
|
>>> model = Model(net, loss_fn=loss, optimizer=optim)
|
|
63
63
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
64
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
64
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
65
65
|
>>> dataset = create_dataset()
|
|
66
66
|
>>> on_request_exit = ms.train.OnRequestExit(file_name='LeNet5')
|
|
67
67
|
>>> model.train(10, dataset, callbacks=on_request_exit)
|
|
@@ -84,13 +84,13 @@ class ReduceLROnPlateau(Callback):
|
|
|
84
84
|
>>> from mindspore import nn
|
|
85
85
|
>>> from mindspore.train import Model, ReduceLROnPlateau
|
|
86
86
|
>>> # Define the network structure of LeNet5. Refer to
|
|
87
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
87
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
88
88
|
>>> net = LeNet5()
|
|
89
89
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
90
90
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
91
91
|
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={"acc"})
|
|
92
92
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
93
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
93
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
94
94
|
>>> dataset = create_dataset()
|
|
95
95
|
>>> cb = ReduceLROnPlateau(monitor="acc", patience=3, verbose=True)
|
|
96
96
|
>>> model.fit(10, dataset, callbacks=cb)
|
|
@@ -190,10 +190,10 @@ class SummaryCollector(Callback):
|
|
|
190
190
|
... ms.set_context(mode=ms.GRAPH_MODE, device_target="Ascend")
|
|
191
191
|
... mnist_dataset_dir = '/path/to/mnist_dataset_directory'
|
|
192
192
|
... # Create the dataset taking MNIST as an example. Refer to
|
|
193
|
-
... # https://gitee.com/mindspore/docs/blob/
|
|
193
|
+
... # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
194
194
|
... ds_train = create_dataset()
|
|
195
195
|
... # Define the network structure of LeNet5. Refer to
|
|
196
|
-
... # https://gitee.com/mindspore/docs/blob/
|
|
196
|
+
... # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
197
197
|
... network = LeNet5(10)
|
|
198
198
|
... net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
|
199
199
|
... net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)
|
|
@@ -43,13 +43,13 @@ class TimeMonitor(Callback):
|
|
|
43
43
|
>>> from mindspore.train import Model, TimeMonitor
|
|
44
44
|
>>>
|
|
45
45
|
>>> # Define the network structure of LeNet5. Refer to
|
|
46
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
46
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
47
47
|
>>> net = LeNet5()
|
|
48
48
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
49
49
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
50
50
|
>>> model = Model(net, loss_fn=loss, optimizer=optim)
|
|
51
51
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
52
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
52
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
53
53
|
>>> dataset = create_dataset()
|
|
54
54
|
>>> time_monitor = TimeMonitor()
|
|
55
55
|
>>> model.train(10, dataset, callbacks=time_monitor)
|
|
@@ -28,7 +28,7 @@ from mindspore import context, nn
|
|
|
28
28
|
from mindspore.train._utils import _exec_datagraph, _get_types_and_shapes, _construct_tensor_list
|
|
29
29
|
from mindspore.parallel._utils import _get_device_num, _get_global_rank, _need_to_full, \
|
|
30
30
|
_to_full_shapes, _get_pipeline_stages, _change_symbols_for_parallel, _is_in_auto_parallel_mode, \
|
|
31
|
-
_origin_shapes
|
|
31
|
+
_origin_shapes, _dynamic_shape_for_dataset
|
|
32
32
|
from mindspore.parallel._ps_context import _is_role_sched
|
|
33
33
|
from mindspore.ops import operations as P
|
|
34
34
|
from mindspore.common.auto_dynamic_shape import _auto_dynamic_shape
|
|
@@ -136,7 +136,12 @@ def _generate_network_with_dataset(network, dataset_helper, queue_name):
|
|
|
136
136
|
|
|
137
137
|
if network.get_inputs() and None not in network.get_inputs():
|
|
138
138
|
if _is_in_auto_parallel_mode():
|
|
139
|
-
|
|
139
|
+
# here, the dataset shapes has been processed by full_shape(), so need to resume it to original shape
|
|
140
|
+
# the _check_inputs() will change static origin_shape to dynamic shape
|
|
141
|
+
# after _check_inputs(), convert dataset_shapes to dynamic shape
|
|
142
|
+
origin_shape = _origin_shapes(dataset_shapes)
|
|
143
|
+
_check_inputs(network.get_inputs(), origin_shape, dataset_types)
|
|
144
|
+
dataset_shapes = _dynamic_shape_for_dataset(dataset_shapes, origin_shape)
|
|
140
145
|
else:
|
|
141
146
|
_check_inputs(network.get_inputs(), dataset_shapes, dataset_types)
|
|
142
147
|
elif context.get_context("mode") == context.PYNATIVE_MODE:
|
|
@@ -191,7 +196,7 @@ def _get_dataset_aux(dataset):
|
|
|
191
196
|
def connect_network_with_dataset(network, dataset_helper):
|
|
192
197
|
"""
|
|
193
198
|
Connect the `network` with dataset in `dataset_helper`. Only supported in `sink mode
|
|
194
|
-
<https://mindspore.cn/tutorials/experts/en/
|
|
199
|
+
<https://mindspore.cn/tutorials/experts/en/master/optimize/execution_opt.html>`_, (dataset_sink_mode=True).
|
|
195
200
|
|
|
196
201
|
Args:
|
|
197
202
|
network (Cell): The training network for dataset.
|
|
@@ -62,7 +62,7 @@ class FixedLossScaleManager(LossScaleManager):
|
|
|
62
62
|
>>> from mindspore import amp, nn
|
|
63
63
|
>>>
|
|
64
64
|
>>> # Define the network structure of LeNet5. Refer to
|
|
65
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
65
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
66
66
|
>>> net = LeNet5()
|
|
67
67
|
>>> loss_scale = 1024.0
|
|
68
68
|
>>> loss_scale_manager = amp.FixedLossScaleManager(loss_scale, False)
|
|
@@ -136,7 +136,7 @@ class DynamicLossScaleManager(LossScaleManager):
|
|
|
136
136
|
>>> from mindspore import amp, nn
|
|
137
137
|
>>>
|
|
138
138
|
>>> # Define the network structure of LeNet5. Refer to
|
|
139
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
139
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
140
140
|
>>> net = LeNet5()
|
|
141
141
|
>>> loss_scale_manager = amp.DynamicLossScaleManager()
|
|
142
142
|
>>> optim = nn.Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
|
|
@@ -200,7 +200,7 @@ class Metric(metaclass=ABCMeta):
|
|
|
200
200
|
|
|
201
201
|
Tutorial Examples:
|
|
202
202
|
- `Evaluation Metrics - Customized Metrics
|
|
203
|
-
<https://mindspore.cn/tutorials/en/
|
|
203
|
+
<https://mindspore.cn/tutorials/en/master/advanced/model/metric.html#customized-metrics>`_
|
|
204
204
|
"""
|
|
205
205
|
raise NotImplementedError('Must define clear function to use this base class')
|
|
206
206
|
|
|
@@ -214,7 +214,7 @@ class Metric(metaclass=ABCMeta):
|
|
|
214
214
|
|
|
215
215
|
Tutorial Examples:
|
|
216
216
|
- `Evaluation Metrics - Customized Metrics
|
|
217
|
-
<https://mindspore.cn/tutorials/en/
|
|
217
|
+
<https://mindspore.cn/tutorials/en/master/advanced/model/metric.html#customized-metrics>`_
|
|
218
218
|
"""
|
|
219
219
|
raise NotImplementedError('Must define eval function to use this base class')
|
|
220
220
|
|
|
@@ -231,7 +231,7 @@ class Metric(metaclass=ABCMeta):
|
|
|
231
231
|
|
|
232
232
|
Tutorial Examples:
|
|
233
233
|
- `Evaluation Metrics - Customized Metrics
|
|
234
|
-
<https://mindspore.cn/tutorials/en/
|
|
234
|
+
<https://mindspore.cn/tutorials/en/master/advanced/model/metric.html#customized-metrics>`_
|
|
235
235
|
"""
|
|
236
236
|
raise NotImplementedError('Must define update function to use this base class')
|
|
237
237
|
|
mindspore/train/mind_ir_pb2.py
CHANGED
|
@@ -20,7 +20,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
|
|
|
20
20
|
syntax='proto2',
|
|
21
21
|
serialized_options=None,
|
|
22
22
|
create_key=_descriptor._internal_create_key,
|
|
23
|
-
serialized_pb=b'\n\rmind_ir.proto\x12\x07mind_ir\"\x88\t\n\x0e\x41ttributeProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\t\n\x01\x66\x18\x02 \x01(\x02\x12\t\n\x01i\x18\x03 \x01(\x03\x12\t\n\x01\x64\x18\x04 \x01(\x01\x12\t\n\x01s\x18\x05 \x01(\x0c\x12\x1f\n\x01t\x18\x06 \x01(\x0b\x32\x14.mind_ir.TensorProto\x12\x1e\n\x01g\x18\x07 \x01(\x0b\x32\x13.mind_ir.GraphProto\x12\x0e\n\x06\x66loats\x18\x08 \x03(\x02\x12\x0f\n\x07\x64oubles\x18\t \x03(\x01\x12\x0c\n\x04ints\x18\n \x03(\x03\x12\x0f\n\x07strings\x18\x0b \x03(\x0c\x12%\n\x07tensors\x18\x0c \x03(\x0b\x32\x14.mind_ir.TensorProto\x12#\n\x06graphs\x18\r \x03(\x0b\x32\x13.mind_ir.GraphProto\x12\x12\n\ndoc_string\x18\x0e \x01(\t\x12\x15\n\rref_attr_name\x18\x0f \x01(\t\x12\x33\n\x04type\x18\x10 \x01(\x0e\x32%.mind_ir.AttributeProto.AttributeType\x12\'\n\x06values\x18\x11 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12\x36\n\x08seq_info\x18\x12 \x01(\x0b\x32$.mind_ir.AttributeProto.SeqInfoProto\x12&\n\x07\x66unctor\x18\x13 \x01(\x0b\x32\x15.mind_ir.FunctorProto\x1aT\n\x0cSeqInfoProto\x12\x12\n\nis_dyn_len\x18\x01 \x01(\x08\x12\x30\n\x0ftuple_elem_item\x18\x02 \x01(\x0b\x32\x17.mind_ir.AttributeProto\"\xaf\x04\n\rAttributeType\x12\r\n\tUNDEFINED\x10\x00\x12\t\n\x05\x46LOAT\x10\x01\x12\t\n\x05UINT8\x10\x02\x12\x08\n\x04INT8\x10\x03\x12\n\n\x06UINT16\x10\x04\x12\t\n\x05INT16\x10\x05\x12\t\n\x05INT32\x10\x06\x12\t\n\x05INT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\x08\n\x04\x42OOL\x10\t\x12\x0b\n\x07\x46LOAT16\x10\n\x12\n\n\x06\x44OUBLE\x10\x0b\x12\n\n\x06UINT32\x10\x0c\x12\n\n\x06UINT64\x10\r\x12\r\n\tCOMPLEX64\x10\x0e\x12\x0e\n\nCOMPLEX128\x10\x0f\x12\x0c\n\x08\x42\x46LOAT16\x10\x10\x12\n\n\x06TENSOR\x10\x11\x12\t\n\x05GRAPH\x10\x12\x12\x0b\n\x07TENSORS\x10\x13\x12\t\n\x05TUPLE\x10\x14\x12\x08\n\x04LIST\x10\x15\x12\x08\n\x04\x44ICT\x10\x16\x12\n\n\x06UMONAD\x10\x17\x12\x0b\n\x07IOMONAD\x10\x18\x12\x08\n\x04NONE\x10\x19\x12\x14\n\x10PRIMITIVECLOSURE\x10\x1a\x12\x14\n\x10\x46UNCGRAPHCLOSURE\x10\x1b\x12\x12\n\x0ePARTIALCLOSURE\x10\x1c\x12\x14\n\x10UNIONFUNCCLOSURE\x10\x1d\x12\x0e\n\nCSR_TENSOR\x10\x1e\x12\x0e\n\nCOO_TENSOR\x10\x1f\x12\x0e\n\nROW_TENSOR\x10 \x12\x0e\n\nCLASS_TYPE\x10!\x12\x0e\n\nNAME_SPACE\x10\"\x12\n\n\x06SYMBOL\x10#\x12\r\n\tTYPE_NULL\x10$\x12\x0e\n\nMAP_TENSOR\x10%\x12\x0b\n\x07\x46UNCTOR\x10&\x12\n\n\x06SCALAR\x10\'\"\x9d\x01\n\x0c\x46unctorProto\x12/\n\x04type\x18\x01 \x01(\x0e\x32!.mind_ir.FunctorProto.FunctorType\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\x06values\x18\x03 \x03(\x0b\x32\x17.mind_ir.AttributeProto\"%\n\x0b\x46unctorType\x12\x16\n\x12SHAPE_CALC_FUNCTOR\x10\x01\"\x98\x01\n\x0eValueInfoProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12$\n\x06tensor\x18\x02 \x03(\x0b\x32\x14.mind_ir.TensorProto\x12\x12\n\ndoc_string\x18\x03 \x01(\t\x12\x12\n\ndenotation\x18\x04 \x01(\t\x12*\n\tattr_info\x18\x05 \x01(\x0b\x32\x17.mind_ir.AttributeProto\"\xf3\x01\n\tNodeProto\x12\r\n\x05input\x18\x01 \x03(\t\x12\x0e\n\x06output\x18\x02 \x03(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07op_type\x18\x04 \x01(\t\x12*\n\tattribute\x18\x05 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12\x12\n\ndoc_string\x18\x06 \x01(\t\x12\x0e\n\x06\x64omain\x18\x07 \x01(\t\x12*\n\tnode_attr\x18\x08 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12,\n\x0bprimal_attr\x18\t \x03(\x0b\x32\x17.mind_ir.AttributeProto\"\xf8\x03\n\nModelProto\x12\x12\n\nir_version\x18\x01 \x01(\t\x12\x15\n\rproducer_name\x18\x02 \x01(\t\x12\x18\n\x10producer_version\x18\x03 \x01(\t\x12\x0e\n\x06\x64omain\x18\x04 \x01(\t\x12\x15\n\rmodel_version\x18\x05 \x01(\t\x12\x12\n\ndoc_string\x18\x06 \x01(\t\x12\"\n\x05graph\x18\x07 \x01(\x0b\x32\x13.mind_ir.GraphProto\x12&\n\tfunctions\x18\x08 \x03(\x0b\x32\x13.mind_ir.GraphProto\x12\x30\n\x0cpreprocessor\x18\t \x01(\x0b\x32\x1a.mind_ir.PreprocessorProto\x12\x15\n\rlittle_endian\x18\n \x01(\x08\x12(\n\x08parallel\x18\x0b \x01(\x0b\x32\x16.mind_ir.ParallelProto\x12+\n\nprimitives\x18\x0c \x03(\x0b\x32\x17.mind_ir.PrimitiveProto\x12\x17\n\x0fmind_ir_version\x18\r \x01(\x03\x12\x34\n\tuser_info\x18\x0e \x03(\x0b\x32!.mind_ir.ModelProto.UserInfoEntry\x1a/\n\rUserInfoEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\";\n\x11PreprocessorProto\x12&\n\x02op\x18\x01 \x03(\x0b\x32\x1a.mind_ir.PreprocessOpProto\"\x91\x01\n\x11PreprocessOpProto\x12\x15\n\rinput_columns\x18\x01 \x01(\t\x12\x16\n\x0eoutput_columns\x18\x02 \x01(\t\x12\x17\n\x0fproject_columns\x18\x03 \x01(\t\x12\x0f\n\x07op_type\x18\x04 \x01(\t\x12\x12\n\noperations\x18\x05 \x01(\t\x12\x0f\n\x07offload\x18\x06 \x01(\x08\"\xd2\x02\n\nGraphProto\x12 \n\x04node\x18\x01 \x03(\x0b\x32\x12.mind_ir.NodeProto\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\tparameter\x18\x03 \x03(\x0b\x32\x14.mind_ir.TensorProto\x12\x12\n\ndoc_string\x18\x04 \x01(\t\x12&\n\x05input\x18\x05 \x03(\x0b\x32\x17.mind_ir.ValueInfoProto\x12\'\n\x06output\x18\x06 \x03(\x0b\x32\x17.mind_ir.ValueInfoProto\x12\x12\n\nbprop_hash\x18\x07 \x01(\t\x12*\n\tattribute\x18\x08 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12\x16\n\x0e\x62prop_filepath\x18\t \x01(\t\x12.\n\rmap_parameter\x18\n \x03(\x0b\x32\x17.mind_ir.MapTensorProto\"\
|
|
23
|
+
serialized_pb=b'\n\rmind_ir.proto\x12\x07mind_ir\"\x88\t\n\x0e\x41ttributeProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\t\n\x01\x66\x18\x02 \x01(\x02\x12\t\n\x01i\x18\x03 \x01(\x03\x12\t\n\x01\x64\x18\x04 \x01(\x01\x12\t\n\x01s\x18\x05 \x01(\x0c\x12\x1f\n\x01t\x18\x06 \x01(\x0b\x32\x14.mind_ir.TensorProto\x12\x1e\n\x01g\x18\x07 \x01(\x0b\x32\x13.mind_ir.GraphProto\x12\x0e\n\x06\x66loats\x18\x08 \x03(\x02\x12\x0f\n\x07\x64oubles\x18\t \x03(\x01\x12\x0c\n\x04ints\x18\n \x03(\x03\x12\x0f\n\x07strings\x18\x0b \x03(\x0c\x12%\n\x07tensors\x18\x0c \x03(\x0b\x32\x14.mind_ir.TensorProto\x12#\n\x06graphs\x18\r \x03(\x0b\x32\x13.mind_ir.GraphProto\x12\x12\n\ndoc_string\x18\x0e \x01(\t\x12\x15\n\rref_attr_name\x18\x0f \x01(\t\x12\x33\n\x04type\x18\x10 \x01(\x0e\x32%.mind_ir.AttributeProto.AttributeType\x12\'\n\x06values\x18\x11 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12\x36\n\x08seq_info\x18\x12 \x01(\x0b\x32$.mind_ir.AttributeProto.SeqInfoProto\x12&\n\x07\x66unctor\x18\x13 \x01(\x0b\x32\x15.mind_ir.FunctorProto\x1aT\n\x0cSeqInfoProto\x12\x12\n\nis_dyn_len\x18\x01 \x01(\x08\x12\x30\n\x0ftuple_elem_item\x18\x02 \x01(\x0b\x32\x17.mind_ir.AttributeProto\"\xaf\x04\n\rAttributeType\x12\r\n\tUNDEFINED\x10\x00\x12\t\n\x05\x46LOAT\x10\x01\x12\t\n\x05UINT8\x10\x02\x12\x08\n\x04INT8\x10\x03\x12\n\n\x06UINT16\x10\x04\x12\t\n\x05INT16\x10\x05\x12\t\n\x05INT32\x10\x06\x12\t\n\x05INT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\x08\n\x04\x42OOL\x10\t\x12\x0b\n\x07\x46LOAT16\x10\n\x12\n\n\x06\x44OUBLE\x10\x0b\x12\n\n\x06UINT32\x10\x0c\x12\n\n\x06UINT64\x10\r\x12\r\n\tCOMPLEX64\x10\x0e\x12\x0e\n\nCOMPLEX128\x10\x0f\x12\x0c\n\x08\x42\x46LOAT16\x10\x10\x12\n\n\x06TENSOR\x10\x11\x12\t\n\x05GRAPH\x10\x12\x12\x0b\n\x07TENSORS\x10\x13\x12\t\n\x05TUPLE\x10\x14\x12\x08\n\x04LIST\x10\x15\x12\x08\n\x04\x44ICT\x10\x16\x12\n\n\x06UMONAD\x10\x17\x12\x0b\n\x07IOMONAD\x10\x18\x12\x08\n\x04NONE\x10\x19\x12\x14\n\x10PRIMITIVECLOSURE\x10\x1a\x12\x14\n\x10\x46UNCGRAPHCLOSURE\x10\x1b\x12\x12\n\x0ePARTIALCLOSURE\x10\x1c\x12\x14\n\x10UNIONFUNCCLOSURE\x10\x1d\x12\x0e\n\nCSR_TENSOR\x10\x1e\x12\x0e\n\nCOO_TENSOR\x10\x1f\x12\x0e\n\nROW_TENSOR\x10 \x12\x0e\n\nCLASS_TYPE\x10!\x12\x0e\n\nNAME_SPACE\x10\"\x12\n\n\x06SYMBOL\x10#\x12\r\n\tTYPE_NULL\x10$\x12\x0e\n\nMAP_TENSOR\x10%\x12\x0b\n\x07\x46UNCTOR\x10&\x12\n\n\x06SCALAR\x10\'\"\x9d\x01\n\x0c\x46unctorProto\x12/\n\x04type\x18\x01 \x01(\x0e\x32!.mind_ir.FunctorProto.FunctorType\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\x06values\x18\x03 \x03(\x0b\x32\x17.mind_ir.AttributeProto\"%\n\x0b\x46unctorType\x12\x16\n\x12SHAPE_CALC_FUNCTOR\x10\x01\"\x98\x01\n\x0eValueInfoProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12$\n\x06tensor\x18\x02 \x03(\x0b\x32\x14.mind_ir.TensorProto\x12\x12\n\ndoc_string\x18\x03 \x01(\t\x12\x12\n\ndenotation\x18\x04 \x01(\t\x12*\n\tattr_info\x18\x05 \x01(\x0b\x32\x17.mind_ir.AttributeProto\"\xf3\x01\n\tNodeProto\x12\r\n\x05input\x18\x01 \x03(\t\x12\x0e\n\x06output\x18\x02 \x03(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07op_type\x18\x04 \x01(\t\x12*\n\tattribute\x18\x05 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12\x12\n\ndoc_string\x18\x06 \x01(\t\x12\x0e\n\x06\x64omain\x18\x07 \x01(\t\x12*\n\tnode_attr\x18\x08 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12,\n\x0bprimal_attr\x18\t \x03(\x0b\x32\x17.mind_ir.AttributeProto\"\xf8\x03\n\nModelProto\x12\x12\n\nir_version\x18\x01 \x01(\t\x12\x15\n\rproducer_name\x18\x02 \x01(\t\x12\x18\n\x10producer_version\x18\x03 \x01(\t\x12\x0e\n\x06\x64omain\x18\x04 \x01(\t\x12\x15\n\rmodel_version\x18\x05 \x01(\t\x12\x12\n\ndoc_string\x18\x06 \x01(\t\x12\"\n\x05graph\x18\x07 \x01(\x0b\x32\x13.mind_ir.GraphProto\x12&\n\tfunctions\x18\x08 \x03(\x0b\x32\x13.mind_ir.GraphProto\x12\x30\n\x0cpreprocessor\x18\t \x01(\x0b\x32\x1a.mind_ir.PreprocessorProto\x12\x15\n\rlittle_endian\x18\n \x01(\x08\x12(\n\x08parallel\x18\x0b \x01(\x0b\x32\x16.mind_ir.ParallelProto\x12+\n\nprimitives\x18\x0c \x03(\x0b\x32\x17.mind_ir.PrimitiveProto\x12\x17\n\x0fmind_ir_version\x18\r \x01(\x03\x12\x34\n\tuser_info\x18\x0e \x03(\x0b\x32!.mind_ir.ModelProto.UserInfoEntry\x1a/\n\rUserInfoEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\";\n\x11PreprocessorProto\x12&\n\x02op\x18\x01 \x03(\x0b\x32\x1a.mind_ir.PreprocessOpProto\"\x91\x01\n\x11PreprocessOpProto\x12\x15\n\rinput_columns\x18\x01 \x01(\t\x12\x16\n\x0eoutput_columns\x18\x02 \x01(\t\x12\x17\n\x0fproject_columns\x18\x03 \x01(\t\x12\x0f\n\x07op_type\x18\x04 \x01(\t\x12\x12\n\noperations\x18\x05 \x01(\t\x12\x0f\n\x07offload\x18\x06 \x01(\x08\"\xd2\x02\n\nGraphProto\x12 \n\x04node\x18\x01 \x03(\x0b\x32\x12.mind_ir.NodeProto\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\tparameter\x18\x03 \x03(\x0b\x32\x14.mind_ir.TensorProto\x12\x12\n\ndoc_string\x18\x04 \x01(\t\x12&\n\x05input\x18\x05 \x03(\x0b\x32\x17.mind_ir.ValueInfoProto\x12\'\n\x06output\x18\x06 \x03(\x0b\x32\x17.mind_ir.ValueInfoProto\x12\x12\n\nbprop_hash\x18\x07 \x01(\t\x12*\n\tattribute\x18\x08 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12\x16\n\x0e\x62prop_filepath\x18\t \x01(\t\x12.\n\rmap_parameter\x18\n \x03(\x0b\x32\x17.mind_ir.MapTensorProto\"\xda\x07\n\x0bTensorProto\x12\x0c\n\x04\x64ims\x18\x01 \x03(\x03\x12\x11\n\tdata_type\x18\x02 \x01(\x05\x12\x12\n\nfloat_data\x18\x03 \x03(\x02\x12\x12\n\nint32_data\x18\x04 \x03(\x05\x12\x13\n\x0bstring_data\x18\x05 \x03(\x0c\x12\x12\n\nint64_data\x18\x06 \x03(\x03\x12\x0c\n\x04name\x18\x07 \x01(\t\x12\x12\n\ndoc_string\x18\x08 \x01(\t\x12\x10\n\x08raw_data\x18\t \x01(\x0c\x12\x13\n\x0b\x64ouble_data\x18\n \x03(\x01\x12\x13\n\x0buint64_data\x18\x0b \x03(\x04\x12=\n\rexternal_data\x18\x0c \x01(\x0b\x32&.mind_ir.TensorProto.ExternalDataProto\x12\x0f\n\x07ref_key\x18\r \x01(\t\x12\x10\n\x08min_dims\x18\x0e \x03(\x03\x12\x10\n\x08max_dims\x18\x0f \x03(\x03\x12>\n\x10\x63ompression_type\x18\x10 \x01(\x0e\x32$.mind_ir.TensorProto.CompressionType\x12:\n\x0cquant_params\x18\x11 \x03(\x0b\x32$.mind_ir.TensorProto.QuantParamProto\x1a\x45\n\x11\x45xternalDataProto\x12\x10\n\x08location\x18\x01 \x01(\t\x12\x0e\n\x06offset\x18\x02 \x01(\x03\x12\x0e\n\x06length\x18\x03 \x01(\x03\x1aV\n\x0fQuantParamProto\x12\x17\n\x0fquant_algo_name\x18\x01 \x02(\t\x12*\n\tattribute\x18\x02 \x03(\x0b\x32\x17.mind_ir.AttributeProto\"\xf4\x01\n\x08\x44\x61taType\x12\r\n\tUNDEFINED\x10\x00\x12\t\n\x05\x46LOAT\x10\x01\x12\t\n\x05UINT8\x10\x02\x12\x08\n\x04INT8\x10\x03\x12\n\n\x06UINT16\x10\x04\x12\t\n\x05INT16\x10\x05\x12\t\n\x05INT32\x10\x06\x12\t\n\x05INT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\x08\n\x04\x42OOL\x10\t\x12\x0b\n\x07\x46LOAT16\x10\n\x12\n\n\x06\x44OUBLE\x10\x0b\x12\n\n\x06UINT32\x10\x0c\x12\n\n\x06UINT64\x10\r\x12\r\n\tCOMPLEX64\x10\x0e\x12\x0e\n\nCOMPLEX128\x10\x0f\x12\x0c\n\x08\x42\x46LOAT16\x10\x10\x12\x0b\n\x07\x46LOAT64\x10\x11\x12\x0b\n\x07QINT4X2\x10\x12\"u\n\x0f\x43ompressionType\x12\x12\n\x0eNO_COMPRESSION\x10\x00\x12\x0c\n\x08INDEXING\x10\x01\x12\n\n\x06SPARSE\x10\x02\x12\x07\n\x03\x46SE\x10\x03\x12\x0f\n\x0b\x42IT_PACKING\x10\x04\x12\x0b\n\x07\x46SE_INT\x10\x05\x12\r\n\tFSE_INFER\x10\x06\"\xd1\x01\n\x0eMapTensorProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12.\n\rdefault_value\x18\x02 \x02(\x0b\x32\x17.mind_ir.AttributeProto\x12(\n\nkey_tensor\x18\x03 \x02(\x0b\x32\x14.mind_ir.TensorProto\x12*\n\x0cvalue_tensor\x18\x04 \x02(\x0b\x32\x14.mind_ir.TensorProto\x12+\n\rstatus_tensor\x18\x05 \x02(\x0b\x32\x14.mind_ir.TensorProto\"5\n\rParallelProto\x12$\n\x06layout\x18\x01 \x03(\x0b\x32\x14.mind_ir.LayoutProto\"\xfd\x01\n\x0bLayoutProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16\x64\x65vice_arrangement_int\x18\x02 \x03(\x03\x12\x16\n\x0etensor_map_int\x18\x03 \x03(\x03\x12\x17\n\x0fslice_shape_int\x18\x04 \x03(\x03\x12\x12\n\nfield_size\x18\x05 \x01(\x03\x12\x15\n\runiform_split\x18\x06 \x01(\x08\x12\x17\n\x0fopt_shard_group\x18\x07 \x01(\t\x12\x17\n\x0fpipeline_shared\x18\x08 \x01(\x08\x12\x0f\n\x07is_send\x18\t \x01(\x08\x12\x11\n\tpeer_rank\x18\n \x01(\x03\x12\x0e\n\x06sr_tag\x18\x0b \x01(\x03\"\xda\x01\n\x0ePrimitiveProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07op_type\x18\x02 \x01(\t\x12*\n\tattribute\x18\x03 \x03(\x0b\x32\x17.mind_ir.AttributeProto\x12\x15\n\rinstance_name\x18\x04 \x01(\t\x12\x33\n\tprim_type\x18\x05 \x01(\x0e\x32 .mind_ir.PrimitiveProto.PrimType\"1\n\x08PrimType\x12\r\n\tPRIMITIVE\x10\x01\x12\x16\n\x12PRIMITIVE_FUNCTION\x10\x02*R\n\x07Version\x12\x14\n\x10IR_VERSION_START\x10\x00\x12\x0e\n\nIR_VERSION\x10\x01\x12!\n\x1dIR_VERSION_WITH_PRIM_FUNCTION\x10\x02'
|
|
24
24
|
)
|
|
25
25
|
|
|
26
26
|
_VERSION = _descriptor.EnumDescriptor(
|
|
@@ -48,8 +48,8 @@ _VERSION = _descriptor.EnumDescriptor(
|
|
|
48
48
|
],
|
|
49
49
|
containing_type=None,
|
|
50
50
|
serialized_options=None,
|
|
51
|
-
serialized_start=
|
|
52
|
-
serialized_end=
|
|
51
|
+
serialized_start=4540,
|
|
52
|
+
serialized_end=4622,
|
|
53
53
|
)
|
|
54
54
|
_sym_db.RegisterEnumDescriptor(_VERSION)
|
|
55
55
|
|
|
@@ -391,11 +391,16 @@ _TENSORPROTO_DATATYPE = _descriptor.EnumDescriptor(
|
|
|
391
391
|
serialized_options=None,
|
|
392
392
|
type=None,
|
|
393
393
|
create_key=_descriptor._internal_create_key),
|
|
394
|
+
_descriptor.EnumValueDescriptor(
|
|
395
|
+
name='QINT4X2', index=18, number=18,
|
|
396
|
+
serialized_options=None,
|
|
397
|
+
type=None,
|
|
398
|
+
create_key=_descriptor._internal_create_key),
|
|
394
399
|
],
|
|
395
400
|
containing_type=None,
|
|
396
401
|
serialized_options=None,
|
|
397
402
|
serialized_start=3431,
|
|
398
|
-
serialized_end=
|
|
403
|
+
serialized_end=3675,
|
|
399
404
|
)
|
|
400
405
|
_sym_db.RegisterEnumDescriptor(_TENSORPROTO_DATATYPE)
|
|
401
406
|
|
|
@@ -444,8 +449,8 @@ _TENSORPROTO_COMPRESSIONTYPE = _descriptor.EnumDescriptor(
|
|
|
444
449
|
],
|
|
445
450
|
containing_type=None,
|
|
446
451
|
serialized_options=None,
|
|
447
|
-
serialized_start=
|
|
448
|
-
serialized_end=
|
|
452
|
+
serialized_start=3677,
|
|
453
|
+
serialized_end=3794,
|
|
449
454
|
)
|
|
450
455
|
_sym_db.RegisterEnumDescriptor(_TENSORPROTO_COMPRESSIONTYPE)
|
|
451
456
|
|
|
@@ -469,8 +474,8 @@ _PRIMITIVEPROTO_PRIMTYPE = _descriptor.EnumDescriptor(
|
|
|
469
474
|
],
|
|
470
475
|
containing_type=None,
|
|
471
476
|
serialized_options=None,
|
|
472
|
-
serialized_start=
|
|
473
|
-
serialized_end=
|
|
477
|
+
serialized_start=4489,
|
|
478
|
+
serialized_end=4538,
|
|
474
479
|
)
|
|
475
480
|
_sym_db.RegisterEnumDescriptor(_PRIMITIVEPROTO_PRIMTYPE)
|
|
476
481
|
|
|
@@ -1447,7 +1452,7 @@ _TENSORPROTO = _descriptor.Descriptor(
|
|
|
1447
1452
|
oneofs=[
|
|
1448
1453
|
],
|
|
1449
1454
|
serialized_start=2808,
|
|
1450
|
-
serialized_end=
|
|
1455
|
+
serialized_end=3794,
|
|
1451
1456
|
)
|
|
1452
1457
|
|
|
1453
1458
|
|
|
@@ -1506,8 +1511,8 @@ _MAPTENSORPROTO = _descriptor.Descriptor(
|
|
|
1506
1511
|
extension_ranges=[],
|
|
1507
1512
|
oneofs=[
|
|
1508
1513
|
],
|
|
1509
|
-
serialized_start=
|
|
1510
|
-
serialized_end=
|
|
1514
|
+
serialized_start=3797,
|
|
1515
|
+
serialized_end=4006,
|
|
1511
1516
|
)
|
|
1512
1517
|
|
|
1513
1518
|
|
|
@@ -1538,8 +1543,8 @@ _PARALLELPROTO = _descriptor.Descriptor(
|
|
|
1538
1543
|
extension_ranges=[],
|
|
1539
1544
|
oneofs=[
|
|
1540
1545
|
],
|
|
1541
|
-
serialized_start=
|
|
1542
|
-
serialized_end=
|
|
1546
|
+
serialized_start=4008,
|
|
1547
|
+
serialized_end=4061,
|
|
1543
1548
|
)
|
|
1544
1549
|
|
|
1545
1550
|
|
|
@@ -1640,8 +1645,8 @@ _LAYOUTPROTO = _descriptor.Descriptor(
|
|
|
1640
1645
|
extension_ranges=[],
|
|
1641
1646
|
oneofs=[
|
|
1642
1647
|
],
|
|
1643
|
-
serialized_start=
|
|
1644
|
-
serialized_end=
|
|
1648
|
+
serialized_start=4064,
|
|
1649
|
+
serialized_end=4317,
|
|
1645
1650
|
)
|
|
1646
1651
|
|
|
1647
1652
|
|
|
@@ -1701,8 +1706,8 @@ _PRIMITIVEPROTO = _descriptor.Descriptor(
|
|
|
1701
1706
|
extension_ranges=[],
|
|
1702
1707
|
oneofs=[
|
|
1703
1708
|
],
|
|
1704
|
-
serialized_start=
|
|
1705
|
-
serialized_end=
|
|
1709
|
+
serialized_start=4320,
|
|
1710
|
+
serialized_end=4538,
|
|
1706
1711
|
)
|
|
1707
1712
|
|
|
1708
1713
|
_ATTRIBUTEPROTO_SEQINFOPROTO.fields_by_name['tuple_elem_item'].message_type = _ATTRIBUTEPROTO
|
mindspore/train/model.py
CHANGED
|
@@ -190,7 +190,7 @@ class Model:
|
|
|
190
190
|
>>> from mindspore.train import Model
|
|
191
191
|
>>>
|
|
192
192
|
>>> # Define the network structure of LeNet5. Refer to
|
|
193
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
193
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
194
194
|
>>> net = LeNet5()
|
|
195
195
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
|
|
196
196
|
>>> optim = nn.Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
|
|
@@ -199,7 +199,7 @@ class Model:
|
|
|
199
199
|
>>> model.predict_network
|
|
200
200
|
>>> model.eval_network
|
|
201
201
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
202
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
202
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
203
203
|
>>> dataset = create_dataset()
|
|
204
204
|
>>> model.train(2, dataset)
|
|
205
205
|
"""
|
|
@@ -1022,10 +1022,10 @@ class Model:
|
|
|
1022
1022
|
>>> from mindspore.train import Model
|
|
1023
1023
|
>>>
|
|
1024
1024
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
1025
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1025
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
1026
1026
|
>>> dataset = create_dataset()
|
|
1027
1027
|
>>> # Define the network structure of LeNet5. Refer to
|
|
1028
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1028
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
1029
1029
|
>>> net = LeNet5()
|
|
1030
1030
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
|
|
1031
1031
|
>>> loss_scale_manager = ms.FixedLossScaleManager(1024., False)
|
|
@@ -1175,11 +1175,11 @@ class Model:
|
|
|
1175
1175
|
>>> from mindspore.train import Model
|
|
1176
1176
|
>>>
|
|
1177
1177
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
1178
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1178
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
1179
1179
|
>>> train_dataset = create_dataset("train")
|
|
1180
1180
|
>>> valid_dataset = create_dataset("test")
|
|
1181
1181
|
>>> # Define the network structure of LeNet5. Refer to
|
|
1182
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1182
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
1183
1183
|
>>> net = LeNet5()
|
|
1184
1184
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
|
|
1185
1185
|
>>> optim = nn.Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
|
|
@@ -1188,7 +1188,7 @@ class Model:
|
|
|
1188
1188
|
|
|
1189
1189
|
Tutorial Examples:
|
|
1190
1190
|
- `Advanced Encapsulation: Model - Train and Save Model
|
|
1191
|
-
<https://www.mindspore.cn/tutorials/en/
|
|
1191
|
+
<https://www.mindspore.cn/tutorials/en/master/advanced/model.html#training-and-saving-model>`_
|
|
1192
1192
|
"""
|
|
1193
1193
|
device_target = context.get_context("device_target")
|
|
1194
1194
|
if _is_ps_mode() and not _cache_enable() and (device_target in ["Ascend", "CPU"]) and dataset_sink_mode:
|
|
@@ -1268,10 +1268,10 @@ class Model:
|
|
|
1268
1268
|
>>> from mindspore.amp import FixedLossScaleManager
|
|
1269
1269
|
>>>
|
|
1270
1270
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
1271
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1271
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
1272
1272
|
>>> dataset = create_dataset()
|
|
1273
1273
|
>>> # Define the network structure of LeNet5. Refer to
|
|
1274
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1274
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
1275
1275
|
>>> net = LeNet5()
|
|
1276
1276
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits()
|
|
1277
1277
|
>>> loss_scale_manager = FixedLossScaleManager()
|
|
@@ -1444,10 +1444,10 @@ class Model:
|
|
|
1444
1444
|
>>> from mindspore.train import Model
|
|
1445
1445
|
>>>
|
|
1446
1446
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
1447
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1447
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
1448
1448
|
>>> dataset = create_dataset()
|
|
1449
1449
|
>>> # Define the network structure of LeNet5. Refer to
|
|
1450
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1450
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
1451
1451
|
>>> net = LeNet5()
|
|
1452
1452
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
|
|
1453
1453
|
>>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'})
|
|
@@ -1455,7 +1455,7 @@ class Model:
|
|
|
1455
1455
|
|
|
1456
1456
|
Tutorial Examples:
|
|
1457
1457
|
- `Advanced Encapsulation: Model - Train and Save Model
|
|
1458
|
-
<https://www.mindspore.cn/tutorials/en/
|
|
1458
|
+
<https://www.mindspore.cn/tutorials/en/master/advanced/model.html#training-and-saving-model>`_
|
|
1459
1459
|
"""
|
|
1460
1460
|
valid_dataset = self._prepare_obf_dataset(valid_dataset)
|
|
1461
1461
|
dataset_sink_mode = Validator.check_bool(dataset_sink_mode)
|
|
@@ -1701,7 +1701,7 @@ class Model:
|
|
|
1701
1701
|
>>>
|
|
1702
1702
|
>>> input_data = Tensor(np.random.randint(0, 255, [1, 1, 32, 32]), mindspore.float32)
|
|
1703
1703
|
>>> # Define the network structure of LeNet5. Refer to
|
|
1704
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1704
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
1705
1705
|
>>> model = Model(LeNet5())
|
|
1706
1706
|
>>> result = model.predict(input_data)
|
|
1707
1707
|
"""
|
|
@@ -1809,10 +1809,10 @@ class Model:
|
|
|
1809
1809
|
>>> ms.set_auto_parallel_context(parallel_mode=ms.ParallelMode.SEMI_AUTO_PARALLEL)
|
|
1810
1810
|
>>>
|
|
1811
1811
|
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
1812
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1812
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
|
|
1813
1813
|
>>> dataset = create_dataset()
|
|
1814
1814
|
>>> # Define the network structure of LeNet5. Refer to
|
|
1815
|
-
>>> # https://gitee.com/mindspore/docs/blob/
|
|
1815
|
+
>>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
|
|
1816
1816
|
>>> net = LeNet5()
|
|
1817
1817
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits()
|
|
1818
1818
|
>>> loss_scale_manager = ms.FixedLossScaleManager()
|