mindspore 2.0.0rc1__cp38-cp38-manylinux1_x86_64.whl → 2.2.0__cp38-cp38-manylinux1_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +2 -2
- mindspore/__init__.py +5 -2
- mindspore/_akg/akg/build_module.py +5 -6
- mindspore/_akg/akg/composite/build_module.py +49 -16
- mindspore/_akg/akg/composite/split_stitch.py +10 -11
- mindspore/_akg/akg/config/repository.json +195 -0
- mindspore/_akg/akg/global_configs.py +5 -1
- mindspore/_akg/akg/ms/info_version_adapt.py +67 -1
- mindspore/_akg/akg/tvm/api.py +4 -3
- mindspore/_akg/akg/tvm/autotvm/__init__.py +1 -2
- mindspore/_akg/akg/tvm/autotvm/graph_tuner/base_graph_tuner.py +1 -5
- mindspore/_akg/akg/tvm/autotvm/measure/__init__.py +1 -1
- mindspore/_akg/akg/tvm/autotvm/measure/measure.py +1 -10
- mindspore/_akg/akg/tvm/autotvm/measure/measure_methods.py +1 -372
- mindspore/_akg/akg/tvm/build_module.py +16 -1
- mindspore/_akg/akg/tvm/contrib/graph_runtime.py +0 -53
- mindspore/_akg/akg/tvm/hybrid/parser.py +7 -6
- mindspore/_akg/akg/tvm/ir_builder.py +1 -1
- mindspore/_akg/akg/tvm/module.py +1 -2
- mindspore/_akg/akg/tvm/stmt.py +2 -2
- mindspore/_akg/akg/utils/composite_op_helper.py +9 -10
- mindspore/_akg/akg/utils/kernel_exec.py +58 -260
- mindspore/_akg/akg/utils/op_dsl.py +17 -1
- mindspore/_akg/akg/utils/result_analysis.py +4 -24
- mindspore/_akg/akg/utils/tbe_codegen_utils.py +198 -0
- mindspore/_c_dataengine.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_check_jit_forbidden_api.py +5 -1
- mindspore/_checkparam.py +79 -62
- mindspore/_extends/graph_kernel/__init__.py +0 -1
- mindspore/_extends/graph_kernel/model/graph_split.py +2 -0
- mindspore/_extends/graph_kernel/model/model_builder.py +9 -50
- mindspore/_extends/graph_kernel/splitter.py +1 -9
- mindspore/_extends/parallel_compile/akg_compiler/akg_process.py +128 -21
- mindspore/_extends/parallel_compile/akg_compiler/build_tbe_kernel.py +2 -2
- mindspore/_extends/parallel_compile/akg_compiler/tbe_topi.py +4 -2
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_adapter.py +18 -13
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_helper.py +13 -9
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_job.py +1 -1
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_job_manager.py +1 -1
- mindspore/_extends/parse/__init__.py +19 -17
- mindspore/_extends/parse/namespace.py +7 -36
- mindspore/_extends/parse/parser.py +375 -189
- mindspore/_extends/parse/resources.py +36 -41
- mindspore/_extends/parse/standard_method.py +350 -245
- mindspore/_extends/parse/trope.py +2 -12
- mindspore/_extends/remote/kernel_build_server.py +24 -7
- mindspore/_extends/remote/kernel_build_server_akg_v2.py +55 -0
- mindspore/_install_custom.py +43 -0
- mindspore/_mindspore_offline_debug.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/amp.py +85 -19
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/boost/base.py +2 -2
- mindspore/boost/boost.py +27 -32
- mindspore/boost/boost_cell_wrapper.py +37 -13
- mindspore/boost/grad_accumulation.py +1 -1
- mindspore/boost/grad_freeze.py +34 -6
- mindspore/boost/group_loss_scale_manager.py +15 -14
- mindspore/boost/less_batch_normalization.py +28 -3
- mindspore/common/__init__.py +15 -11
- mindspore/common/_auto_dynamic.py +68 -0
- mindspore/common/_jit_fallback_utils.py +111 -0
- mindspore/common/_register_for_adapter.py +17 -5
- mindspore/common/_register_for_tensor.py +2 -2
- mindspore/common/_stub_tensor.py +18 -15
- mindspore/common/_utils.py +31 -7
- mindspore/common/api.py +269 -101
- mindspore/common/auto_dynamic_shape.py +498 -0
- mindspore/common/dtype.py +61 -21
- mindspore/common/dump.py +9 -7
- mindspore/common/initializer.py +106 -76
- mindspore/common/jit_config.py +35 -14
- mindspore/common/lazy_inline.py +187 -0
- mindspore/common/mindir_util.py +101 -0
- mindspore/common/mutable.py +10 -13
- mindspore/common/parameter.py +246 -55
- mindspore/common/seed.py +13 -7
- mindspore/common/sparse_tensor.py +29 -33
- mindspore/common/tensor.py +907 -251
- mindspore/communication/__init__.py +7 -4
- mindspore/communication/_comm_helper.py +84 -4
- mindspore/communication/management.py +160 -88
- mindspore/config/op_info.config +99 -75
- mindspore/config/super_bar_config.json +36 -4
- mindspore/context.py +526 -219
- mindspore/dataset/__init__.py +9 -46
- mindspore/dataset/audio/__init__.py +4 -19
- mindspore/dataset/audio/transforms.py +545 -233
- mindspore/dataset/audio/utils.py +21 -18
- mindspore/dataset/callback/ds_callback.py +42 -13
- mindspore/dataset/core/config.py +158 -100
- mindspore/dataset/core/validator_helpers.py +1 -63
- mindspore/dataset/debug/debug_hook.py +45 -13
- mindspore/dataset/debug/pre_defined_hook.py +5 -5
- mindspore/dataset/engine/__init__.py +0 -5
- mindspore/dataset/engine/cache_client.py +38 -15
- mindspore/dataset/engine/datasets.py +615 -278
- mindspore/dataset/engine/datasets_audio.py +154 -283
- mindspore/dataset/engine/datasets_standard_format.py +104 -116
- mindspore/dataset/engine/datasets_text.py +443 -326
- mindspore/dataset/engine/datasets_user_defined.py +251 -164
- mindspore/dataset/engine/datasets_vision.py +839 -1443
- mindspore/dataset/engine/iterators.py +11 -4
- mindspore/dataset/engine/obs/obs_mindrecord_dataset.py +7 -3
- mindspore/dataset/engine/obs/util.py +3 -0
- mindspore/dataset/engine/offload.py +6 -6
- mindspore/dataset/engine/queue.py +15 -14
- mindspore/dataset/engine/samplers.py +39 -23
- mindspore/dataset/engine/serializer_deserializer.py +22 -6
- mindspore/dataset/engine/validators.py +21 -331
- mindspore/dataset/text/__init__.py +5 -33
- mindspore/dataset/text/transforms.py +334 -165
- mindspore/dataset/text/utils.py +215 -145
- mindspore/dataset/transforms/__init__.py +1 -1
- mindspore/dataset/transforms/c_transforms.py +3 -2
- mindspore/dataset/transforms/py_transforms_util.py +40 -12
- mindspore/dataset/transforms/transforms.py +174 -71
- mindspore/dataset/utils/browse_dataset.py +25 -17
- mindspore/dataset/utils/line_reader.py +24 -21
- mindspore/dataset/vision/__init__.py +5 -26
- mindspore/dataset/vision/c_transforms.py +177 -165
- mindspore/dataset/vision/py_transforms.py +114 -119
- mindspore/dataset/vision/py_transforms_util.py +54 -51
- mindspore/dataset/vision/transforms.py +1127 -381
- mindspore/dataset/vision/utils.py +54 -38
- mindspore/dataset/vision/validators.py +12 -2
- mindspore/experimental/map_parameter.py +38 -4
- mindspore/{dataset/datapreprocess → experimental/optim}/__init__.py +14 -4
- mindspore/experimental/optim/adam.py +192 -0
- mindspore/experimental/optim/adamw.py +181 -0
- mindspore/experimental/optim/lr_scheduler.py +1427 -0
- mindspore/experimental/optim/optimizer.py +252 -0
- mindspore/experimental/optim/sgd.py +147 -0
- mindspore/gen_ops.py +273 -0
- mindspore/include/OWNERS +1 -2
- mindspore/include/api/context.h +21 -1
- mindspore/include/api/data_type.h +2 -1
- mindspore/include/api/graph.h +0 -15
- mindspore/include/api/kernel.h +2 -0
- mindspore/include/api/kernel_api.h +37 -12
- mindspore/include/api/model.h +29 -42
- mindspore/include/api/model_group.h +14 -3
- mindspore/include/api/model_parallel_runner.h +18 -2
- mindspore/include/api/serialization.h +26 -0
- mindspore/include/api/status.h +1 -0
- mindspore/include/api/types.h +38 -4
- mindspore/include/c_api/ms/abstract.h +67 -0
- mindspore/include/c_api/ms/attribute.h +197 -0
- mindspore/include/c_api/ms/base/handle_types.h +43 -0
- mindspore/include/c_api/ms/base/macros.h +32 -0
- mindspore/include/c_api/ms/base/status.h +33 -0
- mindspore/include/c_api/ms/base/types.h +282 -0
- mindspore/include/c_api/ms/context.h +102 -0
- mindspore/include/c_api/ms/graph.h +160 -0
- mindspore/include/c_api/ms/node.h +606 -0
- mindspore/include/c_api/ms/tensor.h +161 -0
- mindspore/include/c_api/ms/value.h +84 -0
- mindspore/include/c_api/status_c.h +3 -0
- mindspore/include/dataset/constants.h +6 -12
- mindspore/include/dataset/execute.h +23 -13
- mindspore/include/dataset/text.h +26 -26
- mindspore/include/dataset/transforms.h +25 -31
- mindspore/include/dataset/vision.h +60 -60
- mindspore/include/dataset/vision_ascend.h +5 -6
- mindspore/include/dataset/vision_lite.h +17 -17
- mindspore/include/mindapi/base/format.h +0 -1
- mindspore/include/mindapi/base/type_id.h +2 -1
- mindspore/include/mindapi/base/types.h +5 -1
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libjemalloc.so.2 +0 -0
- mindspore/lib/libmindspore.so +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_shared_lib.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libopencv_imgcodecs.so.4.5 +0 -0
- mindspore/lib/libopencv_imgproc.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_aicpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +9000 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/libakg.so +0 -0
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_aicpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/cpu/libakg.so +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.10 +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.11 +0 -0
- mindspore/lib/plugin/gpu10.1/libakg.so +0 -0
- mindspore/lib/plugin/gpu10.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu10.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.1/libakg.so +0 -0
- mindspore/lib/plugin/gpu11.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.6/libakg.so +0 -0
- mindspore/lib/plugin/gpu11.6/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.6/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.1 +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.10.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.6 +0 -0
- mindspore/log.py +9 -6
- mindspore/mindrecord/filereader.py +33 -4
- mindspore/mindrecord/filewriter.py +70 -35
- mindspore/mindrecord/mindpage.py +40 -34
- mindspore/mindrecord/shardreader.py +1 -1
- mindspore/mindrecord/shardsegment.py +1 -1
- mindspore/mindrecord/tools/cifar100_to_mr.py +25 -18
- mindspore/mindrecord/tools/cifar10_to_mr.py +25 -18
- mindspore/mindrecord/tools/csv_to_mr.py +29 -13
- mindspore/mindrecord/tools/imagenet_to_mr.py +24 -10
- mindspore/mindrecord/tools/mnist_to_mr.py +24 -11
- mindspore/mindrecord/tools/tfrecord_to_mr.py +31 -26
- mindspore/nn/cell.py +463 -169
- mindspore/nn/dynamic_lr.py +47 -43
- mindspore/nn/layer/activation.py +225 -82
- mindspore/nn/layer/basic.py +121 -79
- mindspore/nn/layer/channel_shuffle.py +21 -21
- mindspore/nn/layer/combined.py +33 -26
- mindspore/nn/layer/container.py +277 -22
- mindspore/nn/layer/conv.py +441 -304
- mindspore/nn/layer/dense.py +19 -13
- mindspore/nn/layer/embedding.py +62 -49
- mindspore/nn/layer/flash_attention.py +264 -0
- mindspore/nn/layer/image.py +50 -39
- mindspore/nn/layer/math.py +62 -51
- mindspore/nn/layer/normalization.py +219 -167
- mindspore/nn/layer/padding.py +58 -70
- mindspore/nn/layer/pooling.py +334 -287
- mindspore/nn/layer/rnn_cells.py +53 -38
- mindspore/nn/layer/rnns.py +59 -56
- mindspore/nn/layer/thor_layer.py +52 -44
- mindspore/nn/layer/timedistributed.py +6 -4
- mindspore/nn/layer/transformer.py +284 -164
- mindspore/nn/learning_rate_schedule.py +34 -25
- mindspore/nn/loss/__init__.py +3 -2
- mindspore/nn/loss/loss.py +554 -311
- mindspore/nn/optim/ada_grad.py +12 -9
- mindspore/nn/optim/adadelta.py +14 -11
- mindspore/nn/optim/adafactor.py +19 -16
- mindspore/nn/optim/adam.py +62 -47
- mindspore/nn/optim/adamax.py +13 -10
- mindspore/nn/optim/adasum.py +12 -8
- mindspore/nn/optim/asgd.py +10 -9
- mindspore/nn/optim/ftrl.py +20 -17
- mindspore/nn/optim/lamb.py +16 -12
- mindspore/nn/optim/lars.py +8 -6
- mindspore/nn/optim/lazyadam.py +25 -20
- mindspore/nn/optim/momentum.py +10 -7
- mindspore/nn/optim/optimizer.py +61 -9
- mindspore/nn/optim/proximal_ada_grad.py +14 -13
- mindspore/nn/optim/rmsprop.py +17 -13
- mindspore/nn/optim/rprop.py +30 -17
- mindspore/nn/optim/sgd.py +40 -23
- mindspore/nn/optim/thor.py +24 -26
- mindspore/nn/probability/bijector/bijector.py +11 -11
- mindspore/nn/probability/bijector/exp.py +1 -1
- mindspore/nn/probability/bijector/gumbel_cdf.py +3 -3
- mindspore/nn/probability/bijector/invert.py +1 -1
- mindspore/nn/probability/bijector/power_transform.py +29 -29
- mindspore/nn/probability/bijector/scalar_affine.py +3 -3
- mindspore/nn/probability/bijector/softplus.py +5 -5
- mindspore/nn/probability/bnn_layers/bnn_cell_wrapper.py +4 -2
- mindspore/nn/probability/bnn_layers/conv_variational.py +13 -13
- mindspore/nn/probability/bnn_layers/dense_variational.py +12 -12
- mindspore/nn/probability/bnn_layers/layer_distribution.py +9 -8
- mindspore/nn/probability/distribution/_utils/custom_ops.py +19 -3
- mindspore/nn/probability/distribution/_utils/utils.py +1 -1
- mindspore/nn/probability/distribution/bernoulli.py +9 -9
- mindspore/nn/probability/distribution/beta.py +8 -8
- mindspore/nn/probability/distribution/categorical.py +23 -15
- mindspore/nn/probability/distribution/cauchy.py +5 -6
- mindspore/nn/probability/distribution/distribution.py +3 -3
- mindspore/nn/probability/distribution/exponential.py +4 -4
- mindspore/nn/probability/distribution/gamma.py +10 -10
- mindspore/nn/probability/distribution/geometric.py +8 -8
- mindspore/nn/probability/distribution/gumbel.py +8 -9
- mindspore/nn/probability/distribution/half_normal.py +5 -5
- mindspore/nn/probability/distribution/laplace.py +5 -5
- mindspore/nn/probability/distribution/log_normal.py +12 -11
- mindspore/nn/probability/distribution/logistic.py +8 -8
- mindspore/nn/probability/distribution/normal.py +6 -5
- mindspore/nn/probability/distribution/poisson.py +10 -11
- mindspore/nn/probability/distribution/student_t.py +8 -9
- mindspore/nn/probability/distribution/transformed_distribution.py +5 -5
- mindspore/nn/probability/distribution/uniform.py +11 -11
- mindspore/nn/reinforcement/tensor_array.py +2 -2
- mindspore/nn/sparse/sparse.py +9 -9
- mindspore/nn/wrap/cell_wrapper.py +188 -63
- mindspore/nn/wrap/grad_reducer.py +21 -12
- mindspore/nn/wrap/loss_scale.py +136 -49
- mindspore/numpy/__init__.py +4 -4
- mindspore/numpy/array_creations.py +55 -56
- mindspore/numpy/array_ops.py +134 -35
- mindspore/numpy/logic_ops.py +66 -20
- mindspore/numpy/math_ops.py +142 -139
- mindspore/numpy/utils_const.py +2 -2
- mindspore/offline_debug/convert_async.py +2 -2
- mindspore/ops/_grad_experimental/__init__.py +7 -5
- mindspore/ops/_grad_experimental/grad_array_ops.py +231 -348
- mindspore/ops/{_grad → _grad_experimental}/grad_base.py +1 -33
- mindspore/ops/{_grad → _grad_experimental}/grad_comm_ops.py +25 -13
- mindspore/ops/{_grad/__init__.py → _grad_experimental/grad_debug_ops.py} +15 -7
- mindspore/ops/{_grad → _grad_experimental}/grad_implementations.py +17 -11
- mindspore/ops/_grad_experimental/grad_inner_ops.py +33 -52
- mindspore/ops/_grad_experimental/grad_math_ops.py +151 -1224
- mindspore/ops/_grad_experimental/grad_nn_ops.py +141 -414
- mindspore/ops/{_grad → _grad_experimental}/grad_quant_ops.py +10 -6
- mindspore/ops/_grad_experimental/grad_sparse.py +317 -2
- mindspore/ops/_grad_experimental/grad_sparse_ops.py +3 -13
- mindspore/ops/{_grad → _grad_experimental}/taylor_rule.py +1 -1
- mindspore/ops/_op_impl/_custom_op/dsd_back_impl.py +1 -1
- mindspore/ops/_op_impl/_custom_op/flash_attention/__init__.py +0 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/attention.py +406 -0
- mindspore/{_extends/graph_kernel/expanders/complex/__init__.py → ops/_op_impl/_custom_op/flash_attention/constants.py} +27 -8
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_bwd.py +467 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_fwd.py +563 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_impl.py +193 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tik_ops_utils.py +435 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/__init__.py +0 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/sparse_tiling.py +45 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/strategy.py +67 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/wukong_tiling.py +62 -0
- mindspore/ops/_op_impl/_custom_op/matmul_cube_dense_left_impl.py +2 -2
- mindspore/ops/_op_impl/aicpu/__init__.py +41 -1
- mindspore/ops/_op_impl/aicpu/adaptive_max_pool_2d.py +37 -0
- mindspore/ops/_op_impl/aicpu/bias_add_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/cast.py +52 -0
- mindspore/ops/_op_impl/aicpu/coalesce.py +2 -0
- mindspore/ops/_op_impl/aicpu/col2im.py +3 -1
- mindspore/ops/_op_impl/aicpu/count_nonzero.py +43 -0
- mindspore/ops/_op_impl/aicpu/dropout_genmask.py +6 -0
- mindspore/ops/_op_impl/aicpu/eps.py +32 -0
- mindspore/ops/_op_impl/aicpu/eye.py +4 -4
- mindspore/ops/_op_impl/aicpu/fft_with_size.py +6 -0
- mindspore/ops/_op_impl/aicpu/fill_diagonal.py +5 -0
- mindspore/ops/_op_impl/aicpu/gamma.py +2 -2
- mindspore/ops/_op_impl/aicpu/im2col.py +3 -5
- mindspore/ops/_op_impl/aicpu/lgamma.py +1 -0
- mindspore/ops/_op_impl/aicpu/log_uniform_candidate_sampler.py +6 -3
- mindspore/ops/_op_impl/aicpu/lu.py +39 -0
- mindspore/ops/_op_impl/aicpu/lu_unpack_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/masked_scatter.py +1 -0
- mindspore/ops/_op_impl/aicpu/masked_select_grad.py +3 -0
- mindspore/ops/_op_impl/aicpu/matrix_band_part.py +59 -0
- mindspore/ops/_op_impl/aicpu/matrix_power.py +6 -1
- mindspore/ops/_op_impl/aicpu/median.py +1 -0
- mindspore/ops/_op_impl/aicpu/multinomial.py +9 -9
- mindspore/ops/_op_impl/aicpu/not_equal.py +0 -5
- mindspore/ops/_op_impl/aicpu/pad_v3.py +3 -1
- mindspore/ops/_op_impl/aicpu/pad_v3_grad.py +2 -0
- mindspore/ops/_op_impl/aicpu/parameterized_truncated_normal.py +15 -7
- mindspore/ops/_op_impl/aicpu/random_categorical.py +39 -19
- mindspore/ops/_op_impl/aicpu/random_choice_with_mask.py +5 -2
- mindspore/ops/_op_impl/aicpu/random_poisson.py +103 -52
- mindspore/ops/_op_impl/aicpu/random_shuffle.py +17 -15
- mindspore/ops/_op_impl/aicpu/resize_bilinear_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/resize_nearest_neighbor_v2.py +0 -6
- mindspore/ops/_op_impl/aicpu/resize_nearest_neighbor_v2_grad.py +0 -7
- mindspore/ops/_op_impl/aicpu/scatter_nd.py +2 -0
- mindspore/ops/_op_impl/aicpu/sequence_concat.py +40 -0
- mindspore/ops/_op_impl/aicpu/sequence_stack.py +40 -0
- mindspore/ops/_op_impl/aicpu/{sparseaddmm.py → sparse_addmm.py} +2 -2
- mindspore/ops/_op_impl/aicpu/{sparsesparsemaximum.py → sparse_sparse_maximum.py} +4 -4
- mindspore/ops/_op_impl/aicpu/standard_laplace.py +5 -4
- mindspore/ops/_op_impl/aicpu/standard_normal.py +5 -4
- mindspore/ops/_op_impl/aicpu/truncated_normal.py +9 -7
- mindspore/ops/_op_impl/aicpu/uniform.py +5 -3
- mindspore/ops/_op_impl/aicpu/uniform_candidate_sampler.py +8 -4
- mindspore/ops/_op_impl/aicpu/uniform_int.py +5 -5
- mindspore/ops/_op_impl/aicpu/uniform_real.py +4 -4
- mindspore/ops/_op_impl/aicpu/upsample_nearest_3d.py +14 -6
- mindspore/ops/_op_impl/aicpu/upsample_nearest_3d_grad.py +22 -8
- mindspore/ops/_op_impl/aicpu/upsample_trilinear_3d.py +11 -6
- mindspore/ops/_op_impl/aicpu/upsample_trilinear_3d_grad.py +21 -10
- mindspore/ops/_op_impl/tbe/__init__.py +6 -4
- mindspore/ops/_op_impl/tbe/atomic_addr_clean.py +1 -1
- mindspore/ops/_op_impl/tbe/avg_pool.py +2 -2
- mindspore/ops/_op_impl/tbe/avg_pool_3d.py +3 -3
- mindspore/ops/_op_impl/tbe/avg_pool_3d_grad.py +4 -4
- mindspore/ops/_op_impl/tbe/avg_pool_ds.py +2 -2
- mindspore/ops/_op_impl/tbe/avg_pool_grad.py +3 -3
- mindspore/ops/_op_impl/tbe/avg_pool_grad_vm.py +3 -3
- mindspore/ops/_op_impl/tbe/batch_to_space.py +1 -1
- mindspore/ops/_op_impl/tbe/batch_to_space_nd.py +2 -2
- mindspore/ops/_op_impl/tbe/bn_infer.py +2 -2
- mindspore/ops/_op_impl/tbe/bn_infer_ds.py +3 -2
- mindspore/ops/_op_impl/tbe/broadcast_to.py +1 -1
- mindspore/ops/_op_impl/tbe/depthwise_conv2d.py +3 -3
- mindspore/ops/_op_impl/tbe/expand_dims.py +1 -1
- mindspore/ops/_op_impl/tbe/gather_v2.py +56 -0
- mindspore/ops/_op_impl/tbe/im2col.py +4 -4
- mindspore/ops/_op_impl/tbe/inplace_index_add.py +7 -3
- mindspore/ops/_op_impl/tbe/mem_set.py +38 -0
- mindspore/ops/_op_impl/tbe/scatter_nd_add.py +3 -0
- mindspore/ops/_op_impl/tbe/scatter_nd_d.py +1 -1
- mindspore/ops/_op_impl/tbe/space_to_batch.py +1 -1
- mindspore/ops/_op_impl/tbe/space_to_batch_nd.py +2 -2
- mindspore/ops/_op_impl/tbe/trans_data_ds.py +2 -0
- mindspore/ops/_primitive_cache.py +1 -1
- mindspore/ops/_tracefunc.py +241 -0
- mindspore/ops/_utils/utils.py +10 -2
- mindspore/ops/_vmap/vmap_array_ops.py +5 -3
- mindspore/ops/_vmap/vmap_base.py +5 -4
- mindspore/ops/_vmap/vmap_convolution_ops.py +1 -1
- mindspore/ops/_vmap/vmap_grad_math_ops.py +6 -4
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +11 -6
- mindspore/ops/_vmap/vmap_math_ops.py +5 -2
- mindspore/ops/_vmap/vmap_nn_ops.py +135 -11
- mindspore/ops/arg_dtype_cast.py +54 -0
- mindspore/ops/composite/__init__.py +7 -5
- mindspore/ops/composite/base.py +78 -34
- mindspore/ops/composite/math_ops.py +5 -695
- mindspore/ops/composite/multitype_ops/_compile_utils.py +403 -97
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +28 -22
- mindspore/ops/composite/multitype_ops/add_impl.py +69 -7
- mindspore/ops/composite/multitype_ops/bitwise_and_impl.py +2 -1
- mindspore/ops/composite/multitype_ops/bitwise_or_impl.py +2 -1
- mindspore/ops/composite/multitype_ops/bitwise_xor_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/div_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/floordiv_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/getitem_impl.py +48 -10
- mindspore/ops/composite/multitype_ops/greater_equal_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/greater_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/left_shift_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/less_equal_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/less_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/logic_not_impl.py +2 -2
- mindspore/ops/composite/multitype_ops/mod_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/mul_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/negative_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/not_in_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/ones_like_impl.py +6 -0
- mindspore/ops/composite/multitype_ops/pow_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/right_shift_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/setitem_impl.py +10 -7
- mindspore/ops/composite/multitype_ops/sub_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/uadd_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/zeros_like_impl.py +9 -0
- mindspore/ops/deprecated.py +304 -0
- mindspore/ops/function/__init__.py +41 -4
- mindspore/ops/function/array_func.py +1108 -467
- mindspore/ops/function/clip_func.py +94 -27
- mindspore/ops/function/debug_func.py +3 -1
- mindspore/ops/function/grad/grad_func.py +82 -73
- mindspore/ops/function/image_func.py +28 -12
- mindspore/ops/function/linalg_func.py +135 -39
- mindspore/ops/function/math_func.py +3779 -894
- mindspore/ops/function/nn_func.py +1584 -657
- mindspore/ops/function/parameter_func.py +13 -3
- mindspore/ops/function/random_func.py +247 -153
- mindspore/ops/function/sparse_func.py +14 -11
- mindspore/ops/function/sparse_unary_func.py +173 -47
- mindspore/ops/function/spectral_func.py +8 -4
- mindspore/ops/function/vmap_func.py +8 -7
- mindspore/ops/functional.py +47 -16
- mindspore/ops/op_info_register.py +346 -86
- mindspore/ops/operations/__init__.py +38 -22
- mindspore/ops/operations/_grad_ops.py +145 -149
- mindspore/ops/operations/_inner_ops.py +298 -56
- mindspore/ops/operations/_ms_kernel.py +3 -3
- mindspore/ops/operations/_quant_ops.py +24 -28
- mindspore/ops/operations/_rl_inner_ops.py +9 -7
- mindspore/ops/operations/_scalar_ops.py +115 -0
- mindspore/ops/operations/_sequence_ops.py +148 -10
- mindspore/ops/operations/_tensor_array.py +1 -1
- mindspore/ops/operations/_thor_ops.py +2 -2
- mindspore/ops/operations/array_ops.py +1239 -561
- mindspore/ops/operations/comm_ops.py +166 -90
- mindspore/ops/operations/control_ops.py +3 -3
- mindspore/ops/operations/custom_ops.py +124 -102
- mindspore/ops/operations/debug_ops.py +24 -11
- mindspore/ops/operations/image_ops.py +86 -71
- mindspore/ops/operations/inner_ops.py +18 -13
- mindspore/ops/operations/linalg_ops.py +30 -11
- mindspore/ops/operations/math_ops.py +1730 -435
- mindspore/ops/operations/nn_ops.py +1953 -943
- mindspore/ops/operations/other_ops.py +65 -43
- mindspore/ops/operations/random_ops.py +258 -98
- mindspore/ops/operations/rl_ops.py +4 -36
- mindspore/ops/operations/sparse_ops.py +38 -33
- mindspore/ops/operations/spectral_ops.py +8 -4
- mindspore/ops/primitive.py +66 -44
- mindspore/ops/signature.py +5 -5
- mindspore/parallel/_auto_parallel_context.py +80 -19
- mindspore/parallel/_cost_model_context.py +42 -0
- mindspore/parallel/_offload_context.py +162 -72
- mindspore/parallel/_parallel_serialization.py +2 -2
- mindspore/parallel/_ps_context.py +16 -4
- mindspore/parallel/_recovery_context.py +2 -1
- mindspore/parallel/_tensor.py +15 -13
- mindspore/parallel/_transformer/layers.py +8 -6
- mindspore/parallel/_transformer/loss.py +1 -0
- mindspore/parallel/_transformer/moe.py +7 -7
- mindspore/parallel/_transformer/op_parallel_config.py +12 -1
- mindspore/parallel/_transformer/transformer.py +34 -14
- mindspore/parallel/_utils.py +36 -14
- mindspore/parallel/algo_parameter_config.py +114 -20
- mindspore/parallel/checkpoint_transform.py +16 -18
- mindspore/parallel/shard.py +16 -13
- mindspore/profiler/__init__.py +1 -1
- mindspore/profiler/common/struct_type.py +3 -3
- mindspore/profiler/common/util.py +3 -2
- mindspore/profiler/envprofiling.py +11 -4
- mindspore/profiler/parser/aicpu_data_parser.py +5 -3
- mindspore/profiler/parser/ascend_flops_generator.py +94 -0
- mindspore/profiler/parser/ascend_fpbp_generator.py +76 -0
- mindspore/profiler/parser/ascend_hccl_generator.py +288 -0
- mindspore/profiler/parser/ascend_msprof_exporter.py +213 -0
- mindspore/profiler/parser/ascend_msprof_generator.py +199 -0
- mindspore/profiler/parser/ascend_op_generator.py +276 -0
- mindspore/profiler/parser/ascend_steptrace_generator.py +94 -0
- mindspore/profiler/parser/ascend_timeline_generator.py +110 -54
- mindspore/profiler/parser/base_timeline_generator.py +11 -7
- mindspore/profiler/parser/cpu_gpu_timeline_generator.py +45 -46
- mindspore/profiler/parser/flops_parser.py +15 -11
- mindspore/profiler/parser/framework_parser.py +92 -73
- mindspore/profiler/parser/hccl_parser.py +16 -12
- mindspore/profiler/parser/integrator.py +22 -11
- mindspore/profiler/parser/memory_usage_parser.py +36 -11
- mindspore/profiler/parser/minddata_analyzer.py +12 -14
- mindspore/profiler/parser/minddata_pipeline_parser.py +1 -1
- mindspore/profiler/parser/msadvisor_parser.py +8 -4
- mindspore/profiler/parser/op_intermediate_parser.py +5 -2
- mindspore/profiler/parser/optime_parser.py +1 -1
- mindspore/profiler/parser/profiler_info.py +4 -5
- mindspore/profiler/parser/step_trace_parser.py +11 -14
- mindspore/profiler/profiling.py +678 -377
- mindspore/rewrite/api/node.py +211 -54
- mindspore/rewrite/api/node_type.py +5 -0
- mindspore/rewrite/api/pattern_engine.py +22 -23
- mindspore/rewrite/api/scoped_value.py +20 -17
- mindspore/rewrite/api/symbol_tree.py +252 -106
- mindspore/rewrite/api/tree_node_helper.py +3 -0
- mindspore/rewrite/ast_helpers/__init__.py +2 -1
- mindspore/rewrite/ast_helpers/ast_finder.py +129 -0
- mindspore/rewrite/ast_helpers/ast_modifier.py +116 -104
- mindspore/rewrite/ast_transformers/flatten_recursive_stmt.py +97 -46
- mindspore/rewrite/common/rewrite_elog.py +5 -1
- mindspore/rewrite/namer.py +51 -51
- mindspore/rewrite/namespace.py +14 -5
- mindspore/{ops/bprop_mindir → rewrite/node}/__init__.py +9 -4
- mindspore/rewrite/node/call_function.py +79 -0
- mindspore/rewrite/node/cell_container.py +135 -0
- mindspore/rewrite/node/control_flow.py +88 -0
- mindspore/rewrite/{node.py → node/node.py} +313 -247
- mindspore/rewrite/node/node_manager.py +254 -0
- mindspore/rewrite/node/node_topological_manager.py +243 -0
- mindspore/rewrite/parsers/arguments_parser.py +22 -21
- mindspore/rewrite/parsers/assign_parser.py +225 -239
- mindspore/rewrite/parsers/attribute_parser.py +9 -7
- mindspore/rewrite/parsers/class_def_parser.py +179 -218
- mindspore/rewrite/parsers/constant_parser.py +9 -6
- mindspore/rewrite/parsers/container_parser.py +9 -7
- mindspore/rewrite/parsers/for_parser.py +36 -15
- mindspore/rewrite/parsers/function_def_parser.py +23 -20
- mindspore/rewrite/parsers/if_parser.py +28 -24
- mindspore/rewrite/parsers/module_parser.py +202 -25
- mindspore/rewrite/{parser.py → parsers/parser.py} +4 -2
- mindspore/rewrite/{parser_register.py → parsers/parser_register.py} +1 -1
- mindspore/rewrite/parsers/return_parser.py +6 -6
- mindspore/rewrite/sparsify/sparse_transformer.py +12 -3
- mindspore/rewrite/sparsify/sparsify.py +4 -1
- mindspore/rewrite/sparsify/utils.py +11 -5
- mindspore/rewrite/symbol_tree.py +577 -732
- mindspore/rewrite/symbol_tree_builder.py +9 -175
- mindspore/rewrite/symbol_tree_dumper.py +2 -2
- mindspore/run_check/_check_version.py +46 -39
- mindspore/run_check/run_check.py +3 -2
- mindspore/{scipy/sparse → safeguard}/__init__.py +4 -5
- mindspore/safeguard/rewrite_obfuscation.py +517 -0
- mindspore/scipy/__init__.py +1 -1
- mindspore/scipy/linalg.py +67 -61
- mindspore/scipy/ops.py +5 -41
- mindspore/scipy/ops_grad.py +3 -2
- mindspore/scipy/ops_wrapper.py +5 -5
- mindspore/scipy/optimize/line_search.py +8 -8
- mindspore/scipy/optimize/linear_sum_assignment.py +4 -4
- mindspore/scipy/optimize/minimize.py +16 -12
- mindspore/scipy/utils.py +1 -52
- mindspore/scipy/utils_const.py +4 -4
- mindspore/train/__init__.py +4 -4
- mindspore/train/_utils.py +13 -5
- mindspore/train/amp.py +410 -148
- mindspore/train/anf_ir_pb2.py +16 -4
- mindspore/train/callback/_backup_and_restore.py +8 -11
- mindspore/train/callback/_callback.py +80 -3
- mindspore/train/callback/_checkpoint.py +82 -51
- mindspore/train/callback/_early_stop.py +12 -15
- mindspore/train/callback/_history.py +1 -1
- mindspore/train/callback/_lambda_callback.py +13 -13
- mindspore/train/callback/_landscape.py +21 -17
- mindspore/train/callback/_loss_monitor.py +9 -10
- mindspore/train/callback/_on_request_exit.py +16 -33
- mindspore/train/callback/_reduce_lr_on_plateau.py +21 -24
- mindspore/train/callback/_summary_collector.py +44 -30
- mindspore/train/callback/_time_monitor.py +62 -12
- mindspore/train/data_sink.py +10 -16
- mindspore/train/dataset_helper.py +154 -86
- mindspore/train/loss_scale_manager.py +14 -9
- mindspore/train/metrics/__init__.py +10 -2
- mindspore/train/metrics/accuracy.py +1 -1
- mindspore/train/metrics/auc.py +1 -1
- mindspore/train/metrics/bleu_score.py +2 -2
- mindspore/train/metrics/confusion_matrix.py +14 -14
- mindspore/train/metrics/cosine_similarity.py +3 -3
- mindspore/train/metrics/dice.py +1 -1
- mindspore/train/metrics/fbeta.py +1 -1
- mindspore/train/metrics/hausdorff_distance.py +8 -6
- mindspore/train/metrics/mean_surface_distance.py +5 -4
- mindspore/train/metrics/metric.py +49 -17
- mindspore/train/metrics/occlusion_sensitivity.py +4 -4
- mindspore/train/metrics/perplexity.py +1 -1
- mindspore/train/metrics/precision.py +2 -2
- mindspore/train/metrics/recall.py +2 -3
- mindspore/train/metrics/roc.py +7 -7
- mindspore/train/metrics/root_mean_square_surface_distance.py +5 -4
- mindspore/train/metrics/topk.py +7 -4
- mindspore/train/mind_ir_pb2.py +193 -48
- mindspore/train/model.py +377 -133
- mindspore/train/serialization.py +697 -245
- mindspore/train/summary/_summary_adapter.py +5 -2
- mindspore/train/summary/_writer_pool.py +4 -3
- mindspore/train/summary/summary_record.py +25 -23
- mindspore/train/train_thor/convert_utils.py +39 -23
- mindspore/train/train_thor/dataset_helper.py +4 -3
- mindspore/train/train_thor/model_thor.py +8 -8
- mindspore/version.py +1 -1
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/METADATA +7 -8
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/RECORD +647 -818
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/entry_points.txt +0 -1
- mindspore/_akg/akg/tvm/contrib/debugger/__init__.py +0 -16
- mindspore/_akg/akg/tvm/contrib/debugger/debug_result.py +0 -274
- mindspore/_akg/akg/tvm/contrib/debugger/debug_runtime.py +0 -259
- mindspore/_akg/akg/tvm/contrib/peak.py +0 -341
- mindspore/_akg/akg/tvm/contrib/rpc.py +0 -25
- mindspore/_akg/akg/tvm/contrib/xcode.py +0 -257
- mindspore/_akg/akg/tvm/exec/__init__.py +0 -17
- mindspore/_akg/akg/tvm/exec/autotvm_log_editor.py +0 -60
- mindspore/_akg/akg/tvm/exec/measure_peak.py +0 -48
- mindspore/_akg/akg/tvm/exec/query_rpc_tracker.py +0 -48
- mindspore/_akg/akg/tvm/exec/rpc_proxy.py +0 -98
- mindspore/_akg/akg/tvm/exec/rpc_server.py +0 -88
- mindspore/_akg/akg/tvm/exec/rpc_tracker.py +0 -62
- mindspore/_akg/akg/tvm/rpc/__init__.py +0 -29
- mindspore/_akg/akg/tvm/rpc/base.py +0 -182
- mindspore/_akg/akg/tvm/rpc/client.py +0 -436
- mindspore/_akg/akg/tvm/rpc/proxy.py +0 -595
- mindspore/_akg/akg/tvm/rpc/server.py +0 -413
- mindspore/_akg/akg/tvm/rpc/tornado_util.py +0 -121
- mindspore/_akg/akg/tvm/rpc/tracker.py +0 -431
- mindspore/_extends/graph_kernel/expander.py +0 -80
- mindspore/_extends/graph_kernel/expanders/__init__.py +0 -57
- mindspore/_extends/graph_kernel/expanders/_utils.py +0 -269
- mindspore/_extends/graph_kernel/expanders/addn.py +0 -33
- mindspore/_extends/graph_kernel/expanders/batchnorm.py +0 -152
- mindspore/_extends/graph_kernel/expanders/batchnorm_grad.py +0 -105
- mindspore/_extends/graph_kernel/expanders/bias_add_grad.py +0 -49
- mindspore/_extends/graph_kernel/expanders/clip_by_norm_no_div_sum.py +0 -33
- mindspore/_extends/graph_kernel/expanders/complex/abs.py +0 -30
- mindspore/_extends/graph_kernel/expanders/complex/add.py +0 -44
- mindspore/_extends/graph_kernel/expanders/complex/div.py +0 -62
- mindspore/_extends/graph_kernel/expanders/complex/mul.py +0 -52
- mindspore/_extends/graph_kernel/expanders/complex/real_div.py +0 -62
- mindspore/_extends/graph_kernel/expanders/complex/sub.py +0 -45
- mindspore/_extends/graph_kernel/expanders/conv2d.py +0 -200
- mindspore/_extends/graph_kernel/expanders/dropout_grad.py +0 -30
- mindspore/_extends/graph_kernel/expanders/equal_count.py +0 -50
- mindspore/_extends/graph_kernel/expanders/erfc.py +0 -35
- mindspore/_extends/graph_kernel/expanders/expand_dims.py +0 -50
- mindspore/_extends/graph_kernel/expanders/fused_adam.py +0 -44
- mindspore/_extends/graph_kernel/expanders/fused_adam_weight_decay.py +0 -47
- mindspore/_extends/graph_kernel/expanders/fused_mul_add.py +0 -28
- mindspore/_extends/graph_kernel/expanders/gather.py +0 -43
- mindspore/_extends/graph_kernel/expanders/gelu_grad.py +0 -70
- mindspore/_extends/graph_kernel/expanders/gkdropout.py +0 -40
- mindspore/_extends/graph_kernel/expanders/identity.py +0 -25
- mindspore/_extends/graph_kernel/expanders/layernorm.py +0 -93
- mindspore/_extends/graph_kernel/expanders/layernorm_grad.py +0 -113
- mindspore/_extends/graph_kernel/expanders/logsoftmax.py +0 -46
- mindspore/_extends/graph_kernel/expanders/logsoftmax_grad.py +0 -36
- mindspore/_extends/graph_kernel/expanders/matmul.py +0 -80
- mindspore/_extends/graph_kernel/expanders/maximum_grad.py +0 -59
- mindspore/_extends/graph_kernel/expanders/minimum_grad.py +0 -80
- mindspore/_extends/graph_kernel/expanders/oneslike.py +0 -26
- mindspore/_extends/graph_kernel/expanders/reduce_mean.py +0 -43
- mindspore/_extends/graph_kernel/expanders/relu_grad.py +0 -32
- mindspore/_extends/graph_kernel/expanders/sigmoid_cross_entropy_with_logits.py +0 -41
- mindspore/_extends/graph_kernel/expanders/sigmoid_cross_entropy_with_logits_grad.py +0 -35
- mindspore/_extends/graph_kernel/expanders/sigmoid_grad.py +0 -31
- mindspore/_extends/graph_kernel/expanders/slice.py +0 -35
- mindspore/_extends/graph_kernel/expanders/softmax_cross_entropy_with_logits.py +0 -42
- mindspore/_extends/graph_kernel/expanders/softmax_grad_ext.py +0 -41
- mindspore/_extends/graph_kernel/expanders/softsign.py +0 -28
- mindspore/_extends/graph_kernel/expanders/sqrt_grad.py +0 -29
- mindspore/_extends/graph_kernel/expanders/square_sum_all.py +0 -44
- mindspore/_extends/graph_kernel/expanders/square_sum_v1.py +0 -37
- mindspore/_extends/graph_kernel/expanders/squared_difference.py +0 -43
- mindspore/_extends/graph_kernel/expanders/tanh_grad.py +0 -31
- mindspore/_extends/graph_kernel/expanders/tile.py +0 -54
- mindspore/_extends/graph_kernel/model/op_infer.py +0 -506
- mindspore/_extends/parse/jit_fallback_modules.py +0 -51
- mindspore/dataset/datapreprocess/preprocess_imagenet_validate_dataset.py +0 -54
- mindspore/dataset/engine/graphdata.py +0 -1586
- mindspore/include/api/net.h +0 -142
- mindspore/ops/_grad/grad_array_ops.py +0 -1347
- mindspore/ops/_grad/grad_clip_ops.py +0 -84
- mindspore/ops/_grad/grad_debug_ops.py +0 -68
- mindspore/ops/_grad/grad_inner_ops.py +0 -235
- mindspore/ops/_grad/grad_math_ops.py +0 -1684
- mindspore/ops/_grad/grad_nn_ops.py +0 -1529
- mindspore/ops/_grad/grad_other_ops.py +0 -89
- mindspore/ops/_grad/grad_sequence_ops.py +0 -296
- mindspore/ops/_grad/grad_sparse.py +0 -323
- mindspore/ops/_grad_experimental/grad_image_ops.py +0 -249
- mindspore/ops/_grad_experimental/grad_linalg_ops.py +0 -195
- mindspore/ops/_grad_experimental/grad_scalar_ops.py +0 -112
- mindspore/ops/bprop_mindir/AdaptiveAvgPool2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/AdaptiveMaxPool2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ApproximateEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Argmax_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Argmin_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/AssignSub_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Assign_bprop.mindir +0 -17
- mindspore/ops/bprop_mindir/AvgPool3D_bprop.mindir +0 -150
- mindspore/ops/bprop_mindir/AvgPool_bprop.mindir +0 -66
- mindspore/ops/bprop_mindir/BCEWithLogitsLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BNTrainingReduce_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/BatchNormGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BatchToSpaceND_bprop.mindir +0 -28
- mindspore/ops/bprop_mindir/BiasAddGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BinaryCrossEntropy_bprop.mindir +0 -33
- mindspore/ops/bprop_mindir/BroadcastTo_bprop.mindir +0 -306
- mindspore/ops/bprop_mindir/Broadcast_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/CTCLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Concat_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Conv2DBackpropFilter_bprop.mindir +0 -240
- mindspore/ops/bprop_mindir/Conv2DBackpropInput_bprop.mindir +0 -247
- mindspore/ops/bprop_mindir/Conv2DTranspose_bprop.mindir +0 -247
- mindspore/ops/bprop_mindir/Conv3DTranspose_bprop.mindir +0 -315
- mindspore/ops/bprop_mindir/Conv3D_bprop.mindir +0 -278
- mindspore/ops/bprop_mindir/DType_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/DeformableOffsets_bprop.mindir +0 -58
- mindspore/ops/bprop_mindir/Depend_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/DepthToSpace_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/DepthwiseConv2dNative_bprop.mindir +0 -138
- mindspore/ops/bprop_mindir/DiagPart_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Dropout2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Dropout3D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DropoutDoMask_bprop.mindir +0 -25
- mindspore/ops/bprop_mindir/DropoutGenMask_bprop.mindir +0 -18
- mindspore/ops/bprop_mindir/DropoutGrad_bprop.mindir +0 -27
- mindspore/ops/bprop_mindir/Dropout_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicGRUV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicRNN_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicShape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/Elu_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/EmbeddingLookup_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Equal_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ExpandDims_bprop.mindir +0 -58
- mindspore/ops/bprop_mindir/FastGeLU_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Flatten_bprop.mindir +0 -54
- mindspore/ops/bprop_mindir/FloorDiv_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/GatherD_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/GatherNd_bprop.mindir +0 -57
- mindspore/ops/bprop_mindir/Gather_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/GreaterEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Greater_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/HSigmoid_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/HSwish_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/IOU_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/InstanceNorm_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/IsFinite_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/IsInf_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/IsNan_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/KLDivLoss_bprop.mindir +0 -126
- mindspore/ops/bprop_mindir/L2Loss_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/L2Normalize_bprop.mindir +0 -30
- mindspore/ops/bprop_mindir/LRN_bprop.mindir +0 -43
- mindspore/ops/bprop_mindir/LayerNormGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/LessEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Less_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/LinSpace_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/Load_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/LogSoftmax_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/LogicalAnd_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/LogicalNot_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/MaskedSelect_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/MaxPool3DGradGrad_bprop.mindir +0 -74
- mindspore/ops/bprop_mindir/MaxPool3DGrad_bprop.mindir +0 -74
- mindspore/ops/bprop_mindir/MaxPool3D_bprop.mindir +0 -75
- mindspore/ops/bprop_mindir/MaxPoolGradGrad_bprop.mindir +0 -65
- mindspore/ops/bprop_mindir/MaxPoolWithArgmax_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Maximum_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Minimum_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/MirrorPad_bprop.mindir +0 -27
- mindspore/ops/bprop_mindir/Mish_bprop.mindir +0 -35
- mindspore/ops/bprop_mindir/MulNoNan_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/NLLLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/NonZero_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/NotEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/OneHot_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/OnesLike_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/PReLU_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Pad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Padding_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/RNNTLoss_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/ROIAlign_bprop.mindir +0 -82
- mindspore/ops/bprop_mindir/Range_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Rank_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/ReLU6_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/ReLUV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ReduceAll_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ReduceAny_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ReluGrad_bprop.mindir +0 -20
- mindspore/ops/bprop_mindir/Reshape_bprop.mindir +0 -60
- mindspore/ops/bprop_mindir/ResizeBilinear_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/ResizeNearestNeighbor_bprop.mindir +0 -89
- mindspore/ops/bprop_mindir/ReverseSequence_bprop.mindir +0 -52
- mindspore/ops/bprop_mindir/ReverseV2_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Round_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/ScatterMax_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ScatterMin_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ScatterNdUpdate_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/ScatterNd_bprop.mindir +0 -24
- mindspore/ops/bprop_mindir/ScatterNonAliasingAdd_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/ScatterUpdate_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SeLU_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/Select_bprop.mindir +0 -31
- mindspore/ops/bprop_mindir/Shape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/SigmoidCrossEntropyWithLogits_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/SigmoidGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Sigmoid_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Sign_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Slice_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/SmoothL1Loss_bprop.mindir +0 -36
- mindspore/ops/bprop_mindir/SoftmaxCrossEntropyWithLogits_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Softplus_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Softsign_bprop.mindir +0 -33
- mindspore/ops/bprop_mindir/Sort_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SpaceToBatchND_bprop.mindir +0 -28
- mindspore/ops/bprop_mindir/SpaceToDepth_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/SparseGatherV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SparseSoftmaxCrossEntropyWithLogits_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Split_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Squeeze_bprop.mindir +0 -54
- mindspore/ops/bprop_mindir/StridedSliceGrad_bprop.mindir +0 -95
- mindspore/ops/bprop_mindir/StridedSlice_bprop.mindir +0 -98
- mindspore/ops/bprop_mindir/Switch_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/TanhGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Tanh_bprop.mindir +0 -66
- mindspore/ops/bprop_mindir/TensorScatterAdd_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/TensorScatterUpdate_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/TensorShape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/Tile_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/TopK_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/TransShape_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/TruncateDiv_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/TupleGetItem_bprop.mindir +0 -20
- mindspore/ops/bprop_mindir/Unique_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Unstack_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/UpsampleNearest3D_bprop.mindir +0 -32
- mindspore/ops/bprop_mindir/UpsampleTrilinear3D_bprop.mindir +0 -38
- mindspore/ops/bprop_mindir/ZerosLike_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/generate_mindir.py +0 -114
- mindspore/rewrite/node_visitor.py +0 -44
- mindspore/rewrite/topological_manager.py +0 -203
- mindspore/scipy/sparse/linalg.py +0 -192
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/WHEEL +0 -0
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/top_level.txt +0 -0
|
@@ -30,18 +30,18 @@ class LambdaCallback(Callback):
|
|
|
30
30
|
This is an experimental API that is subject to change or deletion.
|
|
31
31
|
|
|
32
32
|
Args:
|
|
33
|
-
on_train_epoch_begin (Function): called at each train epoch begin.
|
|
34
|
-
on_train_epoch_end (Function): called at each train epoch end.
|
|
35
|
-
on_train_step_begin (Function): called at each train step begin.
|
|
36
|
-
on_train_step_end (Function): called at each train step end.
|
|
37
|
-
on_train_begin (Function): called at the beginning of model train.
|
|
38
|
-
on_train_end (Function): called at the end of model train.
|
|
39
|
-
on_eval_epoch_begin (Function): called at eval epoch begin.
|
|
40
|
-
on_eval_epoch_end (Function): called at eval epoch end.
|
|
41
|
-
on_eval_step_begin (Function): called at each eval step begin.
|
|
42
|
-
on_eval_step_end (Function): called at each eval step end.
|
|
43
|
-
on_eval_begin (Function): called at the beginning of model eval.
|
|
44
|
-
on_eval_end (Function): called at the end of model eval.
|
|
33
|
+
on_train_epoch_begin (Function): called at each train epoch begin. Default: ``None`` .
|
|
34
|
+
on_train_epoch_end (Function): called at each train epoch end. Default: ``None`` .
|
|
35
|
+
on_train_step_begin (Function): called at each train step begin. Default: ``None`` .
|
|
36
|
+
on_train_step_end (Function): called at each train step end. Default: ``None`` .
|
|
37
|
+
on_train_begin (Function): called at the beginning of model train. Default: ``None`` .
|
|
38
|
+
on_train_end (Function): called at the end of model train. Default: ``None`` .
|
|
39
|
+
on_eval_epoch_begin (Function): called at eval epoch begin. Default: ``None`` .
|
|
40
|
+
on_eval_epoch_end (Function): called at eval epoch end. Default: ``None`` .
|
|
41
|
+
on_eval_step_begin (Function): called at each eval step begin. Default: ``None`` .
|
|
42
|
+
on_eval_step_end (Function): called at each eval step end. Default: ``None`` .
|
|
43
|
+
on_eval_begin (Function): called at the beginning of model eval. Default: ``None`` .
|
|
44
|
+
on_eval_end (Function): called at the end of model eval. Default: ``None`` .
|
|
45
45
|
|
|
46
46
|
Examples:
|
|
47
47
|
>>> import numpy as np
|
|
@@ -54,7 +54,7 @@ class LambdaCallback(Callback):
|
|
|
54
54
|
>>> crit = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
55
55
|
>>> opt = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
56
56
|
>>> lambda_callback = LambdaCallback(on_train_epoch_end=
|
|
57
|
-
... lambda run_context: print("loss:
|
|
57
|
+
... lambda run_context: print("loss:", run_context.original_args().net_outputs))
|
|
58
58
|
>>> model = Model(network=net, optimizer=opt, loss_fn=crit, metrics={"recall"})
|
|
59
59
|
>>> model.train(2, train_dataset, callbacks=[lambda_callback])
|
|
60
60
|
loss: 1.6127687
|
|
@@ -180,17 +180,18 @@ class SummaryLandscape:
|
|
|
180
180
|
Examples:
|
|
181
181
|
>>> import mindspore as ms
|
|
182
182
|
>>> import mindspore.nn as nn
|
|
183
|
-
>>> from mindspore.
|
|
184
|
-
>>> from mindspore
|
|
183
|
+
>>> from mindspore.train import Model, Accuracy, Loss
|
|
184
|
+
>>> from mindspore import SummaryCollector, SummaryLandscape
|
|
185
185
|
>>>
|
|
186
186
|
>>> if __name__ == '__main__':
|
|
187
187
|
... # If the device_target is Ascend, set the device_target to "Ascend"
|
|
188
188
|
... ms.set_context(mode=ms.GRAPH_MODE, device_target="GPU")
|
|
189
|
-
...
|
|
190
|
-
... #
|
|
191
|
-
... ds_train = create_dataset(
|
|
192
|
-
... #
|
|
193
|
-
...
|
|
189
|
+
... # Create the dataset taking MNIST as an example. Refer to
|
|
190
|
+
... # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/mnist.py
|
|
191
|
+
... ds_train = create_dataset()
|
|
192
|
+
... # Define the network structure of LeNet5. Refer to
|
|
193
|
+
... # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
194
|
+
... network = LeNet5()
|
|
194
195
|
... net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
|
195
196
|
... net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)
|
|
196
197
|
... model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()})
|
|
@@ -208,12 +209,15 @@ class SummaryLandscape:
|
|
|
208
209
|
...
|
|
209
210
|
... # Simple usage for visualization landscape:
|
|
210
211
|
... def callback_fn():
|
|
211
|
-
... network
|
|
212
|
+
... # Define the network structure of LeNet5. Refer to
|
|
213
|
+
... # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
214
|
+
... network = LeNet5()
|
|
212
215
|
... net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
|
213
216
|
... metrics = {"Loss": Loss()}
|
|
214
217
|
... model = Model(network, net_loss, metrics=metrics)
|
|
215
|
-
...
|
|
216
|
-
...
|
|
218
|
+
... # Create the dataset taking MNIST as an example. Refer to
|
|
219
|
+
... # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/mnist.py
|
|
220
|
+
... ds_eval = create_dataset()
|
|
217
221
|
... return model, network, ds_eval, metrics
|
|
218
222
|
...
|
|
219
223
|
... summary_landscape = SummaryLandscape('./summary/lenet_interval_1')
|
|
@@ -269,19 +273,19 @@ class SummaryLandscape:
|
|
|
269
273
|
collect_landscape (Union[dict, None]): The meaning of the parameters
|
|
270
274
|
when creating loss landscape is consistent with the fields
|
|
271
275
|
with the same name in SummaryCollector. The purpose of setting here
|
|
272
|
-
is to allow users to freely modify creating parameters. Default: None.
|
|
276
|
+
is to allow users to freely modify creating parameters. Default: ``None`` .
|
|
273
277
|
|
|
274
278
|
- landscape_size (int): Specify the image resolution of the generated loss landscape.
|
|
275
|
-
For example, if it is set to 128, the resolution of the landscape is 128 * 128.
|
|
279
|
+
For example, if it is set to ``128`` , the resolution of the landscape is 128 * 128.
|
|
276
280
|
The calculation time increases with the increase of resolution.
|
|
277
|
-
Default: 40. Optional values: between 3 and 256.
|
|
281
|
+
Default: ``40`` . Optional values: between 3 and 256.
|
|
278
282
|
- create_landscape (dict): Select how to create loss landscape.
|
|
279
283
|
Training process loss landscape(train) and training result loss landscape(result).
|
|
280
|
-
Default: {"train": True, "result": True}. Optional: True/False.
|
|
284
|
+
Default: {"train": True, "result": True}. Optional: ``True`` / ``False`` .
|
|
281
285
|
- num_samples (int): The size of the dataset used to create the loss landscape.
|
|
282
286
|
For example, in image dataset, You can set num_samples is 2048,
|
|
283
287
|
which means that 2048 images are used to create loss landscape.
|
|
284
|
-
Default: 2048.
|
|
288
|
+
Default: ``2048`` .
|
|
285
289
|
- intervals (List[List[int]]): Specifies the interval
|
|
286
290
|
in which the loss landscape. For example: If the user wants to
|
|
287
291
|
create loss landscape of two training processes, they are 1-5 epoch
|
|
@@ -289,9 +293,9 @@ class SummaryLandscape:
|
|
|
289
293
|
Note: Each interval have at least three epochs.
|
|
290
294
|
device_ids (List(int)): Specifies which devices are used to create loss landscape.
|
|
291
295
|
For example: [0, 1] refers to creating loss landscape with device 0 and device 1.
|
|
292
|
-
Default: None.
|
|
296
|
+
Default: ``None`` .
|
|
293
297
|
output (str): Specifies the path to save the loss landscape.
|
|
294
|
-
Default: None. The default save path is the same as the summary file.
|
|
298
|
+
Default: ``None`` . The default save path is the same as the summary file.
|
|
295
299
|
"""
|
|
296
300
|
|
|
297
301
|
executor = None
|
|
@@ -19,6 +19,7 @@ import numpy as np
|
|
|
19
19
|
|
|
20
20
|
from mindspore import _checkparam as Validator
|
|
21
21
|
from mindspore.train.callback._callback import Callback, _handle_loss
|
|
22
|
+
from mindspore._c_expression import _collect_host_info
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
class LossMonitor(Callback):
|
|
@@ -32,27 +33,24 @@ class LossMonitor(Callback):
|
|
|
32
33
|
|
|
33
34
|
Args:
|
|
34
35
|
per_print_times (int): How many steps to print once loss. During sink mode, it will print loss in the
|
|
35
|
-
nearest step. Default: 1.
|
|
36
|
+
nearest step. Default: ``1`` .
|
|
36
37
|
|
|
37
38
|
Raises:
|
|
38
39
|
ValueError: If per_print_times is not an integer or less than zero.
|
|
39
40
|
|
|
40
41
|
Examples:
|
|
41
|
-
.. note::
|
|
42
|
-
Before running the following example, you need to customize the network LeNet5 and
|
|
43
|
-
dataset preparation function create_dataset. Refer to
|
|
44
|
-
`Building a Network <https://www.mindspore.cn/tutorials/en/r2.0/beginner/model.html>`_
|
|
45
|
-
and `Dataset <https://www.mindspore.cn/tutorials/en/r2.0/beginner/dataset.html>`_ .
|
|
46
|
-
|
|
47
42
|
>>> from mindspore import nn
|
|
48
43
|
>>> from mindspore.train import Model, LossMonitor
|
|
49
44
|
>>>
|
|
45
|
+
>>> # Define the network structure of LeNet5. Refer to
|
|
46
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
50
47
|
>>> net = LeNet5()
|
|
51
48
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
52
49
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
53
50
|
>>> model = Model(net, loss_fn=loss, optimizer=optim)
|
|
54
|
-
>>>
|
|
55
|
-
>>>
|
|
51
|
+
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
52
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/mnist.py
|
|
53
|
+
>>> dataset = create_dataset()
|
|
56
54
|
>>> loss_monitor = LossMonitor()
|
|
57
55
|
>>> model.train(10, dataset, callbacks=loss_monitor)
|
|
58
56
|
"""
|
|
@@ -72,7 +70,7 @@ class LossMonitor(Callback):
|
|
|
72
70
|
please refer to :class:`mindspore.train.RunContext`.
|
|
73
71
|
"""
|
|
74
72
|
cb_params = run_context.original_args()
|
|
75
|
-
|
|
73
|
+
_collect_host_info("Callback", "LossMonitor", "step_end", level=1)
|
|
76
74
|
cur_epoch_num = cb_params.get("cur_epoch_num", 1)
|
|
77
75
|
loss = _handle_loss(cb_params.net_outputs)
|
|
78
76
|
|
|
@@ -103,6 +101,7 @@ class LossMonitor(Callback):
|
|
|
103
101
|
please refer to :class:`mindspore.train.RunContext`.
|
|
104
102
|
"""
|
|
105
103
|
cb_params = run_context.original_args()
|
|
104
|
+
_collect_host_info("Callback", "LossMonitor", "train_epoch_end", level=1)
|
|
106
105
|
metrics = cb_params.get("metrics")
|
|
107
106
|
if metrics:
|
|
108
107
|
print("Eval result: epoch %d, metrics: %s" % (cb_params.cur_epoch_num, metrics))
|
|
@@ -34,13 +34,13 @@ class OnRequestExit(Callback):
|
|
|
34
34
|
including checkpoint and mindir, and then exit the training process.
|
|
35
35
|
|
|
36
36
|
Args:
|
|
37
|
-
save_ckpt (bool): Whether save the checkpoint before the training process exit. Default: True.
|
|
38
|
-
save_mindir (bool): Whether save the mindir before the training process exit. Default: True.
|
|
37
|
+
save_ckpt (bool): Whether save the checkpoint before the training process exit. Default: ``True`` .
|
|
38
|
+
save_mindir (bool): Whether save the mindir before the training process exit. Default: ``True`` .
|
|
39
39
|
file_name (str): The saved checkpoint and mindir file name,
|
|
40
|
-
the checkpoint file add suffix '.ckpt', the mindir file add suffix '.mindir'. Default: 'Net'.
|
|
41
|
-
directory (str): The directory save checkpoint and mindir. Default: './'.
|
|
40
|
+
the checkpoint file add suffix '.ckpt', the mindir file add suffix '.mindir'. Default: ``'Net'`` .
|
|
41
|
+
directory (str): The directory save checkpoint and mindir. Default: ``'./'`` .
|
|
42
42
|
sig (int): The user registered exit signal, it must be a captureable and negligible signal.
|
|
43
|
-
When the process receives the signal, exits the training or eval process. Default: signal.SIGTERM.
|
|
43
|
+
When the process receives the signal, exits the training or eval process. Default: ``signal.SIGTERM`` .
|
|
44
44
|
|
|
45
45
|
Raises:
|
|
46
46
|
ValueError: If the 'save_ckpt' is not a bool.
|
|
@@ -50,38 +50,21 @@ class OnRequestExit(Callback):
|
|
|
50
50
|
ValueError: If the 'sig' is not an int or the 'sig' is signal.SIGKILL.
|
|
51
51
|
|
|
52
52
|
Examples:
|
|
53
|
-
>>> import numpy as np
|
|
54
|
-
>>> import mindspore as ms
|
|
55
|
-
>>> from mindspore import dataset as ds
|
|
56
53
|
>>> from mindspore import nn
|
|
54
|
+
>>> from mindspore.train import Model, TimeMonitor
|
|
55
|
+
>>> import mindspore as ms
|
|
57
56
|
>>>
|
|
58
|
-
>>> # Define the
|
|
59
|
-
>>>
|
|
60
|
-
>>>
|
|
61
|
-
>>>
|
|
62
|
-
>>>
|
|
63
|
-
>>>
|
|
64
|
-
>>>
|
|
65
|
-
>>>
|
|
66
|
-
>>>
|
|
67
|
-
>>> forward_net = ForwardNet()
|
|
68
|
-
>>> loss = nn.MAELoss()
|
|
69
|
-
>>> opt = nn.Momentum(forward_net.trainable_params(), 0.01, 0.9)
|
|
70
|
-
>>> model = ms.Model(forward_net, loss_fn=loss, optimizer=opt)\
|
|
71
|
-
>>>
|
|
72
|
-
>>> # Create dataset
|
|
73
|
-
>>> def generator_multi_column():
|
|
74
|
-
>>> i = 0
|
|
75
|
-
>>> while i < 1000:
|
|
76
|
-
>>> i += 1
|
|
77
|
-
>>> yield np.ones((1, 32, 32)).astype(np.float32) * 0.01, np.array(1).astype(np.int32)
|
|
78
|
-
>>> dataset = ds.GeneratorDataset(source=generator_multi_column, column_names=["data", "label"])
|
|
79
|
-
>>> dataset = dataset.batch(32, drop_remainder=True)
|
|
80
|
-
>>>
|
|
57
|
+
>>> # Define the network structure of LeNet5. Refer to
|
|
58
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
59
|
+
>>> net = LeNet5()
|
|
60
|
+
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
61
|
+
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
62
|
+
>>> model = Model(net, loss_fn=loss, optimizer=optim)
|
|
63
|
+
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
64
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/mnist.py
|
|
65
|
+
>>> dataset = create_dataset()
|
|
81
66
|
>>> on_request_exit = ms.train.OnRequestExit(file_name='LeNet5')
|
|
82
67
|
>>> model.train(10, dataset, callbacks=on_request_exit)
|
|
83
|
-
>>> # The user send the signal SIGTERM to the training process,
|
|
84
|
-
>>> # the process would save the checkpoint and mindir, and then exit the training process.
|
|
85
68
|
"""
|
|
86
69
|
|
|
87
70
|
def __init__(self, save_ckpt=True, save_mindir=True, file_name='Net', directory='./', sig=signal.SIGTERM):
|
|
@@ -20,6 +20,7 @@ import numpy as np
|
|
|
20
20
|
|
|
21
21
|
from mindspore.common.tensor import Tensor
|
|
22
22
|
from mindspore.common.parameter import Parameter
|
|
23
|
+
from mindspore.common import dtype as mstype
|
|
23
24
|
from mindspore import _checkparam as Validator
|
|
24
25
|
from mindspore import log as logger
|
|
25
26
|
from mindspore.ops import functional as F, ReduceOp
|
|
@@ -48,53 +49,49 @@ class ReduceLROnPlateau(Callback):
|
|
|
48
49
|
|
|
49
50
|
Args:
|
|
50
51
|
monitor (str): quantity to be monitored. If evaluation is performed on
|
|
51
|
-
the end of train epochs, the valid monitors can be "loss"
|
|
52
|
-
"eval_loss" or metric names passed when instantiate the `Model`;
|
|
53
|
-
otherwise the valid monitor is "loss"
|
|
54
|
-
When monitor is "loss"
|
|
55
|
-
the first element will be returned as training loss.
|
|
56
|
-
|
|
52
|
+
the end of train epochs, the valid monitors can be ``"loss"``,
|
|
53
|
+
``"eval_loss"`` or metric names passed when instantiate the `Model`;
|
|
54
|
+
otherwise the valid monitor is ``"loss"``.
|
|
55
|
+
When `monitor` is ``"loss"``, if train network has multiple outputs,
|
|
56
|
+
the first element will be returned as training loss. Default: ``'eval_loss'``.
|
|
57
57
|
factor (float): factor by which the learning rate will be reduced.
|
|
58
|
-
`new_lr = lr * factor`. Default: 0.1.
|
|
58
|
+
`new_lr = lr * factor`. Default: ``0.1`` .
|
|
59
59
|
patience (int): `monitor` value is better than history best value over
|
|
60
60
|
`min_delta` is seen as improvement, `patience` is number of epochs
|
|
61
61
|
with no improvement that would be waited. When the waiting
|
|
62
62
|
counter `self.wait` is larger than or equal to `patience`, the lr
|
|
63
|
-
will be reduced. Default: 10.
|
|
63
|
+
will be reduced. Default: ``10`` .
|
|
64
64
|
verbose (bool): If False: quiet, if True: print related information.
|
|
65
|
-
Default: False.
|
|
65
|
+
Default: ``False`` .
|
|
66
66
|
mode (str): one of `{'auto', 'min', 'max'}`. In "min" mode,
|
|
67
67
|
the learning rate will be reduced when the
|
|
68
68
|
quantity monitored has stopped decreasing; in "max" mode it will be
|
|
69
69
|
reduced when the quantity monitored has stopped increasing; in "auto"
|
|
70
70
|
mode, the direction is automatically inferred from the name of the
|
|
71
|
-
monitored quantity. Default:
|
|
71
|
+
monitored quantity. Default: ``'auto'`` .
|
|
72
72
|
min_delta (float): threshold for measuring the new optimum, to only focus on
|
|
73
|
-
significant changes. Default: 1e-4.
|
|
73
|
+
significant changes. Default: ``1e-4`` .
|
|
74
74
|
cooldown (int): number of epochs to wait before resuming normal operation after
|
|
75
|
-
lr has been reduced. Default: 0.
|
|
76
|
-
min_lr (float): lower bound on the learning rate. Default: 0.
|
|
75
|
+
lr has been reduced. Default: ``0`` .
|
|
76
|
+
min_lr (float): lower bound on the learning rate. Default: ``0`` .
|
|
77
77
|
|
|
78
78
|
Raises:
|
|
79
|
-
ValueError: `mode` not in 'auto'
|
|
79
|
+
ValueError: `mode` not in ``'auto'``, ``'min'`` or ``'max'``.
|
|
80
80
|
ValueError: The monitor value is not a scalar.
|
|
81
81
|
ValueError: The learning rate is not a Parameter.
|
|
82
82
|
|
|
83
83
|
Examples:
|
|
84
|
-
.. note::
|
|
85
|
-
Before running the following example, you need to customize the network LeNet5 and
|
|
86
|
-
dataset preparation function create_dataset. Refer to
|
|
87
|
-
`Building a Network <https://www.mindspore.cn/tutorials/en/r2.0/beginner/model.html>`_
|
|
88
|
-
and `Dataset <https://www.mindspore.cn/tutorials/en/r2.0/beginner/dataset.html>`_ .
|
|
89
|
-
|
|
90
84
|
>>> from mindspore import nn
|
|
91
85
|
>>> from mindspore.train import Model, ReduceLROnPlateau
|
|
86
|
+
>>> # Define the network structure of LeNet5. Refer to
|
|
87
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
92
88
|
>>> net = LeNet5()
|
|
93
89
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
94
90
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
95
91
|
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics={"acc"})
|
|
96
|
-
>>>
|
|
97
|
-
>>>
|
|
92
|
+
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
93
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/mnist.py
|
|
94
|
+
>>> dataset = create_dataset()
|
|
98
95
|
>>> cb = ReduceLROnPlateau(monitor="acc", patience=3, verbose=True)
|
|
99
96
|
>>> model.fit(10, dataset, callbacks=cb)
|
|
100
97
|
"""
|
|
@@ -160,7 +157,7 @@ class ReduceLROnPlateau(Callback):
|
|
|
160
157
|
if rank_size == 1:
|
|
161
158
|
reduce_monitor_value = current_monitor_value
|
|
162
159
|
else:
|
|
163
|
-
reduce_monitor_value = self._reduce(Tensor(current_monitor_value.
|
|
160
|
+
reduce_monitor_value = self._reduce(Tensor(current_monitor_value, mstype.float32)).asnumpy() / rank_size
|
|
164
161
|
|
|
165
162
|
if reduce_monitor_value is None:
|
|
166
163
|
return
|
|
@@ -226,4 +223,4 @@ class ValueReduce(nn.Cell):
|
|
|
226
223
|
self.allreduce = ops.AllReduce(ReduceOp.SUM)
|
|
227
224
|
|
|
228
225
|
def construct(self, x):
|
|
229
|
-
return self.allreduce(x)
|
|
226
|
+
return self.allreduce(x)
|
|
@@ -41,6 +41,7 @@ from mindspore.nn.optim.optimizer import Optimizer
|
|
|
41
41
|
from mindspore.nn.loss.loss import LossBase
|
|
42
42
|
from mindspore.train._utils import check_value_type, _make_directory
|
|
43
43
|
from mindspore._c_expression import security
|
|
44
|
+
from mindspore._c_expression import _collect_host_info
|
|
44
45
|
|
|
45
46
|
HYPER_CONFIG_ENV_NAME = "MINDINSIGHT_HYPER_CONFIG"
|
|
46
47
|
HYPER_CONFIG_LEN_LIMIT = 100000
|
|
@@ -70,9 +71,9 @@ class LineageMetadata:
|
|
|
70
71
|
|
|
71
72
|
class SummaryCollector(Callback):
|
|
72
73
|
"""
|
|
73
|
-
SummaryCollector can help you to collect some common information
|
|
74
|
+
SummaryCollector can help you to collect some common information,
|
|
75
|
+
such as loss, learning late, computational graph and so on.
|
|
74
76
|
|
|
75
|
-
It can help you to collect loss, learning late, computational graph and so on.
|
|
76
77
|
SummaryCollector also enables the summary operator to collect data to summary files.
|
|
77
78
|
|
|
78
79
|
Note:
|
|
@@ -90,50 +91,55 @@ class SummaryCollector(Callback):
|
|
|
90
91
|
and the unit is `step`. If a frequency is set, we will collect data
|
|
91
92
|
when (current steps % freq) equals to 0, and the first step will be collected at any time.
|
|
92
93
|
It is important to note that if the data sink mode is used, the unit will become the `epoch`.
|
|
93
|
-
It is not recommended to collect data too frequently, which can affect performance. Default: 10.
|
|
94
|
+
It is not recommended to collect data too frequently, which can affect performance. Default: ``10`` .
|
|
95
|
+
num_process (int): Number of processes saving summary data. The more processes there are, the better the
|
|
96
|
+
performance, but there may be host memory overflow issues. Default: ``32`` .
|
|
94
97
|
collect_specified_data (Union[None, dict]): Perform custom operations on the collected data.
|
|
95
98
|
By default, if set to None, all data is collected as the default behavior.
|
|
96
99
|
You can customize the collected data with a dictionary.
|
|
97
100
|
For example, you can set {'collect_metric': False} to control not collecting metrics.
|
|
98
|
-
The data that supports control is shown below. Default: None.
|
|
101
|
+
The data that supports control is shown below. Default: ``None`` .
|
|
99
102
|
|
|
100
103
|
- collect_metric (bool): Whether to collect training metrics, currently only the loss is collected.
|
|
101
|
-
The first output will be treated as the loss and it will be averaged. Default: True.
|
|
104
|
+
The first output will be treated as the loss and it will be averaged. Default: ``True`` .
|
|
102
105
|
- collect_graph (bool): Whether to collect the computational graph. Currently, only
|
|
103
|
-
training computational graph is collected. Default: True.
|
|
106
|
+
training computational graph is collected. Default: ``True`` .
|
|
104
107
|
- collect_train_lineage (bool): Whether to collect lineage data for the training phase,
|
|
105
108
|
this field will be displayed on the `lineage page \
|
|
106
|
-
<https://www.mindspore.cn/mindinsight/docs/en/r2.
|
|
107
|
-
of MindInsight. Default: True.
|
|
109
|
+
<https://www.mindspore.cn/mindinsight/docs/en/r2.2/lineage_and_scalars_comparison.html>`_
|
|
110
|
+
of MindInsight. Default: ``True`` .
|
|
108
111
|
- collect_eval_lineage (bool): Whether to collect lineage data for the evaluation phase,
|
|
109
|
-
this field will be displayed on the lineage page
|
|
112
|
+
this field will be displayed on the `lineage page
|
|
113
|
+
<https://www.mindspore.cn/mindinsight/docs/en/r2.2/lineage_and_scalars_comparison.html>`_
|
|
114
|
+
of MindInsight. Default: ``True`` .
|
|
110
115
|
- collect_input_data (bool): Whether to collect dataset for each training.
|
|
111
116
|
Currently only image data is supported.
|
|
112
117
|
If there are multiple columns of data in the dataset, the first column should be image data.
|
|
113
|
-
Default: True.
|
|
114
|
-
- collect_dataset_graph (bool): Whether to collect dataset graph for the training phase.
|
|
118
|
+
Default: ``True`` .
|
|
119
|
+
- collect_dataset_graph (bool): Whether to collect dataset graph for the training phase.
|
|
120
|
+
Default: ``True`` .
|
|
115
121
|
- histogram_regular (Union[str, None]): Collect weight and bias for parameter distribution page
|
|
116
122
|
and displayed in MindInsight. This field allows regular strings to control which parameters to collect.
|
|
117
123
|
It is not recommended to collect too many parameters at once, as it can affect performance.
|
|
118
124
|
Note that if you collect too many parameters and run out of memory, the training will fail.
|
|
119
|
-
Default: None, it means only the first five parameters are collected.
|
|
125
|
+
Default: ``None`` , it means only the first five parameters are collected.
|
|
120
126
|
- collect_landscape (Union[dict,None]): Whether to collect the parameters needed to create the
|
|
121
127
|
loss landscape. If set to None, collect_landscape parameters will not be collected. All parameter
|
|
122
128
|
information is collected by default and stored in file `{summary_dir}/ckpt_dir/train_metadata.json`.
|
|
123
129
|
|
|
124
130
|
- landscape_size (int): Specify the image resolution of the generated loss landscape.
|
|
125
|
-
For example, if it is set to 128, the resolution of the landscape is 128 * 128.
|
|
131
|
+
For example, if it is set to ``128`` , the resolution of the landscape is 128 * 128.
|
|
126
132
|
The calculation time increases with the increase of resolution.
|
|
127
|
-
Default: 40. Optional values: between 3 and 256.
|
|
128
|
-
- unit (str): Specify the interval strength of the training process. Default: "step".
|
|
133
|
+
Default: ``40`` . Optional values: between 3 and 256.
|
|
134
|
+
- unit (str): Specify the interval strength of the training process. Default: ``"step"`` .
|
|
129
135
|
Optional: epoch/step.
|
|
130
136
|
- create_landscape (dict): Select how to create loss landscape.
|
|
131
137
|
Training process loss landscape(train) and training result loss landscape(result).
|
|
132
|
-
Default: {"train": True, "result": True}. Optional: True/False.
|
|
138
|
+
Default: {"train": True, "result": True}. Optional: ``True`` / ``False`` .
|
|
133
139
|
- num_samples (int): The size of the dataset used to create the loss landscape.
|
|
134
140
|
For example, in image dataset, You can set num_samples is 128,
|
|
135
141
|
which means that 128 images are used to create loss landscape.
|
|
136
|
-
Default: 128.
|
|
142
|
+
Default: ``128`` .
|
|
137
143
|
- intervals (List[List[int]]): Specifies the interval
|
|
138
144
|
in which the loss landscape. For example: If the user wants to
|
|
139
145
|
create loss landscape of two training processes, they are 1-5 epoch
|
|
@@ -143,10 +149,11 @@ class SummaryCollector(Callback):
|
|
|
143
149
|
keep_default_action (bool): This field affects the collection behavior of the 'collect_specified_data' field.
|
|
144
150
|
True: it means that after specified data is set, non-specified data is collected as the default behavior.
|
|
145
151
|
False: it means that after specified data is set, only the specified data is collected,
|
|
146
|
-
and the others are not collected. Default: True.
|
|
152
|
+
and the others are not collected. Default: ``True`` .
|
|
147
153
|
custom_lineage_data (Union[dict, None]): Allows you to customize the data and present it on the MingInsight
|
|
148
|
-
lineage page.
|
|
149
|
-
|
|
154
|
+
`lineage page <https://www.mindspore.cn/mindinsight/docs/en/r2.2/lineage_and_scalars_comparison.html>`_ .
|
|
155
|
+
In the custom data, the type of the key supports str, and the type of value supports str, int
|
|
156
|
+
and float. Default: ``None`` , it means there is no custom data.
|
|
150
157
|
collect_tensor_freq (Optional[int]): The same semantics as the `collect_freq`, but controls TensorSummary only.
|
|
151
158
|
Because TensorSummary data is too large to be compared with other summary data, this parameter is used to
|
|
152
159
|
reduce its collection. By default, The maximum number of steps for collecting TensorSummary data is 20,
|
|
@@ -156,18 +163,18 @@ class SummaryCollector(Callback):
|
|
|
156
163
|
but when the total steps is 20, both TensorSummary and other summary will be collected 3 steps.
|
|
157
164
|
Also note that when in parallel mode, the total steps will be split evenly, which will
|
|
158
165
|
affect the number of steps TensorSummary will be collected.
|
|
159
|
-
Default: None, which means to follow the behavior as described above.
|
|
166
|
+
Default: ``None`` , which means to follow the behavior as described above.
|
|
160
167
|
max_file_size (Optional[int]): The maximum size in bytes of each file that can be written to the disk.
|
|
161
168
|
For example, to write not larger than 4GB, specify `max_file_size=4*1024**3`.
|
|
162
|
-
Default: None, which means no limit.
|
|
169
|
+
Default: ``None`` , which means no limit.
|
|
163
170
|
export_options (Union[None, dict]): Perform custom operations on the export data.
|
|
164
171
|
Note that the size of export files is not limited by the max_file_size.
|
|
165
172
|
You can customize the export data with a dictionary. For example, you can set {'tensor_format': 'npy'}
|
|
166
173
|
to export tensor as npy file. The data that supports control is shown below.
|
|
167
|
-
Default: None, it means that the data is not exported.
|
|
174
|
+
Default: ``None`` , it means that the data is not exported.
|
|
168
175
|
|
|
169
176
|
- tensor_format (Union[str, None]): Customize the export tensor format. Supports ["npy", None].
|
|
170
|
-
Default: None, it means that the tensor is not exported.
|
|
177
|
+
Default: ``None`` , it means that the tensor is not exported.
|
|
171
178
|
|
|
172
179
|
- npy: export tensor as npy file.
|
|
173
180
|
|
|
@@ -176,17 +183,18 @@ class SummaryCollector(Callback):
|
|
|
176
183
|
|
|
177
184
|
Examples:
|
|
178
185
|
>>> import mindspore as ms
|
|
179
|
-
>>>
|
|
180
|
-
>>> from mindspore.train import Model,
|
|
181
|
-
>>> from mindspore.nn import Accuracy
|
|
186
|
+
>>> from mindspore import nn, SummaryCollector
|
|
187
|
+
>>> from mindspore.train import Model, Accuracy
|
|
182
188
|
>>>
|
|
183
189
|
>>> if __name__ == '__main__':
|
|
184
190
|
... # If the device_target is GPU, set the device_target to "GPU"
|
|
185
191
|
... ms.set_context(mode=ms.GRAPH_MODE, device_target="Ascend")
|
|
186
192
|
... mnist_dataset_dir = '/path/to/mnist_dataset_directory'
|
|
187
|
-
... #
|
|
188
|
-
...
|
|
189
|
-
...
|
|
193
|
+
... # Create the dataset taking MNIST as an example. Refer to
|
|
194
|
+
... # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/mnist.py
|
|
195
|
+
... ds_train = create_dataset()
|
|
196
|
+
... # Define the network structure of LeNet5. Refer to
|
|
197
|
+
... # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
190
198
|
... network = LeNet5(10)
|
|
191
199
|
... net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
|
192
200
|
... net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)
|
|
@@ -222,6 +230,7 @@ class SummaryCollector(Callback):
|
|
|
222
230
|
def __init__(self,
|
|
223
231
|
summary_dir,
|
|
224
232
|
collect_freq=10,
|
|
233
|
+
num_process=32,
|
|
225
234
|
collect_specified_data=None,
|
|
226
235
|
keep_default_action=True,
|
|
227
236
|
custom_lineage_data=None,
|
|
@@ -275,6 +284,7 @@ class SummaryCollector(Callback):
|
|
|
275
284
|
self._is_parse_loss_success = True
|
|
276
285
|
self._first_step = True
|
|
277
286
|
self._dataset_sink_mode = True
|
|
287
|
+
self._num_process = num_process
|
|
278
288
|
|
|
279
289
|
def __enter__(self):
|
|
280
290
|
self._record = SummaryRecord(log_dir=self._summary_dir,
|
|
@@ -463,6 +473,7 @@ class SummaryCollector(Callback):
|
|
|
463
473
|
|
|
464
474
|
def begin(self, run_context):
|
|
465
475
|
cb_params = run_context.original_args()
|
|
476
|
+
_collect_host_info("Callback", "SummaryCollector", "begin", level=1)
|
|
466
477
|
self._check_callbacks(cb_params)
|
|
467
478
|
|
|
468
479
|
if cb_params.mode not in ModeEnum.to_list():
|
|
@@ -474,6 +485,7 @@ class SummaryCollector(Callback):
|
|
|
474
485
|
|
|
475
486
|
def step_end(self, run_context):
|
|
476
487
|
cb_params = run_context.original_args()
|
|
488
|
+
_collect_host_info("Callback", "SummaryCollector", "step_end", level=1)
|
|
477
489
|
if cb_params.mode != ModeEnum.TRAIN.value:
|
|
478
490
|
return
|
|
479
491
|
|
|
@@ -548,6 +560,7 @@ class SummaryCollector(Callback):
|
|
|
548
560
|
|
|
549
561
|
def epoch_end(self, run_context):
|
|
550
562
|
cb_params = run_context.original_args()
|
|
563
|
+
_collect_host_info("Callback", "SummaryCollector", "epoch_end", level=1)
|
|
551
564
|
self._collect_tensor_data(cb_params)
|
|
552
565
|
collect_landscape = self._collect_specified_data.get('collect_landscape')
|
|
553
566
|
if collect_landscape is not None:
|
|
@@ -564,6 +577,7 @@ class SummaryCollector(Callback):
|
|
|
564
577
|
|
|
565
578
|
def end(self, run_context):
|
|
566
579
|
cb_params = run_context.original_args()
|
|
580
|
+
_collect_host_info("Callback", "SummaryCollector", "end", level=1)
|
|
567
581
|
if cb_params.mode == ModeEnum.TRAIN.value:
|
|
568
582
|
self._collect_train_lineage(cb_params)
|
|
569
583
|
else:
|