mindspore 2.0.0rc1__cp38-cp38-manylinux1_x86_64.whl → 2.2.0__cp38-cp38-manylinux1_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +2 -2
- mindspore/__init__.py +5 -2
- mindspore/_akg/akg/build_module.py +5 -6
- mindspore/_akg/akg/composite/build_module.py +49 -16
- mindspore/_akg/akg/composite/split_stitch.py +10 -11
- mindspore/_akg/akg/config/repository.json +195 -0
- mindspore/_akg/akg/global_configs.py +5 -1
- mindspore/_akg/akg/ms/info_version_adapt.py +67 -1
- mindspore/_akg/akg/tvm/api.py +4 -3
- mindspore/_akg/akg/tvm/autotvm/__init__.py +1 -2
- mindspore/_akg/akg/tvm/autotvm/graph_tuner/base_graph_tuner.py +1 -5
- mindspore/_akg/akg/tvm/autotvm/measure/__init__.py +1 -1
- mindspore/_akg/akg/tvm/autotvm/measure/measure.py +1 -10
- mindspore/_akg/akg/tvm/autotvm/measure/measure_methods.py +1 -372
- mindspore/_akg/akg/tvm/build_module.py +16 -1
- mindspore/_akg/akg/tvm/contrib/graph_runtime.py +0 -53
- mindspore/_akg/akg/tvm/hybrid/parser.py +7 -6
- mindspore/_akg/akg/tvm/ir_builder.py +1 -1
- mindspore/_akg/akg/tvm/module.py +1 -2
- mindspore/_akg/akg/tvm/stmt.py +2 -2
- mindspore/_akg/akg/utils/composite_op_helper.py +9 -10
- mindspore/_akg/akg/utils/kernel_exec.py +58 -260
- mindspore/_akg/akg/utils/op_dsl.py +17 -1
- mindspore/_akg/akg/utils/result_analysis.py +4 -24
- mindspore/_akg/akg/utils/tbe_codegen_utils.py +198 -0
- mindspore/_c_dataengine.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_check_jit_forbidden_api.py +5 -1
- mindspore/_checkparam.py +79 -62
- mindspore/_extends/graph_kernel/__init__.py +0 -1
- mindspore/_extends/graph_kernel/model/graph_split.py +2 -0
- mindspore/_extends/graph_kernel/model/model_builder.py +9 -50
- mindspore/_extends/graph_kernel/splitter.py +1 -9
- mindspore/_extends/parallel_compile/akg_compiler/akg_process.py +128 -21
- mindspore/_extends/parallel_compile/akg_compiler/build_tbe_kernel.py +2 -2
- mindspore/_extends/parallel_compile/akg_compiler/tbe_topi.py +4 -2
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_adapter.py +18 -13
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_helper.py +13 -9
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_job.py +1 -1
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_job_manager.py +1 -1
- mindspore/_extends/parse/__init__.py +19 -17
- mindspore/_extends/parse/namespace.py +7 -36
- mindspore/_extends/parse/parser.py +375 -189
- mindspore/_extends/parse/resources.py +36 -41
- mindspore/_extends/parse/standard_method.py +350 -245
- mindspore/_extends/parse/trope.py +2 -12
- mindspore/_extends/remote/kernel_build_server.py +24 -7
- mindspore/_extends/remote/kernel_build_server_akg_v2.py +55 -0
- mindspore/_install_custom.py +43 -0
- mindspore/_mindspore_offline_debug.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/amp.py +85 -19
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/boost/base.py +2 -2
- mindspore/boost/boost.py +27 -32
- mindspore/boost/boost_cell_wrapper.py +37 -13
- mindspore/boost/grad_accumulation.py +1 -1
- mindspore/boost/grad_freeze.py +34 -6
- mindspore/boost/group_loss_scale_manager.py +15 -14
- mindspore/boost/less_batch_normalization.py +28 -3
- mindspore/common/__init__.py +15 -11
- mindspore/common/_auto_dynamic.py +68 -0
- mindspore/common/_jit_fallback_utils.py +111 -0
- mindspore/common/_register_for_adapter.py +17 -5
- mindspore/common/_register_for_tensor.py +2 -2
- mindspore/common/_stub_tensor.py +18 -15
- mindspore/common/_utils.py +31 -7
- mindspore/common/api.py +269 -101
- mindspore/common/auto_dynamic_shape.py +498 -0
- mindspore/common/dtype.py +61 -21
- mindspore/common/dump.py +9 -7
- mindspore/common/initializer.py +106 -76
- mindspore/common/jit_config.py +35 -14
- mindspore/common/lazy_inline.py +187 -0
- mindspore/common/mindir_util.py +101 -0
- mindspore/common/mutable.py +10 -13
- mindspore/common/parameter.py +246 -55
- mindspore/common/seed.py +13 -7
- mindspore/common/sparse_tensor.py +29 -33
- mindspore/common/tensor.py +907 -251
- mindspore/communication/__init__.py +7 -4
- mindspore/communication/_comm_helper.py +84 -4
- mindspore/communication/management.py +160 -88
- mindspore/config/op_info.config +99 -75
- mindspore/config/super_bar_config.json +36 -4
- mindspore/context.py +526 -219
- mindspore/dataset/__init__.py +9 -46
- mindspore/dataset/audio/__init__.py +4 -19
- mindspore/dataset/audio/transforms.py +545 -233
- mindspore/dataset/audio/utils.py +21 -18
- mindspore/dataset/callback/ds_callback.py +42 -13
- mindspore/dataset/core/config.py +158 -100
- mindspore/dataset/core/validator_helpers.py +1 -63
- mindspore/dataset/debug/debug_hook.py +45 -13
- mindspore/dataset/debug/pre_defined_hook.py +5 -5
- mindspore/dataset/engine/__init__.py +0 -5
- mindspore/dataset/engine/cache_client.py +38 -15
- mindspore/dataset/engine/datasets.py +615 -278
- mindspore/dataset/engine/datasets_audio.py +154 -283
- mindspore/dataset/engine/datasets_standard_format.py +104 -116
- mindspore/dataset/engine/datasets_text.py +443 -326
- mindspore/dataset/engine/datasets_user_defined.py +251 -164
- mindspore/dataset/engine/datasets_vision.py +839 -1443
- mindspore/dataset/engine/iterators.py +11 -4
- mindspore/dataset/engine/obs/obs_mindrecord_dataset.py +7 -3
- mindspore/dataset/engine/obs/util.py +3 -0
- mindspore/dataset/engine/offload.py +6 -6
- mindspore/dataset/engine/queue.py +15 -14
- mindspore/dataset/engine/samplers.py +39 -23
- mindspore/dataset/engine/serializer_deserializer.py +22 -6
- mindspore/dataset/engine/validators.py +21 -331
- mindspore/dataset/text/__init__.py +5 -33
- mindspore/dataset/text/transforms.py +334 -165
- mindspore/dataset/text/utils.py +215 -145
- mindspore/dataset/transforms/__init__.py +1 -1
- mindspore/dataset/transforms/c_transforms.py +3 -2
- mindspore/dataset/transforms/py_transforms_util.py +40 -12
- mindspore/dataset/transforms/transforms.py +174 -71
- mindspore/dataset/utils/browse_dataset.py +25 -17
- mindspore/dataset/utils/line_reader.py +24 -21
- mindspore/dataset/vision/__init__.py +5 -26
- mindspore/dataset/vision/c_transforms.py +177 -165
- mindspore/dataset/vision/py_transforms.py +114 -119
- mindspore/dataset/vision/py_transforms_util.py +54 -51
- mindspore/dataset/vision/transforms.py +1127 -381
- mindspore/dataset/vision/utils.py +54 -38
- mindspore/dataset/vision/validators.py +12 -2
- mindspore/experimental/map_parameter.py +38 -4
- mindspore/{dataset/datapreprocess → experimental/optim}/__init__.py +14 -4
- mindspore/experimental/optim/adam.py +192 -0
- mindspore/experimental/optim/adamw.py +181 -0
- mindspore/experimental/optim/lr_scheduler.py +1427 -0
- mindspore/experimental/optim/optimizer.py +252 -0
- mindspore/experimental/optim/sgd.py +147 -0
- mindspore/gen_ops.py +273 -0
- mindspore/include/OWNERS +1 -2
- mindspore/include/api/context.h +21 -1
- mindspore/include/api/data_type.h +2 -1
- mindspore/include/api/graph.h +0 -15
- mindspore/include/api/kernel.h +2 -0
- mindspore/include/api/kernel_api.h +37 -12
- mindspore/include/api/model.h +29 -42
- mindspore/include/api/model_group.h +14 -3
- mindspore/include/api/model_parallel_runner.h +18 -2
- mindspore/include/api/serialization.h +26 -0
- mindspore/include/api/status.h +1 -0
- mindspore/include/api/types.h +38 -4
- mindspore/include/c_api/ms/abstract.h +67 -0
- mindspore/include/c_api/ms/attribute.h +197 -0
- mindspore/include/c_api/ms/base/handle_types.h +43 -0
- mindspore/include/c_api/ms/base/macros.h +32 -0
- mindspore/include/c_api/ms/base/status.h +33 -0
- mindspore/include/c_api/ms/base/types.h +282 -0
- mindspore/include/c_api/ms/context.h +102 -0
- mindspore/include/c_api/ms/graph.h +160 -0
- mindspore/include/c_api/ms/node.h +606 -0
- mindspore/include/c_api/ms/tensor.h +161 -0
- mindspore/include/c_api/ms/value.h +84 -0
- mindspore/include/c_api/status_c.h +3 -0
- mindspore/include/dataset/constants.h +6 -12
- mindspore/include/dataset/execute.h +23 -13
- mindspore/include/dataset/text.h +26 -26
- mindspore/include/dataset/transforms.h +25 -31
- mindspore/include/dataset/vision.h +60 -60
- mindspore/include/dataset/vision_ascend.h +5 -6
- mindspore/include/dataset/vision_lite.h +17 -17
- mindspore/include/mindapi/base/format.h +0 -1
- mindspore/include/mindapi/base/type_id.h +2 -1
- mindspore/include/mindapi/base/types.h +5 -1
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libjemalloc.so.2 +0 -0
- mindspore/lib/libmindspore.so +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_shared_lib.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libopencv_imgcodecs.so.4.5 +0 -0
- mindspore/lib/libopencv_imgproc.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_aicpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +9000 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/libakg.so +0 -0
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_aicpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/cpu/libakg.so +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.10 +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.11 +0 -0
- mindspore/lib/plugin/gpu10.1/libakg.so +0 -0
- mindspore/lib/plugin/gpu10.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu10.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.1/libakg.so +0 -0
- mindspore/lib/plugin/gpu11.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.6/libakg.so +0 -0
- mindspore/lib/plugin/gpu11.6/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.6/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.1 +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.10.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.6 +0 -0
- mindspore/log.py +9 -6
- mindspore/mindrecord/filereader.py +33 -4
- mindspore/mindrecord/filewriter.py +70 -35
- mindspore/mindrecord/mindpage.py +40 -34
- mindspore/mindrecord/shardreader.py +1 -1
- mindspore/mindrecord/shardsegment.py +1 -1
- mindspore/mindrecord/tools/cifar100_to_mr.py +25 -18
- mindspore/mindrecord/tools/cifar10_to_mr.py +25 -18
- mindspore/mindrecord/tools/csv_to_mr.py +29 -13
- mindspore/mindrecord/tools/imagenet_to_mr.py +24 -10
- mindspore/mindrecord/tools/mnist_to_mr.py +24 -11
- mindspore/mindrecord/tools/tfrecord_to_mr.py +31 -26
- mindspore/nn/cell.py +463 -169
- mindspore/nn/dynamic_lr.py +47 -43
- mindspore/nn/layer/activation.py +225 -82
- mindspore/nn/layer/basic.py +121 -79
- mindspore/nn/layer/channel_shuffle.py +21 -21
- mindspore/nn/layer/combined.py +33 -26
- mindspore/nn/layer/container.py +277 -22
- mindspore/nn/layer/conv.py +441 -304
- mindspore/nn/layer/dense.py +19 -13
- mindspore/nn/layer/embedding.py +62 -49
- mindspore/nn/layer/flash_attention.py +264 -0
- mindspore/nn/layer/image.py +50 -39
- mindspore/nn/layer/math.py +62 -51
- mindspore/nn/layer/normalization.py +219 -167
- mindspore/nn/layer/padding.py +58 -70
- mindspore/nn/layer/pooling.py +334 -287
- mindspore/nn/layer/rnn_cells.py +53 -38
- mindspore/nn/layer/rnns.py +59 -56
- mindspore/nn/layer/thor_layer.py +52 -44
- mindspore/nn/layer/timedistributed.py +6 -4
- mindspore/nn/layer/transformer.py +284 -164
- mindspore/nn/learning_rate_schedule.py +34 -25
- mindspore/nn/loss/__init__.py +3 -2
- mindspore/nn/loss/loss.py +554 -311
- mindspore/nn/optim/ada_grad.py +12 -9
- mindspore/nn/optim/adadelta.py +14 -11
- mindspore/nn/optim/adafactor.py +19 -16
- mindspore/nn/optim/adam.py +62 -47
- mindspore/nn/optim/adamax.py +13 -10
- mindspore/nn/optim/adasum.py +12 -8
- mindspore/nn/optim/asgd.py +10 -9
- mindspore/nn/optim/ftrl.py +20 -17
- mindspore/nn/optim/lamb.py +16 -12
- mindspore/nn/optim/lars.py +8 -6
- mindspore/nn/optim/lazyadam.py +25 -20
- mindspore/nn/optim/momentum.py +10 -7
- mindspore/nn/optim/optimizer.py +61 -9
- mindspore/nn/optim/proximal_ada_grad.py +14 -13
- mindspore/nn/optim/rmsprop.py +17 -13
- mindspore/nn/optim/rprop.py +30 -17
- mindspore/nn/optim/sgd.py +40 -23
- mindspore/nn/optim/thor.py +24 -26
- mindspore/nn/probability/bijector/bijector.py +11 -11
- mindspore/nn/probability/bijector/exp.py +1 -1
- mindspore/nn/probability/bijector/gumbel_cdf.py +3 -3
- mindspore/nn/probability/bijector/invert.py +1 -1
- mindspore/nn/probability/bijector/power_transform.py +29 -29
- mindspore/nn/probability/bijector/scalar_affine.py +3 -3
- mindspore/nn/probability/bijector/softplus.py +5 -5
- mindspore/nn/probability/bnn_layers/bnn_cell_wrapper.py +4 -2
- mindspore/nn/probability/bnn_layers/conv_variational.py +13 -13
- mindspore/nn/probability/bnn_layers/dense_variational.py +12 -12
- mindspore/nn/probability/bnn_layers/layer_distribution.py +9 -8
- mindspore/nn/probability/distribution/_utils/custom_ops.py +19 -3
- mindspore/nn/probability/distribution/_utils/utils.py +1 -1
- mindspore/nn/probability/distribution/bernoulli.py +9 -9
- mindspore/nn/probability/distribution/beta.py +8 -8
- mindspore/nn/probability/distribution/categorical.py +23 -15
- mindspore/nn/probability/distribution/cauchy.py +5 -6
- mindspore/nn/probability/distribution/distribution.py +3 -3
- mindspore/nn/probability/distribution/exponential.py +4 -4
- mindspore/nn/probability/distribution/gamma.py +10 -10
- mindspore/nn/probability/distribution/geometric.py +8 -8
- mindspore/nn/probability/distribution/gumbel.py +8 -9
- mindspore/nn/probability/distribution/half_normal.py +5 -5
- mindspore/nn/probability/distribution/laplace.py +5 -5
- mindspore/nn/probability/distribution/log_normal.py +12 -11
- mindspore/nn/probability/distribution/logistic.py +8 -8
- mindspore/nn/probability/distribution/normal.py +6 -5
- mindspore/nn/probability/distribution/poisson.py +10 -11
- mindspore/nn/probability/distribution/student_t.py +8 -9
- mindspore/nn/probability/distribution/transformed_distribution.py +5 -5
- mindspore/nn/probability/distribution/uniform.py +11 -11
- mindspore/nn/reinforcement/tensor_array.py +2 -2
- mindspore/nn/sparse/sparse.py +9 -9
- mindspore/nn/wrap/cell_wrapper.py +188 -63
- mindspore/nn/wrap/grad_reducer.py +21 -12
- mindspore/nn/wrap/loss_scale.py +136 -49
- mindspore/numpy/__init__.py +4 -4
- mindspore/numpy/array_creations.py +55 -56
- mindspore/numpy/array_ops.py +134 -35
- mindspore/numpy/logic_ops.py +66 -20
- mindspore/numpy/math_ops.py +142 -139
- mindspore/numpy/utils_const.py +2 -2
- mindspore/offline_debug/convert_async.py +2 -2
- mindspore/ops/_grad_experimental/__init__.py +7 -5
- mindspore/ops/_grad_experimental/grad_array_ops.py +231 -348
- mindspore/ops/{_grad → _grad_experimental}/grad_base.py +1 -33
- mindspore/ops/{_grad → _grad_experimental}/grad_comm_ops.py +25 -13
- mindspore/ops/{_grad/__init__.py → _grad_experimental/grad_debug_ops.py} +15 -7
- mindspore/ops/{_grad → _grad_experimental}/grad_implementations.py +17 -11
- mindspore/ops/_grad_experimental/grad_inner_ops.py +33 -52
- mindspore/ops/_grad_experimental/grad_math_ops.py +151 -1224
- mindspore/ops/_grad_experimental/grad_nn_ops.py +141 -414
- mindspore/ops/{_grad → _grad_experimental}/grad_quant_ops.py +10 -6
- mindspore/ops/_grad_experimental/grad_sparse.py +317 -2
- mindspore/ops/_grad_experimental/grad_sparse_ops.py +3 -13
- mindspore/ops/{_grad → _grad_experimental}/taylor_rule.py +1 -1
- mindspore/ops/_op_impl/_custom_op/dsd_back_impl.py +1 -1
- mindspore/ops/_op_impl/_custom_op/flash_attention/__init__.py +0 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/attention.py +406 -0
- mindspore/{_extends/graph_kernel/expanders/complex/__init__.py → ops/_op_impl/_custom_op/flash_attention/constants.py} +27 -8
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_bwd.py +467 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_fwd.py +563 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_impl.py +193 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tik_ops_utils.py +435 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/__init__.py +0 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/sparse_tiling.py +45 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/strategy.py +67 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/wukong_tiling.py +62 -0
- mindspore/ops/_op_impl/_custom_op/matmul_cube_dense_left_impl.py +2 -2
- mindspore/ops/_op_impl/aicpu/__init__.py +41 -1
- mindspore/ops/_op_impl/aicpu/adaptive_max_pool_2d.py +37 -0
- mindspore/ops/_op_impl/aicpu/bias_add_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/cast.py +52 -0
- mindspore/ops/_op_impl/aicpu/coalesce.py +2 -0
- mindspore/ops/_op_impl/aicpu/col2im.py +3 -1
- mindspore/ops/_op_impl/aicpu/count_nonzero.py +43 -0
- mindspore/ops/_op_impl/aicpu/dropout_genmask.py +6 -0
- mindspore/ops/_op_impl/aicpu/eps.py +32 -0
- mindspore/ops/_op_impl/aicpu/eye.py +4 -4
- mindspore/ops/_op_impl/aicpu/fft_with_size.py +6 -0
- mindspore/ops/_op_impl/aicpu/fill_diagonal.py +5 -0
- mindspore/ops/_op_impl/aicpu/gamma.py +2 -2
- mindspore/ops/_op_impl/aicpu/im2col.py +3 -5
- mindspore/ops/_op_impl/aicpu/lgamma.py +1 -0
- mindspore/ops/_op_impl/aicpu/log_uniform_candidate_sampler.py +6 -3
- mindspore/ops/_op_impl/aicpu/lu.py +39 -0
- mindspore/ops/_op_impl/aicpu/lu_unpack_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/masked_scatter.py +1 -0
- mindspore/ops/_op_impl/aicpu/masked_select_grad.py +3 -0
- mindspore/ops/_op_impl/aicpu/matrix_band_part.py +59 -0
- mindspore/ops/_op_impl/aicpu/matrix_power.py +6 -1
- mindspore/ops/_op_impl/aicpu/median.py +1 -0
- mindspore/ops/_op_impl/aicpu/multinomial.py +9 -9
- mindspore/ops/_op_impl/aicpu/not_equal.py +0 -5
- mindspore/ops/_op_impl/aicpu/pad_v3.py +3 -1
- mindspore/ops/_op_impl/aicpu/pad_v3_grad.py +2 -0
- mindspore/ops/_op_impl/aicpu/parameterized_truncated_normal.py +15 -7
- mindspore/ops/_op_impl/aicpu/random_categorical.py +39 -19
- mindspore/ops/_op_impl/aicpu/random_choice_with_mask.py +5 -2
- mindspore/ops/_op_impl/aicpu/random_poisson.py +103 -52
- mindspore/ops/_op_impl/aicpu/random_shuffle.py +17 -15
- mindspore/ops/_op_impl/aicpu/resize_bilinear_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/resize_nearest_neighbor_v2.py +0 -6
- mindspore/ops/_op_impl/aicpu/resize_nearest_neighbor_v2_grad.py +0 -7
- mindspore/ops/_op_impl/aicpu/scatter_nd.py +2 -0
- mindspore/ops/_op_impl/aicpu/sequence_concat.py +40 -0
- mindspore/ops/_op_impl/aicpu/sequence_stack.py +40 -0
- mindspore/ops/_op_impl/aicpu/{sparseaddmm.py → sparse_addmm.py} +2 -2
- mindspore/ops/_op_impl/aicpu/{sparsesparsemaximum.py → sparse_sparse_maximum.py} +4 -4
- mindspore/ops/_op_impl/aicpu/standard_laplace.py +5 -4
- mindspore/ops/_op_impl/aicpu/standard_normal.py +5 -4
- mindspore/ops/_op_impl/aicpu/truncated_normal.py +9 -7
- mindspore/ops/_op_impl/aicpu/uniform.py +5 -3
- mindspore/ops/_op_impl/aicpu/uniform_candidate_sampler.py +8 -4
- mindspore/ops/_op_impl/aicpu/uniform_int.py +5 -5
- mindspore/ops/_op_impl/aicpu/uniform_real.py +4 -4
- mindspore/ops/_op_impl/aicpu/upsample_nearest_3d.py +14 -6
- mindspore/ops/_op_impl/aicpu/upsample_nearest_3d_grad.py +22 -8
- mindspore/ops/_op_impl/aicpu/upsample_trilinear_3d.py +11 -6
- mindspore/ops/_op_impl/aicpu/upsample_trilinear_3d_grad.py +21 -10
- mindspore/ops/_op_impl/tbe/__init__.py +6 -4
- mindspore/ops/_op_impl/tbe/atomic_addr_clean.py +1 -1
- mindspore/ops/_op_impl/tbe/avg_pool.py +2 -2
- mindspore/ops/_op_impl/tbe/avg_pool_3d.py +3 -3
- mindspore/ops/_op_impl/tbe/avg_pool_3d_grad.py +4 -4
- mindspore/ops/_op_impl/tbe/avg_pool_ds.py +2 -2
- mindspore/ops/_op_impl/tbe/avg_pool_grad.py +3 -3
- mindspore/ops/_op_impl/tbe/avg_pool_grad_vm.py +3 -3
- mindspore/ops/_op_impl/tbe/batch_to_space.py +1 -1
- mindspore/ops/_op_impl/tbe/batch_to_space_nd.py +2 -2
- mindspore/ops/_op_impl/tbe/bn_infer.py +2 -2
- mindspore/ops/_op_impl/tbe/bn_infer_ds.py +3 -2
- mindspore/ops/_op_impl/tbe/broadcast_to.py +1 -1
- mindspore/ops/_op_impl/tbe/depthwise_conv2d.py +3 -3
- mindspore/ops/_op_impl/tbe/expand_dims.py +1 -1
- mindspore/ops/_op_impl/tbe/gather_v2.py +56 -0
- mindspore/ops/_op_impl/tbe/im2col.py +4 -4
- mindspore/ops/_op_impl/tbe/inplace_index_add.py +7 -3
- mindspore/ops/_op_impl/tbe/mem_set.py +38 -0
- mindspore/ops/_op_impl/tbe/scatter_nd_add.py +3 -0
- mindspore/ops/_op_impl/tbe/scatter_nd_d.py +1 -1
- mindspore/ops/_op_impl/tbe/space_to_batch.py +1 -1
- mindspore/ops/_op_impl/tbe/space_to_batch_nd.py +2 -2
- mindspore/ops/_op_impl/tbe/trans_data_ds.py +2 -0
- mindspore/ops/_primitive_cache.py +1 -1
- mindspore/ops/_tracefunc.py +241 -0
- mindspore/ops/_utils/utils.py +10 -2
- mindspore/ops/_vmap/vmap_array_ops.py +5 -3
- mindspore/ops/_vmap/vmap_base.py +5 -4
- mindspore/ops/_vmap/vmap_convolution_ops.py +1 -1
- mindspore/ops/_vmap/vmap_grad_math_ops.py +6 -4
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +11 -6
- mindspore/ops/_vmap/vmap_math_ops.py +5 -2
- mindspore/ops/_vmap/vmap_nn_ops.py +135 -11
- mindspore/ops/arg_dtype_cast.py +54 -0
- mindspore/ops/composite/__init__.py +7 -5
- mindspore/ops/composite/base.py +78 -34
- mindspore/ops/composite/math_ops.py +5 -695
- mindspore/ops/composite/multitype_ops/_compile_utils.py +403 -97
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +28 -22
- mindspore/ops/composite/multitype_ops/add_impl.py +69 -7
- mindspore/ops/composite/multitype_ops/bitwise_and_impl.py +2 -1
- mindspore/ops/composite/multitype_ops/bitwise_or_impl.py +2 -1
- mindspore/ops/composite/multitype_ops/bitwise_xor_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/div_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/floordiv_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/getitem_impl.py +48 -10
- mindspore/ops/composite/multitype_ops/greater_equal_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/greater_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/left_shift_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/less_equal_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/less_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/logic_not_impl.py +2 -2
- mindspore/ops/composite/multitype_ops/mod_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/mul_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/negative_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/not_in_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/ones_like_impl.py +6 -0
- mindspore/ops/composite/multitype_ops/pow_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/right_shift_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/setitem_impl.py +10 -7
- mindspore/ops/composite/multitype_ops/sub_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/uadd_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/zeros_like_impl.py +9 -0
- mindspore/ops/deprecated.py +304 -0
- mindspore/ops/function/__init__.py +41 -4
- mindspore/ops/function/array_func.py +1108 -467
- mindspore/ops/function/clip_func.py +94 -27
- mindspore/ops/function/debug_func.py +3 -1
- mindspore/ops/function/grad/grad_func.py +82 -73
- mindspore/ops/function/image_func.py +28 -12
- mindspore/ops/function/linalg_func.py +135 -39
- mindspore/ops/function/math_func.py +3779 -894
- mindspore/ops/function/nn_func.py +1584 -657
- mindspore/ops/function/parameter_func.py +13 -3
- mindspore/ops/function/random_func.py +247 -153
- mindspore/ops/function/sparse_func.py +14 -11
- mindspore/ops/function/sparse_unary_func.py +173 -47
- mindspore/ops/function/spectral_func.py +8 -4
- mindspore/ops/function/vmap_func.py +8 -7
- mindspore/ops/functional.py +47 -16
- mindspore/ops/op_info_register.py +346 -86
- mindspore/ops/operations/__init__.py +38 -22
- mindspore/ops/operations/_grad_ops.py +145 -149
- mindspore/ops/operations/_inner_ops.py +298 -56
- mindspore/ops/operations/_ms_kernel.py +3 -3
- mindspore/ops/operations/_quant_ops.py +24 -28
- mindspore/ops/operations/_rl_inner_ops.py +9 -7
- mindspore/ops/operations/_scalar_ops.py +115 -0
- mindspore/ops/operations/_sequence_ops.py +148 -10
- mindspore/ops/operations/_tensor_array.py +1 -1
- mindspore/ops/operations/_thor_ops.py +2 -2
- mindspore/ops/operations/array_ops.py +1239 -561
- mindspore/ops/operations/comm_ops.py +166 -90
- mindspore/ops/operations/control_ops.py +3 -3
- mindspore/ops/operations/custom_ops.py +124 -102
- mindspore/ops/operations/debug_ops.py +24 -11
- mindspore/ops/operations/image_ops.py +86 -71
- mindspore/ops/operations/inner_ops.py +18 -13
- mindspore/ops/operations/linalg_ops.py +30 -11
- mindspore/ops/operations/math_ops.py +1730 -435
- mindspore/ops/operations/nn_ops.py +1953 -943
- mindspore/ops/operations/other_ops.py +65 -43
- mindspore/ops/operations/random_ops.py +258 -98
- mindspore/ops/operations/rl_ops.py +4 -36
- mindspore/ops/operations/sparse_ops.py +38 -33
- mindspore/ops/operations/spectral_ops.py +8 -4
- mindspore/ops/primitive.py +66 -44
- mindspore/ops/signature.py +5 -5
- mindspore/parallel/_auto_parallel_context.py +80 -19
- mindspore/parallel/_cost_model_context.py +42 -0
- mindspore/parallel/_offload_context.py +162 -72
- mindspore/parallel/_parallel_serialization.py +2 -2
- mindspore/parallel/_ps_context.py +16 -4
- mindspore/parallel/_recovery_context.py +2 -1
- mindspore/parallel/_tensor.py +15 -13
- mindspore/parallel/_transformer/layers.py +8 -6
- mindspore/parallel/_transformer/loss.py +1 -0
- mindspore/parallel/_transformer/moe.py +7 -7
- mindspore/parallel/_transformer/op_parallel_config.py +12 -1
- mindspore/parallel/_transformer/transformer.py +34 -14
- mindspore/parallel/_utils.py +36 -14
- mindspore/parallel/algo_parameter_config.py +114 -20
- mindspore/parallel/checkpoint_transform.py +16 -18
- mindspore/parallel/shard.py +16 -13
- mindspore/profiler/__init__.py +1 -1
- mindspore/profiler/common/struct_type.py +3 -3
- mindspore/profiler/common/util.py +3 -2
- mindspore/profiler/envprofiling.py +11 -4
- mindspore/profiler/parser/aicpu_data_parser.py +5 -3
- mindspore/profiler/parser/ascend_flops_generator.py +94 -0
- mindspore/profiler/parser/ascend_fpbp_generator.py +76 -0
- mindspore/profiler/parser/ascend_hccl_generator.py +288 -0
- mindspore/profiler/parser/ascend_msprof_exporter.py +213 -0
- mindspore/profiler/parser/ascend_msprof_generator.py +199 -0
- mindspore/profiler/parser/ascend_op_generator.py +276 -0
- mindspore/profiler/parser/ascend_steptrace_generator.py +94 -0
- mindspore/profiler/parser/ascend_timeline_generator.py +110 -54
- mindspore/profiler/parser/base_timeline_generator.py +11 -7
- mindspore/profiler/parser/cpu_gpu_timeline_generator.py +45 -46
- mindspore/profiler/parser/flops_parser.py +15 -11
- mindspore/profiler/parser/framework_parser.py +92 -73
- mindspore/profiler/parser/hccl_parser.py +16 -12
- mindspore/profiler/parser/integrator.py +22 -11
- mindspore/profiler/parser/memory_usage_parser.py +36 -11
- mindspore/profiler/parser/minddata_analyzer.py +12 -14
- mindspore/profiler/parser/minddata_pipeline_parser.py +1 -1
- mindspore/profiler/parser/msadvisor_parser.py +8 -4
- mindspore/profiler/parser/op_intermediate_parser.py +5 -2
- mindspore/profiler/parser/optime_parser.py +1 -1
- mindspore/profiler/parser/profiler_info.py +4 -5
- mindspore/profiler/parser/step_trace_parser.py +11 -14
- mindspore/profiler/profiling.py +678 -377
- mindspore/rewrite/api/node.py +211 -54
- mindspore/rewrite/api/node_type.py +5 -0
- mindspore/rewrite/api/pattern_engine.py +22 -23
- mindspore/rewrite/api/scoped_value.py +20 -17
- mindspore/rewrite/api/symbol_tree.py +252 -106
- mindspore/rewrite/api/tree_node_helper.py +3 -0
- mindspore/rewrite/ast_helpers/__init__.py +2 -1
- mindspore/rewrite/ast_helpers/ast_finder.py +129 -0
- mindspore/rewrite/ast_helpers/ast_modifier.py +116 -104
- mindspore/rewrite/ast_transformers/flatten_recursive_stmt.py +97 -46
- mindspore/rewrite/common/rewrite_elog.py +5 -1
- mindspore/rewrite/namer.py +51 -51
- mindspore/rewrite/namespace.py +14 -5
- mindspore/{ops/bprop_mindir → rewrite/node}/__init__.py +9 -4
- mindspore/rewrite/node/call_function.py +79 -0
- mindspore/rewrite/node/cell_container.py +135 -0
- mindspore/rewrite/node/control_flow.py +88 -0
- mindspore/rewrite/{node.py → node/node.py} +313 -247
- mindspore/rewrite/node/node_manager.py +254 -0
- mindspore/rewrite/node/node_topological_manager.py +243 -0
- mindspore/rewrite/parsers/arguments_parser.py +22 -21
- mindspore/rewrite/parsers/assign_parser.py +225 -239
- mindspore/rewrite/parsers/attribute_parser.py +9 -7
- mindspore/rewrite/parsers/class_def_parser.py +179 -218
- mindspore/rewrite/parsers/constant_parser.py +9 -6
- mindspore/rewrite/parsers/container_parser.py +9 -7
- mindspore/rewrite/parsers/for_parser.py +36 -15
- mindspore/rewrite/parsers/function_def_parser.py +23 -20
- mindspore/rewrite/parsers/if_parser.py +28 -24
- mindspore/rewrite/parsers/module_parser.py +202 -25
- mindspore/rewrite/{parser.py → parsers/parser.py} +4 -2
- mindspore/rewrite/{parser_register.py → parsers/parser_register.py} +1 -1
- mindspore/rewrite/parsers/return_parser.py +6 -6
- mindspore/rewrite/sparsify/sparse_transformer.py +12 -3
- mindspore/rewrite/sparsify/sparsify.py +4 -1
- mindspore/rewrite/sparsify/utils.py +11 -5
- mindspore/rewrite/symbol_tree.py +577 -732
- mindspore/rewrite/symbol_tree_builder.py +9 -175
- mindspore/rewrite/symbol_tree_dumper.py +2 -2
- mindspore/run_check/_check_version.py +46 -39
- mindspore/run_check/run_check.py +3 -2
- mindspore/{scipy/sparse → safeguard}/__init__.py +4 -5
- mindspore/safeguard/rewrite_obfuscation.py +517 -0
- mindspore/scipy/__init__.py +1 -1
- mindspore/scipy/linalg.py +67 -61
- mindspore/scipy/ops.py +5 -41
- mindspore/scipy/ops_grad.py +3 -2
- mindspore/scipy/ops_wrapper.py +5 -5
- mindspore/scipy/optimize/line_search.py +8 -8
- mindspore/scipy/optimize/linear_sum_assignment.py +4 -4
- mindspore/scipy/optimize/minimize.py +16 -12
- mindspore/scipy/utils.py +1 -52
- mindspore/scipy/utils_const.py +4 -4
- mindspore/train/__init__.py +4 -4
- mindspore/train/_utils.py +13 -5
- mindspore/train/amp.py +410 -148
- mindspore/train/anf_ir_pb2.py +16 -4
- mindspore/train/callback/_backup_and_restore.py +8 -11
- mindspore/train/callback/_callback.py +80 -3
- mindspore/train/callback/_checkpoint.py +82 -51
- mindspore/train/callback/_early_stop.py +12 -15
- mindspore/train/callback/_history.py +1 -1
- mindspore/train/callback/_lambda_callback.py +13 -13
- mindspore/train/callback/_landscape.py +21 -17
- mindspore/train/callback/_loss_monitor.py +9 -10
- mindspore/train/callback/_on_request_exit.py +16 -33
- mindspore/train/callback/_reduce_lr_on_plateau.py +21 -24
- mindspore/train/callback/_summary_collector.py +44 -30
- mindspore/train/callback/_time_monitor.py +62 -12
- mindspore/train/data_sink.py +10 -16
- mindspore/train/dataset_helper.py +154 -86
- mindspore/train/loss_scale_manager.py +14 -9
- mindspore/train/metrics/__init__.py +10 -2
- mindspore/train/metrics/accuracy.py +1 -1
- mindspore/train/metrics/auc.py +1 -1
- mindspore/train/metrics/bleu_score.py +2 -2
- mindspore/train/metrics/confusion_matrix.py +14 -14
- mindspore/train/metrics/cosine_similarity.py +3 -3
- mindspore/train/metrics/dice.py +1 -1
- mindspore/train/metrics/fbeta.py +1 -1
- mindspore/train/metrics/hausdorff_distance.py +8 -6
- mindspore/train/metrics/mean_surface_distance.py +5 -4
- mindspore/train/metrics/metric.py +49 -17
- mindspore/train/metrics/occlusion_sensitivity.py +4 -4
- mindspore/train/metrics/perplexity.py +1 -1
- mindspore/train/metrics/precision.py +2 -2
- mindspore/train/metrics/recall.py +2 -3
- mindspore/train/metrics/roc.py +7 -7
- mindspore/train/metrics/root_mean_square_surface_distance.py +5 -4
- mindspore/train/metrics/topk.py +7 -4
- mindspore/train/mind_ir_pb2.py +193 -48
- mindspore/train/model.py +377 -133
- mindspore/train/serialization.py +697 -245
- mindspore/train/summary/_summary_adapter.py +5 -2
- mindspore/train/summary/_writer_pool.py +4 -3
- mindspore/train/summary/summary_record.py +25 -23
- mindspore/train/train_thor/convert_utils.py +39 -23
- mindspore/train/train_thor/dataset_helper.py +4 -3
- mindspore/train/train_thor/model_thor.py +8 -8
- mindspore/version.py +1 -1
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/METADATA +7 -8
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/RECORD +647 -818
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/entry_points.txt +0 -1
- mindspore/_akg/akg/tvm/contrib/debugger/__init__.py +0 -16
- mindspore/_akg/akg/tvm/contrib/debugger/debug_result.py +0 -274
- mindspore/_akg/akg/tvm/contrib/debugger/debug_runtime.py +0 -259
- mindspore/_akg/akg/tvm/contrib/peak.py +0 -341
- mindspore/_akg/akg/tvm/contrib/rpc.py +0 -25
- mindspore/_akg/akg/tvm/contrib/xcode.py +0 -257
- mindspore/_akg/akg/tvm/exec/__init__.py +0 -17
- mindspore/_akg/akg/tvm/exec/autotvm_log_editor.py +0 -60
- mindspore/_akg/akg/tvm/exec/measure_peak.py +0 -48
- mindspore/_akg/akg/tvm/exec/query_rpc_tracker.py +0 -48
- mindspore/_akg/akg/tvm/exec/rpc_proxy.py +0 -98
- mindspore/_akg/akg/tvm/exec/rpc_server.py +0 -88
- mindspore/_akg/akg/tvm/exec/rpc_tracker.py +0 -62
- mindspore/_akg/akg/tvm/rpc/__init__.py +0 -29
- mindspore/_akg/akg/tvm/rpc/base.py +0 -182
- mindspore/_akg/akg/tvm/rpc/client.py +0 -436
- mindspore/_akg/akg/tvm/rpc/proxy.py +0 -595
- mindspore/_akg/akg/tvm/rpc/server.py +0 -413
- mindspore/_akg/akg/tvm/rpc/tornado_util.py +0 -121
- mindspore/_akg/akg/tvm/rpc/tracker.py +0 -431
- mindspore/_extends/graph_kernel/expander.py +0 -80
- mindspore/_extends/graph_kernel/expanders/__init__.py +0 -57
- mindspore/_extends/graph_kernel/expanders/_utils.py +0 -269
- mindspore/_extends/graph_kernel/expanders/addn.py +0 -33
- mindspore/_extends/graph_kernel/expanders/batchnorm.py +0 -152
- mindspore/_extends/graph_kernel/expanders/batchnorm_grad.py +0 -105
- mindspore/_extends/graph_kernel/expanders/bias_add_grad.py +0 -49
- mindspore/_extends/graph_kernel/expanders/clip_by_norm_no_div_sum.py +0 -33
- mindspore/_extends/graph_kernel/expanders/complex/abs.py +0 -30
- mindspore/_extends/graph_kernel/expanders/complex/add.py +0 -44
- mindspore/_extends/graph_kernel/expanders/complex/div.py +0 -62
- mindspore/_extends/graph_kernel/expanders/complex/mul.py +0 -52
- mindspore/_extends/graph_kernel/expanders/complex/real_div.py +0 -62
- mindspore/_extends/graph_kernel/expanders/complex/sub.py +0 -45
- mindspore/_extends/graph_kernel/expanders/conv2d.py +0 -200
- mindspore/_extends/graph_kernel/expanders/dropout_grad.py +0 -30
- mindspore/_extends/graph_kernel/expanders/equal_count.py +0 -50
- mindspore/_extends/graph_kernel/expanders/erfc.py +0 -35
- mindspore/_extends/graph_kernel/expanders/expand_dims.py +0 -50
- mindspore/_extends/graph_kernel/expanders/fused_adam.py +0 -44
- mindspore/_extends/graph_kernel/expanders/fused_adam_weight_decay.py +0 -47
- mindspore/_extends/graph_kernel/expanders/fused_mul_add.py +0 -28
- mindspore/_extends/graph_kernel/expanders/gather.py +0 -43
- mindspore/_extends/graph_kernel/expanders/gelu_grad.py +0 -70
- mindspore/_extends/graph_kernel/expanders/gkdropout.py +0 -40
- mindspore/_extends/graph_kernel/expanders/identity.py +0 -25
- mindspore/_extends/graph_kernel/expanders/layernorm.py +0 -93
- mindspore/_extends/graph_kernel/expanders/layernorm_grad.py +0 -113
- mindspore/_extends/graph_kernel/expanders/logsoftmax.py +0 -46
- mindspore/_extends/graph_kernel/expanders/logsoftmax_grad.py +0 -36
- mindspore/_extends/graph_kernel/expanders/matmul.py +0 -80
- mindspore/_extends/graph_kernel/expanders/maximum_grad.py +0 -59
- mindspore/_extends/graph_kernel/expanders/minimum_grad.py +0 -80
- mindspore/_extends/graph_kernel/expanders/oneslike.py +0 -26
- mindspore/_extends/graph_kernel/expanders/reduce_mean.py +0 -43
- mindspore/_extends/graph_kernel/expanders/relu_grad.py +0 -32
- mindspore/_extends/graph_kernel/expanders/sigmoid_cross_entropy_with_logits.py +0 -41
- mindspore/_extends/graph_kernel/expanders/sigmoid_cross_entropy_with_logits_grad.py +0 -35
- mindspore/_extends/graph_kernel/expanders/sigmoid_grad.py +0 -31
- mindspore/_extends/graph_kernel/expanders/slice.py +0 -35
- mindspore/_extends/graph_kernel/expanders/softmax_cross_entropy_with_logits.py +0 -42
- mindspore/_extends/graph_kernel/expanders/softmax_grad_ext.py +0 -41
- mindspore/_extends/graph_kernel/expanders/softsign.py +0 -28
- mindspore/_extends/graph_kernel/expanders/sqrt_grad.py +0 -29
- mindspore/_extends/graph_kernel/expanders/square_sum_all.py +0 -44
- mindspore/_extends/graph_kernel/expanders/square_sum_v1.py +0 -37
- mindspore/_extends/graph_kernel/expanders/squared_difference.py +0 -43
- mindspore/_extends/graph_kernel/expanders/tanh_grad.py +0 -31
- mindspore/_extends/graph_kernel/expanders/tile.py +0 -54
- mindspore/_extends/graph_kernel/model/op_infer.py +0 -506
- mindspore/_extends/parse/jit_fallback_modules.py +0 -51
- mindspore/dataset/datapreprocess/preprocess_imagenet_validate_dataset.py +0 -54
- mindspore/dataset/engine/graphdata.py +0 -1586
- mindspore/include/api/net.h +0 -142
- mindspore/ops/_grad/grad_array_ops.py +0 -1347
- mindspore/ops/_grad/grad_clip_ops.py +0 -84
- mindspore/ops/_grad/grad_debug_ops.py +0 -68
- mindspore/ops/_grad/grad_inner_ops.py +0 -235
- mindspore/ops/_grad/grad_math_ops.py +0 -1684
- mindspore/ops/_grad/grad_nn_ops.py +0 -1529
- mindspore/ops/_grad/grad_other_ops.py +0 -89
- mindspore/ops/_grad/grad_sequence_ops.py +0 -296
- mindspore/ops/_grad/grad_sparse.py +0 -323
- mindspore/ops/_grad_experimental/grad_image_ops.py +0 -249
- mindspore/ops/_grad_experimental/grad_linalg_ops.py +0 -195
- mindspore/ops/_grad_experimental/grad_scalar_ops.py +0 -112
- mindspore/ops/bprop_mindir/AdaptiveAvgPool2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/AdaptiveMaxPool2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ApproximateEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Argmax_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Argmin_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/AssignSub_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Assign_bprop.mindir +0 -17
- mindspore/ops/bprop_mindir/AvgPool3D_bprop.mindir +0 -150
- mindspore/ops/bprop_mindir/AvgPool_bprop.mindir +0 -66
- mindspore/ops/bprop_mindir/BCEWithLogitsLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BNTrainingReduce_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/BatchNormGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BatchToSpaceND_bprop.mindir +0 -28
- mindspore/ops/bprop_mindir/BiasAddGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BinaryCrossEntropy_bprop.mindir +0 -33
- mindspore/ops/bprop_mindir/BroadcastTo_bprop.mindir +0 -306
- mindspore/ops/bprop_mindir/Broadcast_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/CTCLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Concat_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Conv2DBackpropFilter_bprop.mindir +0 -240
- mindspore/ops/bprop_mindir/Conv2DBackpropInput_bprop.mindir +0 -247
- mindspore/ops/bprop_mindir/Conv2DTranspose_bprop.mindir +0 -247
- mindspore/ops/bprop_mindir/Conv3DTranspose_bprop.mindir +0 -315
- mindspore/ops/bprop_mindir/Conv3D_bprop.mindir +0 -278
- mindspore/ops/bprop_mindir/DType_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/DeformableOffsets_bprop.mindir +0 -58
- mindspore/ops/bprop_mindir/Depend_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/DepthToSpace_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/DepthwiseConv2dNative_bprop.mindir +0 -138
- mindspore/ops/bprop_mindir/DiagPart_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Dropout2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Dropout3D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DropoutDoMask_bprop.mindir +0 -25
- mindspore/ops/bprop_mindir/DropoutGenMask_bprop.mindir +0 -18
- mindspore/ops/bprop_mindir/DropoutGrad_bprop.mindir +0 -27
- mindspore/ops/bprop_mindir/Dropout_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicGRUV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicRNN_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicShape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/Elu_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/EmbeddingLookup_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Equal_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ExpandDims_bprop.mindir +0 -58
- mindspore/ops/bprop_mindir/FastGeLU_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Flatten_bprop.mindir +0 -54
- mindspore/ops/bprop_mindir/FloorDiv_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/GatherD_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/GatherNd_bprop.mindir +0 -57
- mindspore/ops/bprop_mindir/Gather_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/GreaterEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Greater_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/HSigmoid_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/HSwish_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/IOU_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/InstanceNorm_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/IsFinite_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/IsInf_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/IsNan_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/KLDivLoss_bprop.mindir +0 -126
- mindspore/ops/bprop_mindir/L2Loss_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/L2Normalize_bprop.mindir +0 -30
- mindspore/ops/bprop_mindir/LRN_bprop.mindir +0 -43
- mindspore/ops/bprop_mindir/LayerNormGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/LessEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Less_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/LinSpace_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/Load_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/LogSoftmax_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/LogicalAnd_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/LogicalNot_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/MaskedSelect_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/MaxPool3DGradGrad_bprop.mindir +0 -74
- mindspore/ops/bprop_mindir/MaxPool3DGrad_bprop.mindir +0 -74
- mindspore/ops/bprop_mindir/MaxPool3D_bprop.mindir +0 -75
- mindspore/ops/bprop_mindir/MaxPoolGradGrad_bprop.mindir +0 -65
- mindspore/ops/bprop_mindir/MaxPoolWithArgmax_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Maximum_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Minimum_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/MirrorPad_bprop.mindir +0 -27
- mindspore/ops/bprop_mindir/Mish_bprop.mindir +0 -35
- mindspore/ops/bprop_mindir/MulNoNan_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/NLLLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/NonZero_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/NotEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/OneHot_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/OnesLike_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/PReLU_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Pad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Padding_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/RNNTLoss_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/ROIAlign_bprop.mindir +0 -82
- mindspore/ops/bprop_mindir/Range_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Rank_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/ReLU6_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/ReLUV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ReduceAll_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ReduceAny_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ReluGrad_bprop.mindir +0 -20
- mindspore/ops/bprop_mindir/Reshape_bprop.mindir +0 -60
- mindspore/ops/bprop_mindir/ResizeBilinear_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/ResizeNearestNeighbor_bprop.mindir +0 -89
- mindspore/ops/bprop_mindir/ReverseSequence_bprop.mindir +0 -52
- mindspore/ops/bprop_mindir/ReverseV2_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Round_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/ScatterMax_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ScatterMin_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ScatterNdUpdate_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/ScatterNd_bprop.mindir +0 -24
- mindspore/ops/bprop_mindir/ScatterNonAliasingAdd_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/ScatterUpdate_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SeLU_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/Select_bprop.mindir +0 -31
- mindspore/ops/bprop_mindir/Shape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/SigmoidCrossEntropyWithLogits_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/SigmoidGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Sigmoid_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Sign_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Slice_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/SmoothL1Loss_bprop.mindir +0 -36
- mindspore/ops/bprop_mindir/SoftmaxCrossEntropyWithLogits_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Softplus_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Softsign_bprop.mindir +0 -33
- mindspore/ops/bprop_mindir/Sort_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SpaceToBatchND_bprop.mindir +0 -28
- mindspore/ops/bprop_mindir/SpaceToDepth_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/SparseGatherV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SparseSoftmaxCrossEntropyWithLogits_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Split_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Squeeze_bprop.mindir +0 -54
- mindspore/ops/bprop_mindir/StridedSliceGrad_bprop.mindir +0 -95
- mindspore/ops/bprop_mindir/StridedSlice_bprop.mindir +0 -98
- mindspore/ops/bprop_mindir/Switch_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/TanhGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Tanh_bprop.mindir +0 -66
- mindspore/ops/bprop_mindir/TensorScatterAdd_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/TensorScatterUpdate_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/TensorShape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/Tile_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/TopK_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/TransShape_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/TruncateDiv_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/TupleGetItem_bprop.mindir +0 -20
- mindspore/ops/bprop_mindir/Unique_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Unstack_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/UpsampleNearest3D_bprop.mindir +0 -32
- mindspore/ops/bprop_mindir/UpsampleTrilinear3D_bprop.mindir +0 -38
- mindspore/ops/bprop_mindir/ZerosLike_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/generate_mindir.py +0 -114
- mindspore/rewrite/node_visitor.py +0 -44
- mindspore/rewrite/topological_manager.py +0 -203
- mindspore/scipy/sparse/linalg.py +0 -192
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/WHEEL +0 -0
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/top_level.txt +0 -0
|
@@ -28,35 +28,65 @@ class TimeMonitor(Callback):
|
|
|
28
28
|
Args:
|
|
29
29
|
data_size (int): How many steps are the intervals between print information each time.
|
|
30
30
|
if the program get `batch_num` during training, `data_size` will be set to `batch_num`,
|
|
31
|
-
otherwise `data_size` will be used. Default: None.
|
|
31
|
+
otherwise `data_size` will be used. Default: ``None`` .
|
|
32
|
+
|
|
33
|
+
data_time (bool): Whether to sow the average time of fetching data in Host.
|
|
34
|
+
Note that data fetch and network compute are processed sequentially in non dataset sink mode, while
|
|
35
|
+
they are asynchronous in dataset sink mode. Default: ``False`` .
|
|
32
36
|
|
|
33
37
|
Raises:
|
|
34
38
|
ValueError: If data_size is not positive int.
|
|
39
|
+
TypeError: If data_time is not bool.
|
|
35
40
|
|
|
36
41
|
Examples:
|
|
37
|
-
.. note::
|
|
38
|
-
Before running the following example, you need to customize the network LeNet5 and
|
|
39
|
-
dataset preparation function create_dataset. Refer to
|
|
40
|
-
`Building a Network <https://www.mindspore.cn/tutorials/en/r2.0/beginner/model.html>`_
|
|
41
|
-
and `Dataset <https://www.mindspore.cn/tutorials/en/r2.0/beginner/dataset.html>`_ .
|
|
42
|
-
|
|
43
42
|
>>> from mindspore import nn
|
|
44
43
|
>>> from mindspore.train import Model, TimeMonitor
|
|
45
44
|
>>>
|
|
45
|
+
>>> # Define the network structure of LeNet5. Refer to
|
|
46
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
46
47
|
>>> net = LeNet5()
|
|
47
48
|
>>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
|
|
48
49
|
>>> optim = nn.Momentum(net.trainable_params(), 0.01, 0.9)
|
|
49
50
|
>>> model = Model(net, loss_fn=loss, optimizer=optim)
|
|
50
|
-
>>>
|
|
51
|
-
>>>
|
|
51
|
+
>>> # Create the dataset taking MNIST as an example. Refer to
|
|
52
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/mnist.py
|
|
53
|
+
>>> dataset = create_dataset()
|
|
52
54
|
>>> time_monitor = TimeMonitor()
|
|
53
55
|
>>> model.train(10, dataset, callbacks=time_monitor)
|
|
54
56
|
"""
|
|
55
57
|
|
|
56
|
-
def __init__(self, data_size=None):
|
|
58
|
+
def __init__(self, data_size=None, data_time=False):
|
|
57
59
|
super(TimeMonitor, self).__init__()
|
|
58
60
|
self.data_size = data_size
|
|
59
61
|
self.epoch_time = time.time()
|
|
62
|
+
self.data_time = data_time
|
|
63
|
+
self.data_time_sum = 0.0
|
|
64
|
+
self.data_time_start = 0.0
|
|
65
|
+
self.data_sink = lambda c: c.original_args()["dataset_sink_mode"]
|
|
66
|
+
Validator.check_bool(data_time, "data_time")
|
|
67
|
+
|
|
68
|
+
def on_train_step_begin(self, run_context):
|
|
69
|
+
"""
|
|
70
|
+
Record time at the beginning of step.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
run_context (RunContext): Context of the process running. For more details,
|
|
74
|
+
please refer to :class:`mindspore.train.RunContext`.
|
|
75
|
+
"""
|
|
76
|
+
if self.data_time and not self.data_sink(run_context):
|
|
77
|
+
interval = time.time() - self.data_time_start
|
|
78
|
+
self.data_time_sum = self.data_time_sum + interval
|
|
79
|
+
|
|
80
|
+
def on_train_step_end(self, run_context):
|
|
81
|
+
"""
|
|
82
|
+
Record time at the end of step.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
run_context (RunContext): Context of the process running. For more details,
|
|
86
|
+
please refer to :class:`mindspore.train.RunContext`.
|
|
87
|
+
"""
|
|
88
|
+
if self.data_time and not self.data_sink(run_context):
|
|
89
|
+
self.data_time_start = time.time()
|
|
60
90
|
|
|
61
91
|
def epoch_begin(self, run_context):
|
|
62
92
|
"""
|
|
@@ -67,6 +97,9 @@ class TimeMonitor(Callback):
|
|
|
67
97
|
please refer to :class:`mindspore.train.RunContext`.
|
|
68
98
|
"""
|
|
69
99
|
self.epoch_time = time.time()
|
|
100
|
+
if self.data_time and not self.data_sink(run_context):
|
|
101
|
+
self.data_time_sum = 0.0
|
|
102
|
+
self.data_time_start = time.time()
|
|
70
103
|
|
|
71
104
|
def epoch_end(self, run_context):
|
|
72
105
|
"""
|
|
@@ -87,5 +120,22 @@ class TimeMonitor(Callback):
|
|
|
87
120
|
Validator.check_positive_int(step_size)
|
|
88
121
|
|
|
89
122
|
step_seconds = epoch_seconds / step_size
|
|
90
|
-
|
|
91
|
-
|
|
123
|
+
|
|
124
|
+
train_log = "{} epoch time: {:5.3f} ms, per step time: {:5.3f} ms".format(
|
|
125
|
+
mode.title(), epoch_seconds, step_seconds)
|
|
126
|
+
|
|
127
|
+
if self.data_time and not self.data_sink(run_context):
|
|
128
|
+
data_step_seconds = self.data_time_sum * 1000 / step_size
|
|
129
|
+
data_log = " (data time: {:5.3f} ms)".format(data_step_seconds)
|
|
130
|
+
train_log += data_log
|
|
131
|
+
elif self.data_time and self.data_sink(run_context):
|
|
132
|
+
# send info viewer to query epoch message of cur_epoch_num
|
|
133
|
+
send_info = cb_params["dataset_helper"].get_send_info(run_context)
|
|
134
|
+
epoch = cb_params["cur_epoch_num"]
|
|
135
|
+
epoch_send_info = send_info.epoch(epoch)
|
|
136
|
+
# show average time of fetching data time
|
|
137
|
+
fetch_data_time = epoch_send_info["fetch_data_time"]
|
|
138
|
+
data_log = " (data time: {:5.3f} ms)".format(fetch_data_time)
|
|
139
|
+
train_log += data_log
|
|
140
|
+
|
|
141
|
+
print(train_log, flush=True)
|
mindspore/train/data_sink.py
CHANGED
|
@@ -40,7 +40,7 @@ def _init_sink_dataset(dataset, sink_size, input_signature, create_info):
|
|
|
40
40
|
# create transfer_dataset
|
|
41
41
|
is_info_queue = (create_info and sink_size == 1 and dataset_size != 1 and
|
|
42
42
|
input_signature is None and not dynamic_shape and
|
|
43
|
-
context.get_context('device_target') == 'Ascend'
|
|
43
|
+
context.get_context('device_target') == 'Ascend')
|
|
44
44
|
transfer_dataset = _exec_datagraph(dataset, sink_size, create_data_info_queue=is_info_queue)
|
|
45
45
|
dataset.__transfer_dataset__ = transfer_dataset
|
|
46
46
|
|
|
@@ -130,29 +130,23 @@ def data_sink(fn, dataset, sink_size=1, jit_config=None, input_signature=None):
|
|
|
130
130
|
A wrapper function to generate a function for the input function.
|
|
131
131
|
|
|
132
132
|
Note:
|
|
133
|
-
When using data sinking, the dataset will automatically
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
total_step = epochs * dataset_size
|
|
139
|
-
|
|
140
|
-
train_sink_step = total_step / sink_size
|
|
141
|
-
|
|
142
|
-
After transforming from `mindspore.data_sink`, you need to execute `train_sink_step` step for training.
|
|
133
|
+
When using data sinking, the dataset will be automatically sent in a loop, and only the step size of sinking
|
|
134
|
+
`sink_size` needs to be considered. The default value of `sink_size` is ``1``, which means that all data will
|
|
135
|
+
be sunk every epoch. If `sink_size` is greater than 1, the amount of data sunk per epoch will be the dataset
|
|
136
|
+
with a size of `sink_size`.
|
|
143
137
|
|
|
144
138
|
Args:
|
|
145
139
|
fn (Function): The Python function that will be run with dataset.
|
|
146
140
|
dataset (Dataset): The dataset iterator. The dataset can be generated by dataset generator API in
|
|
147
141
|
:class:`mindspore.dataset`, such as :class:`mindspore.dataset.ImageFolderDataset`.
|
|
148
|
-
sink_size (int): Control the amount of data in each sink. `sink_size` must be positive integer. Default: 1.
|
|
142
|
+
sink_size (int): Control the amount of data in each sink. `sink_size` must be positive integer. Default: ``1`` .
|
|
149
143
|
jit_config (JitConfig): Controls the execution mode(Graph mode/PyNative mode) of the generated function, and Jit
|
|
150
|
-
config for compile. Default: None, means running in PyNative mode.
|
|
144
|
+
config for compile. Default: ``None`` , means running in PyNative mode.
|
|
151
145
|
input_signature (Union[Tensor, List or Tuple of Tensors]): The Tensor which describes the input arguments.
|
|
152
146
|
The shape and dtype of the Tensor will be supplied to this function. If input_signature is specified,
|
|
153
147
|
each input to `fn` must be a `Tensor`. And the input parameters of `fn` cannot accept `**kwargs`. The shape
|
|
154
148
|
and dtype of actual inputs should keep the same as input_signature. Otherwise, TypeError will be raised.
|
|
155
|
-
Default: None.
|
|
149
|
+
Default: ``None`` .
|
|
156
150
|
|
|
157
151
|
Returns:
|
|
158
152
|
Function, the generated function will be executed in data sinking mode.
|
|
@@ -175,7 +169,7 @@ def data_sink(fn, dataset, sink_size=1, jit_config=None, input_signature=None):
|
|
|
175
169
|
... out = x + y
|
|
176
170
|
... return out
|
|
177
171
|
>>>
|
|
178
|
-
>>> sink_process = ms.
|
|
172
|
+
>>> sink_process = ms.data_sink(func_net, dataset, sink_size=1)
|
|
179
173
|
>>> for _ in range(2):
|
|
180
174
|
... out = sink_process()
|
|
181
175
|
... print(out)
|
|
@@ -212,7 +206,7 @@ def data_sink(fn, dataset, sink_size=1, jit_config=None, input_signature=None):
|
|
|
212
206
|
real_sink_fun = _get_sink_fun(sink_fun, key_info, is_info_queue, dataset, jit_config)
|
|
213
207
|
|
|
214
208
|
loop = sink_size
|
|
215
|
-
if jit_config is not None:
|
|
209
|
+
if jit_config is not None and context.get_context('mode') == context.GRAPH_MODE:
|
|
216
210
|
loop = 1
|
|
217
211
|
|
|
218
212
|
out = None
|
|
@@ -18,17 +18,19 @@ from __future__ import absolute_import
|
|
|
18
18
|
import math
|
|
19
19
|
|
|
20
20
|
from mindspore import _checkparam as Validator
|
|
21
|
+
from mindspore import log as logger
|
|
22
|
+
from mindspore.common._auto_dynamic import is_auto_dynamic, convert_new_shapes
|
|
21
23
|
from mindspore.common.dtype import pytype_to_dtype
|
|
22
24
|
from mindspore.common.api import _cell_graph_executor
|
|
23
25
|
from mindspore.common._utils import is_shape_unknown
|
|
24
26
|
from mindspore.dataset.engine import offload
|
|
25
|
-
import mindspore.dataset as ds
|
|
26
27
|
from mindspore import context, nn
|
|
27
28
|
from mindspore.train._utils import _exec_datagraph, _get_types_and_shapes, _construct_tensor_list
|
|
28
29
|
from mindspore.parallel._utils import _get_device_num, _get_global_rank, _need_to_full, \
|
|
29
30
|
_to_full_shapes, _get_pipeline_stages
|
|
30
31
|
from mindspore.parallel._ps_context import _is_role_sched
|
|
31
32
|
from mindspore.ops import operations as P
|
|
33
|
+
from mindspore.common.auto_dynamic_shape import _auto_dynamic_shape
|
|
32
34
|
|
|
33
35
|
|
|
34
36
|
def _send_data(dataset, epoch_num):
|
|
@@ -47,31 +49,31 @@ def _send_data_no_flag(dataset, epoch_num):
|
|
|
47
49
|
|
|
48
50
|
def _dynamic_sink_data(dataset, dataset_iter):
|
|
49
51
|
"""Special scenario for dataset with sink_size=1."""
|
|
50
|
-
_, dataset_shapes = dataset_iter.types_shapes()
|
|
51
52
|
if hasattr(dataset_iter, "sink_size") and \
|
|
52
53
|
dataset_iter.sink_size == 1 and \
|
|
53
54
|
dataset.get_dataset_size() != 1 and \
|
|
54
55
|
hasattr(dataset_iter, "sink_count") and \
|
|
55
|
-
dataset_iter.sink_count == 1
|
|
56
|
-
not _has_dynamic_shape(dataset_shapes) and \
|
|
57
|
-
not ds.config.get_dynamic_shape():
|
|
56
|
+
dataset_iter.sink_count == 1:
|
|
58
57
|
return True
|
|
59
58
|
return False
|
|
60
59
|
|
|
61
60
|
|
|
62
|
-
def _dynamic_sink_exception_scenario(dataset_iter):
|
|
61
|
+
def _dynamic_sink_exception_scenario(dataset_iter, is_dynamic):
|
|
63
62
|
"""The exception scenario for dynamic data is not applicable."""
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
if _has_dynamic_shape(dataset_shapes) or context.get_context("mode") != context.GRAPH_MODE:
|
|
63
|
+
if context.get_context("mode") != context.GRAPH_MODE or is_dynamic:
|
|
67
64
|
return True
|
|
68
65
|
return False
|
|
69
66
|
|
|
70
67
|
|
|
71
|
-
def _dynamic_sink_scenario(dataset, dataset_iter):
|
|
68
|
+
def _dynamic_sink_scenario(dataset, dataset_iter, is_dynamic):
|
|
72
69
|
"""Special scenario with dynamic shape and sink_size=1."""
|
|
73
70
|
flag = False
|
|
74
|
-
|
|
71
|
+
|
|
72
|
+
# This is used only for test
|
|
73
|
+
if is_auto_dynamic():
|
|
74
|
+
return False
|
|
75
|
+
|
|
76
|
+
if _dynamic_sink_data(dataset, dataset_iter) and not _dynamic_sink_exception_scenario(dataset_iter, is_dynamic):
|
|
75
77
|
flag = True
|
|
76
78
|
|
|
77
79
|
return flag
|
|
@@ -93,8 +95,7 @@ class _DataWrapper(nn.Cell):
|
|
|
93
95
|
self.get_next = P.GetNext(
|
|
94
96
|
dataset_types, dataset_shapes, len(dataset_types), queue_name)
|
|
95
97
|
self.network = network
|
|
96
|
-
|
|
97
|
-
self._jit_config_dict = network.jit_config_dict
|
|
98
|
+
self._get_attr_from_cell(network)
|
|
98
99
|
|
|
99
100
|
def construct(self):
|
|
100
101
|
outputs = self.get_next()
|
|
@@ -120,11 +121,18 @@ def _generate_network_with_dataset(network, dataset_helper, queue_name):
|
|
|
120
121
|
Generate new network with network and dataset info.
|
|
121
122
|
"""
|
|
122
123
|
dataset_types, dataset_shapes = dataset_helper.types_shapes()
|
|
124
|
+
|
|
125
|
+
# This is used only for test
|
|
126
|
+
if is_auto_dynamic():
|
|
127
|
+
new_shapes = convert_new_shapes(dataset_shapes)
|
|
128
|
+
return _generate_dataset_sink_mode_net(network, new_shapes, dataset_types, queue_name)
|
|
129
|
+
|
|
123
130
|
if network.get_inputs() and None not in network.get_inputs():
|
|
124
131
|
_check_inputs(network.get_inputs(), dataset_shapes, dataset_types)
|
|
125
132
|
elif context.get_context("mode") == context.PYNATIVE_MODE:
|
|
126
133
|
dataset_shapes = tuple([(-2,)] * len(dataset_shapes))
|
|
127
|
-
network = _generate_dataset_sink_mode_net(
|
|
134
|
+
network = _generate_dataset_sink_mode_net(
|
|
135
|
+
network, dataset_shapes, dataset_types, queue_name)
|
|
128
136
|
return network
|
|
129
137
|
|
|
130
138
|
|
|
@@ -172,29 +180,20 @@ def _get_dataset_aux(dataset):
|
|
|
172
180
|
|
|
173
181
|
def connect_network_with_dataset(network, dataset_helper):
|
|
174
182
|
"""
|
|
175
|
-
Connect the `network` with dataset in `dataset_helper`.
|
|
176
|
-
|
|
177
|
-
This function wraps the input network with 'GetNext' so that the data can be fetched automatically from the
|
|
178
|
-
data channel corresponding to the 'queue_name' and passed to the input network during forward computation.
|
|
179
|
-
|
|
180
|
-
Note:
|
|
181
|
-
In the case of running the network on Ascend/GPU in graph mode, this function will wrap the input network with
|
|
182
|
-
:class:`mindspore.ops.GetNext`. In other cases, the input network will be returned with no change.
|
|
183
|
-
The :class:`mindspore.ops.GetNext` is required to get data only in sink mode,
|
|
184
|
-
so this function is not applicable to no-sink mode.
|
|
185
|
-
when dataset_helper's dataset_sink_mode is True, it can only be connected to one network.
|
|
183
|
+
Connect the `network` with dataset in `dataset_helper`. Only supported in `sink mode
|
|
184
|
+
<https://mindspore.cn/tutorials/experts/en/r2.2/optimize/execution_opt.html>`_, (dataset_sink_mode=True).
|
|
186
185
|
|
|
187
186
|
Args:
|
|
188
187
|
network (Cell): The training network for dataset.
|
|
189
188
|
dataset_helper (DatasetHelper): A class to process the MindData dataset, it provides the type, shape and queue
|
|
190
|
-
name of the dataset
|
|
189
|
+
name of the dataset.
|
|
191
190
|
|
|
192
191
|
Returns:
|
|
193
|
-
Cell, a new network
|
|
194
|
-
it is the input network.
|
|
192
|
+
Cell, a new network containing the type, shape and queue name of the dataset info.
|
|
195
193
|
|
|
196
194
|
Raises:
|
|
197
195
|
RuntimeError: If the API was not called in dataset sink mode.
|
|
196
|
+
|
|
198
197
|
Supported Platforms:
|
|
199
198
|
``Ascend`` ``GPU``
|
|
200
199
|
|
|
@@ -208,7 +207,7 @@ def connect_network_with_dataset(network, dataset_helper):
|
|
|
208
207
|
>>> train_dataset = ds.NumpySlicesDataset(data=data).batch(32)
|
|
209
208
|
>>> dataset_helper = ms.DatasetHelper(train_dataset, dataset_sink_mode=True)
|
|
210
209
|
>>> net = nn.Dense(10, 5)
|
|
211
|
-
>>>
|
|
210
|
+
>>> net_with_dataset = ms.connect_network_with_dataset(net, dataset_helper)
|
|
212
211
|
"""
|
|
213
212
|
dataset_iter = dataset_helper.iter
|
|
214
213
|
dataset = dataset_iter.dataset
|
|
@@ -219,6 +218,7 @@ def connect_network_with_dataset(network, dataset_helper):
|
|
|
219
218
|
"The API 'connect_network_with_dataset' should be called in dataset sink mode.")
|
|
220
219
|
|
|
221
220
|
if _is_role_sched():
|
|
221
|
+
network.add_flags(sink_mode=True)
|
|
222
222
|
return network
|
|
223
223
|
|
|
224
224
|
if not hasattr(aux, '__network__'):
|
|
@@ -227,13 +227,15 @@ def connect_network_with_dataset(network, dataset_helper):
|
|
|
227
227
|
if aux.__network__ is not network:
|
|
228
228
|
raise ValueError(
|
|
229
229
|
"The dataset has been connected to other network, please check the code.")
|
|
230
|
-
|
|
230
|
+
is_dynamic = bool(network.get_inputs())
|
|
231
231
|
queue_name = dataset.__transfer_dataset__.queue_name
|
|
232
|
-
if _dynamic_sink_scenario(dataset, dataset_iter
|
|
232
|
+
if _dynamic_sink_scenario(dataset, dataset_iter, is_dynamic):
|
|
233
233
|
dataset_types, dataset_shapes = dataset_helper.get_data_info()
|
|
234
234
|
dataset_types = [pytype_to_dtype(x) for x in dataset_types]
|
|
235
|
-
|
|
235
|
+
if not is_dynamic:
|
|
236
|
+
dataset_shapes = _auto_dynamic_shape.auto_dynamic_generate_compile_args(dataset_shapes, True)
|
|
236
237
|
key = str(dataset_types) + str(dataset_shapes)
|
|
238
|
+
_auto_dynamic_shape.update_phase_and_compile_args(dataset_shapes, key, True, aux)
|
|
237
239
|
if hasattr(aux, '__network_manage__') and key in aux.__network_manage__:
|
|
238
240
|
network = aux.__network_manage__[key]
|
|
239
241
|
else:
|
|
@@ -248,21 +250,22 @@ def connect_network_with_dataset(network, dataset_helper):
|
|
|
248
250
|
else:
|
|
249
251
|
aux.__network_manage__ = dict()
|
|
250
252
|
aux.__network_manage__[key] = network
|
|
253
|
+
network.add_flags(sink_mode=True)
|
|
251
254
|
return network
|
|
252
255
|
|
|
253
256
|
if hasattr(aux, '__sink_network__'):
|
|
254
257
|
network = aux.__sink_network__
|
|
255
258
|
else:
|
|
256
|
-
if
|
|
259
|
+
if context.get_context("device_target") in ("Ascend", "GPU"):
|
|
257
260
|
network = offload.check_add_offload_sink_mode(
|
|
258
261
|
dataset, dataset_helper, network)
|
|
259
262
|
network = _generate_network_with_dataset(
|
|
260
263
|
network, dataset_helper, queue_name)
|
|
261
264
|
aux.__sink_network__ = network
|
|
262
265
|
|
|
263
|
-
if _dynamic_sink_data(dataset, dataset_iter) and _dynamic_sink_exception_scenario(dataset_iter):
|
|
266
|
+
if _dynamic_sink_data(dataset, dataset_iter) and _dynamic_sink_exception_scenario(dataset_iter, is_dynamic):
|
|
264
267
|
dataset_helper.get_data_info()
|
|
265
|
-
|
|
268
|
+
network.add_flags(sink_mode=True)
|
|
266
269
|
return network
|
|
267
270
|
|
|
268
271
|
|
|
@@ -281,7 +284,7 @@ class DatasetHelper:
|
|
|
281
284
|
:class:`mindspore.dataset`, such as :class:`mindspore.dataset.ImageFolderDataset`.
|
|
282
285
|
dataset_sink_mode (bool): If the value is True, GetNext is employed to fetch the data at device through the
|
|
283
286
|
dataset pipeline, otherwise fetch the data at host by iterating through the dataset.
|
|
284
|
-
Default: True
|
|
287
|
+
Default: ``True``.
|
|
285
288
|
sink_size (int): Control the amount of data in each sink.
|
|
286
289
|
If sink_size=-1, sink the complete dataset for each epoch.
|
|
287
290
|
If sink_size>0, sink sink_size data for each epoch.
|
|
@@ -303,7 +306,7 @@ class DatasetHelper:
|
|
|
303
306
|
>>> for next_element in set_helper:
|
|
304
307
|
... # `next_element` includes data and label, using data to run the net
|
|
305
308
|
... data = next_element[0]
|
|
306
|
-
... net(data)
|
|
309
|
+
... result = net(data)
|
|
307
310
|
"""
|
|
308
311
|
|
|
309
312
|
def __init__(self, dataset, dataset_sink_mode=True, sink_size=-1, epoch_num=1):
|
|
@@ -316,22 +319,19 @@ class DatasetHelper:
|
|
|
316
319
|
sink_size = dataset.get_dataset_size()
|
|
317
320
|
|
|
318
321
|
if dataset_sink_mode:
|
|
319
|
-
if context.get_context("
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
elif (context.get_context("device_target") == "Ascend") or \
|
|
326
|
-
(context.get_context("device_target") == "GPU"):
|
|
327
|
-
iterclass = _DatasetIterMSLoopSink
|
|
328
|
-
else:
|
|
329
|
-
target = context.get_context("device_target")
|
|
330
|
-
raise RuntimeError("Currently dataset sink mode is not supported when the device "
|
|
331
|
-
"target is {}, please set dataset_sink_mode to False "
|
|
332
|
-
"in Model.train()".format(target))
|
|
322
|
+
if context.get_context("mode") == context.GRAPH_MODE:
|
|
323
|
+
if _is_role_sched():
|
|
324
|
+
iterclass = _DatasetIterPSServer
|
|
325
|
+
elif (context.get_context("device_target") == "Ascend") or \
|
|
326
|
+
(context.get_context("device_target") == "GPU"):
|
|
327
|
+
iterclass = _DatasetIterMSLoopSink
|
|
333
328
|
else:
|
|
334
|
-
|
|
329
|
+
target = context.get_context("device_target")
|
|
330
|
+
raise RuntimeError("Currently dataset sink mode is not supported when the device "
|
|
331
|
+
"target is {}, please set dataset_sink_mode to False "
|
|
332
|
+
"in Model.train()".format(target))
|
|
333
|
+
else:
|
|
334
|
+
iterclass = _DatasetIterPyNative
|
|
335
335
|
self.iter = iterclass(dataset, sink_size, epoch_num)
|
|
336
336
|
else:
|
|
337
337
|
iterclass = _DatasetIterNormal
|
|
@@ -383,28 +383,108 @@ class DatasetHelper:
|
|
|
383
383
|
return self.iter.get_sink_size()
|
|
384
384
|
|
|
385
385
|
def stop_send(self):
|
|
386
|
-
"""
|
|
386
|
+
"""
|
|
387
|
+
Stop send data about data sink.
|
|
388
|
+
|
|
389
|
+
Examples:
|
|
390
|
+
>>> import mindspore as ms
|
|
391
|
+
>>> import numpy as np
|
|
392
|
+
>>> # Define a dataset pipeline
|
|
393
|
+
>>> def generator():
|
|
394
|
+
... for i in range(5):
|
|
395
|
+
... yield (np.ones((32, 10)),)
|
|
396
|
+
>>> train_dataset = ms.dataset.GeneratorDataset(generator, ["data"])
|
|
397
|
+
>>> dataset_helper = ms.DatasetHelper(train_dataset, dataset_sink_mode=True, sink_size=-1)
|
|
398
|
+
>>> dataset_helper.stop_send()
|
|
399
|
+
"""
|
|
387
400
|
self.iter.stop_send()
|
|
388
401
|
|
|
389
402
|
def release(self):
|
|
390
|
-
"""
|
|
403
|
+
"""
|
|
404
|
+
Free up resources about data sink.
|
|
405
|
+
|
|
406
|
+
Examples:
|
|
407
|
+
>>> import numpy as np
|
|
408
|
+
>>> import mindspore as ms
|
|
409
|
+
>>> from mindspore import nn
|
|
410
|
+
>>> from mindspore import dataset as ds
|
|
411
|
+
>>>
|
|
412
|
+
>>> data = {"x": np.float32(np.random.rand(64, 10)), "y": np.random.randint(0, 5, (64,))}
|
|
413
|
+
>>> train_dataset = ds.NumpySlicesDataset(data=data).batch(32)
|
|
414
|
+
>>> dataset_helper = ms.DatasetHelper(train_dataset, dataset_sink_mode=True)
|
|
415
|
+
>>> dataset_helper.release()
|
|
416
|
+
"""
|
|
391
417
|
self.iter.release()
|
|
392
418
|
|
|
393
419
|
def continue_send(self):
|
|
394
|
-
"""
|
|
420
|
+
"""
|
|
421
|
+
Continue to send data to device at the beginning of epoch.
|
|
422
|
+
|
|
423
|
+
Examples:
|
|
424
|
+
>>> import numpy as np
|
|
425
|
+
>>> import mindspore as ms
|
|
426
|
+
>>> from mindspore import nn
|
|
427
|
+
>>> from mindspore import dataset as ds
|
|
428
|
+
>>>
|
|
429
|
+
>>> data = {"x": np.float32(np.random.rand(64, 10)), "y": np.random.randint(0, 5, (64,))}
|
|
430
|
+
>>> train_dataset = ds.NumpySlicesDataset(data=data).batch(32)
|
|
431
|
+
>>> dataset_helper = ms.DatasetHelper(train_dataset, dataset_sink_mode=True)
|
|
432
|
+
>>> dataset_helper.continue_send()
|
|
433
|
+
"""
|
|
395
434
|
self.iter.continue_send()
|
|
396
435
|
|
|
397
|
-
def _reset(self, step,
|
|
436
|
+
def _reset(self, step, dataset_size):
|
|
398
437
|
"""Reset the dataset to the provided step and epoch."""
|
|
399
|
-
self.iter._reset(step,
|
|
438
|
+
self.iter._reset(step, dataset_size) # pylint: disable=protected-access
|
|
400
439
|
|
|
440
|
+
# pylint: disable=missing-docstring
|
|
401
441
|
def get_data_info(self):
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
Generally, it works in dynamic shape scenarios.
|
|
405
|
-
"""
|
|
442
|
+
# In sink mode, it returns the types and shapes of the current data.
|
|
443
|
+
# Generally, it works in dynamic shape scenarios.
|
|
406
444
|
return self.iter.get_data_info()
|
|
407
445
|
|
|
446
|
+
# pylint: disable=missing-docstring
|
|
447
|
+
def get_send_info(self, run_context):
|
|
448
|
+
# In sink mode, it returns the send information of dataset at this moment.
|
|
449
|
+
# Send information includes number of send batches, time summary of fetching data on host
|
|
450
|
+
# and time summary of sending data.
|
|
451
|
+
class InfoViewer:
|
|
452
|
+
'''
|
|
453
|
+
Inner class for parsing send info.
|
|
454
|
+
'''
|
|
455
|
+
def __init__(self, send_info, run_context):
|
|
456
|
+
self.info_ = {}
|
|
457
|
+
self.sink_size = run_context.original_args()["batch_num"]
|
|
458
|
+
if run_context.original_args().get("train_dataset", None) is not None:
|
|
459
|
+
self.dataset_size = run_context.original_args()["train_dataset"].get_dataset_size()
|
|
460
|
+
elif run_context.original_args().get("valid_dataset", None) is not None:
|
|
461
|
+
self.dataset_size = run_context.original_args()["valid_dataset"].get_dataset_size()
|
|
462
|
+
else:
|
|
463
|
+
raise RuntimeError("Could not find a proper dataset to estimate dataset size.")
|
|
464
|
+
if not send_info:
|
|
465
|
+
epoch = 1
|
|
466
|
+
self.info_[epoch] = {'fetch_data_num': 0, 'fetch_data_time': 0, 'first_data_time': 0}
|
|
467
|
+
else:
|
|
468
|
+
for info_per_epoch in send_info:
|
|
469
|
+
epoch, fetch_data_num, first_data_time, fetch_data_time = info_per_epoch
|
|
470
|
+
if fetch_data_num > 1:
|
|
471
|
+
fetch_data_time = (fetch_data_time - first_data_time) / (fetch_data_num - 1) * 1000.
|
|
472
|
+
self.info_[epoch] = {'fetch_data_num': fetch_data_num,
|
|
473
|
+
'fetch_data_time': fetch_data_time,
|
|
474
|
+
'first_data_time': first_data_time}
|
|
475
|
+
|
|
476
|
+
def epoch(self, epoch):
|
|
477
|
+
if self.sink_size == self.dataset_size:
|
|
478
|
+
return self.info_[epoch]
|
|
479
|
+
global_step = epoch * self.sink_size
|
|
480
|
+
data_epoch = math.ceil(global_step / self.dataset_size)
|
|
481
|
+
return self.info_[data_epoch]
|
|
482
|
+
|
|
483
|
+
# send info struct:[epoch, data_num_per_epoch, first_data_time, accumulate_data_time]
|
|
484
|
+
# for example [1, 1875, 0.421, 0.362]
|
|
485
|
+
send_info = self.iter.get_send_info()
|
|
486
|
+
return InfoViewer(send_info, run_context)
|
|
487
|
+
|
|
408
488
|
|
|
409
489
|
class _DatasetIter:
|
|
410
490
|
"""Base iter for dataset helper"""
|
|
@@ -415,14 +495,20 @@ class _DatasetIter:
|
|
|
415
495
|
self.sink_count = self.get_sink_count(dataset)
|
|
416
496
|
self.dataset_types, self.dataset_shapes = _get_types_and_shapes(
|
|
417
497
|
dataset)
|
|
418
|
-
|
|
419
|
-
if
|
|
420
|
-
|
|
498
|
+
|
|
499
|
+
if dataset.get_init_step() % sink_size != 0:
|
|
500
|
+
init_epoch = dataset.get_init_step() // sink_size
|
|
501
|
+
init_step = init_epoch * sink_size
|
|
502
|
+
logger.warning("Init global step must be the end of the epoch in sink mode, "
|
|
503
|
+
"but got: {0}. Reset it to the end of epoch {1} at step {2}."
|
|
504
|
+
.format(dataset.get_init_step(), init_epoch, init_step))
|
|
505
|
+
dataset.set_init_step(init_step)
|
|
506
|
+
|
|
421
507
|
if not hasattr(dataset, '__transfer_dataset__'):
|
|
422
508
|
if hasattr(dataset, '__loop_size__'):
|
|
423
509
|
self.sink_size = dataset.__loop_size__
|
|
424
|
-
create_data_info_queue = (
|
|
425
|
-
|
|
510
|
+
create_data_info_queue = (
|
|
511
|
+
sink_size == 1 and self.sink_count == 1 and dataset.get_dataset_size() != 1)
|
|
426
512
|
dataset.__transfer_dataset__ = _exec_datagraph(dataset, self.sink_size,
|
|
427
513
|
create_data_info_queue=create_data_info_queue)
|
|
428
514
|
|
|
@@ -439,6 +525,7 @@ class _DatasetIter:
|
|
|
439
525
|
self.release = dataset.__transfer_dataset__.release
|
|
440
526
|
self.continue_send = dataset.__transfer_dataset__.continue_send
|
|
441
527
|
self.get_data_info = dataset.__transfer_dataset__.get_data_info
|
|
528
|
+
self.get_send_info = dataset.__transfer_dataset__.get_send_info
|
|
442
529
|
if hasattr(dataset.__transfer_dataset__, "_reset"):
|
|
443
530
|
self._reset = dataset.__transfer_dataset__._reset # pylint: disable=protected-access
|
|
444
531
|
|
|
@@ -475,8 +562,7 @@ class _DatasetIter:
|
|
|
475
562
|
if hasattr(self.dataset, '__loop_size__'):
|
|
476
563
|
sink_size = self.dataset.__loop_size__
|
|
477
564
|
else:
|
|
478
|
-
if context.get_context("
|
|
479
|
-
or context.get_context("device_target") == "GPU":
|
|
565
|
+
if context.get_context("device_target") == "Ascend" or context.get_context("device_target") == "GPU":
|
|
480
566
|
if self.sink_size > 0:
|
|
481
567
|
sink_size = self.sink_size
|
|
482
568
|
else:
|
|
@@ -484,24 +570,6 @@ class _DatasetIter:
|
|
|
484
570
|
return sink_size
|
|
485
571
|
|
|
486
572
|
|
|
487
|
-
class _DatasetIterGE(_DatasetIter):
|
|
488
|
-
"""Iter for GE."""
|
|
489
|
-
|
|
490
|
-
def __init__(self, dataset, sink_size, epoch_num):
|
|
491
|
-
super().__init__(dataset, sink_size, epoch_num)
|
|
492
|
-
self.sink_count = self.get_sink_count(dataset)
|
|
493
|
-
batch_expand_num = 1
|
|
494
|
-
if _need_to_full():
|
|
495
|
-
batch_expand_num = _get_device_num() // _get_pipeline_stages()
|
|
496
|
-
tensor_list_run = _construct_tensor_list(
|
|
497
|
-
self.dataset_types, self.dataset_shapes, batch_expand_num)
|
|
498
|
-
|
|
499
|
-
def op():
|
|
500
|
-
return tensor_list_run
|
|
501
|
-
|
|
502
|
-
self.op = op
|
|
503
|
-
|
|
504
|
-
|
|
505
573
|
class _DatasetIterPyNative(_DatasetIter):
|
|
506
574
|
"""Iter for context (mode=PYNATIVE_MODE)."""
|
|
507
575
|
|
|
@@ -51,16 +51,19 @@ class FixedLossScaleManager(LossScaleManager):
|
|
|
51
51
|
inherits from :class:`mindspore.amp.LossScaleManager`.
|
|
52
52
|
|
|
53
53
|
Args:
|
|
54
|
-
loss_scale (float): Magnification factor of gradients. Note that if `drop_overflow_update` is set to False,
|
|
55
|
-
the value of `loss_scale` in optimizer should be set to the same as here. Default: 128.0.
|
|
56
|
-
drop_overflow_update (bool): Whether to execute optimizer if there is an overflow.
|
|
57
|
-
|
|
54
|
+
loss_scale (float): Magnification factor of gradients. Note that if `drop_overflow_update` is set to ``False`` ,
|
|
55
|
+
the value of `loss_scale` in optimizer should be set to the same as here. Default: ``128.0`` .
|
|
56
|
+
drop_overflow_update (bool): Whether to execute optimizer if there is an overflow.
|
|
57
|
+
If ``True`` , the optimizer will
|
|
58
|
+
not executed when overflow occurs. Default: ``True`` .
|
|
58
59
|
|
|
59
60
|
Examples:
|
|
60
61
|
>>> import mindspore as ms
|
|
61
62
|
>>> from mindspore import amp, nn
|
|
62
63
|
>>>
|
|
63
|
-
>>>
|
|
64
|
+
>>> # Define the network structure of LeNet5. Refer to
|
|
65
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
66
|
+
>>> net = LeNet5()
|
|
64
67
|
>>> loss_scale = 1024.0
|
|
65
68
|
>>> loss_scale_manager = amp.FixedLossScaleManager(loss_scale, False)
|
|
66
69
|
>>> optim = nn.Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9, loss_scale=loss_scale)
|
|
@@ -121,9 +124,9 @@ class DynamicLossScaleManager(LossScaleManager):
|
|
|
121
124
|
adjusted, inherits from :class:`mindspore.amp.LossScaleManager`.
|
|
122
125
|
|
|
123
126
|
Args:
|
|
124
|
-
init_loss_scale (float): Initialize loss scale. Default: 2**24.
|
|
125
|
-
scale_factor (int): Coefficient of increase and decrease. Default: 2.
|
|
126
|
-
scale_window (int): Maximum continuous normal steps when there is no overflow. Default: 2000.
|
|
127
|
+
init_loss_scale (float): Initialize loss scale. Default: ``2 ** 24`` .
|
|
128
|
+
scale_factor (int): Coefficient of increase and decrease. Default: ``2`` .
|
|
129
|
+
scale_window (int): Maximum continuous normal steps when there is no overflow. Default: ``2000`` .
|
|
127
130
|
|
|
128
131
|
Supported Platforms:
|
|
129
132
|
``Ascend`` ``GPU``
|
|
@@ -132,7 +135,9 @@ class DynamicLossScaleManager(LossScaleManager):
|
|
|
132
135
|
>>> import mindspore as ms
|
|
133
136
|
>>> from mindspore import amp, nn
|
|
134
137
|
>>>
|
|
135
|
-
>>>
|
|
138
|
+
>>> # Define the network structure of LeNet5. Refer to
|
|
139
|
+
>>> # https://gitee.com/mindspore/docs/blob/r2.2/docs/mindspore/code/lenet.py
|
|
140
|
+
>>> net = LeNet5()
|
|
136
141
|
>>> loss_scale_manager = amp.DynamicLossScaleManager()
|
|
137
142
|
>>> optim = nn.Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
|
|
138
143
|
>>> model = ms.Model(net, loss_scale_manager=loss_scale_manager, optimizer=optim)
|