mindspore 2.0.0rc1__cp38-cp38-manylinux1_x86_64.whl → 2.2.0__cp38-cp38-manylinux1_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Third_Party_Open_Source_Software_Notice +2 -2
- mindspore/__init__.py +5 -2
- mindspore/_akg/akg/build_module.py +5 -6
- mindspore/_akg/akg/composite/build_module.py +49 -16
- mindspore/_akg/akg/composite/split_stitch.py +10 -11
- mindspore/_akg/akg/config/repository.json +195 -0
- mindspore/_akg/akg/global_configs.py +5 -1
- mindspore/_akg/akg/ms/info_version_adapt.py +67 -1
- mindspore/_akg/akg/tvm/api.py +4 -3
- mindspore/_akg/akg/tvm/autotvm/__init__.py +1 -2
- mindspore/_akg/akg/tvm/autotvm/graph_tuner/base_graph_tuner.py +1 -5
- mindspore/_akg/akg/tvm/autotvm/measure/__init__.py +1 -1
- mindspore/_akg/akg/tvm/autotvm/measure/measure.py +1 -10
- mindspore/_akg/akg/tvm/autotvm/measure/measure_methods.py +1 -372
- mindspore/_akg/akg/tvm/build_module.py +16 -1
- mindspore/_akg/akg/tvm/contrib/graph_runtime.py +0 -53
- mindspore/_akg/akg/tvm/hybrid/parser.py +7 -6
- mindspore/_akg/akg/tvm/ir_builder.py +1 -1
- mindspore/_akg/akg/tvm/module.py +1 -2
- mindspore/_akg/akg/tvm/stmt.py +2 -2
- mindspore/_akg/akg/utils/composite_op_helper.py +9 -10
- mindspore/_akg/akg/utils/kernel_exec.py +58 -260
- mindspore/_akg/akg/utils/op_dsl.py +17 -1
- mindspore/_akg/akg/utils/result_analysis.py +4 -24
- mindspore/_akg/akg/utils/tbe_codegen_utils.py +198 -0
- mindspore/_c_dataengine.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_c_expression.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_c_mindrecord.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/_check_jit_forbidden_api.py +5 -1
- mindspore/_checkparam.py +79 -62
- mindspore/_extends/graph_kernel/__init__.py +0 -1
- mindspore/_extends/graph_kernel/model/graph_split.py +2 -0
- mindspore/_extends/graph_kernel/model/model_builder.py +9 -50
- mindspore/_extends/graph_kernel/splitter.py +1 -9
- mindspore/_extends/parallel_compile/akg_compiler/akg_process.py +128 -21
- mindspore/_extends/parallel_compile/akg_compiler/build_tbe_kernel.py +2 -2
- mindspore/_extends/parallel_compile/akg_compiler/tbe_topi.py +4 -2
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_adapter.py +18 -13
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_helper.py +13 -9
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_job.py +1 -1
- mindspore/_extends/parallel_compile/tbe_compiler/tbe_job_manager.py +1 -1
- mindspore/_extends/parse/__init__.py +19 -17
- mindspore/_extends/parse/namespace.py +7 -36
- mindspore/_extends/parse/parser.py +375 -189
- mindspore/_extends/parse/resources.py +36 -41
- mindspore/_extends/parse/standard_method.py +350 -245
- mindspore/_extends/parse/trope.py +2 -12
- mindspore/_extends/remote/kernel_build_server.py +24 -7
- mindspore/_extends/remote/kernel_build_server_akg_v2.py +55 -0
- mindspore/_install_custom.py +43 -0
- mindspore/_mindspore_offline_debug.cpython-38-x86_64-linux-gnu.so +0 -0
- mindspore/amp.py +85 -19
- mindspore/bin/cache_admin +0 -0
- mindspore/bin/cache_server +0 -0
- mindspore/boost/base.py +2 -2
- mindspore/boost/boost.py +27 -32
- mindspore/boost/boost_cell_wrapper.py +37 -13
- mindspore/boost/grad_accumulation.py +1 -1
- mindspore/boost/grad_freeze.py +34 -6
- mindspore/boost/group_loss_scale_manager.py +15 -14
- mindspore/boost/less_batch_normalization.py +28 -3
- mindspore/common/__init__.py +15 -11
- mindspore/common/_auto_dynamic.py +68 -0
- mindspore/common/_jit_fallback_utils.py +111 -0
- mindspore/common/_register_for_adapter.py +17 -5
- mindspore/common/_register_for_tensor.py +2 -2
- mindspore/common/_stub_tensor.py +18 -15
- mindspore/common/_utils.py +31 -7
- mindspore/common/api.py +269 -101
- mindspore/common/auto_dynamic_shape.py +498 -0
- mindspore/common/dtype.py +61 -21
- mindspore/common/dump.py +9 -7
- mindspore/common/initializer.py +106 -76
- mindspore/common/jit_config.py +35 -14
- mindspore/common/lazy_inline.py +187 -0
- mindspore/common/mindir_util.py +101 -0
- mindspore/common/mutable.py +10 -13
- mindspore/common/parameter.py +246 -55
- mindspore/common/seed.py +13 -7
- mindspore/common/sparse_tensor.py +29 -33
- mindspore/common/tensor.py +907 -251
- mindspore/communication/__init__.py +7 -4
- mindspore/communication/_comm_helper.py +84 -4
- mindspore/communication/management.py +160 -88
- mindspore/config/op_info.config +99 -75
- mindspore/config/super_bar_config.json +36 -4
- mindspore/context.py +526 -219
- mindspore/dataset/__init__.py +9 -46
- mindspore/dataset/audio/__init__.py +4 -19
- mindspore/dataset/audio/transforms.py +545 -233
- mindspore/dataset/audio/utils.py +21 -18
- mindspore/dataset/callback/ds_callback.py +42 -13
- mindspore/dataset/core/config.py +158 -100
- mindspore/dataset/core/validator_helpers.py +1 -63
- mindspore/dataset/debug/debug_hook.py +45 -13
- mindspore/dataset/debug/pre_defined_hook.py +5 -5
- mindspore/dataset/engine/__init__.py +0 -5
- mindspore/dataset/engine/cache_client.py +38 -15
- mindspore/dataset/engine/datasets.py +615 -278
- mindspore/dataset/engine/datasets_audio.py +154 -283
- mindspore/dataset/engine/datasets_standard_format.py +104 -116
- mindspore/dataset/engine/datasets_text.py +443 -326
- mindspore/dataset/engine/datasets_user_defined.py +251 -164
- mindspore/dataset/engine/datasets_vision.py +839 -1443
- mindspore/dataset/engine/iterators.py +11 -4
- mindspore/dataset/engine/obs/obs_mindrecord_dataset.py +7 -3
- mindspore/dataset/engine/obs/util.py +3 -0
- mindspore/dataset/engine/offload.py +6 -6
- mindspore/dataset/engine/queue.py +15 -14
- mindspore/dataset/engine/samplers.py +39 -23
- mindspore/dataset/engine/serializer_deserializer.py +22 -6
- mindspore/dataset/engine/validators.py +21 -331
- mindspore/dataset/text/__init__.py +5 -33
- mindspore/dataset/text/transforms.py +334 -165
- mindspore/dataset/text/utils.py +215 -145
- mindspore/dataset/transforms/__init__.py +1 -1
- mindspore/dataset/transforms/c_transforms.py +3 -2
- mindspore/dataset/transforms/py_transforms_util.py +40 -12
- mindspore/dataset/transforms/transforms.py +174 -71
- mindspore/dataset/utils/browse_dataset.py +25 -17
- mindspore/dataset/utils/line_reader.py +24 -21
- mindspore/dataset/vision/__init__.py +5 -26
- mindspore/dataset/vision/c_transforms.py +177 -165
- mindspore/dataset/vision/py_transforms.py +114 -119
- mindspore/dataset/vision/py_transforms_util.py +54 -51
- mindspore/dataset/vision/transforms.py +1127 -381
- mindspore/dataset/vision/utils.py +54 -38
- mindspore/dataset/vision/validators.py +12 -2
- mindspore/experimental/map_parameter.py +38 -4
- mindspore/{dataset/datapreprocess → experimental/optim}/__init__.py +14 -4
- mindspore/experimental/optim/adam.py +192 -0
- mindspore/experimental/optim/adamw.py +181 -0
- mindspore/experimental/optim/lr_scheduler.py +1427 -0
- mindspore/experimental/optim/optimizer.py +252 -0
- mindspore/experimental/optim/sgd.py +147 -0
- mindspore/gen_ops.py +273 -0
- mindspore/include/OWNERS +1 -2
- mindspore/include/api/context.h +21 -1
- mindspore/include/api/data_type.h +2 -1
- mindspore/include/api/graph.h +0 -15
- mindspore/include/api/kernel.h +2 -0
- mindspore/include/api/kernel_api.h +37 -12
- mindspore/include/api/model.h +29 -42
- mindspore/include/api/model_group.h +14 -3
- mindspore/include/api/model_parallel_runner.h +18 -2
- mindspore/include/api/serialization.h +26 -0
- mindspore/include/api/status.h +1 -0
- mindspore/include/api/types.h +38 -4
- mindspore/include/c_api/ms/abstract.h +67 -0
- mindspore/include/c_api/ms/attribute.h +197 -0
- mindspore/include/c_api/ms/base/handle_types.h +43 -0
- mindspore/include/c_api/ms/base/macros.h +32 -0
- mindspore/include/c_api/ms/base/status.h +33 -0
- mindspore/include/c_api/ms/base/types.h +282 -0
- mindspore/include/c_api/ms/context.h +102 -0
- mindspore/include/c_api/ms/graph.h +160 -0
- mindspore/include/c_api/ms/node.h +606 -0
- mindspore/include/c_api/ms/tensor.h +161 -0
- mindspore/include/c_api/ms/value.h +84 -0
- mindspore/include/c_api/status_c.h +3 -0
- mindspore/include/dataset/constants.h +6 -12
- mindspore/include/dataset/execute.h +23 -13
- mindspore/include/dataset/text.h +26 -26
- mindspore/include/dataset/transforms.h +25 -31
- mindspore/include/dataset/vision.h +60 -60
- mindspore/include/dataset/vision_ascend.h +5 -6
- mindspore/include/dataset/vision_lite.h +17 -17
- mindspore/include/mindapi/base/format.h +0 -1
- mindspore/include/mindapi/base/type_id.h +2 -1
- mindspore/include/mindapi/base/types.h +5 -1
- mindspore/lib/libdnnl.so.2 +0 -0
- mindspore/lib/libjemalloc.so.2 +0 -0
- mindspore/lib/libmindspore.so +0 -0
- mindspore/lib/libmindspore_backend.so +0 -0
- mindspore/lib/libmindspore_common.so +0 -0
- mindspore/lib/libmindspore_core.so +0 -0
- mindspore/lib/libmindspore_glog.so.0 +0 -0
- mindspore/lib/libmindspore_gpr.so.15 +0 -0
- mindspore/lib/libmindspore_grpc++.so.1 +0 -0
- mindspore/lib/libmindspore_grpc.so.15 +0 -0
- mindspore/lib/libmindspore_shared_lib.so +0 -0
- mindspore/lib/libmpi_adapter.so +0 -0
- mindspore/lib/libnnacl.so +0 -0
- mindspore/lib/libopencv_core.so.4.5 +0 -0
- mindspore/lib/libopencv_imgcodecs.so.4.5 +0 -0
- mindspore/lib/libopencv_imgproc.so.4.5 +0 -0
- mindspore/lib/libps_cache.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_aicpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +9000 -0
- mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
- mindspore/lib/plugin/ascend/libakg.so +0 -0
- mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
- mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
- mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_aicpu_kernels.so +0 -0
- mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
- mindspore/lib/plugin/cpu/libakg.so +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.10 +0 -0
- mindspore/lib/plugin/gpu/libcuda_ops.so.11 +0 -0
- mindspore/lib/plugin/gpu10.1/libakg.so +0 -0
- mindspore/lib/plugin/gpu10.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu10.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.1/libakg.so +0 -0
- mindspore/lib/plugin/gpu11.1/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.1/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/gpu11.6/libakg.so +0 -0
- mindspore/lib/plugin/gpu11.6/libnccl.so.2 +0 -0
- mindspore/lib/plugin/gpu11.6/libnvidia_collective.so +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.1 +0 -0
- mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.10.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.1 +0 -0
- mindspore/lib/plugin/libmindspore_gpu.so.11.6 +0 -0
- mindspore/log.py +9 -6
- mindspore/mindrecord/filereader.py +33 -4
- mindspore/mindrecord/filewriter.py +70 -35
- mindspore/mindrecord/mindpage.py +40 -34
- mindspore/mindrecord/shardreader.py +1 -1
- mindspore/mindrecord/shardsegment.py +1 -1
- mindspore/mindrecord/tools/cifar100_to_mr.py +25 -18
- mindspore/mindrecord/tools/cifar10_to_mr.py +25 -18
- mindspore/mindrecord/tools/csv_to_mr.py +29 -13
- mindspore/mindrecord/tools/imagenet_to_mr.py +24 -10
- mindspore/mindrecord/tools/mnist_to_mr.py +24 -11
- mindspore/mindrecord/tools/tfrecord_to_mr.py +31 -26
- mindspore/nn/cell.py +463 -169
- mindspore/nn/dynamic_lr.py +47 -43
- mindspore/nn/layer/activation.py +225 -82
- mindspore/nn/layer/basic.py +121 -79
- mindspore/nn/layer/channel_shuffle.py +21 -21
- mindspore/nn/layer/combined.py +33 -26
- mindspore/nn/layer/container.py +277 -22
- mindspore/nn/layer/conv.py +441 -304
- mindspore/nn/layer/dense.py +19 -13
- mindspore/nn/layer/embedding.py +62 -49
- mindspore/nn/layer/flash_attention.py +264 -0
- mindspore/nn/layer/image.py +50 -39
- mindspore/nn/layer/math.py +62 -51
- mindspore/nn/layer/normalization.py +219 -167
- mindspore/nn/layer/padding.py +58 -70
- mindspore/nn/layer/pooling.py +334 -287
- mindspore/nn/layer/rnn_cells.py +53 -38
- mindspore/nn/layer/rnns.py +59 -56
- mindspore/nn/layer/thor_layer.py +52 -44
- mindspore/nn/layer/timedistributed.py +6 -4
- mindspore/nn/layer/transformer.py +284 -164
- mindspore/nn/learning_rate_schedule.py +34 -25
- mindspore/nn/loss/__init__.py +3 -2
- mindspore/nn/loss/loss.py +554 -311
- mindspore/nn/optim/ada_grad.py +12 -9
- mindspore/nn/optim/adadelta.py +14 -11
- mindspore/nn/optim/adafactor.py +19 -16
- mindspore/nn/optim/adam.py +62 -47
- mindspore/nn/optim/adamax.py +13 -10
- mindspore/nn/optim/adasum.py +12 -8
- mindspore/nn/optim/asgd.py +10 -9
- mindspore/nn/optim/ftrl.py +20 -17
- mindspore/nn/optim/lamb.py +16 -12
- mindspore/nn/optim/lars.py +8 -6
- mindspore/nn/optim/lazyadam.py +25 -20
- mindspore/nn/optim/momentum.py +10 -7
- mindspore/nn/optim/optimizer.py +61 -9
- mindspore/nn/optim/proximal_ada_grad.py +14 -13
- mindspore/nn/optim/rmsprop.py +17 -13
- mindspore/nn/optim/rprop.py +30 -17
- mindspore/nn/optim/sgd.py +40 -23
- mindspore/nn/optim/thor.py +24 -26
- mindspore/nn/probability/bijector/bijector.py +11 -11
- mindspore/nn/probability/bijector/exp.py +1 -1
- mindspore/nn/probability/bijector/gumbel_cdf.py +3 -3
- mindspore/nn/probability/bijector/invert.py +1 -1
- mindspore/nn/probability/bijector/power_transform.py +29 -29
- mindspore/nn/probability/bijector/scalar_affine.py +3 -3
- mindspore/nn/probability/bijector/softplus.py +5 -5
- mindspore/nn/probability/bnn_layers/bnn_cell_wrapper.py +4 -2
- mindspore/nn/probability/bnn_layers/conv_variational.py +13 -13
- mindspore/nn/probability/bnn_layers/dense_variational.py +12 -12
- mindspore/nn/probability/bnn_layers/layer_distribution.py +9 -8
- mindspore/nn/probability/distribution/_utils/custom_ops.py +19 -3
- mindspore/nn/probability/distribution/_utils/utils.py +1 -1
- mindspore/nn/probability/distribution/bernoulli.py +9 -9
- mindspore/nn/probability/distribution/beta.py +8 -8
- mindspore/nn/probability/distribution/categorical.py +23 -15
- mindspore/nn/probability/distribution/cauchy.py +5 -6
- mindspore/nn/probability/distribution/distribution.py +3 -3
- mindspore/nn/probability/distribution/exponential.py +4 -4
- mindspore/nn/probability/distribution/gamma.py +10 -10
- mindspore/nn/probability/distribution/geometric.py +8 -8
- mindspore/nn/probability/distribution/gumbel.py +8 -9
- mindspore/nn/probability/distribution/half_normal.py +5 -5
- mindspore/nn/probability/distribution/laplace.py +5 -5
- mindspore/nn/probability/distribution/log_normal.py +12 -11
- mindspore/nn/probability/distribution/logistic.py +8 -8
- mindspore/nn/probability/distribution/normal.py +6 -5
- mindspore/nn/probability/distribution/poisson.py +10 -11
- mindspore/nn/probability/distribution/student_t.py +8 -9
- mindspore/nn/probability/distribution/transformed_distribution.py +5 -5
- mindspore/nn/probability/distribution/uniform.py +11 -11
- mindspore/nn/reinforcement/tensor_array.py +2 -2
- mindspore/nn/sparse/sparse.py +9 -9
- mindspore/nn/wrap/cell_wrapper.py +188 -63
- mindspore/nn/wrap/grad_reducer.py +21 -12
- mindspore/nn/wrap/loss_scale.py +136 -49
- mindspore/numpy/__init__.py +4 -4
- mindspore/numpy/array_creations.py +55 -56
- mindspore/numpy/array_ops.py +134 -35
- mindspore/numpy/logic_ops.py +66 -20
- mindspore/numpy/math_ops.py +142 -139
- mindspore/numpy/utils_const.py +2 -2
- mindspore/offline_debug/convert_async.py +2 -2
- mindspore/ops/_grad_experimental/__init__.py +7 -5
- mindspore/ops/_grad_experimental/grad_array_ops.py +231 -348
- mindspore/ops/{_grad → _grad_experimental}/grad_base.py +1 -33
- mindspore/ops/{_grad → _grad_experimental}/grad_comm_ops.py +25 -13
- mindspore/ops/{_grad/__init__.py → _grad_experimental/grad_debug_ops.py} +15 -7
- mindspore/ops/{_grad → _grad_experimental}/grad_implementations.py +17 -11
- mindspore/ops/_grad_experimental/grad_inner_ops.py +33 -52
- mindspore/ops/_grad_experimental/grad_math_ops.py +151 -1224
- mindspore/ops/_grad_experimental/grad_nn_ops.py +141 -414
- mindspore/ops/{_grad → _grad_experimental}/grad_quant_ops.py +10 -6
- mindspore/ops/_grad_experimental/grad_sparse.py +317 -2
- mindspore/ops/_grad_experimental/grad_sparse_ops.py +3 -13
- mindspore/ops/{_grad → _grad_experimental}/taylor_rule.py +1 -1
- mindspore/ops/_op_impl/_custom_op/dsd_back_impl.py +1 -1
- mindspore/ops/_op_impl/_custom_op/flash_attention/__init__.py +0 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/attention.py +406 -0
- mindspore/{_extends/graph_kernel/expanders/complex/__init__.py → ops/_op_impl/_custom_op/flash_attention/constants.py} +27 -8
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_bwd.py +467 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_fwd.py +563 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/flash_attention_impl.py +193 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tik_ops_utils.py +435 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/__init__.py +0 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/sparse_tiling.py +45 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/strategy.py +67 -0
- mindspore/ops/_op_impl/_custom_op/flash_attention/tiling_strategy/wukong_tiling.py +62 -0
- mindspore/ops/_op_impl/_custom_op/matmul_cube_dense_left_impl.py +2 -2
- mindspore/ops/_op_impl/aicpu/__init__.py +41 -1
- mindspore/ops/_op_impl/aicpu/adaptive_max_pool_2d.py +37 -0
- mindspore/ops/_op_impl/aicpu/bias_add_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/cast.py +52 -0
- mindspore/ops/_op_impl/aicpu/coalesce.py +2 -0
- mindspore/ops/_op_impl/aicpu/col2im.py +3 -1
- mindspore/ops/_op_impl/aicpu/count_nonzero.py +43 -0
- mindspore/ops/_op_impl/aicpu/dropout_genmask.py +6 -0
- mindspore/ops/_op_impl/aicpu/eps.py +32 -0
- mindspore/ops/_op_impl/aicpu/eye.py +4 -4
- mindspore/ops/_op_impl/aicpu/fft_with_size.py +6 -0
- mindspore/ops/_op_impl/aicpu/fill_diagonal.py +5 -0
- mindspore/ops/_op_impl/aicpu/gamma.py +2 -2
- mindspore/ops/_op_impl/aicpu/im2col.py +3 -5
- mindspore/ops/_op_impl/aicpu/lgamma.py +1 -0
- mindspore/ops/_op_impl/aicpu/log_uniform_candidate_sampler.py +6 -3
- mindspore/ops/_op_impl/aicpu/lu.py +39 -0
- mindspore/ops/_op_impl/aicpu/lu_unpack_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/masked_scatter.py +1 -0
- mindspore/ops/_op_impl/aicpu/masked_select_grad.py +3 -0
- mindspore/ops/_op_impl/aicpu/matrix_band_part.py +59 -0
- mindspore/ops/_op_impl/aicpu/matrix_power.py +6 -1
- mindspore/ops/_op_impl/aicpu/median.py +1 -0
- mindspore/ops/_op_impl/aicpu/multinomial.py +9 -9
- mindspore/ops/_op_impl/aicpu/not_equal.py +0 -5
- mindspore/ops/_op_impl/aicpu/pad_v3.py +3 -1
- mindspore/ops/_op_impl/aicpu/pad_v3_grad.py +2 -0
- mindspore/ops/_op_impl/aicpu/parameterized_truncated_normal.py +15 -7
- mindspore/ops/_op_impl/aicpu/random_categorical.py +39 -19
- mindspore/ops/_op_impl/aicpu/random_choice_with_mask.py +5 -2
- mindspore/ops/_op_impl/aicpu/random_poisson.py +103 -52
- mindspore/ops/_op_impl/aicpu/random_shuffle.py +17 -15
- mindspore/ops/_op_impl/aicpu/resize_bilinear_grad.py +0 -1
- mindspore/ops/_op_impl/aicpu/resize_nearest_neighbor_v2.py +0 -6
- mindspore/ops/_op_impl/aicpu/resize_nearest_neighbor_v2_grad.py +0 -7
- mindspore/ops/_op_impl/aicpu/scatter_nd.py +2 -0
- mindspore/ops/_op_impl/aicpu/sequence_concat.py +40 -0
- mindspore/ops/_op_impl/aicpu/sequence_stack.py +40 -0
- mindspore/ops/_op_impl/aicpu/{sparseaddmm.py → sparse_addmm.py} +2 -2
- mindspore/ops/_op_impl/aicpu/{sparsesparsemaximum.py → sparse_sparse_maximum.py} +4 -4
- mindspore/ops/_op_impl/aicpu/standard_laplace.py +5 -4
- mindspore/ops/_op_impl/aicpu/standard_normal.py +5 -4
- mindspore/ops/_op_impl/aicpu/truncated_normal.py +9 -7
- mindspore/ops/_op_impl/aicpu/uniform.py +5 -3
- mindspore/ops/_op_impl/aicpu/uniform_candidate_sampler.py +8 -4
- mindspore/ops/_op_impl/aicpu/uniform_int.py +5 -5
- mindspore/ops/_op_impl/aicpu/uniform_real.py +4 -4
- mindspore/ops/_op_impl/aicpu/upsample_nearest_3d.py +14 -6
- mindspore/ops/_op_impl/aicpu/upsample_nearest_3d_grad.py +22 -8
- mindspore/ops/_op_impl/aicpu/upsample_trilinear_3d.py +11 -6
- mindspore/ops/_op_impl/aicpu/upsample_trilinear_3d_grad.py +21 -10
- mindspore/ops/_op_impl/tbe/__init__.py +6 -4
- mindspore/ops/_op_impl/tbe/atomic_addr_clean.py +1 -1
- mindspore/ops/_op_impl/tbe/avg_pool.py +2 -2
- mindspore/ops/_op_impl/tbe/avg_pool_3d.py +3 -3
- mindspore/ops/_op_impl/tbe/avg_pool_3d_grad.py +4 -4
- mindspore/ops/_op_impl/tbe/avg_pool_ds.py +2 -2
- mindspore/ops/_op_impl/tbe/avg_pool_grad.py +3 -3
- mindspore/ops/_op_impl/tbe/avg_pool_grad_vm.py +3 -3
- mindspore/ops/_op_impl/tbe/batch_to_space.py +1 -1
- mindspore/ops/_op_impl/tbe/batch_to_space_nd.py +2 -2
- mindspore/ops/_op_impl/tbe/bn_infer.py +2 -2
- mindspore/ops/_op_impl/tbe/bn_infer_ds.py +3 -2
- mindspore/ops/_op_impl/tbe/broadcast_to.py +1 -1
- mindspore/ops/_op_impl/tbe/depthwise_conv2d.py +3 -3
- mindspore/ops/_op_impl/tbe/expand_dims.py +1 -1
- mindspore/ops/_op_impl/tbe/gather_v2.py +56 -0
- mindspore/ops/_op_impl/tbe/im2col.py +4 -4
- mindspore/ops/_op_impl/tbe/inplace_index_add.py +7 -3
- mindspore/ops/_op_impl/tbe/mem_set.py +38 -0
- mindspore/ops/_op_impl/tbe/scatter_nd_add.py +3 -0
- mindspore/ops/_op_impl/tbe/scatter_nd_d.py +1 -1
- mindspore/ops/_op_impl/tbe/space_to_batch.py +1 -1
- mindspore/ops/_op_impl/tbe/space_to_batch_nd.py +2 -2
- mindspore/ops/_op_impl/tbe/trans_data_ds.py +2 -0
- mindspore/ops/_primitive_cache.py +1 -1
- mindspore/ops/_tracefunc.py +241 -0
- mindspore/ops/_utils/utils.py +10 -2
- mindspore/ops/_vmap/vmap_array_ops.py +5 -3
- mindspore/ops/_vmap/vmap_base.py +5 -4
- mindspore/ops/_vmap/vmap_convolution_ops.py +1 -1
- mindspore/ops/_vmap/vmap_grad_math_ops.py +6 -4
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +11 -6
- mindspore/ops/_vmap/vmap_math_ops.py +5 -2
- mindspore/ops/_vmap/vmap_nn_ops.py +135 -11
- mindspore/ops/arg_dtype_cast.py +54 -0
- mindspore/ops/composite/__init__.py +7 -5
- mindspore/ops/composite/base.py +78 -34
- mindspore/ops/composite/math_ops.py +5 -695
- mindspore/ops/composite/multitype_ops/_compile_utils.py +403 -97
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +28 -22
- mindspore/ops/composite/multitype_ops/add_impl.py +69 -7
- mindspore/ops/composite/multitype_ops/bitwise_and_impl.py +2 -1
- mindspore/ops/composite/multitype_ops/bitwise_or_impl.py +2 -1
- mindspore/ops/composite/multitype_ops/bitwise_xor_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/div_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/floordiv_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/getitem_impl.py +48 -10
- mindspore/ops/composite/multitype_ops/greater_equal_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/greater_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/left_shift_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/less_equal_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/less_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/logic_not_impl.py +2 -2
- mindspore/ops/composite/multitype_ops/mod_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/mul_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/negative_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/not_in_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/ones_like_impl.py +6 -0
- mindspore/ops/composite/multitype_ops/pow_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/right_shift_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/setitem_impl.py +10 -7
- mindspore/ops/composite/multitype_ops/sub_impl.py +1 -0
- mindspore/ops/composite/multitype_ops/uadd_impl.py +2 -0
- mindspore/ops/composite/multitype_ops/zeros_like_impl.py +9 -0
- mindspore/ops/deprecated.py +304 -0
- mindspore/ops/function/__init__.py +41 -4
- mindspore/ops/function/array_func.py +1108 -467
- mindspore/ops/function/clip_func.py +94 -27
- mindspore/ops/function/debug_func.py +3 -1
- mindspore/ops/function/grad/grad_func.py +82 -73
- mindspore/ops/function/image_func.py +28 -12
- mindspore/ops/function/linalg_func.py +135 -39
- mindspore/ops/function/math_func.py +3779 -894
- mindspore/ops/function/nn_func.py +1584 -657
- mindspore/ops/function/parameter_func.py +13 -3
- mindspore/ops/function/random_func.py +247 -153
- mindspore/ops/function/sparse_func.py +14 -11
- mindspore/ops/function/sparse_unary_func.py +173 -47
- mindspore/ops/function/spectral_func.py +8 -4
- mindspore/ops/function/vmap_func.py +8 -7
- mindspore/ops/functional.py +47 -16
- mindspore/ops/op_info_register.py +346 -86
- mindspore/ops/operations/__init__.py +38 -22
- mindspore/ops/operations/_grad_ops.py +145 -149
- mindspore/ops/operations/_inner_ops.py +298 -56
- mindspore/ops/operations/_ms_kernel.py +3 -3
- mindspore/ops/operations/_quant_ops.py +24 -28
- mindspore/ops/operations/_rl_inner_ops.py +9 -7
- mindspore/ops/operations/_scalar_ops.py +115 -0
- mindspore/ops/operations/_sequence_ops.py +148 -10
- mindspore/ops/operations/_tensor_array.py +1 -1
- mindspore/ops/operations/_thor_ops.py +2 -2
- mindspore/ops/operations/array_ops.py +1239 -561
- mindspore/ops/operations/comm_ops.py +166 -90
- mindspore/ops/operations/control_ops.py +3 -3
- mindspore/ops/operations/custom_ops.py +124 -102
- mindspore/ops/operations/debug_ops.py +24 -11
- mindspore/ops/operations/image_ops.py +86 -71
- mindspore/ops/operations/inner_ops.py +18 -13
- mindspore/ops/operations/linalg_ops.py +30 -11
- mindspore/ops/operations/math_ops.py +1730 -435
- mindspore/ops/operations/nn_ops.py +1953 -943
- mindspore/ops/operations/other_ops.py +65 -43
- mindspore/ops/operations/random_ops.py +258 -98
- mindspore/ops/operations/rl_ops.py +4 -36
- mindspore/ops/operations/sparse_ops.py +38 -33
- mindspore/ops/operations/spectral_ops.py +8 -4
- mindspore/ops/primitive.py +66 -44
- mindspore/ops/signature.py +5 -5
- mindspore/parallel/_auto_parallel_context.py +80 -19
- mindspore/parallel/_cost_model_context.py +42 -0
- mindspore/parallel/_offload_context.py +162 -72
- mindspore/parallel/_parallel_serialization.py +2 -2
- mindspore/parallel/_ps_context.py +16 -4
- mindspore/parallel/_recovery_context.py +2 -1
- mindspore/parallel/_tensor.py +15 -13
- mindspore/parallel/_transformer/layers.py +8 -6
- mindspore/parallel/_transformer/loss.py +1 -0
- mindspore/parallel/_transformer/moe.py +7 -7
- mindspore/parallel/_transformer/op_parallel_config.py +12 -1
- mindspore/parallel/_transformer/transformer.py +34 -14
- mindspore/parallel/_utils.py +36 -14
- mindspore/parallel/algo_parameter_config.py +114 -20
- mindspore/parallel/checkpoint_transform.py +16 -18
- mindspore/parallel/shard.py +16 -13
- mindspore/profiler/__init__.py +1 -1
- mindspore/profiler/common/struct_type.py +3 -3
- mindspore/profiler/common/util.py +3 -2
- mindspore/profiler/envprofiling.py +11 -4
- mindspore/profiler/parser/aicpu_data_parser.py +5 -3
- mindspore/profiler/parser/ascend_flops_generator.py +94 -0
- mindspore/profiler/parser/ascend_fpbp_generator.py +76 -0
- mindspore/profiler/parser/ascend_hccl_generator.py +288 -0
- mindspore/profiler/parser/ascend_msprof_exporter.py +213 -0
- mindspore/profiler/parser/ascend_msprof_generator.py +199 -0
- mindspore/profiler/parser/ascend_op_generator.py +276 -0
- mindspore/profiler/parser/ascend_steptrace_generator.py +94 -0
- mindspore/profiler/parser/ascend_timeline_generator.py +110 -54
- mindspore/profiler/parser/base_timeline_generator.py +11 -7
- mindspore/profiler/parser/cpu_gpu_timeline_generator.py +45 -46
- mindspore/profiler/parser/flops_parser.py +15 -11
- mindspore/profiler/parser/framework_parser.py +92 -73
- mindspore/profiler/parser/hccl_parser.py +16 -12
- mindspore/profiler/parser/integrator.py +22 -11
- mindspore/profiler/parser/memory_usage_parser.py +36 -11
- mindspore/profiler/parser/minddata_analyzer.py +12 -14
- mindspore/profiler/parser/minddata_pipeline_parser.py +1 -1
- mindspore/profiler/parser/msadvisor_parser.py +8 -4
- mindspore/profiler/parser/op_intermediate_parser.py +5 -2
- mindspore/profiler/parser/optime_parser.py +1 -1
- mindspore/profiler/parser/profiler_info.py +4 -5
- mindspore/profiler/parser/step_trace_parser.py +11 -14
- mindspore/profiler/profiling.py +678 -377
- mindspore/rewrite/api/node.py +211 -54
- mindspore/rewrite/api/node_type.py +5 -0
- mindspore/rewrite/api/pattern_engine.py +22 -23
- mindspore/rewrite/api/scoped_value.py +20 -17
- mindspore/rewrite/api/symbol_tree.py +252 -106
- mindspore/rewrite/api/tree_node_helper.py +3 -0
- mindspore/rewrite/ast_helpers/__init__.py +2 -1
- mindspore/rewrite/ast_helpers/ast_finder.py +129 -0
- mindspore/rewrite/ast_helpers/ast_modifier.py +116 -104
- mindspore/rewrite/ast_transformers/flatten_recursive_stmt.py +97 -46
- mindspore/rewrite/common/rewrite_elog.py +5 -1
- mindspore/rewrite/namer.py +51 -51
- mindspore/rewrite/namespace.py +14 -5
- mindspore/{ops/bprop_mindir → rewrite/node}/__init__.py +9 -4
- mindspore/rewrite/node/call_function.py +79 -0
- mindspore/rewrite/node/cell_container.py +135 -0
- mindspore/rewrite/node/control_flow.py +88 -0
- mindspore/rewrite/{node.py → node/node.py} +313 -247
- mindspore/rewrite/node/node_manager.py +254 -0
- mindspore/rewrite/node/node_topological_manager.py +243 -0
- mindspore/rewrite/parsers/arguments_parser.py +22 -21
- mindspore/rewrite/parsers/assign_parser.py +225 -239
- mindspore/rewrite/parsers/attribute_parser.py +9 -7
- mindspore/rewrite/parsers/class_def_parser.py +179 -218
- mindspore/rewrite/parsers/constant_parser.py +9 -6
- mindspore/rewrite/parsers/container_parser.py +9 -7
- mindspore/rewrite/parsers/for_parser.py +36 -15
- mindspore/rewrite/parsers/function_def_parser.py +23 -20
- mindspore/rewrite/parsers/if_parser.py +28 -24
- mindspore/rewrite/parsers/module_parser.py +202 -25
- mindspore/rewrite/{parser.py → parsers/parser.py} +4 -2
- mindspore/rewrite/{parser_register.py → parsers/parser_register.py} +1 -1
- mindspore/rewrite/parsers/return_parser.py +6 -6
- mindspore/rewrite/sparsify/sparse_transformer.py +12 -3
- mindspore/rewrite/sparsify/sparsify.py +4 -1
- mindspore/rewrite/sparsify/utils.py +11 -5
- mindspore/rewrite/symbol_tree.py +577 -732
- mindspore/rewrite/symbol_tree_builder.py +9 -175
- mindspore/rewrite/symbol_tree_dumper.py +2 -2
- mindspore/run_check/_check_version.py +46 -39
- mindspore/run_check/run_check.py +3 -2
- mindspore/{scipy/sparse → safeguard}/__init__.py +4 -5
- mindspore/safeguard/rewrite_obfuscation.py +517 -0
- mindspore/scipy/__init__.py +1 -1
- mindspore/scipy/linalg.py +67 -61
- mindspore/scipy/ops.py +5 -41
- mindspore/scipy/ops_grad.py +3 -2
- mindspore/scipy/ops_wrapper.py +5 -5
- mindspore/scipy/optimize/line_search.py +8 -8
- mindspore/scipy/optimize/linear_sum_assignment.py +4 -4
- mindspore/scipy/optimize/minimize.py +16 -12
- mindspore/scipy/utils.py +1 -52
- mindspore/scipy/utils_const.py +4 -4
- mindspore/train/__init__.py +4 -4
- mindspore/train/_utils.py +13 -5
- mindspore/train/amp.py +410 -148
- mindspore/train/anf_ir_pb2.py +16 -4
- mindspore/train/callback/_backup_and_restore.py +8 -11
- mindspore/train/callback/_callback.py +80 -3
- mindspore/train/callback/_checkpoint.py +82 -51
- mindspore/train/callback/_early_stop.py +12 -15
- mindspore/train/callback/_history.py +1 -1
- mindspore/train/callback/_lambda_callback.py +13 -13
- mindspore/train/callback/_landscape.py +21 -17
- mindspore/train/callback/_loss_monitor.py +9 -10
- mindspore/train/callback/_on_request_exit.py +16 -33
- mindspore/train/callback/_reduce_lr_on_plateau.py +21 -24
- mindspore/train/callback/_summary_collector.py +44 -30
- mindspore/train/callback/_time_monitor.py +62 -12
- mindspore/train/data_sink.py +10 -16
- mindspore/train/dataset_helper.py +154 -86
- mindspore/train/loss_scale_manager.py +14 -9
- mindspore/train/metrics/__init__.py +10 -2
- mindspore/train/metrics/accuracy.py +1 -1
- mindspore/train/metrics/auc.py +1 -1
- mindspore/train/metrics/bleu_score.py +2 -2
- mindspore/train/metrics/confusion_matrix.py +14 -14
- mindspore/train/metrics/cosine_similarity.py +3 -3
- mindspore/train/metrics/dice.py +1 -1
- mindspore/train/metrics/fbeta.py +1 -1
- mindspore/train/metrics/hausdorff_distance.py +8 -6
- mindspore/train/metrics/mean_surface_distance.py +5 -4
- mindspore/train/metrics/metric.py +49 -17
- mindspore/train/metrics/occlusion_sensitivity.py +4 -4
- mindspore/train/metrics/perplexity.py +1 -1
- mindspore/train/metrics/precision.py +2 -2
- mindspore/train/metrics/recall.py +2 -3
- mindspore/train/metrics/roc.py +7 -7
- mindspore/train/metrics/root_mean_square_surface_distance.py +5 -4
- mindspore/train/metrics/topk.py +7 -4
- mindspore/train/mind_ir_pb2.py +193 -48
- mindspore/train/model.py +377 -133
- mindspore/train/serialization.py +697 -245
- mindspore/train/summary/_summary_adapter.py +5 -2
- mindspore/train/summary/_writer_pool.py +4 -3
- mindspore/train/summary/summary_record.py +25 -23
- mindspore/train/train_thor/convert_utils.py +39 -23
- mindspore/train/train_thor/dataset_helper.py +4 -3
- mindspore/train/train_thor/model_thor.py +8 -8
- mindspore/version.py +1 -1
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/METADATA +7 -8
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/RECORD +647 -818
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/entry_points.txt +0 -1
- mindspore/_akg/akg/tvm/contrib/debugger/__init__.py +0 -16
- mindspore/_akg/akg/tvm/contrib/debugger/debug_result.py +0 -274
- mindspore/_akg/akg/tvm/contrib/debugger/debug_runtime.py +0 -259
- mindspore/_akg/akg/tvm/contrib/peak.py +0 -341
- mindspore/_akg/akg/tvm/contrib/rpc.py +0 -25
- mindspore/_akg/akg/tvm/contrib/xcode.py +0 -257
- mindspore/_akg/akg/tvm/exec/__init__.py +0 -17
- mindspore/_akg/akg/tvm/exec/autotvm_log_editor.py +0 -60
- mindspore/_akg/akg/tvm/exec/measure_peak.py +0 -48
- mindspore/_akg/akg/tvm/exec/query_rpc_tracker.py +0 -48
- mindspore/_akg/akg/tvm/exec/rpc_proxy.py +0 -98
- mindspore/_akg/akg/tvm/exec/rpc_server.py +0 -88
- mindspore/_akg/akg/tvm/exec/rpc_tracker.py +0 -62
- mindspore/_akg/akg/tvm/rpc/__init__.py +0 -29
- mindspore/_akg/akg/tvm/rpc/base.py +0 -182
- mindspore/_akg/akg/tvm/rpc/client.py +0 -436
- mindspore/_akg/akg/tvm/rpc/proxy.py +0 -595
- mindspore/_akg/akg/tvm/rpc/server.py +0 -413
- mindspore/_akg/akg/tvm/rpc/tornado_util.py +0 -121
- mindspore/_akg/akg/tvm/rpc/tracker.py +0 -431
- mindspore/_extends/graph_kernel/expander.py +0 -80
- mindspore/_extends/graph_kernel/expanders/__init__.py +0 -57
- mindspore/_extends/graph_kernel/expanders/_utils.py +0 -269
- mindspore/_extends/graph_kernel/expanders/addn.py +0 -33
- mindspore/_extends/graph_kernel/expanders/batchnorm.py +0 -152
- mindspore/_extends/graph_kernel/expanders/batchnorm_grad.py +0 -105
- mindspore/_extends/graph_kernel/expanders/bias_add_grad.py +0 -49
- mindspore/_extends/graph_kernel/expanders/clip_by_norm_no_div_sum.py +0 -33
- mindspore/_extends/graph_kernel/expanders/complex/abs.py +0 -30
- mindspore/_extends/graph_kernel/expanders/complex/add.py +0 -44
- mindspore/_extends/graph_kernel/expanders/complex/div.py +0 -62
- mindspore/_extends/graph_kernel/expanders/complex/mul.py +0 -52
- mindspore/_extends/graph_kernel/expanders/complex/real_div.py +0 -62
- mindspore/_extends/graph_kernel/expanders/complex/sub.py +0 -45
- mindspore/_extends/graph_kernel/expanders/conv2d.py +0 -200
- mindspore/_extends/graph_kernel/expanders/dropout_grad.py +0 -30
- mindspore/_extends/graph_kernel/expanders/equal_count.py +0 -50
- mindspore/_extends/graph_kernel/expanders/erfc.py +0 -35
- mindspore/_extends/graph_kernel/expanders/expand_dims.py +0 -50
- mindspore/_extends/graph_kernel/expanders/fused_adam.py +0 -44
- mindspore/_extends/graph_kernel/expanders/fused_adam_weight_decay.py +0 -47
- mindspore/_extends/graph_kernel/expanders/fused_mul_add.py +0 -28
- mindspore/_extends/graph_kernel/expanders/gather.py +0 -43
- mindspore/_extends/graph_kernel/expanders/gelu_grad.py +0 -70
- mindspore/_extends/graph_kernel/expanders/gkdropout.py +0 -40
- mindspore/_extends/graph_kernel/expanders/identity.py +0 -25
- mindspore/_extends/graph_kernel/expanders/layernorm.py +0 -93
- mindspore/_extends/graph_kernel/expanders/layernorm_grad.py +0 -113
- mindspore/_extends/graph_kernel/expanders/logsoftmax.py +0 -46
- mindspore/_extends/graph_kernel/expanders/logsoftmax_grad.py +0 -36
- mindspore/_extends/graph_kernel/expanders/matmul.py +0 -80
- mindspore/_extends/graph_kernel/expanders/maximum_grad.py +0 -59
- mindspore/_extends/graph_kernel/expanders/minimum_grad.py +0 -80
- mindspore/_extends/graph_kernel/expanders/oneslike.py +0 -26
- mindspore/_extends/graph_kernel/expanders/reduce_mean.py +0 -43
- mindspore/_extends/graph_kernel/expanders/relu_grad.py +0 -32
- mindspore/_extends/graph_kernel/expanders/sigmoid_cross_entropy_with_logits.py +0 -41
- mindspore/_extends/graph_kernel/expanders/sigmoid_cross_entropy_with_logits_grad.py +0 -35
- mindspore/_extends/graph_kernel/expanders/sigmoid_grad.py +0 -31
- mindspore/_extends/graph_kernel/expanders/slice.py +0 -35
- mindspore/_extends/graph_kernel/expanders/softmax_cross_entropy_with_logits.py +0 -42
- mindspore/_extends/graph_kernel/expanders/softmax_grad_ext.py +0 -41
- mindspore/_extends/graph_kernel/expanders/softsign.py +0 -28
- mindspore/_extends/graph_kernel/expanders/sqrt_grad.py +0 -29
- mindspore/_extends/graph_kernel/expanders/square_sum_all.py +0 -44
- mindspore/_extends/graph_kernel/expanders/square_sum_v1.py +0 -37
- mindspore/_extends/graph_kernel/expanders/squared_difference.py +0 -43
- mindspore/_extends/graph_kernel/expanders/tanh_grad.py +0 -31
- mindspore/_extends/graph_kernel/expanders/tile.py +0 -54
- mindspore/_extends/graph_kernel/model/op_infer.py +0 -506
- mindspore/_extends/parse/jit_fallback_modules.py +0 -51
- mindspore/dataset/datapreprocess/preprocess_imagenet_validate_dataset.py +0 -54
- mindspore/dataset/engine/graphdata.py +0 -1586
- mindspore/include/api/net.h +0 -142
- mindspore/ops/_grad/grad_array_ops.py +0 -1347
- mindspore/ops/_grad/grad_clip_ops.py +0 -84
- mindspore/ops/_grad/grad_debug_ops.py +0 -68
- mindspore/ops/_grad/grad_inner_ops.py +0 -235
- mindspore/ops/_grad/grad_math_ops.py +0 -1684
- mindspore/ops/_grad/grad_nn_ops.py +0 -1529
- mindspore/ops/_grad/grad_other_ops.py +0 -89
- mindspore/ops/_grad/grad_sequence_ops.py +0 -296
- mindspore/ops/_grad/grad_sparse.py +0 -323
- mindspore/ops/_grad_experimental/grad_image_ops.py +0 -249
- mindspore/ops/_grad_experimental/grad_linalg_ops.py +0 -195
- mindspore/ops/_grad_experimental/grad_scalar_ops.py +0 -112
- mindspore/ops/bprop_mindir/AdaptiveAvgPool2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/AdaptiveMaxPool2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ApproximateEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Argmax_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Argmin_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/AssignSub_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Assign_bprop.mindir +0 -17
- mindspore/ops/bprop_mindir/AvgPool3D_bprop.mindir +0 -150
- mindspore/ops/bprop_mindir/AvgPool_bprop.mindir +0 -66
- mindspore/ops/bprop_mindir/BCEWithLogitsLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BNTrainingReduce_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/BatchNormGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BatchToSpaceND_bprop.mindir +0 -28
- mindspore/ops/bprop_mindir/BiasAddGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/BinaryCrossEntropy_bprop.mindir +0 -33
- mindspore/ops/bprop_mindir/BroadcastTo_bprop.mindir +0 -306
- mindspore/ops/bprop_mindir/Broadcast_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/CTCLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Concat_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Conv2DBackpropFilter_bprop.mindir +0 -240
- mindspore/ops/bprop_mindir/Conv2DBackpropInput_bprop.mindir +0 -247
- mindspore/ops/bprop_mindir/Conv2DTranspose_bprop.mindir +0 -247
- mindspore/ops/bprop_mindir/Conv3DTranspose_bprop.mindir +0 -315
- mindspore/ops/bprop_mindir/Conv3D_bprop.mindir +0 -278
- mindspore/ops/bprop_mindir/DType_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/DeformableOffsets_bprop.mindir +0 -58
- mindspore/ops/bprop_mindir/Depend_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/DepthToSpace_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/DepthwiseConv2dNative_bprop.mindir +0 -138
- mindspore/ops/bprop_mindir/DiagPart_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Dropout2D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Dropout3D_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DropoutDoMask_bprop.mindir +0 -25
- mindspore/ops/bprop_mindir/DropoutGenMask_bprop.mindir +0 -18
- mindspore/ops/bprop_mindir/DropoutGrad_bprop.mindir +0 -27
- mindspore/ops/bprop_mindir/Dropout_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicGRUV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicRNN_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/DynamicShape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/Elu_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/EmbeddingLookup_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Equal_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ExpandDims_bprop.mindir +0 -58
- mindspore/ops/bprop_mindir/FastGeLU_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Flatten_bprop.mindir +0 -54
- mindspore/ops/bprop_mindir/FloorDiv_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/GatherD_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/GatherNd_bprop.mindir +0 -57
- mindspore/ops/bprop_mindir/Gather_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/GreaterEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Greater_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/HSigmoid_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/HSwish_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/IOU_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/InstanceNorm_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/IsFinite_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/IsInf_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/IsNan_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/KLDivLoss_bprop.mindir +0 -126
- mindspore/ops/bprop_mindir/L2Loss_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/L2Normalize_bprop.mindir +0 -30
- mindspore/ops/bprop_mindir/LRN_bprop.mindir +0 -43
- mindspore/ops/bprop_mindir/LayerNormGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/LessEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/Less_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/LinSpace_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/Load_bprop.mindir +0 -13
- mindspore/ops/bprop_mindir/LogSoftmax_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/LogicalAnd_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/LogicalNot_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/MaskedSelect_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/MaxPool3DGradGrad_bprop.mindir +0 -74
- mindspore/ops/bprop_mindir/MaxPool3DGrad_bprop.mindir +0 -74
- mindspore/ops/bprop_mindir/MaxPool3D_bprop.mindir +0 -75
- mindspore/ops/bprop_mindir/MaxPoolGradGrad_bprop.mindir +0 -65
- mindspore/ops/bprop_mindir/MaxPoolWithArgmax_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Maximum_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Minimum_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/MirrorPad_bprop.mindir +0 -27
- mindspore/ops/bprop_mindir/Mish_bprop.mindir +0 -35
- mindspore/ops/bprop_mindir/MulNoNan_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/NLLLoss_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/NonZero_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/NotEqual_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/OneHot_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/OnesLike_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/PReLU_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Pad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Padding_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/RNNTLoss_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/ROIAlign_bprop.mindir +0 -82
- mindspore/ops/bprop_mindir/Range_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Rank_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/ReLU6_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/ReLUV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ReduceAll_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ReduceAny_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/ReluGrad_bprop.mindir +0 -20
- mindspore/ops/bprop_mindir/Reshape_bprop.mindir +0 -60
- mindspore/ops/bprop_mindir/ResizeBilinear_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/ResizeNearestNeighbor_bprop.mindir +0 -89
- mindspore/ops/bprop_mindir/ReverseSequence_bprop.mindir +0 -52
- mindspore/ops/bprop_mindir/ReverseV2_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Round_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/ScatterMax_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ScatterMin_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/ScatterNdUpdate_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/ScatterNd_bprop.mindir +0 -24
- mindspore/ops/bprop_mindir/ScatterNonAliasingAdd_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/ScatterUpdate_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SeLU_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/Select_bprop.mindir +0 -31
- mindspore/ops/bprop_mindir/Shape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/SigmoidCrossEntropyWithLogits_bprop.mindir +0 -21
- mindspore/ops/bprop_mindir/SigmoidGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Sigmoid_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Sign_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/Slice_bprop.mindir +0 -26
- mindspore/ops/bprop_mindir/SmoothL1Loss_bprop.mindir +0 -36
- mindspore/ops/bprop_mindir/SoftmaxCrossEntropyWithLogits_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Softplus_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Softsign_bprop.mindir +0 -33
- mindspore/ops/bprop_mindir/Sort_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SpaceToBatchND_bprop.mindir +0 -28
- mindspore/ops/bprop_mindir/SpaceToDepth_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/SparseGatherV2_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/SparseSoftmaxCrossEntropyWithLogits_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Split_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/Squeeze_bprop.mindir +0 -54
- mindspore/ops/bprop_mindir/StridedSliceGrad_bprop.mindir +0 -95
- mindspore/ops/bprop_mindir/StridedSlice_bprop.mindir +0 -98
- mindspore/ops/bprop_mindir/Switch_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/TanhGrad_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/Tanh_bprop.mindir +0 -66
- mindspore/ops/bprop_mindir/TensorScatterAdd_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/TensorScatterUpdate_bprop.mindir +0 -29
- mindspore/ops/bprop_mindir/TensorShape_bprop.mindir +0 -14
- mindspore/ops/bprop_mindir/Tile_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/TopK_bprop.mindir +0 -0
- mindspore/ops/bprop_mindir/TransShape_bprop.mindir +0 -23
- mindspore/ops/bprop_mindir/TruncateDiv_bprop.mindir +0 -19
- mindspore/ops/bprop_mindir/TupleGetItem_bprop.mindir +0 -20
- mindspore/ops/bprop_mindir/Unique_bprop.mindir +0 -16
- mindspore/ops/bprop_mindir/Unstack_bprop.mindir +0 -22
- mindspore/ops/bprop_mindir/UpsampleNearest3D_bprop.mindir +0 -32
- mindspore/ops/bprop_mindir/UpsampleTrilinear3D_bprop.mindir +0 -38
- mindspore/ops/bprop_mindir/ZerosLike_bprop.mindir +0 -15
- mindspore/ops/bprop_mindir/generate_mindir.py +0 -114
- mindspore/rewrite/node_visitor.py +0 -44
- mindspore/rewrite/topological_manager.py +0 -203
- mindspore/scipy/sparse/linalg.py +0 -192
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/WHEEL +0 -0
- {mindspore-2.0.0rc1.dist-info → mindspore-2.2.0.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright 2019-
|
|
1
|
+
# Copyright 2019-2023 Huawei Technologies Co., Ltd
|
|
2
2
|
#
|
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
4
|
# you may not use this file except in compliance with the License.
|
|
@@ -19,6 +19,7 @@ After declaring the dataset object, you can further apply dataset operations
|
|
|
19
19
|
(e.g. filter, skip, concat, map, batch) on it.
|
|
20
20
|
"""
|
|
21
21
|
import builtins
|
|
22
|
+
import errno
|
|
22
23
|
import math
|
|
23
24
|
import os
|
|
24
25
|
import signal
|
|
@@ -47,6 +48,7 @@ from ..core.config import get_enable_shared_mem, get_prefetch_size, get_multipro
|
|
|
47
48
|
get_enable_watchdog, get_debug_mode
|
|
48
49
|
from ..core.datatypes import mstypelist_to_detypelist
|
|
49
50
|
from ..core.py_util_helpers import ExceptionHandler
|
|
51
|
+
from ..transforms import transforms
|
|
50
52
|
|
|
51
53
|
|
|
52
54
|
def _iter_fn(dataset, num_samples):
|
|
@@ -127,15 +129,30 @@ def _fill_worker_indices(workers, indices, idx):
|
|
|
127
129
|
return idx
|
|
128
130
|
|
|
129
131
|
|
|
132
|
+
def _fill_worker_quit_flag(workers, worker_to_quit):
|
|
133
|
+
"""
|
|
134
|
+
Worker index queue filler, fill worker index queue with QUIT flag.
|
|
135
|
+
"""
|
|
136
|
+
num_worker = len(workers)
|
|
137
|
+
for i in range(num_worker):
|
|
138
|
+
# just put only one QUIT flag to the sub-thread / sub-process
|
|
139
|
+
if str(i) not in worker_to_quit:
|
|
140
|
+
try:
|
|
141
|
+
workers[i].put("QUIT")
|
|
142
|
+
worker_to_quit[str(i)] = "QUIT"
|
|
143
|
+
except queue.Full:
|
|
144
|
+
continue
|
|
145
|
+
|
|
146
|
+
|
|
130
147
|
def _convert_row(row):
|
|
131
148
|
"""
|
|
132
149
|
Convert Op return value to numpy, or keep as a dict (if already a dict)
|
|
133
150
|
"""
|
|
134
151
|
|
|
135
152
|
# convert single item to np.array
|
|
136
|
-
prim_type = (int, float, str, bytes, np.ndarray, Tensor)
|
|
153
|
+
prim_type = (int, float, str, bytes, np.ndarray, Tensor, np.number, np.bool_)
|
|
137
154
|
if isinstance(row, prim_type):
|
|
138
|
-
if isinstance(row, Tensor):
|
|
155
|
+
if isinstance(row, Tensor): # mindspore.Tensor
|
|
139
156
|
item = row.asnumpy()
|
|
140
157
|
else:
|
|
141
158
|
item = np.array(row, copy=False)
|
|
@@ -152,7 +169,7 @@ def _convert_row(row):
|
|
|
152
169
|
idx = 0
|
|
153
170
|
for x in row:
|
|
154
171
|
idx += 1
|
|
155
|
-
if isinstance(x, Tensor):
|
|
172
|
+
if isinstance(x, Tensor): # mindspore.Tensor
|
|
156
173
|
value.append(x.asnumpy())
|
|
157
174
|
elif isinstance(x, dict):
|
|
158
175
|
value.append(x)
|
|
@@ -199,20 +216,30 @@ class SamplerFn:
|
|
|
199
216
|
queue_size = max(2, queue_size)
|
|
200
217
|
|
|
201
218
|
if multi_process and get_enable_shared_mem():
|
|
202
|
-
|
|
203
|
-
|
|
219
|
+
# generator dataset use idx_queue and res_queue to transfer data between main and subprocess
|
|
220
|
+
# idx_queue is used multiprocess.Queue which is not shared memory, so it's size is 0.
|
|
221
|
+
# res_queue is used shared memory, so it' size is max_rowsize which is defined by user.
|
|
222
|
+
_check_shm_usage(num_worker, queue_size, 0, max_rowsize)
|
|
223
|
+
self.count = multiprocessing.Value('i', 0)
|
|
204
224
|
for _ in range(num_worker):
|
|
205
225
|
if multi_process is True:
|
|
206
226
|
try:
|
|
207
|
-
worker = _GeneratorWorkerMp(dataset, self.eof, max_rowsize, queue_size, self.ppid, count)
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
227
|
+
worker = _GeneratorWorkerMp(dataset, self.eof, max_rowsize, queue_size, self.ppid, self.count)
|
|
228
|
+
worker.daemon = True
|
|
229
|
+
# When multi processes fork a subprocess, the lock of the main process is copied to the subprocess,
|
|
230
|
+
# which may cause deadlock. Therefore, the subprocess startup is performed in the initialization
|
|
231
|
+
# phase. In this phase, the main process is not locked.
|
|
232
|
+
worker.start()
|
|
233
|
+
except OSError as e:
|
|
234
|
+
if e.errno == errno.EMFILE:
|
|
235
|
+
raise RuntimeError("Failed to launch multiprocessing of GeneratorDataset: "
|
|
236
|
+
"Too many open files. Please check if `num_parallel_workers` "
|
|
237
|
+
"is set too large, or you are creating iterators multiple times. "
|
|
238
|
+
"You can also increase the limit using `ulimit -n` in the shell "
|
|
239
|
+
"to avoid this error.")
|
|
240
|
+
raise
|
|
241
|
+
except Exception as e:
|
|
242
|
+
raise RuntimeError("Failed to launch multiprocessing of GeneratorDataset: {0}".format(e))
|
|
216
243
|
self.pids.append(worker.pid)
|
|
217
244
|
self.need_join = True
|
|
218
245
|
else:
|
|
@@ -229,7 +256,11 @@ class SamplerFn:
|
|
|
229
256
|
for w in self.workers:
|
|
230
257
|
# Check whether the queue of the subprocess is empty.
|
|
231
258
|
if not w.queue_empty():
|
|
232
|
-
|
|
259
|
+
# in failover reset scenario the QUIT flag should be pop first
|
|
260
|
+
while w.idx_queue.qsize() > 0:
|
|
261
|
+
result = w.idx_queue.get()
|
|
262
|
+
if result != "QUIT":
|
|
263
|
+
raise Exception("The queue of the subprocess is not empty.")
|
|
233
264
|
# Start all workers
|
|
234
265
|
if not w.is_alive():
|
|
235
266
|
w.start()
|
|
@@ -238,6 +269,9 @@ class SamplerFn:
|
|
|
238
269
|
idx_cursor = 0
|
|
239
270
|
idx_cursor = _fill_worker_indices(self.workers, indices, idx_cursor)
|
|
240
271
|
|
|
272
|
+
# worker to quit
|
|
273
|
+
worker_to_quit = {}
|
|
274
|
+
|
|
241
275
|
# Fetch results
|
|
242
276
|
for i in range(len(indices)):
|
|
243
277
|
if self.eof.is_set():
|
|
@@ -261,21 +295,7 @@ class SamplerFn:
|
|
|
261
295
|
cost_time = int(time.time()) - start_time
|
|
262
296
|
if cost_time / self.check_interval >= wait_count:
|
|
263
297
|
wait_count += 1
|
|
264
|
-
|
|
265
|
-
"thread/process of the generator generates data had been hung by gil lock. "
|
|
266
|
-
"Check whether the source of generator has an infinite loop operation or the "
|
|
267
|
-
"output data is too large. You can also set the timeout interval by "
|
|
268
|
-
"ds.config.set_multiprocessing_interval to adjust the output frequency of this "
|
|
269
|
-
"log.")
|
|
270
|
-
pid = self.workers[i % self.num_worker].pid
|
|
271
|
-
logger.warning("Generator subprocess ID {} is stuck.".format(pid))
|
|
272
|
-
install_status, _ = subprocess.getstatusoutput("py-spy --version")
|
|
273
|
-
if install_status == 0:
|
|
274
|
-
stack = subprocess.getoutput("py-spy dump -p {} -l -s".format(pid))
|
|
275
|
-
logger.warning("Generator subprocess stack:\n{}".format(stack))
|
|
276
|
-
else:
|
|
277
|
-
logger.warning("Please `pip install py-spy` to get the stacks of the stuck process.")
|
|
278
|
-
|
|
298
|
+
self._log_stuck_warning(self.workers[i % self.num_worker], cost_time)
|
|
279
299
|
result = self.workers[i % self.num_worker].get()
|
|
280
300
|
if isinstance(result, ExceptionHandler):
|
|
281
301
|
result.reraise()
|
|
@@ -290,8 +310,47 @@ class SamplerFn:
|
|
|
290
310
|
return
|
|
291
311
|
if idx_cursor < len(indices):
|
|
292
312
|
idx_cursor = _fill_worker_indices(self.workers, indices, idx_cursor)
|
|
313
|
+
else:
|
|
314
|
+
# send QUIT flag to workers
|
|
315
|
+
_fill_worker_quit_flag(self.workers, worker_to_quit)
|
|
293
316
|
yield _convert_row(result)
|
|
294
317
|
|
|
318
|
+
def _log_stuck_warning(self, worker, waiting_time):
|
|
319
|
+
"""
|
|
320
|
+
Log warning of the stuck worker, containing the worker ID, waiting time and
|
|
321
|
+
the current stack (if py-spy installed).
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
worker (Union[threading.Thread, multiprocessing.Process]): The worker instance.
|
|
325
|
+
waiting_time (int): The waiting time for getting data from the worker.
|
|
326
|
+
"""
|
|
327
|
+
if self.multi_process:
|
|
328
|
+
stuck_worker_id = worker.pid
|
|
329
|
+
worker_type = "process"
|
|
330
|
+
stuck_pid = stuck_worker_id
|
|
331
|
+
else:
|
|
332
|
+
if hasattr(worker, "native_id"):
|
|
333
|
+
# only supported since Python 3.8
|
|
334
|
+
stuck_worker_id = worker.native_id
|
|
335
|
+
else:
|
|
336
|
+
stuck_worker_id = worker.ident
|
|
337
|
+
worker_type = "thread"
|
|
338
|
+
stuck_pid = os.getpid() # get the process ID of the stuck thread
|
|
339
|
+
warning_message = "Has been waiting for data from Generator worker {0} ID '{1}' " \
|
|
340
|
+
"for more than {2} seconds. Please check if the user defined " \
|
|
341
|
+
"dataset of GeneratorDataset has a dead loop, or is processing " \
|
|
342
|
+
"too slowly. ".format(worker_type, stuck_worker_id, waiting_time)
|
|
343
|
+
install_status, _ = subprocess.getstatusoutput("py-spy --version")
|
|
344
|
+
if install_status == 0:
|
|
345
|
+
stack = subprocess.getoutput("py-spy dump -p {}".format(stuck_pid))
|
|
346
|
+
warning_message += "Below is the stack of this worker:\n{0}\n".format(stack)
|
|
347
|
+
else:
|
|
348
|
+
warning_message += "You can install py-spy via `pip install py-spy`, then " \
|
|
349
|
+
"stop and rerun your script to get the current stack. "
|
|
350
|
+
warning_message += "If it is not a problem, you can adjust the printing frequency of this log via " \
|
|
351
|
+
"the `mindspore.dataset.config.set_multiprocessing_timeout_interval` interface."
|
|
352
|
+
logger.warning(warning_message)
|
|
353
|
+
|
|
295
354
|
def _launch_cleanup_worker(self, multi_process):
|
|
296
355
|
"""
|
|
297
356
|
We need a extra thread and process if main process or subprocess was killed.
|
|
@@ -322,23 +381,61 @@ class SamplerFn:
|
|
|
322
381
|
def _stop_subprocess(self):
|
|
323
382
|
"""Only the main process can call join."""
|
|
324
383
|
if self.need_join is True and self.ppid == os.getpid():
|
|
384
|
+
# close the watch dog first
|
|
385
|
+
self._abort_watchdog()
|
|
386
|
+
|
|
325
387
|
if hasattr(self, 'eof') and self.eof is not None and not self.eof.is_set():
|
|
326
388
|
self.eof.set()
|
|
327
389
|
self.need_join = False
|
|
328
390
|
for w in self.workers:
|
|
329
391
|
if self.multi_process is True and hasattr(w, '_closed') and w._closed is False: # pylint: disable=W0212
|
|
330
392
|
try:
|
|
393
|
+
# del the queue first
|
|
394
|
+
del w.res_queue
|
|
395
|
+
del w.idx_queue
|
|
396
|
+
|
|
397
|
+
# close all the subprocess workers
|
|
398
|
+
w.terminate()
|
|
331
399
|
w.join()
|
|
400
|
+
w.close()
|
|
332
401
|
except Exception: # pylint: disable=W0703
|
|
333
402
|
# Block all errors when join
|
|
334
403
|
continue
|
|
335
|
-
|
|
404
|
+
|
|
405
|
+
# release the file descriptor handle
|
|
406
|
+
check_interval = get_multiprocessing_timeout_interval()
|
|
407
|
+
for w in self.workers:
|
|
408
|
+
try:
|
|
409
|
+
subprocess_file_descriptor = w.sentinel
|
|
410
|
+
st = time.time()
|
|
411
|
+
while _PythonMultiprocessing.is_process_alive(w.pid):
|
|
412
|
+
time.sleep(0.01) # sleep 10ms, waiting for the subprocess exit
|
|
413
|
+
if time.time() - st > check_interval:
|
|
414
|
+
logger.warning("Waiting for the subprocess worker [{}] to exit.".format(w.pid))
|
|
415
|
+
st += check_interval
|
|
416
|
+
except ValueError as e:
|
|
417
|
+
if "process object is closed" in str(e):
|
|
418
|
+
continue
|
|
419
|
+
raise e
|
|
420
|
+
try:
|
|
421
|
+
if w.is_alive():
|
|
422
|
+
os.close(subprocess_file_descriptor)
|
|
423
|
+
except OSError as e:
|
|
424
|
+
# Maybe the file descriptor had been released, so ignore the 'Bad file descriptor'
|
|
425
|
+
if "Bad file descriptor" not in str(e):
|
|
426
|
+
raise e
|
|
427
|
+
|
|
428
|
+
self.workers.clear()
|
|
429
|
+
self.workers = None
|
|
336
430
|
|
|
337
431
|
def _abort_watchdog(self):
|
|
338
432
|
if hasattr(self, 'eot') and self.eot is not None and not self.eot.is_set():
|
|
339
433
|
self.eot.set()
|
|
340
434
|
if hasattr(self, 'cleaning_process') and self.cleaning_process is not None:
|
|
341
435
|
_PythonMultiprocessing._terminate_processes([self.cleaning_process]) # pylint: disable=W0212
|
|
436
|
+
del self.cleaning_process
|
|
437
|
+
if hasattr(self, 'count'):
|
|
438
|
+
del self.count
|
|
342
439
|
|
|
343
440
|
@classmethod
|
|
344
441
|
def _finalize_join(cls, twr, eot):
|
|
@@ -388,6 +485,7 @@ def _generator_worker_loop(dataset, idx_queue, result_queue, eof, is_multiproces
|
|
|
388
485
|
Multithread or multiprocess generator worker process loop.
|
|
389
486
|
"""
|
|
390
487
|
if is_multiprocessing:
|
|
488
|
+
result_queue.cancel_join_thread() # Ensure that the process does not hung when exiting
|
|
391
489
|
signal.signal(signal.SIGTERM, partial(_subprocess_handle, eof))
|
|
392
490
|
while True:
|
|
393
491
|
_ignore_sigint(is_multiprocessing=is_multiprocessing)
|
|
@@ -397,19 +495,26 @@ def _generator_worker_loop(dataset, idx_queue, result_queue, eof, is_multiproces
|
|
|
397
495
|
idx = idx_queue.get(timeout=1)
|
|
398
496
|
except queue.Empty:
|
|
399
497
|
if _main_process_already_exit(eof, is_multiprocessing, idx_queue, result_queue, ppid) is True:
|
|
498
|
+
del idx_queue
|
|
499
|
+
del result_queue
|
|
400
500
|
return
|
|
401
501
|
# If end-of-file (eof) is not set, continue to get data from idx_queue
|
|
402
502
|
continue
|
|
503
|
+
if idx == "QUIT":
|
|
504
|
+
# all the data had been processed, so we release the executor which is used by the current thread/process
|
|
505
|
+
transforms.clean_unused_executors()
|
|
506
|
+
continue
|
|
403
507
|
if idx is None:
|
|
404
508
|
# When the queue is out of scope from master process, a None item can be fetched from the queue.
|
|
405
509
|
# Upon receiving None, worker process should check if eof is set.
|
|
406
510
|
if not eof.is_set():
|
|
407
511
|
raise Exception("")
|
|
512
|
+
del idx_queue
|
|
513
|
+
del result_queue
|
|
408
514
|
return
|
|
409
515
|
if eof.is_set():
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
result_queue.cancel_join_thread()
|
|
516
|
+
del idx_queue
|
|
517
|
+
del result_queue
|
|
413
518
|
return
|
|
414
519
|
# Fetch data, any exception from __getitem__ will terminate worker and timeout master process
|
|
415
520
|
try:
|
|
@@ -422,6 +527,8 @@ def _generator_worker_loop(dataset, idx_queue, result_queue, eof, is_multiproces
|
|
|
422
527
|
result_queue.put(result, timeout=5)
|
|
423
528
|
except queue.Full:
|
|
424
529
|
if _main_process_already_exit(eof, is_multiprocessing, idx_queue, result_queue, ppid) is True:
|
|
530
|
+
del idx_queue
|
|
531
|
+
del result_queue
|
|
425
532
|
return
|
|
426
533
|
# If eof is not set, continue to put data to result_queue
|
|
427
534
|
continue
|
|
@@ -437,7 +544,8 @@ class _GeneratorWorkerMt(threading.Thread):
|
|
|
437
544
|
def __init__(self, dataset, eof):
|
|
438
545
|
self.idx_queue = queue.Queue(16)
|
|
439
546
|
self.res_queue = queue.Queue(16)
|
|
440
|
-
super().__init__(target=_generator_worker_loop, args=(dataset, self.idx_queue, self.res_queue, eof, False)
|
|
547
|
+
super().__init__(target=_generator_worker_loop, args=(dataset, self.idx_queue, self.res_queue, eof, False),
|
|
548
|
+
name="GeneratorWorkerThread")
|
|
441
549
|
|
|
442
550
|
def put(self, item):
|
|
443
551
|
"""
|
|
@@ -472,9 +580,9 @@ class _GeneratorWorkerMp(multiprocessing.Process):
|
|
|
472
580
|
self.res_queue = _SharedQueue(queue_size, count, max_rowsize=max_rowsize)
|
|
473
581
|
else:
|
|
474
582
|
self.res_queue = multiprocessing.Queue(queue_size)
|
|
475
|
-
self.idx_queue.
|
|
476
|
-
self.res_queue
|
|
477
|
-
|
|
583
|
+
self.idx_queue.cancel_join_thread() # Ensure that the process does not hung when exiting
|
|
584
|
+
super().__init__(target=_generator_worker_loop, args=(dataset, self.idx_queue, self.res_queue, eof, True, ppid),
|
|
585
|
+
name="GeneratorWorkerProcess")
|
|
478
586
|
|
|
479
587
|
def put(self, item):
|
|
480
588
|
"""
|
|
@@ -501,8 +609,11 @@ class _GeneratorWorkerMp(multiprocessing.Process):
|
|
|
501
609
|
|
|
502
610
|
def __del__(self):
|
|
503
611
|
# del all the Queue & SharedQueue when the iter had been deleted from ITERATORS_LIST
|
|
504
|
-
|
|
505
|
-
|
|
612
|
+
if hasattr(self, 'idx_queue'):
|
|
613
|
+
del self.idx_queue
|
|
614
|
+
if hasattr(self, 'res_queue'):
|
|
615
|
+
# del the queue when has
|
|
616
|
+
del self.res_queue
|
|
506
617
|
|
|
507
618
|
|
|
508
619
|
class GeneratorDataset(MappableDataset, UnionBaseDataset):
|
|
@@ -519,30 +630,33 @@ class GeneratorDataset(MappableDataset, UnionBaseDataset):
|
|
|
519
630
|
iter(source).next().
|
|
520
631
|
Random accessible source is required to return a tuple of NumPy arrays as a row of the dataset on
|
|
521
632
|
source[idx].
|
|
522
|
-
column_names (Union[str, list[str]], optional): List of column names of the dataset. Default: None.
|
|
523
|
-
required to provide either column_names or schema.
|
|
524
|
-
column_types (list[mindspore.dtype], optional): List of column data types of the dataset. Default: None.
|
|
633
|
+
column_names (Union[str, list[str]], optional): List of column names of the dataset. Default: ``None`` .
|
|
634
|
+
Users are required to provide either column_names or schema.
|
|
635
|
+
column_types (list[mindspore.dtype], optional): List of column data types of the dataset. Default: ``None`` .
|
|
525
636
|
If provided, sanity check will be performed on generator output.
|
|
526
637
|
schema (Union[str, Schema], optional): Data format policy, which specifies the data types and shapes of the data
|
|
527
|
-
column to be read. Both JSON file path and objects constructed by mindspore.dataset.Schema are
|
|
528
|
-
Default: None.
|
|
638
|
+
column to be read. Both JSON file path and objects constructed by :class:`mindspore.dataset.Schema` are
|
|
639
|
+
acceptable. Default: ``None`` .
|
|
529
640
|
num_samples (int, optional): The number of samples to be included in the dataset.
|
|
530
|
-
Default: None, all images.
|
|
641
|
+
Default: ``None`` , all images.
|
|
531
642
|
num_parallel_workers (int, optional): Number of worker threads/subprocesses used to
|
|
532
|
-
fetch the dataset in parallel. Default: 1
|
|
643
|
+
fetch the dataset in parallel. Default: ``1``.
|
|
533
644
|
shuffle (bool, optional): Whether or not to perform shuffle on the dataset. Random accessible input is required.
|
|
534
|
-
Default: None, expected order behavior shown in the table below.
|
|
645
|
+
Default: ``None`` , expected order behavior shown in the table below.
|
|
535
646
|
sampler (Union[Sampler, Iterable], optional): Object used to choose samples from the dataset. Random accessible
|
|
536
|
-
input is required. Default: None, expected order behavior shown in the table below.
|
|
537
|
-
num_shards (int, optional): Number of shards that the dataset will be divided into. Default: None.
|
|
647
|
+
input is required. Default: ``None`` , expected order behavior shown in the table below.
|
|
648
|
+
num_shards (int, optional): Number of shards that the dataset will be divided into. Default: ``None`` .
|
|
538
649
|
Random accessible input is required. When this argument is specified, `num_samples` reflects the maximum
|
|
539
650
|
sample number of per shard.
|
|
540
|
-
shard_id (int, optional): The shard ID within `num_shards` . Default: None.
|
|
541
|
-
when `num_shards` is also specified.
|
|
651
|
+
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None`` .
|
|
652
|
+
This argument must be specified only when `num_shards` is also specified.
|
|
653
|
+
Random accessible input is required.
|
|
542
654
|
python_multiprocessing (bool, optional): Parallelize Python operations with multiple worker process. This
|
|
543
|
-
option could be beneficial if the Python operation is computational heavy. Default: True
|
|
544
|
-
max_rowsize(int, optional): Maximum size of row in MB that is used for shared memory
|
|
545
|
-
data between processes
|
|
655
|
+
option could be beneficial if the Python operation is computational heavy. Default: ``True``.
|
|
656
|
+
max_rowsize(int, optional): Maximum size of row in MB that is used for shared memory
|
|
657
|
+
allocation to copy data between processes, the total occupied shared memory will increase as
|
|
658
|
+
``num_parallel_workers`` and :func:`mindspore.dataset.config.set_prefetch_size` increase. This is only
|
|
659
|
+
used if python_multiprocessing is set to True. Default: 16.
|
|
546
660
|
|
|
547
661
|
Raises:
|
|
548
662
|
RuntimeError: If source raises an exception during execution.
|
|
@@ -554,11 +668,16 @@ class GeneratorDataset(MappableDataset, UnionBaseDataset):
|
|
|
554
668
|
ValueError: If shard_id is specified but `num_shards` is None.
|
|
555
669
|
ValueError: If `shard_id` is not in range of [0, `num_shards` ).
|
|
556
670
|
|
|
671
|
+
Tutorial Examples:
|
|
672
|
+
- `Load & Process Data With Dataset Pipeline
|
|
673
|
+
<https://www.mindspore.cn/docs/en/r2.2/api_python/samples/dataset/dataset_gallery.html>`_
|
|
674
|
+
|
|
557
675
|
Note:
|
|
558
|
-
- If you configure `python_multiprocessing=True (
|
|
559
|
-
indicates that the multi-process mode is started for data load acceleration.
|
|
560
|
-
|
|
561
|
-
of the user-defined dataset obtains the member variables from the main
|
|
676
|
+
- If you configure `python_multiprocessing=True` (Default: ``True`` ) and `num_parallel_workers>1`
|
|
677
|
+
(default: ``1`` ) indicates that the multi-process mode is started for data load acceleration.
|
|
678
|
+
At this time, as the datasetiterates, the memory consumption of the subprocess will gradually increase,
|
|
679
|
+
mainly because the subprocess of the user-defined dataset obtains the member variables from the main
|
|
680
|
+
process in the Copy On Write way.
|
|
562
681
|
Example: If you define a dataset with `__ init__` function which contains a large number of member variable
|
|
563
682
|
data (for example, a very large file name list is loaded during the dataset construction) and uses the
|
|
564
683
|
multi-process mode, which may cause the problem of OOM (the estimated total memory usage is:
|
|
@@ -566,38 +685,23 @@ class GeneratorDataset(MappableDataset, UnionBaseDataset):
|
|
|
566
685
|
(such as list/dict/int/float/string) with non referenced data types
|
|
567
686
|
(such as Pandas, Numpy or PyArrow objects) for member variables, or load less meta data in member variables,
|
|
568
687
|
or configure `python_multiprocessing=False` to use multi-threading mode.
|
|
688
|
+
|
|
689
|
+
There are several classes/functions that can help you reduce the size of member variables, and you can choose
|
|
690
|
+
to use them:
|
|
691
|
+
|
|
692
|
+
1. :class:`mindspore.dataset.utils.LineReader`: Use this class to initialize your text file object in the
|
|
693
|
+
`__init__` function. Then read the file content based on the line number of the object with the `__getitem__`
|
|
694
|
+
function.
|
|
695
|
+
|
|
569
696
|
- Input `source` accepts user-defined Python functions (PyFuncs), Do not add network computing operators from
|
|
570
697
|
mindspore.nn and mindspore.ops or others into this `source` .
|
|
571
|
-
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
..
|
|
575
|
-
:widths: 25 25 50
|
|
576
|
-
:header-rows: 1
|
|
577
|
-
|
|
578
|
-
* - Parameter `sampler`
|
|
579
|
-
- Parameter `shuffle`
|
|
580
|
-
- Expected Order Behavior
|
|
581
|
-
* - None
|
|
582
|
-
- None
|
|
583
|
-
- random order
|
|
584
|
-
* - None
|
|
585
|
-
- True
|
|
586
|
-
- random order
|
|
587
|
-
* - None
|
|
588
|
-
- False
|
|
589
|
-
- sequential order
|
|
590
|
-
* - Sampler object
|
|
591
|
-
- None
|
|
592
|
-
- order defined by sampler
|
|
593
|
-
* - Sampler object
|
|
594
|
-
- True
|
|
595
|
-
- not allowed
|
|
596
|
-
* - Sampler object
|
|
597
|
-
- False
|
|
598
|
-
- not allowed
|
|
698
|
+
- The parameters `num_samples` , `shuffle` , `num_shards` , `shard_id` can be used to control the sampler
|
|
699
|
+
used in the dataset, and their effects when combined with parameter `sampler` are as follows.
|
|
700
|
+
|
|
701
|
+
.. include:: mindspore.dataset.sampler.txt
|
|
599
702
|
|
|
600
703
|
Examples:
|
|
704
|
+
>>> import mindspore.dataset as ds
|
|
601
705
|
>>> import numpy as np
|
|
602
706
|
>>>
|
|
603
707
|
>>> # 1) Multidimensional generator function as callable input.
|
|
@@ -712,42 +816,7 @@ class GeneratorDataset(MappableDataset, UnionBaseDataset):
|
|
|
712
816
|
def __deepcopy__(self, memodict):
|
|
713
817
|
if id(self) in memodict:
|
|
714
818
|
return memodict[id(self)]
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
sample_fn = None
|
|
718
|
-
if new_op.sampler is not None and hasattr(self.source, "__getitem__"):
|
|
719
|
-
# The reason why there is a try catch here is because when the new op is being constructed with shared
|
|
720
|
-
# memory enabled, there will be an exception thrown if there is not enough shared memory available
|
|
721
|
-
if self.source_len == -1:
|
|
722
|
-
raise RuntimeError("Attempt to construct a random access dataset, '__len__' method is required!")
|
|
723
|
-
try:
|
|
724
|
-
if new_op.num_parallel_workers > 1:
|
|
725
|
-
self.__validate_memory_usage()
|
|
726
|
-
|
|
727
|
-
sample_fn = SamplerFn(self.source, new_op.num_parallel_workers, self.python_multiprocessing,
|
|
728
|
-
self.max_rowsize)
|
|
729
|
-
new_op.prepared_source = (lambda sample_ids: _cpp_sampler_fn_mp(sample_ids, sample_fn))
|
|
730
|
-
else:
|
|
731
|
-
new_op.prepared_source = (lambda sample_ids: _cpp_sampler_fn(sample_ids, self.source))
|
|
732
|
-
new_op.sample_fn = sample_fn
|
|
733
|
-
except RuntimeError as e:
|
|
734
|
-
raise Exception(str(e))
|
|
735
|
-
else:
|
|
736
|
-
try:
|
|
737
|
-
new_op.sampler = None
|
|
738
|
-
new_op.sample_fn = sample_fn
|
|
739
|
-
new_op.source_len = min(new_op.source_len,
|
|
740
|
-
new_op.num_samples) if new_op.num_samples != 0 else new_op.source_len
|
|
741
|
-
iter(self.source)
|
|
742
|
-
except TypeError:
|
|
743
|
-
# Use generator function if input callable
|
|
744
|
-
new_op.prepared_source = (lambda: _generator_fn(self.source, new_op.num_samples))
|
|
745
|
-
else:
|
|
746
|
-
# Use iterator function if input is iterable
|
|
747
|
-
# Random accessible input is also iterable
|
|
748
|
-
new_op.prepared_source = (lambda: _iter_fn(self.source, new_op.num_samples))
|
|
749
|
-
|
|
750
|
-
return new_op
|
|
819
|
+
return self.__safe_deepcopy__(memodict, exclude=("source", "__transfer_dataset__"))
|
|
751
820
|
|
|
752
821
|
def is_shuffled(self):
|
|
753
822
|
if self.sampler:
|
|
@@ -766,7 +835,38 @@ class GeneratorDataset(MappableDataset, UnionBaseDataset):
|
|
|
766
835
|
return super().split(sizes, randomize)
|
|
767
836
|
return super(MappableDataset, self).split(sizes, randomize)
|
|
768
837
|
|
|
838
|
+
def prepare_multiprocessing(self):
|
|
839
|
+
"""Preprocessing of prepared_source."""
|
|
840
|
+
sample_fn = None
|
|
841
|
+
if self.sampler is not None and hasattr(self.source, "__getitem__"):
|
|
842
|
+
# The reason why there is a try catch here is because when the new op is being constructed with shared
|
|
843
|
+
# memory enabled, there will be an exception thrown if there is not enough shared memory available
|
|
844
|
+
if self.source_len == -1:
|
|
845
|
+
raise RuntimeError("Attempt to construct a random access dataset, '__len__' method is required!")
|
|
846
|
+
|
|
847
|
+
if self.num_parallel_workers > 1:
|
|
848
|
+
self.__validate_memory_usage()
|
|
849
|
+
|
|
850
|
+
sample_fn = SamplerFn(self.source, self.num_parallel_workers, self.python_multiprocessing,
|
|
851
|
+
self.max_rowsize)
|
|
852
|
+
self.prepared_source = (lambda sample_ids: _cpp_sampler_fn_mp(sample_ids, sample_fn))
|
|
853
|
+
else:
|
|
854
|
+
self.prepared_source = (lambda sample_ids: _cpp_sampler_fn(sample_ids, self.source))
|
|
855
|
+
self.sample_fn = sample_fn
|
|
856
|
+
else:
|
|
857
|
+
self.sampler = None
|
|
858
|
+
self.sample_fn = sample_fn
|
|
859
|
+
self.source_len = min(self.source_len, self.num_samples) if self.num_samples != 0 else self.source_len
|
|
860
|
+
if not hasattr(self.source, "__iter__"):
|
|
861
|
+
# Use generator function if input callable
|
|
862
|
+
self.prepared_source = (lambda: _generator_fn(self.source, self.num_samples))
|
|
863
|
+
else:
|
|
864
|
+
# Use iterator function if input is iterable
|
|
865
|
+
# Random accessible input is also iterable
|
|
866
|
+
self.prepared_source = (lambda: _iter_fn(self.source, self.num_samples))
|
|
867
|
+
|
|
769
868
|
def parse(self, children=None):
|
|
869
|
+
self.prepare_multiprocessing()
|
|
770
870
|
if self.schema is None:
|
|
771
871
|
return cde.GeneratorNode(self.prepared_source, self.column_names, self.column_types, self.source_len,
|
|
772
872
|
self.sampler, self.num_parallel_workers)
|
|
@@ -792,11 +892,11 @@ class GeneratorDataset(MappableDataset, UnionBaseDataset):
|
|
|
792
892
|
# get process memory usage
|
|
793
893
|
process = psutil.Process(os.getpid())
|
|
794
894
|
process_memory = process.memory_info().rss
|
|
795
|
-
|
|
895
|
+
sys_memory_available = psutil.virtual_memory().available
|
|
796
896
|
|
|
797
897
|
total_memory_maybe_used = process_memory * self.num_parallel_workers * valid_num_shards
|
|
798
|
-
if total_memory_maybe_used /
|
|
799
|
-
valid_num_worker = math.floor(
|
|
898
|
+
if total_memory_maybe_used / sys_memory_available > 0.85:
|
|
899
|
+
valid_num_worker = math.floor(sys_memory_available * 0.85 / valid_num_shards / process_memory)
|
|
800
900
|
valid_num_worker = 1 if valid_num_worker <= 0 else valid_num_worker
|
|
801
901
|
info = "GeneratorDataset's num_parallel_workers: {} is too large which may cause a lot of memory " \
|
|
802
902
|
"occupation (>85%) or out of memory(OOM) during multiprocessing. Therefore, it is recommended " \
|
|
@@ -879,50 +979,27 @@ class NumpySlicesDataset(GeneratorDataset):
|
|
|
879
979
|
NumPy formats. Input data will be sliced along the first dimension and generate additional rows, if input is
|
|
880
980
|
list, there will be one column in each row, otherwise there tends to be multi columns. Large data is not
|
|
881
981
|
recommended to be loaded in this way as data is loading into memory.
|
|
882
|
-
column_names (list[str], optional): List of column names of the dataset. Default: None. If column_names
|
|
883
|
-
provided, the output column names will be named as the keys of dict when the input data is a dict,
|
|
982
|
+
column_names (list[str], optional): List of column names of the dataset. Default: ``None`` . If `column_names`
|
|
983
|
+
is not provided, the output column names will be named as the keys of dict when the input data is a dict,
|
|
884
984
|
otherwise they will be named like column_0, column_1 ...
|
|
885
|
-
num_samples (int, optional): The number of samples to be included in the dataset. Default: None,
|
|
985
|
+
num_samples (int, optional): The number of samples to be included in the dataset. Default: ``None`` ,
|
|
986
|
+
all samples.
|
|
886
987
|
num_parallel_workers (int, optional): Number of worker subprocesses used to
|
|
887
|
-
fetch the dataset in parallel. Default: 1
|
|
988
|
+
fetch the dataset in parallel. Default: ``1``.
|
|
888
989
|
shuffle (bool, optional): Whether or not to perform shuffle on the dataset.
|
|
889
|
-
Default: None, expected order behavior shown in the table below.
|
|
990
|
+
Default: ``None`` , expected order behavior shown in the table below.
|
|
890
991
|
sampler (Union[Sampler, Iterable], optional): Object used to choose samples from the dataset.
|
|
891
|
-
Default: None, expected order behavior shown in the table below.
|
|
892
|
-
num_shards (int, optional): Number of shards that the dataset will be divided into. Default: None.
|
|
992
|
+
Default: ``None`` , expected order behavior shown in the table below.
|
|
993
|
+
num_shards (int, optional): Number of shards that the dataset will be divided into. Default: ``None`` .
|
|
893
994
|
When this argument is specified, `num_samples` reflects the max sample number of per shard.
|
|
894
|
-
shard_id (int, optional): The shard ID within `num_shards` . Default: None. This argument must be
|
|
895
|
-
when `num_shards` is also specified.
|
|
995
|
+
shard_id (int, optional): The shard ID within `num_shards` . Default: ``None`` . This argument must be
|
|
996
|
+
specified only when `num_shards` is also specified.
|
|
896
997
|
|
|
897
998
|
Note:
|
|
898
|
-
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
..
|
|
902
|
-
:widths: 25 25 50
|
|
903
|
-
:header-rows: 1
|
|
904
|
-
|
|
905
|
-
* - Parameter `sampler`
|
|
906
|
-
- Parameter `shuffle`
|
|
907
|
-
- Expected Order Behavior
|
|
908
|
-
* - None
|
|
909
|
-
- None
|
|
910
|
-
- random order
|
|
911
|
-
* - None
|
|
912
|
-
- True
|
|
913
|
-
- random order
|
|
914
|
-
* - None
|
|
915
|
-
- False
|
|
916
|
-
- sequential order
|
|
917
|
-
* - Sampler object
|
|
918
|
-
- None
|
|
919
|
-
- order defined by sampler
|
|
920
|
-
* - Sampler object
|
|
921
|
-
- True
|
|
922
|
-
- not allowed
|
|
923
|
-
* - Sampler object
|
|
924
|
-
- False
|
|
925
|
-
- not allowed
|
|
999
|
+
- The parameters `num_samples` , `shuffle` , `num_shards` , `shard_id` can be used to control the sampler
|
|
1000
|
+
used in the dataset, and their effects when combined with parameter `sampler` are as follows.
|
|
1001
|
+
|
|
1002
|
+
.. include:: mindspore.dataset.sampler.txt
|
|
926
1003
|
|
|
927
1004
|
Raises:
|
|
928
1005
|
RuntimeError: If len of column_names does not match output len of data.
|
|
@@ -933,7 +1010,12 @@ class NumpySlicesDataset(GeneratorDataset):
|
|
|
933
1010
|
ValueError: If shard_id is specified but `num_shards` is None.
|
|
934
1011
|
ValueError: If `shard_id` is not in range of [0, `num_shards` ).
|
|
935
1012
|
|
|
1013
|
+
Tutorial Examples:
|
|
1014
|
+
- `Load & Process Data With Dataset Pipeline
|
|
1015
|
+
<https://www.mindspore.cn/docs/en/r2.2/api_python/samples/dataset/dataset_gallery.html>`_
|
|
1016
|
+
|
|
936
1017
|
Examples:
|
|
1018
|
+
>>> import mindspore.dataset as ds
|
|
937
1019
|
>>> # 1) Input data can be a list
|
|
938
1020
|
>>> data = [1, 2, 3]
|
|
939
1021
|
>>> dataset = ds.NumpySlicesDataset(data=data, column_names=["column_1"])
|
|
@@ -994,7 +1076,12 @@ class PaddedDataset(GeneratorDataset):
|
|
|
994
1076
|
TypeError: If the element of padded_samples is not an instance of dict.
|
|
995
1077
|
ValueError: If the padded_samples is empty.
|
|
996
1078
|
|
|
1079
|
+
Tutorial Examples:
|
|
1080
|
+
- `Load & Process Data With Dataset Pipeline
|
|
1081
|
+
<https://www.mindspore.cn/docs/en/r2.2/api_python/samples/dataset/dataset_gallery.html>`_
|
|
1082
|
+
|
|
997
1083
|
Examples:
|
|
1084
|
+
>>> import mindspore.dataset as ds
|
|
998
1085
|
>>> import numpy as np
|
|
999
1086
|
>>> data = [{'image': np.zeros(1, np.uint8)}, {'image': np.zeros(2, np.uint8)}]
|
|
1000
1087
|
>>> dataset = ds.PaddedDataset(padded_samples=data)
|