mindspore 2.3.0__cp39-none-any.whl → 2.3.0rc2__cp39-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (423) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/Third_Party_Open_Source_Software_Notice +0 -1512
  3. mindspore/__init__.py +1 -2
  4. mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
  5. mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
  6. mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
  7. mindspore/_checkparam.py +25 -5
  8. mindspore/_extends/graph_kernel/model/graph_parallel.py +1 -1
  9. mindspore/_extends/parse/__init__.py +2 -2
  10. mindspore/_extends/parse/compile_config.py +0 -29
  11. mindspore/_extends/parse/namespace.py +2 -2
  12. mindspore/_extends/parse/parser.py +5 -21
  13. mindspore/_extends/parse/resources.py +7 -5
  14. mindspore/_extends/parse/standard_method.py +59 -40
  15. mindspore/_mindspore_offline_debug.cpython-39-aarch64-linux-gnu.so +0 -0
  16. mindspore/amp.py +5 -26
  17. mindspore/bin/cache_admin +0 -0
  18. mindspore/bin/cache_server +0 -0
  19. mindspore/boost/adasum.py +1 -1
  20. mindspore/boost/base.py +1 -1
  21. mindspore/boost/boost_cell_wrapper.py +1 -1
  22. mindspore/boost/grad_freeze.py +2 -2
  23. mindspore/boost/less_batch_normalization.py +6 -9
  24. mindspore/common/__init__.py +1 -8
  25. mindspore/common/_register_for_tensor.py +9 -8
  26. mindspore/common/api.py +65 -275
  27. mindspore/common/dtype.py +4 -8
  28. mindspore/common/dump.py +5 -2
  29. mindspore/common/jit_config.py +1 -1
  30. mindspore/common/lazy_inline.py +2 -14
  31. mindspore/common/parameter.py +15 -14
  32. mindspore/common/recompute.py +5 -20
  33. mindspore/common/sparse_tensor.py +6 -21
  34. mindspore/common/tensor.py +52 -100
  35. mindspore/communication/__init__.py +11 -6
  36. mindspore/communication/management.py +94 -92
  37. mindspore/context.py +18 -180
  38. mindspore/dataset/engine/datasets.py +46 -69
  39. mindspore/dataset/engine/datasets_user_defined.py +53 -72
  40. mindspore/dataset/engine/datasets_vision.py +2 -2
  41. mindspore/dataset/engine/queue.py +38 -56
  42. mindspore/dataset/engine/validators.py +5 -11
  43. mindspore/dataset/vision/__init__.py +5 -5
  44. mindspore/dataset/vision/c_transforms.py +5 -5
  45. mindspore/dataset/vision/py_transforms_util.py +1 -1
  46. mindspore/dataset/vision/transforms.py +46 -591
  47. mindspore/dataset/vision/utils.py +1 -121
  48. mindspore/dataset/vision/validators.py +3 -9
  49. mindspore/hal/__init__.py +1 -7
  50. mindspore/hal/device.py +1 -1
  51. mindspore/include/api/model.h +0 -3
  52. mindspore/include/dataset/vision.h +2 -54
  53. mindspore/include/mindapi/base/types.h +0 -1
  54. mindspore/lib/libdnnl.so.2 +0 -0
  55. mindspore/lib/libmindspore.so +0 -0
  56. mindspore/lib/libmindspore_backend.so +0 -0
  57. mindspore/lib/libmindspore_common.so +0 -0
  58. mindspore/lib/libmindspore_core.so +0 -0
  59. mindspore/lib/libmindspore_glog.so.0 +0 -0
  60. mindspore/lib/libmindspore_gpr.so.15 +0 -0
  61. mindspore/lib/libmindspore_grpc++.so.1 +0 -0
  62. mindspore/lib/libmindspore_grpc.so.15 +0 -0
  63. mindspore/lib/libmindspore_shared_lib.so +0 -0
  64. mindspore/lib/libmpi_adapter.so +0 -0
  65. mindspore/lib/libmpi_collective.so +0 -0
  66. mindspore/lib/libnnacl.so +0 -0
  67. mindspore/lib/libopencv_core.so.4.5 +0 -0
  68. mindspore/lib/libps_cache.so +0 -0
  69. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -35
  70. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  71. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/vector_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  72. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
  73. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +0 -72
  74. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
  75. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/{aclnn_all_finite.h → aclnn_add_custom.h} +11 -9
  76. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_decoder_kv_cache.h +1 -1
  77. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_prompt_kv_cache.h +1 -1
  78. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
  79. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +12 -184
  80. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +15 -7
  81. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +15 -7
  82. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.cpp +81 -0
  83. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.py +134 -0
  84. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/decoder_kv_cache.py +31 -77
  85. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/prompt_kv_cache.py +31 -77
  86. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
  87. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
  88. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/inc/op_proto.h +5 -4
  89. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
  90. mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
  91. mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
  92. mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
  93. mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
  94. mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
  95. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
  96. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
  97. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
  98. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +286 -275
  99. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
  100. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
  101. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
  102. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
  103. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -1
  104. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -1
  105. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -3
  106. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -5
  107. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_tiling.h +45 -1
  108. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -1
  109. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +4 -8
  110. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_tiling.h +4 -11
  111. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/kernel/flash_attention_score_mix_hwsync.h +0 -18
  112. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -6
  113. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +75 -1
  114. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/kernel/matmul.h +5 -5
  115. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +3 -18
  116. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_common_tiling.h +5 -5
  117. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_info.h +2 -2
  118. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_data.h +3 -36
  119. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/kernel/matmul_stridedslice_fusion.h +2 -2
  120. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +4 -22
  121. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +2 -16
  122. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +3 -1
  123. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +4 -5
  124. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +4 -9
  125. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +2 -5
  126. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -1
  127. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +4 -10
  128. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +12 -0
  129. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -1
  130. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -1
  131. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +1 -1
  132. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +2 -10
  133. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +1 -5
  134. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -1
  135. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -17
  136. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +7 -2
  137. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
  138. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
  139. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
  140. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
  141. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
  142. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
  143. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
  144. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
  145. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
  146. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
  147. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
  148. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
  149. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
  150. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
  151. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
  152. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
  153. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
  154. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
  155. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
  156. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
  157. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
  158. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
  159. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
  160. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
  161. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
  162. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
  163. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
  164. mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
  165. mindspore/mindrecord/filewriter.py +2 -2
  166. mindspore/mint/__init__.py +40 -720
  167. mindspore/mint/nn/__init__.py +7 -89
  168. mindspore/mint/nn/functional.py +16 -165
  169. mindspore/mint/optim/adamw.py +16 -15
  170. mindspore/nn/__init__.py +2 -0
  171. mindspore/nn/cell.py +98 -97
  172. mindspore/nn/extend/basic.py +2 -2
  173. mindspore/nn/extend/embedding.py +1 -1
  174. mindspore/nn/extend/layer/normalization.py +5 -7
  175. mindspore/nn/generator.py +297 -0
  176. mindspore/nn/layer/activation.py +3 -4
  177. mindspore/nn/layer/basic.py +16 -79
  178. mindspore/nn/layer/conv.py +8 -17
  179. mindspore/nn/layer/embedding.py +4 -1
  180. mindspore/nn/layer/math.py +1 -1
  181. mindspore/nn/layer/normalization.py +1 -1
  182. mindspore/nn/layer/pooling.py +0 -5
  183. mindspore/nn/layer/rnn_cells.py +2 -2
  184. mindspore/nn/loss/loss.py +19 -19
  185. mindspore/nn/optim/adasum.py +1 -1
  186. mindspore/nn/optim/sgd.py +2 -3
  187. mindspore/nn/probability/distribution/exponential.py +1 -1
  188. mindspore/nn/probability/distribution/geometric.py +1 -1
  189. mindspore/nn/probability/distribution/logistic.py +1 -1
  190. mindspore/nn/wrap/cell_wrapper.py +1 -25
  191. mindspore/nn/wrap/loss_scale.py +1 -24
  192. mindspore/numpy/array_ops.py +1 -5
  193. mindspore/numpy/dtypes.py +3 -3
  194. mindspore/numpy/math_ops.py +8 -8
  195. mindspore/ops/__init__.py +1 -1
  196. mindspore/ops/_grad_experimental/grad_comm_ops.py +16 -75
  197. mindspore/ops/_vmap/vmap_array_ops.py +0 -27
  198. mindspore/ops/_vmap/vmap_math_ops.py +1 -29
  199. mindspore/ops/_vmap/vmap_nn_ops.py +18 -19
  200. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +8 -34
  201. mindspore/ops/auto_generate/gen_arg_dtype_cast.py +9 -2
  202. mindspore/ops/auto_generate/gen_arg_handler.py +0 -26
  203. mindspore/ops/auto_generate/gen_extend_func.py +27 -603
  204. mindspore/ops/auto_generate/gen_ops_def.py +203 -993
  205. mindspore/ops/auto_generate/gen_ops_prim.py +402 -1946
  206. mindspore/ops/auto_generate/pyboost_inner_prim.py +20 -90
  207. mindspore/ops/composite/base.py +6 -3
  208. mindspore/ops/composite/math_ops.py +1 -1
  209. mindspore/ops/composite/multitype_ops/_compile_utils.py +17 -24
  210. mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
  211. mindspore/ops/extend/__init__.py +3 -2
  212. mindspore/ops/extend/array_func.py +51 -10
  213. mindspore/ops/extend/nn_func.py +78 -2
  214. mindspore/ops/function/__init__.py +13 -8
  215. mindspore/ops/function/array_func.py +179 -455
  216. mindspore/ops/function/clip_func.py +1 -1
  217. mindspore/ops/function/grad/grad_func.py +3 -3
  218. mindspore/ops/function/math_func.py +103 -117
  219. mindspore/ops/function/nn_func.py +163 -275
  220. mindspore/ops/function/other_func.py +2 -2
  221. mindspore/ops/function/random_func.py +69 -202
  222. mindspore/ops/function/sparse_func.py +4 -4
  223. mindspore/ops/functional.py +327 -332
  224. mindspore/ops/operations/__init__.py +3 -13
  225. mindspore/ops/operations/_grad_ops.py +27 -3
  226. mindspore/ops/operations/_inner_ops.py +356 -53
  227. mindspore/ops/operations/_rl_inner_ops.py +2 -2
  228. mindspore/ops/operations/_tensor_array.py +8 -8
  229. mindspore/ops/operations/array_ops.py +65 -82
  230. mindspore/ops/operations/comm_ops.py +93 -784
  231. mindspore/ops/operations/custom_ops.py +28 -51
  232. mindspore/ops/operations/debug_ops.py +4 -4
  233. mindspore/ops/operations/inner_ops.py +2 -2
  234. mindspore/ops/operations/manually_defined/ops_def.py +4 -304
  235. mindspore/ops/operations/math_ops.py +50 -3
  236. mindspore/ops/operations/nn_ops.py +247 -14
  237. mindspore/ops/operations/other_ops.py +3 -3
  238. mindspore/ops/operations/random_ops.py +1 -1
  239. mindspore/ops/operations/sparse_ops.py +1 -1
  240. mindspore/ops/primitive.py +8 -9
  241. mindspore/ops/silent_check.py +5 -5
  242. mindspore/ops_generate/arg_dtype_cast.py +9 -2
  243. mindspore/ops_generate/arg_handler.py +0 -26
  244. mindspore/ops_generate/gen_aclnn_implement.py +4 -1
  245. mindspore/ops_generate/gen_ops.py +4 -26
  246. mindspore/ops_generate/gen_pyboost_func.py +12 -41
  247. mindspore/ops_generate/gen_utils.py +0 -21
  248. mindspore/ops_generate/pyboost_utils.py +2 -7
  249. mindspore/ops_generate/template.py +0 -1
  250. mindspore/parallel/_auto_parallel_context.py +1 -21
  251. mindspore/parallel/_tensor.py +5 -0
  252. mindspore/parallel/_transformer/transformer.py +1 -1
  253. mindspore/parallel/_utils.py +1 -15
  254. mindspore/parallel/algo_parameter_config.py +3 -1
  255. mindspore/parallel/checkpoint_transform.py +9 -12
  256. mindspore/parallel/cluster/process_entity/_api.py +29 -28
  257. mindspore/parallel/cluster/process_entity/_utils.py +3 -13
  258. mindspore/parallel/cluster/run.py +16 -13
  259. mindspore/parallel/parameter_broadcast.py +2 -2
  260. mindspore/parallel/shard.py +17 -31
  261. mindspore/profiler/__init__.py +2 -3
  262. mindspore/profiler/common/util.py +2 -107
  263. mindspore/profiler/envprofiling.py +1 -1
  264. mindspore/profiler/parser/ascend_analysis/constant.py +21 -8
  265. mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -82
  266. mindspore/profiler/parser/ascend_analysis/function_event.py +28 -43
  267. mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +27 -49
  268. mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +10 -15
  269. mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +20 -25
  270. mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +5 -5
  271. mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +1 -10
  272. mindspore/profiler/parser/ascend_hccl_generator.py +1 -4
  273. mindspore/profiler/parser/ascend_msprof_exporter.py +22 -43
  274. mindspore/profiler/parser/ascend_timeline_generator.py +5 -7
  275. mindspore/profiler/parser/minddata_parser.py +3 -72
  276. mindspore/profiler/profiling.py +59 -176
  277. mindspore/rewrite/api/node.py +1 -1
  278. mindspore/rewrite/common/namespace.py +5 -5
  279. mindspore/rewrite/parsers/assign_parser.py +0 -2
  280. mindspore/rewrite/parsers/class_def_parser.py +4 -8
  281. mindspore/run_check/_check_version.py +1 -1
  282. mindspore/scipy/fft.py +3 -1
  283. mindspore/scipy/linalg.py +3 -2
  284. mindspore/scipy/ops.py +3 -5
  285. mindspore/scipy/optimize/__init__.py +2 -2
  286. mindspore/train/__init__.py +4 -4
  287. mindspore/train/anf_ir_pb2.py +2 -8
  288. mindspore/train/callback/__init__.py +2 -5
  289. mindspore/train/callback/_backup_and_restore.py +2 -2
  290. mindspore/train/callback/_checkpoint.py +16 -104
  291. mindspore/train/callback/_landscape.py +1 -1
  292. mindspore/train/callback/_time_monitor.py +1 -1
  293. mindspore/train/data_sink.py +4 -5
  294. mindspore/train/dataset_helper.py +20 -45
  295. mindspore/train/model.py +38 -266
  296. mindspore/train/serialization.py +105 -256
  297. mindspore/train/summary/_summary_adapter.py +1 -1
  298. mindspore/version.py +1 -1
  299. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +2 -2
  300. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +303 -420
  301. mindspore/_extends/pijit/__init__.py +0 -23
  302. mindspore/_extends/pijit/pijit_func_white_list.py +0 -343
  303. mindspore/common/file_system.py +0 -48
  304. mindspore/common/generator.py +0 -260
  305. mindspore/common/no_inline.py +0 -54
  306. mindspore/common/np_dtype.py +0 -25
  307. mindspore/communication/comm_func.py +0 -1140
  308. mindspore/hal/memory.py +0 -326
  309. mindspore/lib/libavcodec.so.59 +0 -0
  310. mindspore/lib/libavdevice.so.59 +0 -0
  311. mindspore/lib/libavfilter.so.8 +0 -0
  312. mindspore/lib/libavformat.so.59 +0 -0
  313. mindspore/lib/libavutil.so.57 +0 -0
  314. mindspore/lib/libmindspore_np_dtype.so +0 -0
  315. mindspore/lib/libswresample.so.4 +0 -0
  316. mindspore/lib/libswscale.so.6 +0 -0
  317. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.cpp +0 -326
  318. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.py +0 -180
  319. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
  320. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o +0 -0
  321. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
  322. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o +0 -0
  323. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
  324. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o +0 -0
  325. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
  326. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
  327. mindspore/lib/plugin/ascend/custom_compiler/OWNERS +0 -12
  328. mindspore/lib/plugin/ascend/custom_compiler/setup.py +0 -255
  329. mindspore/lib/plugin/ascend/custom_compiler/start.sh +0 -26
  330. mindspore/lib/plugin/ascend/custom_compiler/template.json +0 -40
  331. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
  332. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -69
  333. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h +0 -133
  334. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -32
  335. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -35
  336. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h +0 -60
  337. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -37
  338. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
  339. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
  340. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_op.h +0 -34
  341. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_backoff_base.h +0 -62
  342. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -33
  343. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_ops.h +0 -88
  344. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_pa_op.h +0 -45
  345. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
  346. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -95
  347. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -84
  348. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -61
  349. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -224
  350. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
  351. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
  352. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
  353. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
  354. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
  355. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
  356. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
  357. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
  358. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
  359. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
  360. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
  361. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
  362. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
  363. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
  364. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
  365. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
  366. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
  367. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
  368. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
  369. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
  370. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
  371. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
  372. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
  373. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
  374. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
  375. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
  376. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
  377. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
  378. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
  379. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
  380. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
  381. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
  382. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
  383. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
  384. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
  385. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
  386. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_impl.h +0 -45
  387. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_tiling.h +0 -187
  388. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul.h +0 -245
  389. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_interface.h +0 -24
  390. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_utils.h +0 -111
  391. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/tiling_data.h +0 -54
  392. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
  393. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
  394. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
  395. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -364
  396. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -69
  397. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
  398. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -114
  399. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -98
  400. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.json +0 -19
  401. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.o +0 -0
  402. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aic_0.o +0 -0
  403. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aiv_0.o +0 -0
  404. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.json +0 -19
  405. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.o +0 -0
  406. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aic_0.o +0 -0
  407. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  408. mindspore/mint/linalg/__init__.py +0 -22
  409. mindspore/nn/layer/embedding_service.py +0 -531
  410. mindspore/nn/layer/embedding_service_layer.py +0 -393
  411. mindspore/ops/function/reshard_func.py +0 -102
  412. mindspore/ops/operations/_infer_ops.py +0 -19
  413. mindspore/ops/operations/reshard_ops.py +0 -53
  414. mindspore/profiler/common/process_pool.py +0 -41
  415. mindspore/profiler/common/singleton.py +0 -28
  416. mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
  417. mindspore/profiler/parser/ascend_memory_generator.py +0 -185
  418. mindspore/train/callback/_cluster_monitor.py +0 -201
  419. mindspore/train/callback/_flops_collector.py +0 -238
  420. mindspore/train/callback/_mindio_ttp.py +0 -443
  421. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
  422. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
  423. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
mindspore/nn/cell.py CHANGED
@@ -34,7 +34,6 @@ from mindspore import _checkparam as Validator
34
34
  from mindspore.common import dtype as mstype
35
35
  from mindspore.common.api import _cell_graph_executor, _pynative_executor, _get_args_for_run, cells_compile_cache
36
36
  from mindspore.common.api import _generate_branch_control_input, _convert_python_data, _get_args_for_run_predict
37
- from mindspore.common.api import _process_dyn_args, _generate_dyn_compile_args
38
37
  from mindspore.common.parameter import Parameter, ParameterTuple
39
38
  from mindspore.common.tensor import Tensor
40
39
  from mindspore.ops.operations import Cast
@@ -81,7 +80,7 @@ class Cell(Cell_):
81
80
 
82
81
  Examples:
83
82
  >>> import mindspore.nn as nn
84
- >>> from mindspore import ops
83
+ >>> import mindspore.ops as ops
85
84
  >>> class MyCell(nn.Cell):
86
85
  ... def __init__(self, forward_net):
87
86
  ... super(MyCell, self).__init__(auto_prefix=False)
@@ -105,12 +104,9 @@ class Cell(Cell_):
105
104
  '_forward_pre_hook', '_forward_hook', '_enable_forward_pre_hook', '_enable_forward_hook',
106
105
  '_bprop_debug', '_enable_backward_hook', '_cell_backward_hook', '_is_run', '_param_prefix',
107
106
  '_attr_synced', 'pynative', 'requires_grad', 'cell_type']
108
- total_instance_count = 0
109
107
 
110
108
  def __init__(self, auto_prefix=True, flags=None):
111
109
  Cell_.__init__(self, self._cell_tag)
112
- Cell.total_instance_count += 1
113
- self.instance_count = Cell.total_instance_count
114
110
  self._params = OrderedDict()
115
111
  self._cells = OrderedDict()
116
112
  self._params_list = OrderedDict()
@@ -136,7 +132,6 @@ class Cell(Cell_):
136
132
  self.exist_names = set("")
137
133
  self.exist_objs = set()
138
134
  self.recompute_cell = None
139
- self.sig = inspect.signature(self.construct)
140
135
  init_pipeline()
141
136
 
142
137
  # call gc to release GE session resources used by non-used cell objects
@@ -401,9 +396,6 @@ class Cell(Cell_):
401
396
  cells_compile_cache.pop(id(self), None)
402
397
  if hasattr(self, "compile_cache") and self.compile_cache:
403
398
  _cell_graph_executor.del_net_res(self, self.compile_cache)
404
- if isinstance(self, GraphCell):
405
- _cell_graph_executor.dec_graph_cell_count()
406
- Cell.total_instance_count -= 1
407
399
 
408
400
  def __delattr__(self, name):
409
401
  if name in self._params:
@@ -574,9 +566,8 @@ class Cell(Cell_):
574
566
  def shard(self, in_strategy, out_strategy=None, parameter_plan=None, device="Ascend", level=0):
575
567
  """
576
568
  Defining the input and output layouts of this cell and the parallel strategies of remaining ops will be
577
- generated by sharding propagation. In PyNative mode, use this method to specify a Cell for distributed
578
- execution in graph mode. In Graph mode, use this method to specify distribution strategy for a Cell,
579
- strategy for others will be set by sharding propagation.
569
+ generated by sharding propagation. In PyNative mode, use this method
570
+ to specify a Cell for distributed execution in graph mode.
580
571
  in_strategy and out_strategy define the input and output layout respectively.
581
572
  in_strategy/out_strategy should be a tuple, each element of which corresponds to the desired layout of
582
573
  this input/output, and None represents data_parallel,
@@ -584,8 +575,8 @@ class Cell(Cell_):
584
575
  The parallel strategies of remaining operators are derived from the strategy specified by the input and output.
585
576
 
586
577
  Note:
587
- If Cell.shard is called, the parallel mode in `set_auto_parallel_context` (parallel_mode) will be set to
588
- "auto_parallel" and the search mode (search_mode) to "sharding_propagation".
578
+ Only effective in PYNATIVE_MODE and in either ParallelMode.AUTO_PARALLEL with
579
+ search_mode in auto_parallel_context set as sharding_propagation.
589
580
  If the input contain Parameter, its strategy should be set in `in_strategy`.
590
581
 
591
582
  Args:
@@ -607,7 +598,7 @@ class Cell(Cell_):
607
598
  use right now. Support [ ``"0"`` , ``"1"`` , ``"2"`` ]. Default: ``0`` .
608
599
 
609
600
  Returns:
610
- Function, return the cell construct function that will be executed under auto parallel process.
601
+ Cell, the cell itself.
611
602
 
612
603
  Examples:
613
604
  >>> import mindspore.nn as nn
@@ -625,21 +616,22 @@ class Cell(Cell_):
625
616
  ... def __init__(self):
626
617
  ... self.block1 = Block()
627
618
  ... self.block2 = Block()
628
- ... self.block2_shard = self.block2.shard(in_strategy=((2, 1),), out_strategy=(None,),
629
- ... parameter_plan={'self.block2.shard.dense1.weight': (4, 1)})
619
+ ... self.block2.shard(in_strategy=((2, 1),), out_strategy=(None,),
620
+ ... parameter_plan={'self.block2.shard.dense1.weight': (4, 1)})
630
621
  ... def construct(self, x):
631
622
  ... x = self.block1(x)
632
- ... x = self.block2_shard(x)
623
+ ... x = self.block2(x)
633
624
  ... return x
634
625
  """
635
- if context.get_auto_parallel_context("parallel_mode") not in ["auto_parallel", "semi_auto_parallel"]:
636
- raise AssertionError(f"Cell shard only supports auto parallel or semi_auto_parallel "
637
- f"Please check the parallel mode in parallel context.")
626
+ if context.get_context("mode") != context.PYNATIVE_MODE or \
627
+ context.get_auto_parallel_context("parallel_mode") not in ["auto_parallel"]:
628
+ raise AssertionError(f"Cell shard only supports auto parallel under PyNative mode. "
629
+ f"Please check if you call Cell.shard in the script.")
638
630
 
639
631
  shard_fn = Shard()
640
632
  fn = shard_fn(self, in_strategy, out_strategy, parameter_plan, device, level)
641
633
  object.__setattr__(self, "_shard_fn", fn)
642
- return fn
634
+ return self
643
635
 
644
636
  def auto_cast_inputs(self, inputs):
645
637
  """
@@ -686,7 +678,7 @@ class Cell(Cell_):
686
678
  # Run in Graph mode.
687
679
  if os.getenv("MS_JIT") != '0' and context._get_mode() == context.GRAPH_MODE:
688
680
  if kwargs:
689
- bound_arguments = self.sig.bind(*args, **kwargs)
681
+ bound_arguments = inspect.signature(self.construct).bind(*args, **kwargs)
690
682
  bound_arguments.apply_defaults()
691
683
  args = bound_arguments.args
692
684
  kwargs = bound_arguments.kwargs
@@ -907,25 +899,14 @@ class Cell(Cell_):
907
899
  """
908
900
  logger.warning("'set_parallel_input_with_inputs' function is deprecated.")
909
901
 
910
- def set_inputs(self, *inputs, **kwargs):
902
+ def set_inputs(self, *inputs):
911
903
  """
912
904
  Save set inputs for computation graph. The number of inputs should be the same with that of the datasets. When
913
905
  using Model for dynamic shape, please make sure that all networks and loss functions passed to the Model are
914
- configured with set_inputs. The shape of input Tensor can be either dynamic or static.
915
-
916
- .. note::
917
- There are two mode:
918
-
919
- - Full mode: arguments will be used as all compile inputs for graph-compiling.
920
- - Incremental mode: arguments will set to some of the Cell inputs, which will be substituted into the input
921
- at the corresponding position for graph-compiling.
922
-
923
- Only one of inputs or kwargs can be set. Inputs for full mode and kwargs for incremental mode.
906
+ configured with set_inputs. The inputs can be Tensor of either dynamic or static shape.
924
907
 
925
908
  Args:
926
- inputs (tuple): Full mode arguments.
927
- kwargs (dict): Incremental mode arguments. The acceptable key is the name of parameter defined
928
- in `self.construct`.
909
+ inputs (tuple): Inputs of the Cell object.
929
910
 
930
911
  .. warning::
931
912
  This is an experimental API that is subject to change or deletion.
@@ -945,27 +926,16 @@ class Cell(Cell_):
945
926
  >>> net = ReluNet()
946
927
  >>> input_dyn = Tensor(shape=[3, None], dtype=ms.float32)
947
928
  >>> net.set_inputs(input_dyn)
948
- >>> input = Tensor(np.random.random([3, 10]), dtype=ms.float32)
949
- >>> output = net(input)
950
- >>>
951
- >>> net2 = ReluNet()
952
- >>> net2.set_inputs(x=input_dyn)
953
- >>> output = net2(input)
929
+ >>> input1 = Tensor(np.random.random([3, 10]), dtype=ms.float32)
930
+ >>> output = net(input1)
954
931
  """
955
932
  if self.grad_ops_label:
956
933
  logger.warning(f'For Cell, set_inputs must be set before the gradient function of the network is '
957
934
  f'generated.')
958
- if kwargs and inputs:
959
- raise ValueError('For Cell, set_inputs should only set inputs or kwargs(inputs: %s, kwargs: %s)!'
960
- % (inputs, kwargs))
961
-
962
- if not kwargs:
963
- self._dynamic_shape_inputs = inputs
964
- self._check_construct_args(*inputs)
965
- if context._get_mode() == context.PYNATIVE_MODE:
966
- _pynative_executor.set_dynamic_input(self, *self._dynamic_shape_inputs)
967
- else:
968
- self._dynamic_shape_inputs = _process_dyn_args(self.construct, kwargs)
935
+ self._dynamic_shape_inputs = inputs
936
+ self._check_construct_args(*inputs)
937
+ if context._get_mode() == context.PYNATIVE_MODE:
938
+ _pynative_executor.set_dynamic_input(self, *self._dynamic_shape_inputs)
969
939
 
970
940
  def get_inputs(self):
971
941
  """
@@ -1000,46 +970,18 @@ class Cell(Cell_):
1000
970
 
1001
971
  return self._dynamic_shape_inputs
1002
972
 
1003
- def _check_parameter_consistency(self, set_inputs, net_inputs):
1004
- """Check consistency for parameter."""
1005
- for index, (set_input, net_input) in enumerate(zip(set_inputs, net_inputs)):
1006
- if isinstance(set_input, Tensor):
1007
- if not isinstance(net_input, Tensor):
1008
- raise TypeError(
1009
- f"For 'set_inputs' and tuple(list) in 'set_inputs',the type of {index + 1}th input must "
1010
- f"be Tensor, but got {type(net_input)}.")
1011
- if isinstance(set_input, Parameter) != isinstance(net_input, Parameter):
1012
- raise TypeError(
1013
- f"For 'set_inputs' and tuple(list) in 'set_inputs', the {index + 1}th input must be the same "
1014
- f"as expected, but got expected: {type(set_input)} and input: {type(net_input)}.")
1015
- elif isinstance(set_input, (tuple, list)):
1016
- if not isinstance(net_input, (tuple, list)):
1017
- raise TypeError(
1018
- f"The {index + 1}th input type of 'set_inputs' or tuple(list) in "
1019
- f"'set_inputs' must be tuple or list, but got {type(net_input)}.")
1020
- self._check_parameter_consistency(set_input, net_input)
1021
-
1022
973
  def _get_compile_args(self, args):
1023
974
  """Get compile arguments."""
1024
975
  # this is used only for test
1025
- set_by_auto_dynamic = False
1026
- if is_auto_dynamic():
1027
- if self._dynamic_shape_inputs is None:
1028
- set_by_auto_dynamic = True
1029
- else:
1030
- if isinstance(self._dynamic_shape_inputs, (list, tuple)) and self._dynamic_shape_inputs[0] is None:
1031
- set_by_auto_dynamic = True
1032
- if set_by_auto_dynamic:
976
+ if is_auto_dynamic() and (self._dynamic_shape_inputs is None or self._dynamic_shape_inputs[0] is None):
1033
977
  self._dynamic_shape_inputs = convert_inputs_to_dynamic(*args)
1034
978
 
1035
979
  if self._dynamic_shape_inputs is not None:
1036
980
  logger.debug("Compiled Graph with dynamic shape")
1037
- compile_args = _generate_dyn_compile_args(args, self._dynamic_shape_inputs)
1038
- _cell_graph_executor._graph_executor.check_argument_consistency(compile_args, args, "set_inputs")
1039
- self._check_parameter_consistency(compile_args, args)
1040
- Validator.check_symbolic_shape(compile_args, args)
1041
- self.saved_dynamic_shape = compile_args
1042
- return compile_args
981
+ self._check_compile_dynamic_shape(self._dynamic_shape_inputs, args)
982
+ Validator.check_symbolic_shape(self._dynamic_shape_inputs, args)
983
+ self.saved_dynamic_shape = self._dynamic_shape_inputs
984
+ return self._dynamic_shape_inputs
1043
985
  return args
1044
986
 
1045
987
  def compile(self, *args, **kwargs):
@@ -1084,7 +1026,6 @@ class Cell(Cell_):
1084
1026
 
1085
1027
  def exec_checkpoint_graph(self):
1086
1028
  """Executes GE saving checkpoint graph operation."""
1087
- logger.warning("'exec_checkpoint_graph' function is deprecated.")
1088
1029
  self.add_flags(ge_sync_data=True)
1089
1030
  _cell_graph_executor(self, phase='save')
1090
1031
 
@@ -2000,11 +1941,11 @@ class Cell(Cell_):
2000
1941
  Note:
2001
1942
  - The `register_forward_pre_hook(hook_fn)` does not work in graph mode or functions decorated with 'jit'.
2002
1943
  - 'hook_fn' must be defined as the following code.
2003
- `cell` is the object of registered Cell. `inputs` is the forward
1944
+ `cell_id` is the information of registered Cell object, including name and ID. `inputs` is the forward
2004
1945
  input objects passed to the Cell. The 'hook_fn' can modify the forward input objects by returning new
2005
1946
  forward input objects.
2006
1947
  - It should have the following signature:
2007
- hook_fn(cell, inputs) -> new input objects or none.
1948
+ hook_fn(cell_id, inputs) -> new input objects or none.
2008
1949
  - In order to prevent running failed when switching to graph mode, it is not recommended to write it in the
2009
1950
  `construct` function of Cell object. In the pynative mode, if the `register_forward_pre_hook` function is
2010
1951
  called in the `construct` function of the Cell object, a hook function will be added at each run time of
@@ -2028,7 +1969,7 @@ class Cell(Cell_):
2028
1969
  >>> import mindspore as ms
2029
1970
  >>> from mindspore import Tensor, nn, ops
2030
1971
  >>> ms.set_context(mode=ms.PYNATIVE_MODE)
2031
- >>> def forward_pre_hook_fn(cell, inputs):
1972
+ >>> def forward_pre_hook_fn(cell_id, inputs):
2032
1973
  ... print("forward inputs: ", inputs)
2033
1974
  ...
2034
1975
  >>> class Net(nn.Cell):
@@ -2074,8 +2015,9 @@ class Cell(Cell_):
2074
2015
  Supported Platforms:
2075
2016
  ``Ascend`` ``GPU`` ``CPU``
2076
2017
  """
2018
+ cell_id = self.cls_name + "(" + str(id(self)) + ")"
2077
2019
  for fn in self._forward_pre_hook.values():
2078
- ret = fn(self, inputs)
2020
+ ret = fn(cell_id, inputs)
2079
2021
  if ret is not None:
2080
2022
  if not isinstance(ret, tuple):
2081
2023
  inputs = (ret,)
@@ -2090,11 +2032,11 @@ class Cell(Cell_):
2090
2032
  Note:
2091
2033
  - The `register_forward_hook(hook_fn)` does not work in graph mode or functions decorated with 'jit'.
2092
2034
  - 'hook_fn' must be defined as the following code.
2093
- `cell` is the object of registered Cell. `inputs` is the forward
2035
+ `cell_id` is the information of registered Cell object, including name and ID. `inputs` is the forward
2094
2036
  input objects passed to the Cell. `output` is the forward output object of the Cell. The 'hook_fn' can
2095
2037
  modify the forward output object by returning new forward output object.
2096
2038
  - It should have the following signature:
2097
- hook_fn(cell, inputs, output) -> new output object or none.
2039
+ hook_fn(cell_id, inputs, output) -> new output object or none.
2098
2040
  - In order to prevent running failed when switching to graph mode, it is not recommended to write it in the
2099
2041
  `construct` function of Cell object. In the pynative mode, if the `register_forward_hook` function is
2100
2042
  called in the `construct` function of the Cell object, a hook function will be added at each run time of
@@ -2118,7 +2060,7 @@ class Cell(Cell_):
2118
2060
  >>> import mindspore as ms
2119
2061
  >>> from mindspore import Tensor, nn, ops
2120
2062
  >>> ms.set_context(mode=ms.PYNATIVE_MODE)
2121
- >>> def forward_hook_fn(cell, inputs, output):
2063
+ >>> def forward_hook_fn(cell_id, inputs, output):
2122
2064
  ... print("forward inputs: ", inputs)
2123
2065
  ... print("forward output: ", output)
2124
2066
  ...
@@ -2167,8 +2109,9 @@ class Cell(Cell_):
2167
2109
  Supported Platforms:
2168
2110
  ``Ascend`` ``GPU`` ``CPU``
2169
2111
  """
2112
+ cell_id = self.cls_name + "(" + str(id(self)) + ")"
2170
2113
  for fn in self._forward_hook.values():
2171
- ret = fn(self, inputs, output)
2114
+ ret = fn(cell_id, inputs, output)
2172
2115
  if ret is not None:
2173
2116
  output = ret
2174
2117
  return output
@@ -2483,6 +2426,65 @@ class Cell(Cell_):
2483
2426
  for op in all_ops:
2484
2427
  op.place(role, rank_id)
2485
2428
 
2429
+ def _check_dynamic_tensor(self, set_input, net_input, index):
2430
+ """
2431
+ Check if tensor is correctly set for dynamic shape.
2432
+
2433
+ Args:
2434
+ set_input (Tensor): Tensor set for dynamic shape.
2435
+ net_input (Tensor): Input tensor of the Cell object.
2436
+ index (int): Tensor index for set inputs.
2437
+ """
2438
+ if not isinstance(net_input, Tensor):
2439
+ raise TypeError(
2440
+ f"For 'set_inputs' and tuple(list) in 'set_inputs',the type of {index + 1}th input must be Tensor, "
2441
+ f"but got {type(net_input)}.")
2442
+ is_param_set_input = isinstance(set_input, Parameter)
2443
+ is_param_net_input = isinstance(net_input, Parameter)
2444
+ if (is_param_set_input and not is_param_net_input) or (is_param_net_input and not is_param_set_input):
2445
+ raise TypeError(
2446
+ f"For 'set_inputs' and tuple(list) in 'set_inputs', the {index + 1}th input must be the same "
2447
+ f"as network's input, but got 'set_inputs': {type(set_input)} and network's input: {type(net_input)}.")
2448
+ if set_input.dtype != net_input.dtype:
2449
+ raise TypeError(
2450
+ f"For 'set_inputs' and tuple(list) in 'set_inputs',the dtype of {index + 1}th input must be the same "
2451
+ f"as network's input, but got 'set_inputs': {set_input.dtype} and network's input: {net_input.dtype}.")
2452
+ if -2 not in set_input.shape:
2453
+ if net_input.dim() != 0 and set_input.dim() != net_input.dim():
2454
+ raise ValueError(
2455
+ f"For 'set_inputs' and tuple(list) in 'set_inputs',the dims of {index + 1}th input must be the "
2456
+ f"same as network's input, but got 'set_inputs': {set_input.dim()} and network's input: "
2457
+ f"{net_input.dim()}.")
2458
+ if not all([ele1 in (-1, ele2) for ele1, ele2 in zip(set_input.shape, net_input.shape)]):
2459
+ raise ValueError(
2460
+ f"For 'set_inputs' and tuple(list) in 'set_inputs',the shape of {index + 1}th input must be the "
2461
+ f"same as network's input, but got 'set_inputs': {set_input.shape} and network's input: "
2462
+ f"{net_input.shape}.")
2463
+
2464
+ def _check_compile_dynamic_shape(self, set_inputs, net_inputs):
2465
+ """
2466
+ Check if graph has been compiled with dynamic shape.
2467
+
2468
+ Args:
2469
+ net_inputs (tuple): Inputs of the Cell object.
2470
+ """
2471
+ if not getattr(set_inputs, '__ms_dynamic_len__', False):
2472
+ set_inputs_len = len(set_inputs)
2473
+ net_inputs_len = len(net_inputs)
2474
+ if set_inputs_len != net_inputs_len:
2475
+ raise ValueError(f"The length of 'set_inputs' or tuple(list) in 'set_inputs' "
2476
+ f"must be equal to network's inputs, but got 'set_inputs': "
2477
+ f"{set_inputs_len} and network's input: {net_inputs_len}.")
2478
+ for index, (set_input, net_input) in enumerate(zip(set_inputs, net_inputs)):
2479
+ if isinstance(set_input, Tensor):
2480
+ self._check_dynamic_tensor(set_input, net_input, index)
2481
+ elif isinstance(set_input, (tuple, list)):
2482
+ if not isinstance(net_input, (tuple, list)):
2483
+ raise TypeError(
2484
+ f"The {index + 1}th input type of 'set_inputs' or tuple(list) in "
2485
+ f"'set_inputs' must be tuple or list, but got {type(net_input)}.")
2486
+ self._check_compile_dynamic_shape(set_input, net_input)
2487
+
2486
2488
  def _mixed_precision_cast(self, inputs):
2487
2489
  mixed_type = self.get_mixed_precision_type()
2488
2490
  if mixed_type == MixedPrecisionType.NOTSET:
@@ -2579,7 +2581,6 @@ class GraphCell(Cell):
2579
2581
  params_dict = update_func_graph_hyper_params(self.graph, params_init)
2580
2582
  for name, param in params_dict.items():
2581
2583
  self._params[name] = param
2582
- _cell_graph_executor.inc_graph_cell_count()
2583
2584
 
2584
2585
  def construct(self, *inputs):
2585
2586
  return self.graph(*inputs)
@@ -76,10 +76,10 @@ class Linear(Cell):
76
76
  Examples:
77
77
  >>> import mindspore
78
78
  >>> from mindspore import Tensor
79
- >>> from mindspore import nn
79
+ >>> from mindspore.nn.extend import Linear
80
80
  >>> import numpy as np
81
81
  >>> x = Tensor(np.array([[180, 234, 154], [244, 48, 247]]), mindspore.float32)
82
- >>> net = nn.extend.Linear(3, 4)
82
+ >>> net = Linear(3, 4)
83
83
  >>> output = net(x)
84
84
  >>> print(output.shape)
85
85
  (2, 4)
@@ -57,7 +57,7 @@ class Embedding(Cell):
57
57
  mindspore.int32 or mindspore.int64, and the value should be in range `[0, num_embeddings)`.
58
58
 
59
59
  Outputs:
60
- Tensor, has the same data type as weight, the shape is :math:`(*input.shape, embedding\_dim)`.
60
+ Tensor, has the same data type as weight, the shape is :math:`(*input.shape, embedding_dim)`.
61
61
 
62
62
  Raises:
63
63
  TypeError: If `num_embeddings` is not an int.
@@ -48,8 +48,7 @@ class LayerNorm(Cell):
48
48
  beta_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the :math:`\beta` weight.
49
49
  The values of str refer to the function `initializer` including ``'zeros'`` , ``'ones'`` ,
50
50
  ``'xavier_uniform'`` , ``'he_uniform'`` , etc. Default: ``'zeros'`` .
51
- eps (float): A value added to the denominator for numerical stability(:math:`\epsilon`). Default: ``1e-5`` .
52
- elementwise_affine (bool): A bool value, When set to True, gamma and beta can be learned. Default: True.
51
+ epsilon (float): A value added to the denominator for numerical stability(:math:`\epsilon`). Default: ``1e-5`` .
53
52
  dtype (:class:`mindspore.dtype`): Dtype of Parameters. Default: ``mstype.float32`` .
54
53
 
55
54
  Inputs:
@@ -79,8 +78,7 @@ class LayerNorm(Cell):
79
78
  normalized_shape,
80
79
  gamma_init='ones',
81
80
  beta_init='zeros',
82
- eps=1e-5,
83
- elementwise_affine=True,
81
+ epsilon=1e-5,
84
82
  dtype=mstype.float32
85
83
  ):
86
84
  """Initialize LayerNorm."""
@@ -94,11 +92,11 @@ class LayerNorm(Cell):
94
92
  f"least one element, but got normalized_shape = {normalized_shape}"
95
93
  )
96
94
  self.normalized_shape = normalized_shape
97
- self.epsilon = eps
95
+ self.epsilon = epsilon
98
96
  self.gamma = Parameter(initializer(
99
- gamma_init, normalized_shape, dtype=dtype), name="gamma", requires_grad=elementwise_affine)
97
+ gamma_init, normalized_shape, dtype=dtype), name="gamma")
100
98
  self.beta = Parameter(initializer(
101
- beta_init, normalized_shape, dtype=dtype), name="beta", requires_grad=elementwise_affine)
99
+ beta_init, normalized_shape, dtype=dtype), name="beta")
102
100
 
103
101
  def construct(self, input_x):
104
102
  y = F.layer_norm(input_x, self.normalized_shape, self.gamma.astype(input_x.dtype),