mindspore 2.3.0__cp39-none-any.whl → 2.3.0rc2__cp39-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (423) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/Third_Party_Open_Source_Software_Notice +0 -1512
  3. mindspore/__init__.py +1 -2
  4. mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
  5. mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
  6. mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
  7. mindspore/_checkparam.py +25 -5
  8. mindspore/_extends/graph_kernel/model/graph_parallel.py +1 -1
  9. mindspore/_extends/parse/__init__.py +2 -2
  10. mindspore/_extends/parse/compile_config.py +0 -29
  11. mindspore/_extends/parse/namespace.py +2 -2
  12. mindspore/_extends/parse/parser.py +5 -21
  13. mindspore/_extends/parse/resources.py +7 -5
  14. mindspore/_extends/parse/standard_method.py +59 -40
  15. mindspore/_mindspore_offline_debug.cpython-39-aarch64-linux-gnu.so +0 -0
  16. mindspore/amp.py +5 -26
  17. mindspore/bin/cache_admin +0 -0
  18. mindspore/bin/cache_server +0 -0
  19. mindspore/boost/adasum.py +1 -1
  20. mindspore/boost/base.py +1 -1
  21. mindspore/boost/boost_cell_wrapper.py +1 -1
  22. mindspore/boost/grad_freeze.py +2 -2
  23. mindspore/boost/less_batch_normalization.py +6 -9
  24. mindspore/common/__init__.py +1 -8
  25. mindspore/common/_register_for_tensor.py +9 -8
  26. mindspore/common/api.py +65 -275
  27. mindspore/common/dtype.py +4 -8
  28. mindspore/common/dump.py +5 -2
  29. mindspore/common/jit_config.py +1 -1
  30. mindspore/common/lazy_inline.py +2 -14
  31. mindspore/common/parameter.py +15 -14
  32. mindspore/common/recompute.py +5 -20
  33. mindspore/common/sparse_tensor.py +6 -21
  34. mindspore/common/tensor.py +52 -100
  35. mindspore/communication/__init__.py +11 -6
  36. mindspore/communication/management.py +94 -92
  37. mindspore/context.py +18 -180
  38. mindspore/dataset/engine/datasets.py +46 -69
  39. mindspore/dataset/engine/datasets_user_defined.py +53 -72
  40. mindspore/dataset/engine/datasets_vision.py +2 -2
  41. mindspore/dataset/engine/queue.py +38 -56
  42. mindspore/dataset/engine/validators.py +5 -11
  43. mindspore/dataset/vision/__init__.py +5 -5
  44. mindspore/dataset/vision/c_transforms.py +5 -5
  45. mindspore/dataset/vision/py_transforms_util.py +1 -1
  46. mindspore/dataset/vision/transforms.py +46 -591
  47. mindspore/dataset/vision/utils.py +1 -121
  48. mindspore/dataset/vision/validators.py +3 -9
  49. mindspore/hal/__init__.py +1 -7
  50. mindspore/hal/device.py +1 -1
  51. mindspore/include/api/model.h +0 -3
  52. mindspore/include/dataset/vision.h +2 -54
  53. mindspore/include/mindapi/base/types.h +0 -1
  54. mindspore/lib/libdnnl.so.2 +0 -0
  55. mindspore/lib/libmindspore.so +0 -0
  56. mindspore/lib/libmindspore_backend.so +0 -0
  57. mindspore/lib/libmindspore_common.so +0 -0
  58. mindspore/lib/libmindspore_core.so +0 -0
  59. mindspore/lib/libmindspore_glog.so.0 +0 -0
  60. mindspore/lib/libmindspore_gpr.so.15 +0 -0
  61. mindspore/lib/libmindspore_grpc++.so.1 +0 -0
  62. mindspore/lib/libmindspore_grpc.so.15 +0 -0
  63. mindspore/lib/libmindspore_shared_lib.so +0 -0
  64. mindspore/lib/libmpi_adapter.so +0 -0
  65. mindspore/lib/libmpi_collective.so +0 -0
  66. mindspore/lib/libnnacl.so +0 -0
  67. mindspore/lib/libopencv_core.so.4.5 +0 -0
  68. mindspore/lib/libps_cache.so +0 -0
  69. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -35
  70. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  71. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/vector_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  72. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
  73. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +0 -72
  74. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
  75. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/{aclnn_all_finite.h → aclnn_add_custom.h} +11 -9
  76. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_decoder_kv_cache.h +1 -1
  77. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_prompt_kv_cache.h +1 -1
  78. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
  79. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +12 -184
  80. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +15 -7
  81. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +15 -7
  82. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.cpp +81 -0
  83. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.py +134 -0
  84. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/decoder_kv_cache.py +31 -77
  85. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/prompt_kv_cache.py +31 -77
  86. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
  87. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
  88. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/inc/op_proto.h +5 -4
  89. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
  90. mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
  91. mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
  92. mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
  93. mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
  94. mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
  95. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
  96. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
  97. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
  98. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +286 -275
  99. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
  100. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
  101. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
  102. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
  103. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -1
  104. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -1
  105. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -3
  106. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -5
  107. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_tiling.h +45 -1
  108. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -1
  109. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +4 -8
  110. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_tiling.h +4 -11
  111. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/kernel/flash_attention_score_mix_hwsync.h +0 -18
  112. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -6
  113. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +75 -1
  114. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/kernel/matmul.h +5 -5
  115. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +3 -18
  116. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_common_tiling.h +5 -5
  117. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_info.h +2 -2
  118. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_data.h +3 -36
  119. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/kernel/matmul_stridedslice_fusion.h +2 -2
  120. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +4 -22
  121. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +2 -16
  122. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +3 -1
  123. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +4 -5
  124. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +4 -9
  125. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +2 -5
  126. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -1
  127. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +4 -10
  128. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +12 -0
  129. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -1
  130. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -1
  131. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +1 -1
  132. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +2 -10
  133. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +1 -5
  134. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -1
  135. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -17
  136. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +7 -2
  137. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
  138. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
  139. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
  140. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
  141. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
  142. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
  143. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
  144. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
  145. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
  146. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
  147. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
  148. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
  149. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
  150. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
  151. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
  152. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
  153. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
  154. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
  155. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
  156. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
  157. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
  158. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
  159. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
  160. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
  161. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
  162. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
  163. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
  164. mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
  165. mindspore/mindrecord/filewriter.py +2 -2
  166. mindspore/mint/__init__.py +40 -720
  167. mindspore/mint/nn/__init__.py +7 -89
  168. mindspore/mint/nn/functional.py +16 -165
  169. mindspore/mint/optim/adamw.py +16 -15
  170. mindspore/nn/__init__.py +2 -0
  171. mindspore/nn/cell.py +98 -97
  172. mindspore/nn/extend/basic.py +2 -2
  173. mindspore/nn/extend/embedding.py +1 -1
  174. mindspore/nn/extend/layer/normalization.py +5 -7
  175. mindspore/nn/generator.py +297 -0
  176. mindspore/nn/layer/activation.py +3 -4
  177. mindspore/nn/layer/basic.py +16 -79
  178. mindspore/nn/layer/conv.py +8 -17
  179. mindspore/nn/layer/embedding.py +4 -1
  180. mindspore/nn/layer/math.py +1 -1
  181. mindspore/nn/layer/normalization.py +1 -1
  182. mindspore/nn/layer/pooling.py +0 -5
  183. mindspore/nn/layer/rnn_cells.py +2 -2
  184. mindspore/nn/loss/loss.py +19 -19
  185. mindspore/nn/optim/adasum.py +1 -1
  186. mindspore/nn/optim/sgd.py +2 -3
  187. mindspore/nn/probability/distribution/exponential.py +1 -1
  188. mindspore/nn/probability/distribution/geometric.py +1 -1
  189. mindspore/nn/probability/distribution/logistic.py +1 -1
  190. mindspore/nn/wrap/cell_wrapper.py +1 -25
  191. mindspore/nn/wrap/loss_scale.py +1 -24
  192. mindspore/numpy/array_ops.py +1 -5
  193. mindspore/numpy/dtypes.py +3 -3
  194. mindspore/numpy/math_ops.py +8 -8
  195. mindspore/ops/__init__.py +1 -1
  196. mindspore/ops/_grad_experimental/grad_comm_ops.py +16 -75
  197. mindspore/ops/_vmap/vmap_array_ops.py +0 -27
  198. mindspore/ops/_vmap/vmap_math_ops.py +1 -29
  199. mindspore/ops/_vmap/vmap_nn_ops.py +18 -19
  200. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +8 -34
  201. mindspore/ops/auto_generate/gen_arg_dtype_cast.py +9 -2
  202. mindspore/ops/auto_generate/gen_arg_handler.py +0 -26
  203. mindspore/ops/auto_generate/gen_extend_func.py +27 -603
  204. mindspore/ops/auto_generate/gen_ops_def.py +203 -993
  205. mindspore/ops/auto_generate/gen_ops_prim.py +402 -1946
  206. mindspore/ops/auto_generate/pyboost_inner_prim.py +20 -90
  207. mindspore/ops/composite/base.py +6 -3
  208. mindspore/ops/composite/math_ops.py +1 -1
  209. mindspore/ops/composite/multitype_ops/_compile_utils.py +17 -24
  210. mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
  211. mindspore/ops/extend/__init__.py +3 -2
  212. mindspore/ops/extend/array_func.py +51 -10
  213. mindspore/ops/extend/nn_func.py +78 -2
  214. mindspore/ops/function/__init__.py +13 -8
  215. mindspore/ops/function/array_func.py +179 -455
  216. mindspore/ops/function/clip_func.py +1 -1
  217. mindspore/ops/function/grad/grad_func.py +3 -3
  218. mindspore/ops/function/math_func.py +103 -117
  219. mindspore/ops/function/nn_func.py +163 -275
  220. mindspore/ops/function/other_func.py +2 -2
  221. mindspore/ops/function/random_func.py +69 -202
  222. mindspore/ops/function/sparse_func.py +4 -4
  223. mindspore/ops/functional.py +327 -332
  224. mindspore/ops/operations/__init__.py +3 -13
  225. mindspore/ops/operations/_grad_ops.py +27 -3
  226. mindspore/ops/operations/_inner_ops.py +356 -53
  227. mindspore/ops/operations/_rl_inner_ops.py +2 -2
  228. mindspore/ops/operations/_tensor_array.py +8 -8
  229. mindspore/ops/operations/array_ops.py +65 -82
  230. mindspore/ops/operations/comm_ops.py +93 -784
  231. mindspore/ops/operations/custom_ops.py +28 -51
  232. mindspore/ops/operations/debug_ops.py +4 -4
  233. mindspore/ops/operations/inner_ops.py +2 -2
  234. mindspore/ops/operations/manually_defined/ops_def.py +4 -304
  235. mindspore/ops/operations/math_ops.py +50 -3
  236. mindspore/ops/operations/nn_ops.py +247 -14
  237. mindspore/ops/operations/other_ops.py +3 -3
  238. mindspore/ops/operations/random_ops.py +1 -1
  239. mindspore/ops/operations/sparse_ops.py +1 -1
  240. mindspore/ops/primitive.py +8 -9
  241. mindspore/ops/silent_check.py +5 -5
  242. mindspore/ops_generate/arg_dtype_cast.py +9 -2
  243. mindspore/ops_generate/arg_handler.py +0 -26
  244. mindspore/ops_generate/gen_aclnn_implement.py +4 -1
  245. mindspore/ops_generate/gen_ops.py +4 -26
  246. mindspore/ops_generate/gen_pyboost_func.py +12 -41
  247. mindspore/ops_generate/gen_utils.py +0 -21
  248. mindspore/ops_generate/pyboost_utils.py +2 -7
  249. mindspore/ops_generate/template.py +0 -1
  250. mindspore/parallel/_auto_parallel_context.py +1 -21
  251. mindspore/parallel/_tensor.py +5 -0
  252. mindspore/parallel/_transformer/transformer.py +1 -1
  253. mindspore/parallel/_utils.py +1 -15
  254. mindspore/parallel/algo_parameter_config.py +3 -1
  255. mindspore/parallel/checkpoint_transform.py +9 -12
  256. mindspore/parallel/cluster/process_entity/_api.py +29 -28
  257. mindspore/parallel/cluster/process_entity/_utils.py +3 -13
  258. mindspore/parallel/cluster/run.py +16 -13
  259. mindspore/parallel/parameter_broadcast.py +2 -2
  260. mindspore/parallel/shard.py +17 -31
  261. mindspore/profiler/__init__.py +2 -3
  262. mindspore/profiler/common/util.py +2 -107
  263. mindspore/profiler/envprofiling.py +1 -1
  264. mindspore/profiler/parser/ascend_analysis/constant.py +21 -8
  265. mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -82
  266. mindspore/profiler/parser/ascend_analysis/function_event.py +28 -43
  267. mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +27 -49
  268. mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +10 -15
  269. mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +20 -25
  270. mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +5 -5
  271. mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +1 -10
  272. mindspore/profiler/parser/ascend_hccl_generator.py +1 -4
  273. mindspore/profiler/parser/ascend_msprof_exporter.py +22 -43
  274. mindspore/profiler/parser/ascend_timeline_generator.py +5 -7
  275. mindspore/profiler/parser/minddata_parser.py +3 -72
  276. mindspore/profiler/profiling.py +59 -176
  277. mindspore/rewrite/api/node.py +1 -1
  278. mindspore/rewrite/common/namespace.py +5 -5
  279. mindspore/rewrite/parsers/assign_parser.py +0 -2
  280. mindspore/rewrite/parsers/class_def_parser.py +4 -8
  281. mindspore/run_check/_check_version.py +1 -1
  282. mindspore/scipy/fft.py +3 -1
  283. mindspore/scipy/linalg.py +3 -2
  284. mindspore/scipy/ops.py +3 -5
  285. mindspore/scipy/optimize/__init__.py +2 -2
  286. mindspore/train/__init__.py +4 -4
  287. mindspore/train/anf_ir_pb2.py +2 -8
  288. mindspore/train/callback/__init__.py +2 -5
  289. mindspore/train/callback/_backup_and_restore.py +2 -2
  290. mindspore/train/callback/_checkpoint.py +16 -104
  291. mindspore/train/callback/_landscape.py +1 -1
  292. mindspore/train/callback/_time_monitor.py +1 -1
  293. mindspore/train/data_sink.py +4 -5
  294. mindspore/train/dataset_helper.py +20 -45
  295. mindspore/train/model.py +38 -266
  296. mindspore/train/serialization.py +105 -256
  297. mindspore/train/summary/_summary_adapter.py +1 -1
  298. mindspore/version.py +1 -1
  299. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +2 -2
  300. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +303 -420
  301. mindspore/_extends/pijit/__init__.py +0 -23
  302. mindspore/_extends/pijit/pijit_func_white_list.py +0 -343
  303. mindspore/common/file_system.py +0 -48
  304. mindspore/common/generator.py +0 -260
  305. mindspore/common/no_inline.py +0 -54
  306. mindspore/common/np_dtype.py +0 -25
  307. mindspore/communication/comm_func.py +0 -1140
  308. mindspore/hal/memory.py +0 -326
  309. mindspore/lib/libavcodec.so.59 +0 -0
  310. mindspore/lib/libavdevice.so.59 +0 -0
  311. mindspore/lib/libavfilter.so.8 +0 -0
  312. mindspore/lib/libavformat.so.59 +0 -0
  313. mindspore/lib/libavutil.so.57 +0 -0
  314. mindspore/lib/libmindspore_np_dtype.so +0 -0
  315. mindspore/lib/libswresample.so.4 +0 -0
  316. mindspore/lib/libswscale.so.6 +0 -0
  317. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.cpp +0 -326
  318. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.py +0 -180
  319. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
  320. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o +0 -0
  321. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
  322. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o +0 -0
  323. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
  324. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o +0 -0
  325. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
  326. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
  327. mindspore/lib/plugin/ascend/custom_compiler/OWNERS +0 -12
  328. mindspore/lib/plugin/ascend/custom_compiler/setup.py +0 -255
  329. mindspore/lib/plugin/ascend/custom_compiler/start.sh +0 -26
  330. mindspore/lib/plugin/ascend/custom_compiler/template.json +0 -40
  331. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
  332. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -69
  333. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h +0 -133
  334. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -32
  335. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -35
  336. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h +0 -60
  337. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -37
  338. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
  339. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
  340. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_op.h +0 -34
  341. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_backoff_base.h +0 -62
  342. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -33
  343. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_ops.h +0 -88
  344. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_pa_op.h +0 -45
  345. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
  346. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -95
  347. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -84
  348. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -61
  349. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -224
  350. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
  351. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
  352. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
  353. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
  354. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
  355. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
  356. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
  357. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
  358. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
  359. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
  360. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
  361. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
  362. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
  363. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
  364. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
  365. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
  366. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
  367. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
  368. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
  369. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
  370. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
  371. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
  372. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
  373. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
  374. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
  375. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
  376. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
  377. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
  378. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
  379. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
  380. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
  381. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
  382. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
  383. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
  384. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
  385. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
  386. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_impl.h +0 -45
  387. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_tiling.h +0 -187
  388. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul.h +0 -245
  389. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_interface.h +0 -24
  390. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_utils.h +0 -111
  391. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/tiling_data.h +0 -54
  392. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
  393. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
  394. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
  395. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -364
  396. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -69
  397. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
  398. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -114
  399. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -98
  400. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.json +0 -19
  401. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.o +0 -0
  402. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aic_0.o +0 -0
  403. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aiv_0.o +0 -0
  404. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.json +0 -19
  405. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.o +0 -0
  406. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aic_0.o +0 -0
  407. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  408. mindspore/mint/linalg/__init__.py +0 -22
  409. mindspore/nn/layer/embedding_service.py +0 -531
  410. mindspore/nn/layer/embedding_service_layer.py +0 -393
  411. mindspore/ops/function/reshard_func.py +0 -102
  412. mindspore/ops/operations/_infer_ops.py +0 -19
  413. mindspore/ops/operations/reshard_ops.py +0 -53
  414. mindspore/profiler/common/process_pool.py +0 -41
  415. mindspore/profiler/common/singleton.py +0 -28
  416. mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
  417. mindspore/profiler/parser/ascend_memory_generator.py +0 -185
  418. mindspore/train/callback/_cluster_monitor.py +0 -201
  419. mindspore/train/callback/_flops_collector.py +0 -238
  420. mindspore/train/callback/_mindio_ttp.py +0 -443
  421. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
  422. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
  423. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
mindspore/train/model.py CHANGED
@@ -18,7 +18,6 @@ from __future__ import absolute_import
18
18
  from collections.abc import Iterable
19
19
  from functools import wraps
20
20
 
21
- import sys
22
21
  import os
23
22
  import math
24
23
  import copy
@@ -35,10 +34,8 @@ from mindspore.common.tensor import Tensor
35
34
  from mindspore.train.metrics import get_metrics, get_metric_fn
36
35
  from mindspore._checkparam import check_input_data, check_output_data
37
36
  from mindspore import _checkparam as Validator
38
- from mindspore.train.callback import _InternalCallbackParam, RunContext, _CallbackManager, Callback, TimeMonitor,\
39
- FlopsUtilizationCollector, MindIOTTPAdapter
37
+ from mindspore.train.callback import _InternalCallbackParam, RunContext, _CallbackManager, Callback, TimeMonitor
40
38
  from mindspore.train.callback import __all__ as internal_cb_names
41
- from mindspore.train.callback._cluster_monitor import ClusterMonitor
42
39
  from mindspore import context
43
40
  from mindspore.parallel._utils import _get_parallel_mode, _get_device_num, _get_parameter_broadcast, \
44
41
  _device_number_check, _parameter_broadcast_check, _parallel_predict_check, \
@@ -51,7 +48,7 @@ from mindspore.boost import AutoBoost
51
48
  from mindspore.context import ParallelMode
52
49
  from mindspore.parallel._recovery_context import _set_recovery_context, _get_recovery_context
53
50
  from mindspore.train.dataset_helper import DatasetHelper, connect_network_with_dataset
54
- from mindspore.common.api import _pynative_executor, ARG_SPECIFIED, TOTAL_ARG_LEN
51
+ from mindspore.common.api import _pynative_executor
55
52
  from mindspore.dataset.core.config import get_debug_mode
56
53
  from mindspore.dataset.engine.datasets import _set_training_dataset, _reset_training_dataset
57
54
  from mindspore.train import amp
@@ -78,7 +75,6 @@ class _FrameworkProfilerCallback(Callback):
78
75
  """
79
76
  Profiler callback of framework for training.
80
77
  """
81
-
82
78
  def step_begin(self, run_context):
83
79
  _framework_profiler_step_start()
84
80
 
@@ -120,141 +116,6 @@ def _save_final_ckpt(func):
120
116
  return wrapper
121
117
 
122
118
 
123
- def _append_ccae(callbacks):
124
- """Add cluster monitoring when CCAE is enabled."""
125
- perf_config = os.getenv("PERF_DUMP_CONFIG")
126
- if perf_config is None:
127
- return callbacks
128
- pairs = perf_config.split(',')
129
- perf_config_dict = {}
130
- for pair in pairs:
131
- key, value = pair.split(':')
132
- if value.lower() == 'true':
133
- perf_config_dict[key] = True
134
- elif value.lower() == 'false':
135
- perf_config_dict[key] = False
136
- elif value.isdigit():
137
- perf_config_dict[key] = int(value)
138
- else:
139
- perf_config_dict[key] = value
140
- if perf_config_dict.get("enable", False):
141
- if callbacks is None:
142
- callbacks = ClusterMonitor()
143
- elif isinstance(callbacks, list):
144
- callbacks.append(ClusterMonitor())
145
- else:
146
- callbacks = [callbacks, ClusterMonitor()]
147
- return callbacks
148
-
149
-
150
- def _get_arg_infos(inputs):
151
- """Get compile argument information from inputs.
152
-
153
- Args:
154
- inputs (Union[list, tuple, dict]): Argument got from cell which is set by `set_inputs`.
155
-
156
- Raises:
157
- RuntimeError: inputs is not a list, tuple or dict.
158
- RuntimeError: inputs is a dict without necessary keys and values.
159
-
160
- Returns:
161
- _type_: _description_
162
- """
163
- if isinstance(inputs, (list, tuple)):
164
- arg_specified = [[idx, arg] for idx, arg in enumerate(inputs)]
165
- arg_len = len(inputs)
166
- elif isinstance(inputs, dict):
167
- arg_specified = inputs.get(ARG_SPECIFIED, None)
168
- arg_len = inputs.get(TOTAL_ARG_LEN, None)
169
- if arg_specified is None or arg_len is None:
170
- raise RuntimeError(
171
- "The incremental inputs should be processed(with \"%s\" and \"%s\"), but got %s." %
172
- (ARG_SPECIFIED, TOTAL_ARG_LEN, str(inputs)))
173
- else:
174
- raise RuntimeError("inputs should be a list/tuple or a dict, but got %s!" % str(inputs))
175
-
176
- return arg_len, arg_specified
177
-
178
-
179
- def _merge_inputs(inputs1, inputs2):
180
- """Merge two processed inputs to a new inputs for latter setting cell's inputs."""
181
- is_fullmode1 = isinstance(inputs1, (list, tuple))
182
- is_fullmode2 = isinstance(inputs2, (list, tuple))
183
-
184
- if is_fullmode1 and is_fullmode2:
185
- return [*inputs1, *inputs2]
186
-
187
- arg_len1, arg_specified1 = _get_arg_infos(inputs1)
188
- arg_len2, arg_specified2 = _get_arg_infos(inputs2)
189
-
190
- res_arg_len = arg_len1 + arg_len2
191
- res_arg_specified = []
192
- res_arg_specified.extend(arg_specified1)
193
- # The second inputs should add offset before merging.
194
- for idx, arg in arg_specified2:
195
- res_arg_specified.append([idx + arg_len1, arg])
196
-
197
- return {ARG_SPECIFIED: res_arg_specified, TOTAL_ARG_LEN: res_arg_len}
198
-
199
-
200
- def _process_loss_inputs(loss_inputs):
201
- """Process loss's inputs whose first input should be dropped for train or eval.
202
-
203
- Args:
204
- loss_inputs (Union[list, tuple, dict]): Arguments save by `set_inputs` or `jit`.
205
-
206
- Raises:
207
- RuntimeError: inputs is not a list, tuple or dict.
208
- RuntimeError: inputs is a dict without necessary keys and values.
209
-
210
- Returns:
211
- list, tuple or dict: Arguments for latter setting.
212
- """
213
- # For train or eval, the first input of loss is the inner-tensor, so drop it.
214
- res = None
215
- if isinstance(loss_inputs, (list, tuple)):
216
- res = [*loss_inputs]
217
- res.pop(0)
218
- elif isinstance(loss_inputs, dict):
219
- loss_arg_specified = loss_inputs.get(ARG_SPECIFIED, None)
220
- loss_arg_len = loss_inputs.get(TOTAL_ARG_LEN, None)
221
- if loss_arg_specified is None or loss_arg_len is None:
222
- raise RuntimeError(
223
- "The loss incremental inputs should be processed(with \"%s\" and \"%s\"), but got %s." %
224
- (ARG_SPECIFIED, TOTAL_ARG_LEN, str(loss_inputs)))
225
- res_loss_arg_specified = []
226
- for idx, arg in loss_arg_specified:
227
- if idx == 0:
228
- continue
229
- res_loss_arg_specified.append([idx, arg])
230
- res = {ARG_SPECIFIED: res_loss_arg_specified, TOTAL_ARG_LEN: loss_arg_len - 1}
231
- else:
232
- raise RuntimeError("loss_inputs should be a list/tuple or a dict, but got %s!" % str(loss_inputs))
233
-
234
- return res
235
-
236
-
237
- def _set_with_processed_inputs(network, inputs):
238
- """Save set inputs for computation graph with processed inputs.
239
-
240
- Args:
241
- network (nn.Cell): Target cell.
242
- inputs (Union[list, tuple, dict]): Inputs argument got from other cell.
243
-
244
- Raises:
245
- RuntimeError: network is not a nn.Cell.
246
- RuntimeError: inputs is not a list, tuple or dict.
247
- """
248
- Validator.check_value_type('network', network, nn.Cell)
249
- if isinstance(inputs, (list, tuple)):
250
- network.set_inputs(*inputs)
251
- elif isinstance(inputs, dict):
252
- network.set_inputs(**inputs)
253
- else:
254
- raise RuntimeError(
255
- "Reset inputs from a process inputs, should be a list/tuple or a dict, but got %s!" % str(inputs))
256
-
257
-
258
119
  class Model:
259
120
  """
260
121
  High-Level API for training or inference.
@@ -380,6 +241,7 @@ class Model:
380
241
  self._lite_infer = True # if backend lite infer fails, set False
381
242
  self._mindspore_lite_model_group_id = id(self) & 0xFFFF
382
243
 
244
+
383
245
  def _check_for_graph_cell(self, kwargs):
384
246
  """Check for graph cell"""
385
247
  if not isinstance(self._network, nn.GraphCell):
@@ -450,10 +312,13 @@ class Model:
450
312
  raise ValueError("The argument 'optimizer' can not be None when set 'loss_scale_manager'.")
451
313
 
452
314
  net_inputs = network.get_inputs()
315
+ loss_inputs = [None]
453
316
  if self._loss_fn:
454
- if self._loss_fn.get_inputs() and net_inputs:
455
- loss_inputs = _process_loss_inputs(self._loss_fn.get_inputs())
456
- net_inputs = _merge_inputs(net_inputs, loss_inputs)
317
+ if self._loss_fn.get_inputs():
318
+ loss_inputs = [*self._loss_fn.get_inputs()]
319
+ loss_inputs.pop(0)
320
+ if net_inputs:
321
+ net_inputs = [*net_inputs, *loss_inputs]
457
322
  if self._optimizer:
458
323
  amp_config = {}
459
324
  if self._loss_scale_manager_set:
@@ -471,7 +336,7 @@ class Model:
471
336
  # If need to check if loss_fn is not None, but optimizer is None
472
337
 
473
338
  if net_inputs is not None:
474
- _set_with_processed_inputs(network, net_inputs)
339
+ network.set_inputs(*net_inputs)
475
340
  return network
476
341
 
477
342
  def _build_eval_network(self, metrics, eval_network, eval_indexes):
@@ -497,13 +362,17 @@ class Model:
497
362
  f" optional, and then you can set `eval_network` or `loss_fn`. For the latter case,"
498
363
  f" framework will automatically build an evaluation network with `network` and"
499
364
  f" `loss_fn`.")
365
+
500
366
  net_inputs = self._network.get_inputs()
501
- if self._loss_fn.get_inputs() and net_inputs:
502
- loss_inputs = _process_loss_inputs(self._loss_fn.get_inputs())
503
- net_inputs = _merge_inputs(net_inputs, loss_inputs)
367
+ loss_inputs = [None]
368
+ if self._loss_fn.get_inputs():
369
+ loss_inputs = [*self._loss_fn.get_inputs()]
370
+ loss_inputs.pop(0)
371
+ if net_inputs:
372
+ net_inputs = [*net_inputs, *loss_inputs]
504
373
  self._eval_network = nn.WithEvalCell(self._network, self._loss_fn, self._amp_level in ["O2", "O3", "auto"])
505
374
  if net_inputs is not None:
506
- _set_with_processed_inputs(self._eval_network, net_inputs)
375
+ self._eval_network.set_inputs(*net_inputs)
507
376
  self._eval_indexes = [0, 1, 2]
508
377
 
509
378
  def _build_predict_network(self):
@@ -576,6 +445,7 @@ class Model:
576
445
  if _get_recovery_context("enable_recovery") and is_train:
577
446
  _set_training_dataset(dataset_helper)
578
447
 
448
+
579
449
  network.set_train(is_train)
580
450
  network.phase = phase
581
451
  self._backbone_is_train = is_train
@@ -591,40 +461,6 @@ class Model:
591
461
  self._backbone_is_train = is_train
592
462
  return network
593
463
 
594
- def _check_need_ckpt(self, callbacks):
595
- """Check callback list contain ckpt"""
596
- need_ckpt = False
597
- save_ckpt_steps = 1
598
- last_triggered_step = 0
599
- for cb in callbacks:
600
- if isinstance(cb, ModelCheckpoint):
601
- need_ckpt = True
602
- cfg_size = cb._get_save_checkpoint_steps
603
- save_ckpt_steps = save_ckpt_steps if (cfg_size is None or cfg_size >= sys.maxsize) else cfg_size
604
- last_triggered_step = cb._get_last_trigger_step
605
- break
606
- return need_ckpt, save_ckpt_steps, last_triggered_step
607
-
608
- def _store_training_step_info(self, cb_params):
609
- """
610
- cache train step info
611
- :param cb_params: callback params
612
- :return: none
613
- """
614
- if os.environ.get("MS_ENABLE_CKPT_D2H_ASYNC") != "1":
615
- return
616
- if (context.get_context("mode") == context.GRAPH_MODE) and (context.get_context("device_target") == "Ascend"):
617
- cb_params.need_ckpt, cb_params.save_checkpoint_steps, \
618
- cb_params.last_triggered_step = self._check_need_ckpt(cb_params.list_callback)
619
- logger.info(f"need_ckpt:{cb_params.need_ckpt},"
620
- f"save_checkpoint_steps:{cb_params.save_checkpoint_steps},"
621
- f"cur_step_num:{cb_params.cur_step_num},"
622
- f"last_triggered_step:{cb_params.last_triggered_step}")
623
- context.set_context(ascend_config={"need_ckpt": cb_params.need_ckpt,
624
- "save_checkpoint_steps": cb_params.save_checkpoint_steps,
625
- "cur_step_num": cb_params.cur_step_num,
626
- "last_triggered_step": cb_params.last_triggered_step})
627
-
628
464
  def _warmup_dataset(self, epoch, train_dataset, sink_size=-1):
629
465
  """
630
466
  Trigger dataset pipeline running before graph compiling.
@@ -650,22 +486,6 @@ class Model:
650
486
  train_dataset._dataset_helper = dataset_helper
651
487
  train_dataset._warmup_epoch = epoch
652
488
 
653
- def _waiting_for_dataset_warmup_ready(self, train_dataset):
654
- """
655
- Wait for the dataset to warmup until there is a batch of data available for training on the device side.
656
-
657
- Args:
658
- train_dataset (Dataset): A training dataset iterator. If `train_dataset` is defined, training graphs will be
659
- initialized. Default: ``None``.
660
- """
661
- mbuf_size = train_dataset.__transfer_dataset__.get_mbuf_queue_size()
662
- while mbuf_size == 0:
663
- time.sleep(10)
664
- mbuf_size = train_dataset.__transfer_dataset__.get_mbuf_queue_size()
665
- if mbuf_size != 0:
666
- break
667
- logger.warning(f"Waiting for the dataset warmup, current device queue size: {mbuf_size}")
668
-
669
489
  def _init(self, train_dataset=None, valid_dataset=None, sink_size=-1, epoch=1):
670
490
  """
671
491
  Initialize compute graphs and data graphs with the sink mode.
@@ -704,12 +524,17 @@ class Model:
704
524
  dataset_sink_mode=True,
705
525
  sink_size=sink_size)
706
526
  self._warmup_dataset(epoch, train_dataset, sink_size)
707
-
708
527
  # Since dataset pipeline has been triggered, delete flag
709
528
  delattr(train_dataset, "__no_send__")
710
-
711
- # Waiting for the dataset warmup ready
712
- self._waiting_for_dataset_warmup_ready(train_dataset)
529
+ if train_dataset.get_init_step() > 0:
530
+ mbuf_size = train_dataset.__transfer_dataset__.get_mbuf_queue_size()
531
+ while mbuf_size == 0:
532
+ time.sleep(10)
533
+ mbuf_size = train_dataset.__transfer_dataset__.get_mbuf_queue_size()
534
+ if mbuf_size != 0:
535
+ break
536
+ logger.warning(f"Failover mode, waiting for dataset recover to specify step, "
537
+ f"current device queue size: {mbuf_size}")
713
538
 
714
539
  if context.get_auto_parallel_context("pipeline_stages") > 1 and valid_dataset:
715
540
  train_network.add_flags_recursive(is_first_iteration=True)
@@ -787,10 +612,6 @@ class Model:
787
612
  cb_params.list_callback = self._transform_callbacks(callbacks)
788
613
  valid_infos = (valid_dataset, valid_frequency, valid_dataset_sink_mode)
789
614
  cb_params.list_callback.insert(0, _FrameworkProfilerCallback())
790
- if os.environ.get("ENABLE_FLOPS_UTILIZATION_COLLECTOR") == "1" and \
791
- FlopsUtilizationCollector not in cb_params.list_callback:
792
- cb_params.list_callback.insert(0, FlopsUtilizationCollector(
793
- cb_params.batch_num, full_flops=False))
794
615
  if context.get_context("mode") == context.PYNATIVE_MODE:
795
616
  cb_params.list_callback.insert(0, _StepSync())
796
617
  callbacks = cb_params.list_callback
@@ -849,7 +670,6 @@ class Model:
849
670
  dataset_sink_num = math.ceil(epoch * sink_size / dataset_size)
850
671
  train_dataset.__total_batch__ = epoch * sink_size
851
672
 
852
- cb_params.sink_size = sink_size
853
673
  cb_params.cur_step_num = 0
854
674
  cb_params.dataset_sink_mode = True
855
675
 
@@ -895,7 +715,6 @@ class Model:
895
715
  else:
896
716
  cb_params.cur_step_num += 1
897
717
  self._current_step_num = int((cb_params.cur_step_num - 1) % cb_params.batch_num + 1)
898
- self._store_training_step_info(cb_params)
899
718
  cb_params.train_dataset_element = inputs
900
719
  list_callback.on_train_step_begin(run_context)
901
720
  train_network = self._check_network_mode(train_network, True)
@@ -1150,31 +969,6 @@ class Model:
1150
969
 
1151
970
  list_callback.on_train_end(run_context)
1152
971
 
1153
- def _wrapper_train(self, callbacks):
1154
- """
1155
- This method used to wrap train function with ttp wrapper which will do event notify when
1156
- exceptions throw.
1157
-
1158
- Args:
1159
- callbacks (function): Callbacks passed by train method.
1160
- """
1161
-
1162
- if not callbacks:
1163
- return self._train
1164
- cbs = callbacks if isinstance(callbacks, list) else [callbacks]
1165
- obj = None
1166
- _train_wrapper = None
1167
- for item in cbs:
1168
- if isinstance(item, MindIOTTPAdapter):
1169
- obj = item
1170
-
1171
- if (obj is not None) and (obj.enable is True):
1172
- logger.info("MindIO TTP is enable, so we wrapper ttp exception handdler for self train method.")
1173
- _train_wrapper = obj.wrapper_ttp_persist(self._train)
1174
-
1175
- return self._train if not _train_wrapper else _train_wrapper
1176
-
1177
-
1178
972
  def train(self, epoch, train_dataset, callbacks=None, dataset_sink_mode=False, sink_size=-1, initial_epoch=0):
1179
973
  """
1180
974
  Training API.
@@ -1282,17 +1076,15 @@ class Model:
1282
1076
 
1283
1077
  _device_number_check(self._parallel_mode, self._device_number)
1284
1078
 
1285
- callbacks = _append_ccae(callbacks)
1286
- _train_wrapper = None
1287
1079
  if callbacks:
1288
1080
  self._check_methods_for_custom_callbacks(callbacks, "train")
1289
- _train_wrapper = self._wrapper_train(callbacks)
1290
- _train_wrapper(epoch,
1291
- train_dataset,
1292
- callbacks=callbacks,
1293
- dataset_sink_mode=dataset_sink_mode,
1294
- sink_size=sink_size,
1295
- initial_epoch=initial_epoch)
1081
+
1082
+ self._train(epoch,
1083
+ train_dataset,
1084
+ callbacks=callbacks,
1085
+ dataset_sink_mode=dataset_sink_mode,
1086
+ sink_size=sink_size,
1087
+ initial_epoch=initial_epoch)
1296
1088
 
1297
1089
  # When it's distributed training and using MindRT,
1298
1090
  # the node id should be reset to start from 0.
@@ -1320,7 +1112,7 @@ class Model:
1320
1112
  callbacks = [callbacks]
1321
1113
  for cb in callbacks:
1322
1114
  cb_name = cb.__class__.__name__
1323
- if cb_name not in internal_cb_names:
1115
+ if cb_name not in internal_cb_names:
1324
1116
  cb_methods_names = set(cb.__class__.__dict__.keys())
1325
1117
  invalid_methods_names = cb_methods_names & old_version_methods_names
1326
1118
  if invalid_methods_names:
@@ -1683,10 +1475,6 @@ class Model:
1683
1475
  cb_params.mode = "eval"
1684
1476
  cb_params.cur_step_num = 0
1685
1477
  cb_params.list_callback = self._transform_callbacks(callbacks)
1686
- if os.environ.get("ENABLE_FLOPS_UTILIZATION_COLLECTOR") == "1" and \
1687
- FlopsUtilizationCollector not in cb_params.list_callback:
1688
- cb_params.list_callback.insert(0, FlopsUtilizationCollector(
1689
- cb_params.batch_num, full_flops=False))
1690
1478
  cb_params.network = self._network
1691
1479
 
1692
1480
  self._clear_metrics()
@@ -1933,25 +1721,8 @@ class Model:
1933
1721
  self._lite_infer = False
1934
1722
  logger.warning(f"Lite inference failed, {e.__str__()}, fallback to original inference!")
1935
1723
 
1936
- def _check_input_data():
1937
- """Input data check."""
1938
- for item in predict_data:
1939
- if item is None:
1940
- continue
1941
- if isinstance(item, Tensor):
1942
- if item.size == 0:
1943
- msg = "The input data can not be empty."
1944
- logger.critical(msg)
1945
- raise ValueError(msg)
1946
- continue
1947
- if not isinstance(item, (int, float, str)):
1948
- data_class_str = "Tensor, None, int, float, str"
1949
- raise TypeError(f'The types of input data must be in the Union({data_class_str}, ' \
1950
- f'tuple[{data_class_str}], list[{data_class_str}], dict[{data_class_str}]), ' \
1951
- f'but got type {item if item is None else type(item).__name__}.')
1952
-
1953
1724
  self._check_network_mode(self._predict_network, False)
1954
- _check_input_data()
1725
+ check_input_data(*predict_data, data_class=(int, float, str, None, Tensor))
1955
1726
  _parallel_predict_check()
1956
1727
  result = self._predict_network(*predict_data)
1957
1728
 
@@ -2063,6 +1834,7 @@ class Model:
2063
1834
  train_dataset.__model_hash__ = hash(self)
2064
1835
  return train_network.parameter_layout_dict
2065
1836
 
1837
+
2066
1838
  def infer_predict_layout(self, *predict_data, skip_backend_compile=False):
2067
1839
  """
2068
1840
  Generate parameter layout for the predict network in 'AUTO_PARALLEL' or 'SEMI_AUTO_PARALLEL' mode.