mindspore 2.3.0__cp39-none-any.whl → 2.3.0rc2__cp39-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (423) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/Third_Party_Open_Source_Software_Notice +0 -1512
  3. mindspore/__init__.py +1 -2
  4. mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
  5. mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
  6. mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
  7. mindspore/_checkparam.py +25 -5
  8. mindspore/_extends/graph_kernel/model/graph_parallel.py +1 -1
  9. mindspore/_extends/parse/__init__.py +2 -2
  10. mindspore/_extends/parse/compile_config.py +0 -29
  11. mindspore/_extends/parse/namespace.py +2 -2
  12. mindspore/_extends/parse/parser.py +5 -21
  13. mindspore/_extends/parse/resources.py +7 -5
  14. mindspore/_extends/parse/standard_method.py +59 -40
  15. mindspore/_mindspore_offline_debug.cpython-39-aarch64-linux-gnu.so +0 -0
  16. mindspore/amp.py +5 -26
  17. mindspore/bin/cache_admin +0 -0
  18. mindspore/bin/cache_server +0 -0
  19. mindspore/boost/adasum.py +1 -1
  20. mindspore/boost/base.py +1 -1
  21. mindspore/boost/boost_cell_wrapper.py +1 -1
  22. mindspore/boost/grad_freeze.py +2 -2
  23. mindspore/boost/less_batch_normalization.py +6 -9
  24. mindspore/common/__init__.py +1 -8
  25. mindspore/common/_register_for_tensor.py +9 -8
  26. mindspore/common/api.py +65 -275
  27. mindspore/common/dtype.py +4 -8
  28. mindspore/common/dump.py +5 -2
  29. mindspore/common/jit_config.py +1 -1
  30. mindspore/common/lazy_inline.py +2 -14
  31. mindspore/common/parameter.py +15 -14
  32. mindspore/common/recompute.py +5 -20
  33. mindspore/common/sparse_tensor.py +6 -21
  34. mindspore/common/tensor.py +52 -100
  35. mindspore/communication/__init__.py +11 -6
  36. mindspore/communication/management.py +94 -92
  37. mindspore/context.py +18 -180
  38. mindspore/dataset/engine/datasets.py +46 -69
  39. mindspore/dataset/engine/datasets_user_defined.py +53 -72
  40. mindspore/dataset/engine/datasets_vision.py +2 -2
  41. mindspore/dataset/engine/queue.py +38 -56
  42. mindspore/dataset/engine/validators.py +5 -11
  43. mindspore/dataset/vision/__init__.py +5 -5
  44. mindspore/dataset/vision/c_transforms.py +5 -5
  45. mindspore/dataset/vision/py_transforms_util.py +1 -1
  46. mindspore/dataset/vision/transforms.py +46 -591
  47. mindspore/dataset/vision/utils.py +1 -121
  48. mindspore/dataset/vision/validators.py +3 -9
  49. mindspore/hal/__init__.py +1 -7
  50. mindspore/hal/device.py +1 -1
  51. mindspore/include/api/model.h +0 -3
  52. mindspore/include/dataset/vision.h +2 -54
  53. mindspore/include/mindapi/base/types.h +0 -1
  54. mindspore/lib/libdnnl.so.2 +0 -0
  55. mindspore/lib/libmindspore.so +0 -0
  56. mindspore/lib/libmindspore_backend.so +0 -0
  57. mindspore/lib/libmindspore_common.so +0 -0
  58. mindspore/lib/libmindspore_core.so +0 -0
  59. mindspore/lib/libmindspore_glog.so.0 +0 -0
  60. mindspore/lib/libmindspore_gpr.so.15 +0 -0
  61. mindspore/lib/libmindspore_grpc++.so.1 +0 -0
  62. mindspore/lib/libmindspore_grpc.so.15 +0 -0
  63. mindspore/lib/libmindspore_shared_lib.so +0 -0
  64. mindspore/lib/libmpi_adapter.so +0 -0
  65. mindspore/lib/libmpi_collective.so +0 -0
  66. mindspore/lib/libnnacl.so +0 -0
  67. mindspore/lib/libopencv_core.so.4.5 +0 -0
  68. mindspore/lib/libps_cache.so +0 -0
  69. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -35
  70. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  71. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/vector_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  72. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
  73. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +0 -72
  74. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
  75. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/{aclnn_all_finite.h → aclnn_add_custom.h} +11 -9
  76. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_decoder_kv_cache.h +1 -1
  77. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_prompt_kv_cache.h +1 -1
  78. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
  79. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +12 -184
  80. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +15 -7
  81. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +15 -7
  82. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.cpp +81 -0
  83. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.py +134 -0
  84. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/decoder_kv_cache.py +31 -77
  85. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/prompt_kv_cache.py +31 -77
  86. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
  87. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
  88. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/inc/op_proto.h +5 -4
  89. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
  90. mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
  91. mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
  92. mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
  93. mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
  94. mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
  95. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
  96. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
  97. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
  98. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +286 -275
  99. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
  100. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
  101. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
  102. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
  103. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -1
  104. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -1
  105. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -3
  106. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -5
  107. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_tiling.h +45 -1
  108. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -1
  109. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +4 -8
  110. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_tiling.h +4 -11
  111. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/kernel/flash_attention_score_mix_hwsync.h +0 -18
  112. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -6
  113. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +75 -1
  114. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/kernel/matmul.h +5 -5
  115. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +3 -18
  116. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_common_tiling.h +5 -5
  117. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_info.h +2 -2
  118. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_data.h +3 -36
  119. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/kernel/matmul_stridedslice_fusion.h +2 -2
  120. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +4 -22
  121. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +2 -16
  122. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +3 -1
  123. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +4 -5
  124. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +4 -9
  125. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +2 -5
  126. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -1
  127. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +4 -10
  128. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +12 -0
  129. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -1
  130. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -1
  131. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +1 -1
  132. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +2 -10
  133. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +1 -5
  134. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -1
  135. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -17
  136. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +7 -2
  137. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
  138. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
  139. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
  140. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
  141. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
  142. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
  143. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
  144. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
  145. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
  146. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
  147. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
  148. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
  149. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
  150. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
  151. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
  152. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
  153. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
  154. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
  155. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
  156. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
  157. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
  158. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
  159. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
  160. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
  161. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
  162. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
  163. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
  164. mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
  165. mindspore/mindrecord/filewriter.py +2 -2
  166. mindspore/mint/__init__.py +40 -720
  167. mindspore/mint/nn/__init__.py +7 -89
  168. mindspore/mint/nn/functional.py +16 -165
  169. mindspore/mint/optim/adamw.py +16 -15
  170. mindspore/nn/__init__.py +2 -0
  171. mindspore/nn/cell.py +98 -97
  172. mindspore/nn/extend/basic.py +2 -2
  173. mindspore/nn/extend/embedding.py +1 -1
  174. mindspore/nn/extend/layer/normalization.py +5 -7
  175. mindspore/nn/generator.py +297 -0
  176. mindspore/nn/layer/activation.py +3 -4
  177. mindspore/nn/layer/basic.py +16 -79
  178. mindspore/nn/layer/conv.py +8 -17
  179. mindspore/nn/layer/embedding.py +4 -1
  180. mindspore/nn/layer/math.py +1 -1
  181. mindspore/nn/layer/normalization.py +1 -1
  182. mindspore/nn/layer/pooling.py +0 -5
  183. mindspore/nn/layer/rnn_cells.py +2 -2
  184. mindspore/nn/loss/loss.py +19 -19
  185. mindspore/nn/optim/adasum.py +1 -1
  186. mindspore/nn/optim/sgd.py +2 -3
  187. mindspore/nn/probability/distribution/exponential.py +1 -1
  188. mindspore/nn/probability/distribution/geometric.py +1 -1
  189. mindspore/nn/probability/distribution/logistic.py +1 -1
  190. mindspore/nn/wrap/cell_wrapper.py +1 -25
  191. mindspore/nn/wrap/loss_scale.py +1 -24
  192. mindspore/numpy/array_ops.py +1 -5
  193. mindspore/numpy/dtypes.py +3 -3
  194. mindspore/numpy/math_ops.py +8 -8
  195. mindspore/ops/__init__.py +1 -1
  196. mindspore/ops/_grad_experimental/grad_comm_ops.py +16 -75
  197. mindspore/ops/_vmap/vmap_array_ops.py +0 -27
  198. mindspore/ops/_vmap/vmap_math_ops.py +1 -29
  199. mindspore/ops/_vmap/vmap_nn_ops.py +18 -19
  200. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +8 -34
  201. mindspore/ops/auto_generate/gen_arg_dtype_cast.py +9 -2
  202. mindspore/ops/auto_generate/gen_arg_handler.py +0 -26
  203. mindspore/ops/auto_generate/gen_extend_func.py +27 -603
  204. mindspore/ops/auto_generate/gen_ops_def.py +203 -993
  205. mindspore/ops/auto_generate/gen_ops_prim.py +402 -1946
  206. mindspore/ops/auto_generate/pyboost_inner_prim.py +20 -90
  207. mindspore/ops/composite/base.py +6 -3
  208. mindspore/ops/composite/math_ops.py +1 -1
  209. mindspore/ops/composite/multitype_ops/_compile_utils.py +17 -24
  210. mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
  211. mindspore/ops/extend/__init__.py +3 -2
  212. mindspore/ops/extend/array_func.py +51 -10
  213. mindspore/ops/extend/nn_func.py +78 -2
  214. mindspore/ops/function/__init__.py +13 -8
  215. mindspore/ops/function/array_func.py +179 -455
  216. mindspore/ops/function/clip_func.py +1 -1
  217. mindspore/ops/function/grad/grad_func.py +3 -3
  218. mindspore/ops/function/math_func.py +103 -117
  219. mindspore/ops/function/nn_func.py +163 -275
  220. mindspore/ops/function/other_func.py +2 -2
  221. mindspore/ops/function/random_func.py +69 -202
  222. mindspore/ops/function/sparse_func.py +4 -4
  223. mindspore/ops/functional.py +327 -332
  224. mindspore/ops/operations/__init__.py +3 -13
  225. mindspore/ops/operations/_grad_ops.py +27 -3
  226. mindspore/ops/operations/_inner_ops.py +356 -53
  227. mindspore/ops/operations/_rl_inner_ops.py +2 -2
  228. mindspore/ops/operations/_tensor_array.py +8 -8
  229. mindspore/ops/operations/array_ops.py +65 -82
  230. mindspore/ops/operations/comm_ops.py +93 -784
  231. mindspore/ops/operations/custom_ops.py +28 -51
  232. mindspore/ops/operations/debug_ops.py +4 -4
  233. mindspore/ops/operations/inner_ops.py +2 -2
  234. mindspore/ops/operations/manually_defined/ops_def.py +4 -304
  235. mindspore/ops/operations/math_ops.py +50 -3
  236. mindspore/ops/operations/nn_ops.py +247 -14
  237. mindspore/ops/operations/other_ops.py +3 -3
  238. mindspore/ops/operations/random_ops.py +1 -1
  239. mindspore/ops/operations/sparse_ops.py +1 -1
  240. mindspore/ops/primitive.py +8 -9
  241. mindspore/ops/silent_check.py +5 -5
  242. mindspore/ops_generate/arg_dtype_cast.py +9 -2
  243. mindspore/ops_generate/arg_handler.py +0 -26
  244. mindspore/ops_generate/gen_aclnn_implement.py +4 -1
  245. mindspore/ops_generate/gen_ops.py +4 -26
  246. mindspore/ops_generate/gen_pyboost_func.py +12 -41
  247. mindspore/ops_generate/gen_utils.py +0 -21
  248. mindspore/ops_generate/pyboost_utils.py +2 -7
  249. mindspore/ops_generate/template.py +0 -1
  250. mindspore/parallel/_auto_parallel_context.py +1 -21
  251. mindspore/parallel/_tensor.py +5 -0
  252. mindspore/parallel/_transformer/transformer.py +1 -1
  253. mindspore/parallel/_utils.py +1 -15
  254. mindspore/parallel/algo_parameter_config.py +3 -1
  255. mindspore/parallel/checkpoint_transform.py +9 -12
  256. mindspore/parallel/cluster/process_entity/_api.py +29 -28
  257. mindspore/parallel/cluster/process_entity/_utils.py +3 -13
  258. mindspore/parallel/cluster/run.py +16 -13
  259. mindspore/parallel/parameter_broadcast.py +2 -2
  260. mindspore/parallel/shard.py +17 -31
  261. mindspore/profiler/__init__.py +2 -3
  262. mindspore/profiler/common/util.py +2 -107
  263. mindspore/profiler/envprofiling.py +1 -1
  264. mindspore/profiler/parser/ascend_analysis/constant.py +21 -8
  265. mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -82
  266. mindspore/profiler/parser/ascend_analysis/function_event.py +28 -43
  267. mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +27 -49
  268. mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +10 -15
  269. mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +20 -25
  270. mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +5 -5
  271. mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +1 -10
  272. mindspore/profiler/parser/ascend_hccl_generator.py +1 -4
  273. mindspore/profiler/parser/ascend_msprof_exporter.py +22 -43
  274. mindspore/profiler/parser/ascend_timeline_generator.py +5 -7
  275. mindspore/profiler/parser/minddata_parser.py +3 -72
  276. mindspore/profiler/profiling.py +59 -176
  277. mindspore/rewrite/api/node.py +1 -1
  278. mindspore/rewrite/common/namespace.py +5 -5
  279. mindspore/rewrite/parsers/assign_parser.py +0 -2
  280. mindspore/rewrite/parsers/class_def_parser.py +4 -8
  281. mindspore/run_check/_check_version.py +1 -1
  282. mindspore/scipy/fft.py +3 -1
  283. mindspore/scipy/linalg.py +3 -2
  284. mindspore/scipy/ops.py +3 -5
  285. mindspore/scipy/optimize/__init__.py +2 -2
  286. mindspore/train/__init__.py +4 -4
  287. mindspore/train/anf_ir_pb2.py +2 -8
  288. mindspore/train/callback/__init__.py +2 -5
  289. mindspore/train/callback/_backup_and_restore.py +2 -2
  290. mindspore/train/callback/_checkpoint.py +16 -104
  291. mindspore/train/callback/_landscape.py +1 -1
  292. mindspore/train/callback/_time_monitor.py +1 -1
  293. mindspore/train/data_sink.py +4 -5
  294. mindspore/train/dataset_helper.py +20 -45
  295. mindspore/train/model.py +38 -266
  296. mindspore/train/serialization.py +105 -256
  297. mindspore/train/summary/_summary_adapter.py +1 -1
  298. mindspore/version.py +1 -1
  299. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +2 -2
  300. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +303 -420
  301. mindspore/_extends/pijit/__init__.py +0 -23
  302. mindspore/_extends/pijit/pijit_func_white_list.py +0 -343
  303. mindspore/common/file_system.py +0 -48
  304. mindspore/common/generator.py +0 -260
  305. mindspore/common/no_inline.py +0 -54
  306. mindspore/common/np_dtype.py +0 -25
  307. mindspore/communication/comm_func.py +0 -1140
  308. mindspore/hal/memory.py +0 -326
  309. mindspore/lib/libavcodec.so.59 +0 -0
  310. mindspore/lib/libavdevice.so.59 +0 -0
  311. mindspore/lib/libavfilter.so.8 +0 -0
  312. mindspore/lib/libavformat.so.59 +0 -0
  313. mindspore/lib/libavutil.so.57 +0 -0
  314. mindspore/lib/libmindspore_np_dtype.so +0 -0
  315. mindspore/lib/libswresample.so.4 +0 -0
  316. mindspore/lib/libswscale.so.6 +0 -0
  317. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.cpp +0 -326
  318. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.py +0 -180
  319. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
  320. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o +0 -0
  321. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
  322. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o +0 -0
  323. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
  324. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o +0 -0
  325. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
  326. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
  327. mindspore/lib/plugin/ascend/custom_compiler/OWNERS +0 -12
  328. mindspore/lib/plugin/ascend/custom_compiler/setup.py +0 -255
  329. mindspore/lib/plugin/ascend/custom_compiler/start.sh +0 -26
  330. mindspore/lib/plugin/ascend/custom_compiler/template.json +0 -40
  331. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
  332. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -69
  333. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h +0 -133
  334. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -32
  335. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -35
  336. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h +0 -60
  337. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -37
  338. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
  339. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
  340. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_op.h +0 -34
  341. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_backoff_base.h +0 -62
  342. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -33
  343. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_ops.h +0 -88
  344. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_pa_op.h +0 -45
  345. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
  346. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -95
  347. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -84
  348. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -61
  349. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -224
  350. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
  351. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
  352. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
  353. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
  354. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
  355. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
  356. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
  357. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
  358. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
  359. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
  360. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
  361. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
  362. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
  363. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
  364. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
  365. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
  366. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
  367. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
  368. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
  369. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
  370. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
  371. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
  372. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
  373. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
  374. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
  375. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
  376. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
  377. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
  378. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
  379. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
  380. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
  381. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
  382. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
  383. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
  384. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
  385. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
  386. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_impl.h +0 -45
  387. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_tiling.h +0 -187
  388. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul.h +0 -245
  389. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_interface.h +0 -24
  390. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_utils.h +0 -111
  391. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/tiling_data.h +0 -54
  392. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
  393. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
  394. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
  395. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -364
  396. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -69
  397. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
  398. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -114
  399. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -98
  400. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.json +0 -19
  401. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.o +0 -0
  402. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aic_0.o +0 -0
  403. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aiv_0.o +0 -0
  404. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.json +0 -19
  405. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.o +0 -0
  406. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aic_0.o +0 -0
  407. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  408. mindspore/mint/linalg/__init__.py +0 -22
  409. mindspore/nn/layer/embedding_service.py +0 -531
  410. mindspore/nn/layer/embedding_service_layer.py +0 -393
  411. mindspore/ops/function/reshard_func.py +0 -102
  412. mindspore/ops/operations/_infer_ops.py +0 -19
  413. mindspore/ops/operations/reshard_ops.py +0 -53
  414. mindspore/profiler/common/process_pool.py +0 -41
  415. mindspore/profiler/common/singleton.py +0 -28
  416. mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
  417. mindspore/profiler/parser/ascend_memory_generator.py +0 -185
  418. mindspore/train/callback/_cluster_monitor.py +0 -201
  419. mindspore/train/callback/_flops_collector.py +0 -238
  420. mindspore/train/callback/_mindio_ttp.py +0 -443
  421. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
  422. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
  423. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
mindspore/scipy/ops.py CHANGED
@@ -20,11 +20,6 @@ from ..common import dtype as mstype
20
20
 
21
21
 
22
22
  class SolveTriangular():
23
- """
24
- Solve linear system,(triangular matrix)
25
- a * x = b
26
- """
27
-
28
23
  def __init__(self, lower: bool = False, unit_diagonal: bool = False, trans: str = 'N'):
29
24
  self.lower = lower
30
25
  self.unit_diagonal = unit_diagonal
@@ -163,3 +158,6 @@ class LinearSumAssignment(Primitive):
163
158
  def __init__(self):
164
159
  super().__init__(name="LinearSumAssignment")
165
160
  self.init_prim_io_names(inputs=['cost_matrix', 'dimension_limit', 'maximize'], outputs=['row_ind', 'col_ind'])
161
+
162
+ # pylint: disable=C0413,W0611
163
+ from .ops_grad import get_bprpo_eigh
@@ -13,8 +13,8 @@
13
13
  # limitations under the License.
14
14
  # ============================================================================
15
15
  """Optimize submodule"""
16
- __all__ = ["minimize", "line_search", "linear_sum_assignment"]
17
-
18
16
  from .minimize import minimize
19
17
  from .line_search import line_search
20
18
  from .linear_sum_assignment import linear_sum_assignment
19
+
20
+ __all__ = ["minimize", "line_search", "linear_sum_assignment"]
@@ -27,17 +27,17 @@ from mindspore.train.loss_scale_manager import LossScaleManager, FixedLossScaleM
27
27
  from mindspore.train.serialization import save_checkpoint, load_checkpoint, load_param_into_net, export, \
28
28
  load, parse_print, build_searched_strategy, merge_sliced_parameter, load_distributed_checkpoint, \
29
29
  async_ckpt_thread_status, restore_group_info_list, convert_model, obfuscate_model, export_split_mindir, \
30
- load_checkpoint_async, check_checkpoint
30
+ load_checkpoint_async
31
31
  from mindspore.train.callback import Callback, LossMonitor, TimeMonitor, ModelCheckpoint, SummaryCollector, \
32
- CheckpointConfig, RunContext, LearningRateScheduler, SummaryLandscape, FlopsUtilizationCollector, \
33
- History, LambdaCallback, ReduceLROnPlateau, EarlyStopping, OnRequestExit, BackupAndRestore, MindIOTTPAdapter
32
+ CheckpointConfig, RunContext, LearningRateScheduler, SummaryLandscape, \
33
+ History, LambdaCallback, ReduceLROnPlateau, EarlyStopping, OnRequestExit, BackupAndRestore
34
34
  from mindspore.train.summary import SummaryRecord
35
35
  from mindspore.train.train_thor import ConvertNetUtils, ConvertModelUtils
36
36
  from mindspore.train.metrics import *
37
37
  from mindspore.train.data_sink import data_sink
38
38
 
39
39
  __all__ = ["Model", "DatasetHelper", "connect_network_with_dataset", "build_train_network", "LossScaleManager",
40
- "FixedLossScaleManager", "DynamicLossScaleManager", "save_checkpoint", "load_checkpoint", "check_checkpoint",
40
+ "FixedLossScaleManager", "DynamicLossScaleManager", "save_checkpoint", "load_checkpoint",
41
41
  "load_param_into_net", "export", "load", "export_split_mindir", "parse_print", "build_searched_strategy",
42
42
  "merge_sliced_parameter", "load_distributed_checkpoint", "async_ckpt_thread_status",
43
43
  "restore_group_info_list", "convert_model", "data_sink", "obfuscate_model", "load_checkpoint_async"]
@@ -20,7 +20,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
20
20
  syntax='proto2',
21
21
  serialized_options=None,
22
22
  create_key=_descriptor._internal_create_key,
23
- serialized_pb=b'\n\x0c\x61nf_ir.proto\x12\x0emindspore.irpb\"\xdb\x04\n\nValueProto\x12\'\n\x05\x64type\x18\x01 \x01(\x0e\x32\x18.mindspore.irpb.DataType\x12\x10\n\x08\x62ool_val\x18\x02 \x01(\x08\x12\x0f\n\x07int_val\x18\x03 \x01(\x03\x12\x10\n\x08uint_val\x18\x04 \x01(\x04\x12\x11\n\tfloat_val\x18\x05 \x01(\x02\x12\x12\n\ndouble_val\x18\x06 \x01(\x01\x12\x0f\n\x07str_val\x18\x07 \x01(\t\x12/\n\ntensor_val\x18\x08 \x01(\x0b\x32\x1b.mindspore.irpb.TensorProto\x12)\n\x05graph\x18\t \x01(\x0b\x32\x1a.mindspore.irpb.GraphProto\x12\x11\n\tbool_vals\x18\n \x03(\x08\x12\x10\n\x08int_vals\x18\x0b \x03(\x03\x12\x11\n\tuint_vals\x18\x0c \x03(\x04\x12\x12\n\nfloat_vals\x18\r \x03(\x02\x12\x13\n\x0b\x64ouble_vals\x18\x0e \x03(\x01\x12\x10\n\x08str_vals\x18\x0f \x03(\t\x12\x30\n\x0btensor_vals\x18\x10 \x03(\x0b\x32\x1b.mindspore.irpb.TensorProto\x12*\n\x06graphs\x18\x11 \x03(\x0b\x32\x1a.mindspore.irpb.GraphProto\x12*\n\x06values\x18\x12 \x03(\x0b\x32\x1a.mindspore.irpb.ValueProto\x12\x31\n\x08\x64ict_val\x18\x13 \x03(\x0b\x32\x1f.mindspore.irpb.NamedValueProto\x12+\n\x08type_val\x18\x14 \x01(\x0b\x32\x19.mindspore.irpb.TypeProto\"I\n\x0e\x41ttributeProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.mindspore.irpb.ValueProto\"I\n\x0fNamedValueProto\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.mindspore.irpb.ValueProto\"t\n\x10TensorShapeProto\x12\x37\n\x03\x64im\x18\x01 \x03(\x0b\x32*.mindspore.irpb.TensorShapeProto.Dimension\x1a\'\n\tDimension\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\"\xda\x02\n\tTypeProto\x12+\n\tdata_type\x18\x01 \x01(\x0e\x32\x18.mindspore.irpb.DataType\x12\x37\n\x0btensor_type\x18\x02 \x01(\x0b\x32 .mindspore.irpb.TypeProto.TensorH\x00\x12;\n\rsequence_type\x18\x03 \x01(\x0b\x32\".mindspore.irpb.TypeProto.SequenceH\x00\x1a\x66\n\x06Tensor\x12+\n\telem_type\x18\x01 \x01(\x0e\x32\x18.mindspore.irpb.DataType\x12/\n\x05shape\x18\x02 \x01(\x0b\x32 .mindspore.irpb.TensorShapeProto\x1a\x39\n\x08Sequence\x12-\n\nelem_types\x18\x01 \x03(\x0b\x32\x19.mindspore.irpb.TypeProtoB\x07\n\x05value\"x\n\x0eParameterProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\'\n\x04type\x18\x02 \x01(\x0b\x32\x19.mindspore.irpb.TypeProto\x12/\n\x0b\x64\x65\x66\x61ult_val\x18\x03 \x01(\x0b\x32\x1a.mindspore.irpb.ValueProto\"D\n\x0bOutputProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\'\n\x04type\x18\x02 \x01(\x0b\x32\x19.mindspore.irpb.TypeProto\"z\n\nInputProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\x04type\x18\x02 \x01(\x0e\x32#.mindspore.irpb.InputProto.EdgeType\"+\n\x08\x45\x64geType\x12\r\n\tDATA_EDGE\x10\x00\x12\x10\n\x0c\x43ONTROL_EDGE\x10\x01\"\x83\x02\n\tNodeProto\x12)\n\x05input\x18\x01 \x03(\x0b\x32\x1a.mindspore.irpb.InputProto\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07op_type\x18\x03 \x01(\t\x12\r\n\x05scope\x18\x04 \x01(\t\x12\x31\n\tattribute\x18\x05 \x03(\x0b\x32\x1e.mindspore.irpb.AttributeProto\x12.\n\x0boutput_type\x18\x06 \x01(\x0b\x32\x19.mindspore.irpb.TypeProto\x12\x10\n\x08output_i\x18\x07 \x01(\x04\x12\x11\n\tfull_name\x18\x08 \x01(\t\x12\x15\n\rinstance_name\x18\n \x01(\t\"\xb0\x01\n\nModelProto\x12\x12\n\nir_version\x18\x01 \x01(\x03\x12\x0e\n\x06\x64omain\x18\x02 \x01(\t\x12\x15\n\rmodel_version\x18\x03 \x01(\x03\x12)\n\x05graph\x18\x04 \x01(\x0b\x32\x1a.mindspore.irpb.GraphProto\x12<\n\x12metadata_operators\x18\x05 \x01(\x0b\x32 .mindspore.irpb.OperatorSetProto\"?\n\rOperatorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x63onfig\x18\x02 \x01(\x0c\x12\x10\n\x08obj_info\x18\x03 \x01(\x0c\"U\n\x10OperatorSetProto\x12\x30\n\toperators\x18\x01 \x03(\x0b\x32\x1d.mindspore.irpb.OperatorProto\x12\x0f\n\x07summary\x18\x02 \x01(\t\"\xda\x01\n\nGraphProto\x12\'\n\x04node\x18\x01 \x03(\x0b\x32\x19.mindspore.irpb.NodeProto\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x32\n\nparameters\x18\x03 \x03(\x0b\x32\x1e.mindspore.irpb.ParameterProto\x12,\n\x07outputs\x18\x04 \x03(\x0b\x32\x1b.mindspore.irpb.OutputProto\x12\x33\n\nconst_vals\x18\x05 \x03(\x0b\x32\x1f.mindspore.irpb.NamedValueProto\"\xd4\x01\n\x0bTensorProto\x12\x0c\n\x04\x64ims\x18\x01 \x03(\x03\x12+\n\tdata_type\x18\x02 \x01(\x0e\x32\x18.mindspore.irpb.DataType\x12\x16\n\nfloat_data\x18\x03 \x03(\x02\x42\x02\x10\x01\x12\x16\n\nint32_data\x18\x04 \x03(\x05\x42\x02\x10\x01\x12\x16\n\nint64_data\x18\x05 \x03(\x03\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_data\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0buint64_data\x18\x07 \x03(\x04\x42\x02\x10\x01\x12\x10\n\x08raw_data\x18\x08 \x01(\x0c*/\n\x07Version\x12\x14\n\x10UNKNOWWN_VERSION\x10\x00\x12\x0e\n\nIR_VERSION\x10\x01*\x89\x06\n\x08\x44\x61taType\x12\x10\n\x0c\x44T_UNDEFINED\x10\x00\x12\x0b\n\x07\x44T_BOOL\x10\x01\x12\x0b\n\x07\x44T_INT8\x10\x02\x12\x0c\n\x08\x44T_INT16\x10\x03\x12\x0c\n\x08\x44T_INT32\x10\x04\x12\x0c\n\x08\x44T_INT64\x10\x05\x12\x0c\n\x08\x44T_UINT8\x10\x06\x12\r\n\tDT_UINT16\x10\x07\x12\r\n\tDT_UINT32\x10\x08\x12\r\n\tDT_UINT64\x10\t\x12\x0e\n\nDT_FLOAT16\x10\n\x12\x0e\n\nDT_FLOAT32\x10\x0b\x12\x0e\n\nDT_FLOAT64\x10\x0c\x12\r\n\tDT_STRING\x10\r\x12\r\n\tDT_TENSOR\x10\x0e\x12\x0c\n\x08\x44T_GRAPH\x10\x0f\x12\x0c\n\x08\x44T_BOOLS\x10\x10\x12\x0c\n\x08\x44T_INTS8\x10\x11\x12\r\n\tDT_INTS16\x10\x12\x12\r\n\tDT_INTS32\x10\x13\x12\r\n\tDT_INTS64\x10\x14\x12\r\n\tDT_UINTS8\x10\x15\x12\x0e\n\nDT_UINTS16\x10\x16\x12\x0e\n\nDT_UINTS32\x10\x17\x12\x0e\n\nDT_UINTS64\x10\x18\x12\x0f\n\x0b\x44T_FLOATS16\x10\x19\x12\x0f\n\x0b\x44T_FLOATS32\x10\x1a\x12\x0f\n\x0b\x44T_FLOATS64\x10\x1b\x12\x0e\n\nDT_STRINGS\x10\x1c\x12\x0e\n\nDT_TENSORS\x10\x1d\x12\r\n\tDT_GRAPHS\x10\x1e\x12\x0c\n\x08\x44T_TUPLE\x10\x1f\x12\x0b\n\x07\x44T_LIST\x10 \x12\x0b\n\x07\x44T_DICT\x10!\x12\x0b\n\x07\x44T_NONE\x10\"\x12\x0f\n\x0b\x44T_SYM_INST\x10#\x12\x0f\n\x0b\x44T_BASE_INT\x10$\x12\x10\n\x0c\x44T_BASE_UINT\x10%\x12\x11\n\rDT_BASE_FLOAT\x10&\x12\x0b\n\x07\x44T_TYPE\x10\'\x12\n\n\x06\x44T_ANY\x10(\x12\r\n\tDT_REFKEY\x10)\x12\n\n\x06\x44T_REF\x10*\x12\x10\n\x0c\x44T_COMPLEX64\x10+\x12\x11\n\rDT_COMPLEX128\x10,\x12\x13\n\x0f\x44T_BASE_COMPLEX\x10-\x12\x0f\n\x0b\x44T_BFLOAT16\x10.\x12\x10\n\x0c\x44T_BFLOATS16\x10/\x12\x0b\n\x07\x44T_INT4\x10\x30\x12\x0c\n\x08\x44T_SLICE\x10\x31'
23
+ serialized_pb=b'\n\x0c\x61nf_ir.proto\x12\x0emindspore.irpb\"\xdb\x04\n\nValueProto\x12\'\n\x05\x64type\x18\x01 \x01(\x0e\x32\x18.mindspore.irpb.DataType\x12\x10\n\x08\x62ool_val\x18\x02 \x01(\x08\x12\x0f\n\x07int_val\x18\x03 \x01(\x03\x12\x10\n\x08uint_val\x18\x04 \x01(\x04\x12\x11\n\tfloat_val\x18\x05 \x01(\x02\x12\x12\n\ndouble_val\x18\x06 \x01(\x01\x12\x0f\n\x07str_val\x18\x07 \x01(\t\x12/\n\ntensor_val\x18\x08 \x01(\x0b\x32\x1b.mindspore.irpb.TensorProto\x12)\n\x05graph\x18\t \x01(\x0b\x32\x1a.mindspore.irpb.GraphProto\x12\x11\n\tbool_vals\x18\n \x03(\x08\x12\x10\n\x08int_vals\x18\x0b \x03(\x03\x12\x11\n\tuint_vals\x18\x0c \x03(\x04\x12\x12\n\nfloat_vals\x18\r \x03(\x02\x12\x13\n\x0b\x64ouble_vals\x18\x0e \x03(\x01\x12\x10\n\x08str_vals\x18\x0f \x03(\t\x12\x30\n\x0btensor_vals\x18\x10 \x03(\x0b\x32\x1b.mindspore.irpb.TensorProto\x12*\n\x06graphs\x18\x11 \x03(\x0b\x32\x1a.mindspore.irpb.GraphProto\x12*\n\x06values\x18\x12 \x03(\x0b\x32\x1a.mindspore.irpb.ValueProto\x12\x31\n\x08\x64ict_val\x18\x13 \x03(\x0b\x32\x1f.mindspore.irpb.NamedValueProto\x12+\n\x08type_val\x18\x14 \x01(\x0b\x32\x19.mindspore.irpb.TypeProto\"I\n\x0e\x41ttributeProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.mindspore.irpb.ValueProto\"I\n\x0fNamedValueProto\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.mindspore.irpb.ValueProto\"t\n\x10TensorShapeProto\x12\x37\n\x03\x64im\x18\x01 \x03(\x0b\x32*.mindspore.irpb.TensorShapeProto.Dimension\x1a\'\n\tDimension\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\"\xda\x02\n\tTypeProto\x12+\n\tdata_type\x18\x01 \x01(\x0e\x32\x18.mindspore.irpb.DataType\x12\x37\n\x0btensor_type\x18\x02 \x01(\x0b\x32 .mindspore.irpb.TypeProto.TensorH\x00\x12;\n\rsequence_type\x18\x03 \x01(\x0b\x32\".mindspore.irpb.TypeProto.SequenceH\x00\x1a\x66\n\x06Tensor\x12+\n\telem_type\x18\x01 \x01(\x0e\x32\x18.mindspore.irpb.DataType\x12/\n\x05shape\x18\x02 \x01(\x0b\x32 .mindspore.irpb.TensorShapeProto\x1a\x39\n\x08Sequence\x12-\n\nelem_types\x18\x01 \x03(\x0b\x32\x19.mindspore.irpb.TypeProtoB\x07\n\x05value\"x\n\x0eParameterProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\'\n\x04type\x18\x02 \x01(\x0b\x32\x19.mindspore.irpb.TypeProto\x12/\n\x0b\x64\x65\x66\x61ult_val\x18\x03 \x01(\x0b\x32\x1a.mindspore.irpb.ValueProto\"D\n\x0bOutputProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\'\n\x04type\x18\x02 \x01(\x0b\x32\x19.mindspore.irpb.TypeProto\"z\n\nInputProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\x04type\x18\x02 \x01(\x0e\x32#.mindspore.irpb.InputProto.EdgeType\"+\n\x08\x45\x64geType\x12\r\n\tDATA_EDGE\x10\x00\x12\x10\n\x0c\x43ONTROL_EDGE\x10\x01\"\x83\x02\n\tNodeProto\x12)\n\x05input\x18\x01 \x03(\x0b\x32\x1a.mindspore.irpb.InputProto\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07op_type\x18\x03 \x01(\t\x12\r\n\x05scope\x18\x04 \x01(\t\x12\x31\n\tattribute\x18\x05 \x03(\x0b\x32\x1e.mindspore.irpb.AttributeProto\x12.\n\x0boutput_type\x18\x06 \x01(\x0b\x32\x19.mindspore.irpb.TypeProto\x12\x10\n\x08output_i\x18\x07 \x01(\x04\x12\x11\n\tfull_name\x18\x08 \x01(\t\x12\x15\n\rinstance_name\x18\n \x01(\t\"\xb0\x01\n\nModelProto\x12\x12\n\nir_version\x18\x01 \x01(\x03\x12\x0e\n\x06\x64omain\x18\x02 \x01(\t\x12\x15\n\rmodel_version\x18\x03 \x01(\x03\x12)\n\x05graph\x18\x04 \x01(\x0b\x32\x1a.mindspore.irpb.GraphProto\x12<\n\x12metadata_operators\x18\x05 \x01(\x0b\x32 .mindspore.irpb.OperatorSetProto\"?\n\rOperatorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x63onfig\x18\x02 \x01(\x0c\x12\x10\n\x08obj_info\x18\x03 \x01(\x0c\"U\n\x10OperatorSetProto\x12\x30\n\toperators\x18\x01 \x03(\x0b\x32\x1d.mindspore.irpb.OperatorProto\x12\x0f\n\x07summary\x18\x02 \x01(\t\"\xda\x01\n\nGraphProto\x12\'\n\x04node\x18\x01 \x03(\x0b\x32\x19.mindspore.irpb.NodeProto\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x32\n\nparameters\x18\x03 \x03(\x0b\x32\x1e.mindspore.irpb.ParameterProto\x12,\n\x07outputs\x18\x04 \x03(\x0b\x32\x1b.mindspore.irpb.OutputProto\x12\x33\n\nconst_vals\x18\x05 \x03(\x0b\x32\x1f.mindspore.irpb.NamedValueProto\"\xd4\x01\n\x0bTensorProto\x12\x0c\n\x04\x64ims\x18\x01 \x03(\x03\x12+\n\tdata_type\x18\x02 \x01(\x0e\x32\x18.mindspore.irpb.DataType\x12\x16\n\nfloat_data\x18\x03 \x03(\x02\x42\x02\x10\x01\x12\x16\n\nint32_data\x18\x04 \x03(\x05\x42\x02\x10\x01\x12\x16\n\nint64_data\x18\x05 \x03(\x03\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_data\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0buint64_data\x18\x07 \x03(\x04\x42\x02\x10\x01\x12\x10\n\x08raw_data\x18\x08 \x01(\x0c*/\n\x07Version\x12\x14\n\x10UNKNOWWN_VERSION\x10\x00\x12\x0e\n\nIR_VERSION\x10\x01*\xfb\x05\n\x08\x44\x61taType\x12\x10\n\x0c\x44T_UNDEFINED\x10\x00\x12\x0b\n\x07\x44T_BOOL\x10\x01\x12\x0b\n\x07\x44T_INT8\x10\x02\x12\x0c\n\x08\x44T_INT16\x10\x03\x12\x0c\n\x08\x44T_INT32\x10\x04\x12\x0c\n\x08\x44T_INT64\x10\x05\x12\x0c\n\x08\x44T_UINT8\x10\x06\x12\r\n\tDT_UINT16\x10\x07\x12\r\n\tDT_UINT32\x10\x08\x12\r\n\tDT_UINT64\x10\t\x12\x0e\n\nDT_FLOAT16\x10\n\x12\x0e\n\nDT_FLOAT32\x10\x0b\x12\x0e\n\nDT_FLOAT64\x10\x0c\x12\r\n\tDT_STRING\x10\r\x12\r\n\tDT_TENSOR\x10\x0e\x12\x0c\n\x08\x44T_GRAPH\x10\x0f\x12\x0c\n\x08\x44T_BOOLS\x10\x10\x12\x0c\n\x08\x44T_INTS8\x10\x11\x12\r\n\tDT_INTS16\x10\x12\x12\r\n\tDT_INTS32\x10\x13\x12\r\n\tDT_INTS64\x10\x14\x12\r\n\tDT_UINTS8\x10\x15\x12\x0e\n\nDT_UINTS16\x10\x16\x12\x0e\n\nDT_UINTS32\x10\x17\x12\x0e\n\nDT_UINTS64\x10\x18\x12\x0f\n\x0b\x44T_FLOATS16\x10\x19\x12\x0f\n\x0b\x44T_FLOATS32\x10\x1a\x12\x0f\n\x0b\x44T_FLOATS64\x10\x1b\x12\x0e\n\nDT_STRINGS\x10\x1c\x12\x0e\n\nDT_TENSORS\x10\x1d\x12\r\n\tDT_GRAPHS\x10\x1e\x12\x0c\n\x08\x44T_TUPLE\x10\x1f\x12\x0b\n\x07\x44T_LIST\x10 \x12\x0b\n\x07\x44T_DICT\x10!\x12\x0b\n\x07\x44T_NONE\x10\"\x12\x0f\n\x0b\x44T_SYM_INST\x10#\x12\x0f\n\x0b\x44T_BASE_INT\x10$\x12\x10\n\x0c\x44T_BASE_UINT\x10%\x12\x11\n\rDT_BASE_FLOAT\x10&\x12\x0b\n\x07\x44T_TYPE\x10\'\x12\n\n\x06\x44T_ANY\x10(\x12\r\n\tDT_REFKEY\x10)\x12\n\n\x06\x44T_REF\x10*\x12\x10\n\x0c\x44T_COMPLEX64\x10+\x12\x11\n\rDT_COMPLEX128\x10,\x12\x13\n\x0f\x44T_BASE_COMPLEX\x10-\x12\x0f\n\x0b\x44T_BFLOAT16\x10.\x12\x10\n\x0c\x44T_BFLOATS16\x10/\x12\x0b\n\x07\x44T_INT4\x10\x30'
24
24
  )
25
25
 
26
26
  _VERSION = _descriptor.EnumDescriptor(
@@ -301,16 +301,11 @@ _DATATYPE = _descriptor.EnumDescriptor(
301
301
  serialized_options=None,
302
302
  type=None,
303
303
  create_key=_descriptor._internal_create_key),
304
- _descriptor.EnumValueDescriptor(
305
- name='DT_SLICE', index=49, number=49,
306
- serialized_options=None,
307
- type=None,
308
- create_key=_descriptor._internal_create_key),
309
304
  ],
310
305
  containing_type=None,
311
306
  serialized_options=None,
312
307
  serialized_start=2650,
313
- serialized_end=3427,
308
+ serialized_end=3413,
314
309
  )
315
310
  _sym_db.RegisterEnumDescriptor(_DATATYPE)
316
311
 
@@ -366,7 +361,6 @@ DT_BASE_COMPLEX = 45
366
361
  DT_BFLOAT16 = 46
367
362
  DT_BFLOATS16 = 47
368
363
  DT_INT4 = 48
369
- DT_SLICE = 49
370
364
 
371
365
 
372
366
  _INPUTPROTO_EDGETYPE = _descriptor.EnumDescriptor(
@@ -35,10 +35,7 @@ from mindspore.train.callback._early_stop import EarlyStopping
35
35
  from mindspore.train.callback._reduce_lr_on_plateau import ReduceLROnPlateau
36
36
  from mindspore.train.callback._on_request_exit import OnRequestExit
37
37
  from mindspore.train.callback._backup_and_restore import BackupAndRestore
38
- from mindspore.train.callback._flops_collector import FlopsUtilizationCollector
39
- from mindspore.train.callback._mindio_ttp import MindIOTTPAdapter
40
38
 
41
- __all__ = ["Callback", "LossMonitor", "TimeMonitor", "ModelCheckpoint", "FlopsUtilizationCollector",
39
+ __all__ = ["Callback", "LossMonitor", "TimeMonitor", "ModelCheckpoint",
42
40
  "SummaryCollector", "CheckpointConfig", "RunContext", "LearningRateScheduler", "SummaryLandscape",
43
- "History", "LambdaCallback", "ReduceLROnPlateau", "EarlyStopping", "OnRequestExit", "BackupAndRestore",
44
- "MindIOTTPAdapter"]
41
+ "History", "LambdaCallback", "ReduceLROnPlateau", "EarlyStopping", "OnRequestExit", "BackupAndRestore"]
@@ -34,10 +34,10 @@ class BackupAndRestore(Callback):
34
34
 
35
35
  Args:
36
36
  backup_dir (str): Path to store and load the checkpoint file.
37
- save_freq (Union["epoch", int]): When set to ``"epoch"`` the callback saves the checkpoint at the end of
37
+ save_freq(Union["epoch", int]): When set to ``"epoch"`` the callback saves the checkpoint at the end of
38
38
  each epoch. When set to an integer, the callback saves the checkpoint
39
39
  every `save_freq` epoch. Default: ``"epoch"`` .
40
- delete_checkpoint (bool): If `delete_checkpoint=True`, the checkpoint will be deleted after
40
+ delete_checkpoint(bool): If `delete_checkpoint=True`, the checkpoint will be deleted after
41
41
  training is finished. Default: ``True`` .
42
42
 
43
43
  Raises:
@@ -28,15 +28,9 @@ from mindspore.train._utils import _make_directory
28
28
  from mindspore.train.serialization import save_checkpoint, _save_graph
29
29
  from mindspore.parallel._cell_wrapper import destroy_allgather_cell
30
30
  from mindspore.parallel._recovery_context import _set_recovery_context, _get_recovery_context
31
- from mindspore.parallel._auto_parallel_context import _get_auto_parallel_context
32
- from mindspore.parallel._utils import _get_device_num
33
- from mindspore.communication.management import get_rank
34
- from mindspore.train._utils import get_parameter_redundancy, remove_param_redundancy
35
31
  from mindspore.train.callback._callback import Callback, set_cur_net
36
32
  from mindspore.common.tensor import Tensor
37
33
  from mindspore.common.parameter import Parameter
38
- from mindspore.common.generator import Generator
39
- from mindspore.common.api import _cell_graph_executor
40
34
  from mindspore._c_expression import _collect_host_info
41
35
 
42
36
 
@@ -45,29 +39,6 @@ SAVE_DIR = _cur_dir
45
39
  _info_list = ["epoch_num", "step_num"]
46
40
 
47
41
 
48
- def _get_dp_tp_from_redundancy(redundancy_tuple):
49
- """From redundancy get dp and tp"""
50
- dp = []
51
- tp = []
52
- for dp_value in redundancy_tuple:
53
- dp.append(list(dp_value))
54
- for i in range(len(redundancy_tuple[0])):
55
- tp.append([v[i] for v in redundancy_tuple])
56
- return dp, tp
57
-
58
-
59
- def _get_dp_tp_from_layout(parameter_redundancy_dict):
60
- """From layout dict get dp and tp"""
61
- tp = []
62
- dp = []
63
- value_len = 0
64
- for _, value in parameter_redundancy_dict.items():
65
- if len(value) > value_len:
66
- value_len = len(value)
67
- dp, tp = _get_dp_tp_from_redundancy(value)
68
- return dp, tp
69
-
70
-
71
42
  def _chg_ckpt_file_name_if_same_exist(directory, prefix, exception=False):
72
43
  """Check if there is a file with the same name."""
73
44
  if callable(prefix) or callable(directory):
@@ -103,15 +74,14 @@ class CheckpointConfig:
103
74
  The configuration of model checkpoint.
104
75
 
105
76
  Note:
106
- - During the training process, if dataset is transmitted through the data channel,
107
- it is suggested to set 'save_checkpoint_steps' to an integer multiple of loop_size.
108
- Otherwise, the time to save the checkpoint may be biased.
109
- It is recommended to set only one save strategy and one keep strategy at the same time.
110
- If both `save_checkpoint_steps` and `save_checkpoint_seconds` are set,
111
- `save_checkpoint_seconds` will be invalid.
112
- If both `keep_checkpoint_max` and `keep_checkpoint_per_n_minutes` are set,
113
- `keep_checkpoint_per_n_minutes` will be invalid.
114
- - The `enc_mode` and `crc_check` parameters are mutually exclusive and cannot be configured simultaneously.
77
+ During the training process, if dataset is transmitted through the data channel,
78
+ it is suggested to set 'save_checkpoint_steps' to an integer multiple of loop_size.
79
+ Otherwise, the time to save the checkpoint may be biased.
80
+ It is recommended to set only one save strategy and one keep strategy at the same time.
81
+ If both `save_checkpoint_steps` and `save_checkpoint_seconds` are set,
82
+ `save_checkpoint_seconds` will be invalid.
83
+ If both `keep_checkpoint_max` and `keep_checkpoint_per_n_minutes` are set,
84
+ `keep_checkpoint_per_n_minutes` will be invalid.
115
85
 
116
86
  Args:
117
87
  save_checkpoint_steps (int): Steps to save checkpoint. Default: ``1`` .
@@ -134,8 +104,6 @@ class CheckpointConfig:
134
104
  enc_mode (str): This parameter is valid only when enc_key is not set to None. Specifies the encryption
135
105
  mode, currently supports 'AES-GCM', 'AES-CBC' and 'SM4-CBC'. Default: ``'AES-GCM'`` .
136
106
  exception_save (bool): Whether to save the current checkpoint when an exception occurs. Default: ``False`` .
137
- crc_check (bool): Whether to perform crc32 calculation when saving checkpoint and save the calculation
138
- result to the end of ckpt. Default: ``False`` .
139
107
  kwargs (dict): Configuration options dictionary.
140
108
 
141
109
  Raises:
@@ -158,9 +126,11 @@ class CheckpointConfig:
158
126
  >>> config.save_checkpoint_steps
159
127
  1
160
128
  >>> config.save_checkpoint_seconds
129
+ 100
161
130
  >>> config.keep_checkpoint_max
162
131
  5
163
132
  >>> config.keep_checkpoint_per_n_minutes
133
+ 5
164
134
  >>> config.integrated_save
165
135
  True
166
136
  >>> config.async_save
@@ -187,7 +157,6 @@ class CheckpointConfig:
187
157
  enc_key=None,
188
158
  enc_mode='AES-GCM',
189
159
  exception_save=False,
190
- crc_check=False,
191
160
  **kwargs):
192
161
 
193
162
  if save_checkpoint_steps is not None:
@@ -230,9 +199,7 @@ class CheckpointConfig:
230
199
  self._append_dict = self._handle_append_info(append_info)
231
200
  self._enc_key = Validator.check_isinstance('enc_key', enc_key, (type(None), bytes))
232
201
  self._enc_mode = Validator.check_isinstance('enc_mode', enc_mode, str)
233
- self._crc_check = Validator.check_isinstance('crc_check', crc_check, bool)
234
202
  self._map_param_inc = kwargs.get('incremental', False)
235
- self.enable_redundance = kwargs.get('enable_redundance', False)
236
203
 
237
204
  @property
238
205
  def save_checkpoint_steps(self):
@@ -323,16 +290,6 @@ class CheckpointConfig:
323
290
  """
324
291
  return self._enc_mode
325
292
 
326
- @property
327
- def crc_check(self):
328
- """
329
- Get the value of the whether to enable crc check.
330
-
331
- Returns:
332
- bool, whether to enable crc check.
333
- """
334
- return self._crc_check
335
-
336
293
  @property
337
294
  def append_dict(self):
338
295
  """
@@ -398,8 +355,7 @@ class CheckpointConfig:
398
355
  raise TypeError(f"For 'CheckpointConfig', the element of 'append_info' must has only one dict, "
399
356
  "but got {dict_num}")
400
357
  for key, value in element.items():
401
- if isinstance(key, str) and isinstance(value,
402
- (int, float, bool, str, Parameter, Tensor, Generator)):
358
+ if isinstance(key, str) and isinstance(value, (int, float, bool, str, Parameter, Tensor)):
403
359
  handle_append_info[key] = value
404
360
  else:
405
361
  raise TypeError(f"For 'CheckpointConfig', the key type of the dict 'append_info' "
@@ -492,13 +448,8 @@ class ModelCheckpoint(Callback):
492
448
  "but got {}.".format(type(config)))
493
449
  self._config = config
494
450
 
495
- self._aiturbo_init_flag = os.getenv("AITURBO") == "1"
496
451
  # get existing checkpoint files
497
- if self._aiturbo_init_flag:
498
- import aiturbo
499
- self._manager = aiturbo.CheckpointShmManager()
500
- else:
501
- self._manager = CheckpointManager()
452
+ self._manager = CheckpointManager()
502
453
  if not callable(directory) and not callable(prefix):
503
454
  self._prefix = _chg_ckpt_file_name_if_same_exist(self._directory, self._prefix)
504
455
  self._append_dict = self._config.append_dict or {}
@@ -516,28 +467,6 @@ class ModelCheckpoint(Callback):
516
467
  run_context (RunContext): Context of the train running.
517
468
  """
518
469
  cb_params = run_context.original_args()
519
- if self._aiturbo_init_flag:
520
- import aiturbo
521
- ckpt_storage_path = self._directory
522
- rank_id = get_rank()
523
- stage_num = _get_auto_parallel_context("pipeline_stages")
524
- stage_rank_num = _get_device_num() // stage_num
525
- param_layout = cb_params.train_network.parameter_layout_dict
526
- if not param_layout:
527
- layout = {"stage_num": stage_num, "stage_rank_num": stage_rank_num, "stage_layout": None}
528
- aiturbo.init(ckpt_storage_path, rank_id, layout, None, False, None)
529
- else:
530
- device_num = _get_device_num()
531
- chunk_size = device_num // stage_num
532
- initial_rank = (rank_id // chunk_size) * chunk_size
533
- param_redundancy_dict = get_parameter_redundancy(param_layout, initial_rank)
534
- dp, _ = _get_dp_tp_from_layout(param_redundancy_dict)
535
- layout = {"stage_num": stage_num, "stage_rank_num": stage_rank_num,
536
- "stage_layout": param_redundancy_dict}
537
- single_params = remove_param_redundancy(param_redundancy_dict)
538
- single_params = {device_id: list(params) for device_id, params in single_params.items()}
539
- aiturbo.init(ckpt_storage_path, rank_id, layout, single_params, self._config.enable_redundance, dp)
540
- self._aiturbo_init_flag = False
541
470
  if self._prefix_func:
542
471
  self._prefix = self._prefix_func(cb_params)
543
472
  if not isinstance(self._prefix, str) or self._prefix.find('/') >= 0:
@@ -642,22 +571,15 @@ class ModelCheckpoint(Callback):
642
571
  if context.get_context("enable_ge") and os.getenv('MS_DISABLE_REF_MODE') \
643
572
  and context.get_context("mode") == context.GRAPH_MODE:
644
573
  set_cur_net(cb_params.train_network)
645
- cb_params.train_network.add_flags(ge_sync_data=True)
646
- _cell_graph_executor(cb_params.train_network, phase='save')
574
+ cb_params.train_network.exec_checkpoint_graph()
647
575
  if "epoch_num" in self._append_dict:
648
576
  self._append_dict["epoch_num"] = self._append_epoch_num + cb_params.cur_epoch_num
649
577
  if "step_num" in self._append_dict:
650
578
  self._append_dict["step_num"] = self._append_step_num + cb_params.cur_step_num
651
579
  network = self._config.saved_network if self._config.saved_network is not None else cb_params.train_network
652
- if os.getenv("AITURBO") == "1":
653
- save_checkpoint(network, cur_file, self._config.integrated_save, self._config.async_save,
654
- self._append_dict, self._config.enc_key, self._config.enc_mode,
655
- crc_check=self._config.crc_check, incremental=self._map_param_inc,
656
- global_step_num=cb_params.cur_step_num)
657
- else:
658
- save_checkpoint(network, cur_file, self._config.integrated_save, self._config.async_save,
659
- self._append_dict, self._config.enc_key, self._config.enc_mode,
660
- crc_check=self._config.crc_check, incremental=self._map_param_inc)
580
+ save_checkpoint(network, cur_file, self._config.integrated_save, self._config.async_save,
581
+ self._append_dict, self._config.enc_key, self._config.enc_mode,
582
+ incremental=self._map_param_inc)
661
583
 
662
584
  self._latest_ckpt_file_name = cur_file
663
585
 
@@ -677,16 +599,6 @@ class ModelCheckpoint(Callback):
677
599
  """Return the latest checkpoint path and file name."""
678
600
  return self._latest_ckpt_file_name
679
601
 
680
- @property
681
- def _get_save_checkpoint_steps(self):
682
- """Return save ckpt steps"""
683
- return self._config.save_checkpoint_steps
684
-
685
- @property
686
- def _get_last_trigger_step(self):
687
- """Return last triggered steps"""
688
- return self._last_triggered_step
689
-
690
602
 
691
603
  class CheckpointManager:
692
604
  """Manage checkpoint files according to train_config of checkpoint."""
@@ -66,7 +66,7 @@ def nptype_to_prototype(np_value):
66
66
  np.uint32: 'DT_UINT32',
67
67
  np.uint64: 'DT_UINT64',
68
68
  np.float16: 'DT_FLOAT16',
69
- np.float_: 'DT_FLOAT64',
69
+ np.float: 'DT_FLOAT64',
70
70
  np.float32: 'DT_FLOAT32',
71
71
  np.float64: 'DT_FLOAT64',
72
72
  None: 'DT_UNDEFINED'
@@ -30,7 +30,7 @@ class TimeMonitor(Callback):
30
30
  if the program get `batch_num` during training, `data_size` will be set to `batch_num`,
31
31
  otherwise `data_size` will be used. Default: ``None`` .
32
32
 
33
- data_time (bool): Whether to show the average time of fetching data in Host.
33
+ data_time (bool): Whether to sow the average time of fetching data in Host.
34
34
  Note that data fetch and network compute are processed sequentially in non dataset sink mode, while
35
35
  they are asynchronous in dataset sink mode. Default: ``False`` .
36
36
 
@@ -130,11 +130,10 @@ def data_sink(fn, dataset, sink_size=1, jit_config=None, input_signature=None):
130
130
  A wrapper function to generate a function for the input function.
131
131
 
132
132
  Note:
133
- When using data sinking, the dataset will be automatically looped to the device. The device side can cache up
134
- to 100 batches of data and occupy no more than 2GB of memory. At this time, only the number of steps for each
135
- sinking `sink_size` needs to be considered. `sink_size` defaults to ``1``, indicating that each epoch only
136
- takes one batch of data from the cache for training and outputs a loss. If `sink_size` is greater than 1, each
137
- epoch takes out `sink_size` batches of data from the cache for training and outputs a loss.
133
+ When using data sinking, the dataset will be automatically sent in a loop, and only the step size of sinking
134
+ `sink_size` needs to be considered. The default value of `sink_size` is ``1``, which means that all data will
135
+ be sunk every epoch. If `sink_size` is greater than 1, the amount of data sunk per epoch will be the dataset
136
+ with a size of `sink_size`.
138
137
 
139
138
  Args:
140
139
  fn (Function): The Python function that will be run with dataset.
@@ -16,13 +16,12 @@
16
16
  from __future__ import absolute_import
17
17
 
18
18
  import math
19
- import copy
20
19
 
21
20
  from mindspore import _checkparam as Validator
22
21
  from mindspore import log as logger
23
22
  from mindspore.common._auto_dynamic import is_auto_dynamic, convert_new_shapes
24
23
  from mindspore.common.dtype import pytype_to_dtype
25
- from mindspore.common.api import _cell_graph_executor, _is_args_fullmode, ARG_SPECIFIED
24
+ from mindspore.common.api import _cell_graph_executor
26
25
  from mindspore.common._utils import is_shape_unknown
27
26
  from mindspore.dataset.engine import offload
28
27
  from mindspore import context, nn
@@ -98,19 +97,10 @@ class _DataWrapper(nn.Cell):
98
97
  self.get_next = P.GetNext(
99
98
  dataset_types, dataset_shapes, len(dataset_types), queue_name)
100
99
  if network.get_inputs() is not None:
101
- network_inputs = network.get_inputs()
102
- is_fullmode = _is_args_fullmode(network_inputs, False)
103
- if is_fullmode:
104
- symbol_inputs = [getattr(inp, "symbolic_shape", None) for inp in network.get_inputs()]
105
- else:
106
- symbol_inputs = [None for _ in dataset_shapes]
107
- arg_specified = network_inputs.get(ARG_SPECIFIED, [])
108
- for idx, inp in arg_specified:
109
- symbol_inputs[idx] = getattr(inp, "symbolic_shape", None)
110
- symbols_for_parallel = _change_symbols_for_parallel(dataset_shapes, copy.deepcopy(symbol_inputs))
111
- if any((s is not None for s in symbols_for_parallel)):
100
+ symbol_inputs = [getattr(inp, "symbolic_shape", None) for inp in network.get_inputs()]
101
+ symbol_inputs = _change_symbols_for_parallel(dataset_shapes, symbol_inputs)
102
+ if any((s is not None for s in symbol_inputs)):
112
103
  self.get_next.add_prim_attr("symbols", symbol_inputs)
113
- self.get_next.add_prim_attr("symbols_for_parallel", symbols_for_parallel)
114
104
  self.network = network
115
105
  self._get_attr_from_cell(network)
116
106
 
@@ -165,13 +155,6 @@ def _check_inputs(network_shapes, dataset_shapes, dataset_types):
165
155
  """
166
156
  Check if set inputs are correct.
167
157
  """
168
- if not _is_args_fullmode(network_shapes, False):
169
- temp_network_shapes = [None for _ in dataset_shapes]
170
- arg_specified = network_shapes.get(ARG_SPECIFIED, [])
171
- for idx, inp in arg_specified:
172
- temp_network_shapes[idx] = inp
173
- network_shapes = temp_network_shapes
174
-
175
158
  for tensor_index, ele_dataset_shape in enumerate(dataset_shapes):
176
159
  if network_shapes[tensor_index] is None:
177
160
  continue
@@ -263,32 +246,27 @@ def connect_network_with_dataset(network, dataset_helper):
263
246
  queue_name = dataset.__transfer_dataset__.queue_name
264
247
  if _dynamic_sink_scenario(dataset, dataset_iter, is_dynamic):
265
248
  dataset_types, dataset_shapes = dataset_helper.get_data_info()
266
- # Need to do full_batch for shapes which also do in the _DatasetIterMSLoopSink
267
- if _need_to_full():
268
- dataset_shapes = _to_full_shapes(dataset_shapes, _get_device_num() // _get_pipeline_stages())
269
249
  dataset_types = [pytype_to_dtype(x) for x in dataset_types]
270
250
  if not is_dynamic:
271
251
  dataset_shapes = _auto_dynamic_shape.auto_dynamic_generate_compile_args(dataset_shapes, True)
272
252
  key = str(dataset_types) + str(dataset_shapes)
273
-
274
- if hasattr(aux, "__shape_type__") and aux.__shape_type__ != key:
275
- _auto_dynamic_shape.update_phase_and_compile_args(dataset_shapes, key, True, aux)
276
- if hasattr(aux, '__network_manage__') and key in aux.__network_manage__:
277
- network = aux.__network_manage__[key]
253
+ _auto_dynamic_shape.update_phase_and_compile_args(dataset_shapes, key, True, aux)
254
+ if hasattr(aux, '__network_manage__') and key in aux.__network_manage__:
255
+ network = aux.__network_manage__[key]
256
+ else:
257
+ if _need_to_full():
258
+ device_num = _get_device_num() // _get_pipeline_stages()
259
+ dataset_shapes = _to_full_shapes(dataset_shapes, device_num)
260
+
261
+ network = _generate_dataset_sink_mode_net(
262
+ network, dataset_shapes, dataset_types, queue_name)
263
+ if hasattr(aux, '__network_manage__'):
264
+ aux.__network_manage__ = aux.__network_manage__
278
265
  else:
279
- if _need_to_full():
280
- device_num = _get_device_num() // _get_pipeline_stages()
281
- dataset_shapes = _to_full_shapes(dataset_shapes, device_num)
282
-
283
- network = _generate_dataset_sink_mode_net(
284
- network, dataset_shapes, dataset_types, queue_name)
285
- if hasattr(aux, '__network_manage__'):
286
- aux.__network_manage__ = aux.__network_manage__
287
- else:
288
- aux.__network_manage__ = dict()
289
- aux.__network_manage__[key] = network
290
- network.add_flags(sink_mode=True)
291
- return network
266
+ aux.__network_manage__ = dict()
267
+ aux.__network_manage__[key] = network
268
+ network.add_flags(sink_mode=True)
269
+ return network
292
270
 
293
271
  if hasattr(aux, '__sink_network__'):
294
272
  network = aux.__sink_network__
@@ -299,8 +277,6 @@ def connect_network_with_dataset(network, dataset_helper):
299
277
  network = _generate_network_with_dataset(
300
278
  network, dataset_helper, queue_name)
301
279
  aux.__sink_network__ = network
302
- dataset_types, dataset_shapes = dataset_helper.types_shapes()
303
- aux.__shape_type__ = str(dataset_types) + str(dataset_shapes)
304
280
 
305
281
  if _dynamic_sink_data(dataset, dataset_iter) and _dynamic_sink_exception_scenario(dataset_iter, is_dynamic):
306
282
  dataset_helper.get_data_info()
@@ -496,7 +472,6 @@ class DatasetHelper:
496
472
  '''
497
473
  Inner class for parsing send info.
498
474
  '''
499
-
500
475
  def __init__(self, send_info, run_context):
501
476
  self.info_ = {}
502
477
  self.sink_size = run_context.original_args()["batch_num"]